repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Aegeaner/spark | python/pyspark/testing/utils.py | 1 | 3566 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import glob
import os
import struct
import sys
import unittest
from pyspark import SparkContext, SparkConf
have_scipy = False
have_numpy = False
try:
import scipy.sparse
have_scipy = True
except:
# No SciPy, but that's okay, we'll skip those tests
pass
try:
import numpy as np
have_numpy = True
except:
# No NumPy, but that's okay, we'll skip those tests
pass
SPARK_HOME = os.environ["SPARK_HOME"]
def read_int(b):
return struct.unpack("!i", b)[0]
def write_int(i):
return struct.pack("!i", i)
class QuietTest(object):
def __init__(self, sc):
self.log4j = sc._jvm.org.apache.log4j
def __enter__(self):
self.old_level = self.log4j.LogManager.getRootLogger().getLevel()
self.log4j.LogManager.getRootLogger().setLevel(self.log4j.Level.FATAL)
def __exit__(self, exc_type, exc_val, exc_tb):
self.log4j.LogManager.getRootLogger().setLevel(self.old_level)
class PySparkTestCase(unittest.TestCase):
def setUp(self):
self._old_sys_path = list(sys.path)
class_name = self.__class__.__name__
self.sc = SparkContext('local[4]', class_name)
def tearDown(self):
self.sc.stop()
sys.path = self._old_sys_path
class ReusedPySparkTestCase(unittest.TestCase):
@classmethod
def conf(cls):
"""
Override this in subclasses to supply a more specific conf
"""
return SparkConf()
@classmethod
def setUpClass(cls):
cls.sc = SparkContext('local[4]', cls.__name__, conf=cls.conf())
@classmethod
def tearDownClass(cls):
cls.sc.stop()
class ByteArrayOutput(object):
def __init__(self):
self.buffer = bytearray()
def write(self, b):
self.buffer += b
def close(self):
pass
def search_jar(project_relative_path, jar_name_prefix):
project_full_path = os.path.join(
os.environ["SPARK_HOME"], project_relative_path)
# We should ignore the following jars
ignored_jar_suffixes = ("javadoc.jar", "sources.jar", "test-sources.jar", "tests.jar")
# Search jar in the project dir using the jar name_prefix for both sbt build and maven
# build because the artifact jars are in different directories.
sbt_build = glob.glob(os.path.join(
project_full_path, "target/scala-*/%s*.jar" % jar_name_prefix))
maven_build = glob.glob(os.path.join(
project_full_path, "target/%s*.jar" % jar_name_prefix))
jar_paths = sbt_build + maven_build
jars = [jar for jar in jar_paths if not jar.endswith(ignored_jar_suffixes)]
if not jars:
return None
elif len(jars) > 1:
raise Exception("Found multiple JARs: %s; please remove all but one" % (", ".join(jars)))
else:
return jars[0]
| apache-2.0 | -146,794,885,754,362,100 | 27.07874 | 97 | 0.668256 | false |
rahulraj/web_projects | assignment2/src/photogallery/generator/galleryitemfactory.py | 1 | 6059 | import os
import re
import os.path
from iptcinfo import IPTCInfo
from galleryitem import JpegPicture, JpegDirectory, directory_name_to_html_file_name
from ..utils.inject import assign_injectables
def is_jpeg_file(file_name):
"""
Determine if a file is labeled as a JPEG.
Args:
file_name the name of the file.
Returns:
True if the file ends with .jpg.
"""
return file_is_of_type(file_name, 'jpg')
def is_css_file(file_name):
"""
Determine if a file is labeled as CSS.
Args:
file_name the name of the file.
Returns:
True if the file ends with .css.
"""
return file_is_of_type(file_name, 'css')
def is_js_file(file_name):
"""
Determine if a file is labeled as JavaScript.
Args:
file_name the name of the file.
Returns:
True if the file ends with .js.
"""
return file_is_of_type(file_name, 'js')
def file_is_of_type(file_name, extension):
"""
Return whether a file is of a certain type.
Args:
file_name the name of the file to test.
extension the part of the name after the . which will be checked
with a regular expression.
Returns:
True if file_name ends with extension.
"""
type_re = re.compile(r'\.%s' % extension)
return type_re.search(file_name) != None
class GalleryItemFactory(object):
"""
Class to bootstrap the application by reading the disk and
creating GalleryItems from the existing JPEGs and subdirectories.
"""
def __init__(self, lookup_table, should_prompt,
iptc_info_constructor=IPTCInfo,
list_directory=os.listdir, is_directory=os.path.isdir):
"""
Constructor for GalleryItemFactory
Args:
lookup_table the lookup_table that the files use to search IPTCInfo.data.
should_prompt whether the program should prompt the user for directory
names.
iptc_info_constructor the constructor for IPTCInfo objects that the files
will use to lookup metadata (defaults to IPTCInfo).
list_directory the function that takes a path and lists the files in it,
defaults to os.listdir
is_directory a function that takes a file name and returns true if it
is a directory (defaults to os.path.isdir).
"""
assign_injectables(self, locals())
def create_directory(self, path, parent_path=None):
"""
Creates a JpegDirectory object with the appropriate GalleryItems
Args:
path the path to the directory that the JPEGs are stored in.
parent_path the directory one level up of path; if we are creating
a subdirectory this will be used to populate back_href.
It can be None if we are creating the top-most directory.
Returns:
A JpegDirectory containing GalleryItems wrapped around all the appropriate
contents of the directory referred to by path.
Raises:
Any exception thrown when trying to extract IPTC information from a JPEG
file. See the documentation of try_create_jpeg_picture for details.
"""
file_names = self.list_directory(path)
jpeg_names = filter(is_jpeg_file, file_names)
path_contents = []
for name in jpeg_names:
maybe_jpeg_picture = self.try_create_jpeg_picture(path, name)
if maybe_jpeg_picture is not None:
path_contents.append(maybe_jpeg_picture)
subdirectories = self.create_subdirectories(file_names, path)
path_contents.extend(subdirectories)
back_href = self.maybe_get_back_href(parent_path)
return JpegDirectory(path, path_contents, self.should_prompt,
back_href=back_href)
def try_create_jpeg_picture(self, path, name):
"""
Given a path and the name of a file ending in .jpg, tries to create
a JpegPicture object out of it.
Args:
path the path to the directory the file is in.
name the name of the file.
Returns:
A JpegPicture object, if creating it was successful. None if creating
the JpegPicture failed for some reason that does not warrant crashing
the program.
Raises:
Any exception raised when trying to extract IPTC information from the
JPEG, that is not an IOError or an exception with the message
'No IPTC data found.' In those two cases, simply skips the file and
prints a message saying so.
"""
full_jpeg_name = os.path.join(path, name)
try:
return JpegPicture(name,
directory_name_to_html_file_name(path),
self.iptc_info_constructor(full_jpeg_name),
self.lookup_table)
except IOError:
print "I was unable to open the file ", name, " for some reason"
print "Maybe it's corrupted?"
print "Skipping it..."
return None
except Exception as possible_iptc_exception:
if str(possible_iptc_exception) == 'No IPTC data found.':
print "I was unable to get IPTC data from the file %s" % name
print "Skipping it..."
return None
else:
raise possible_iptc_exception # Some other exception
def maybe_get_back_href(self, path):
"""
Given a nullable path name, turns it into a href that can be used
to write an anchor tag pointing to a HTML file. If path
is None, propagates the None by returning it.
Args:
path the path name, or None if it is not applicable.
"""
if path is None:
return None
else:
return directory_name_to_html_file_name(path)
def create_subdirectories(self, file_names, path):
"""
Helper methods to find the subdirectories of path and create JpegDirectories
for them, fully initializing their contents too.
Args:
file_names the names of the files in path.
path the root directory path to process.
"""
full_file_names = [os.path.join(path, name) for name in file_names]
directory_names = filter(self.is_directory, full_file_names)
jpeg_directories = [self.create_directory(directory_name, parent_path=path) \
for directory_name in directory_names]
return jpeg_directories
| mit | -6,736,790,274,765,474,000 | 31.575269 | 84 | 0.674039 | false |
rwl/muntjac | muntjac/addon/invient/demo/invient_demo_win.py | 1 | 154853 | # @INVIENT_COPYRIGHT@
# @MUNTJAC_LICENSE@
"""Window for Invient charts demo."""
from StringIO \
import StringIO
from random \
import random
from threading \
import Thread
from time \
import sleep
from muntjac.addon.invient.invient_charts_util \
import getDate
from datetime \
import datetime
from muntjac.util \
import totalseconds, OrderedSet
from muntjac.api \
import TextArea, VerticalLayout, HorizontalLayout, Label, \
HorizontalSplitPanel, Window, Tree, Alignment, Button, GridLayout, \
ProgressIndicator
from muntjac.ui \
import button
from muntjac.data.property \
import IValueChangeListener
from muntjac.data.util.hierarchical_container \
import HierarchicalContainer
from muntjac.terminal.sizeable \
import ISizeable
from muntjac.addon.invient.invient_charts \
import ChartZoomListener, DateTimePoint, InvientCharts, DateTimeSeries, \
SeriesType, XYSeries, DecimalPoint, PointClickListener, \
ChartSVGAvailableListener, ChartClickListener, ChartResetZoomListener, \
SeriesClickListerner, SeriesHideListerner, SeriesShowListerner, \
SeriesLegendItemClickListerner, PointRemoveListener, PointSelectListener, \
PointUnselectListener, PieChartLegendItemClickListener
from muntjac.addon.invient.invient_charts_config \
import DateTimePlotBand, DateTimeRange, InvientChartsConfig, Margin, \
DateTimeAxis, NumberYAxis, AxisTitle, LineConfig, SymbolMarker, \
MarkerState, ZoomType, YAxisDataLabel, Grid, AreaConfig, SeriesState, \
CategoryAxis, NumberPlotLine, Legend, Layout, Position, HorzAlign, \
VertAlign, NumberValue, NumberXAxis, ScatterConfig, DataLabel, \
SeriesConfig, Stacking, AxisTitleAlign, BarConfig, Tooltip, ColumnConfig, \
XAxisDataLabel, Spacing, Tick, TickmarkPlacement, Symbol, NumberPlotBand, \
NumberRange, AreaSplineConfig, PieConfig, PieDataLabel, PointConfig, \
SplineConfig, ImageMarker, MinorGrid, PlotLabel, ChartLabel, \
ChartLabelItem, DashStyle
from muntjac.addon.invient.color \
import RGBA, RGB
from muntjac.addon.invient.gradient \
import LinearColorStop, LinearGradient
def timestamp(dt):
return long(totalseconds(dt - datetime(1970, 1, 1)) * 1e03) # ms
class InvientChartsDemoWin(Window):
_TREE_ITEM_CAPTION_PROP_ID = 'ChartType'
_SEPARATOR = '|'
def __init__(self):
super(InvientChartsDemoWin, self).__init__()
self._eventLog = TextArea()
self._isAppRunningOnGAE = True
mainLayout = VerticalLayout()
self.setContent(mainLayout)
self.setSizeFull()
mainLayout.setSizeFull()
self.setCaption('Invient Charts')
infoBar = HorizontalLayout()
mainLayout.addComponent(infoBar)
infoBar.setHeight('50px')
infoBar.setWidth('100%')
lblAppTitle = Label('Demo Gallery for Invient Charts')
lblAppTitle.setSizeFull()
lblAppTitle.setStyleName('v-label-app-title')
infoBar.addComponent(lblAppTitle)
self._mainSplit = HorizontalSplitPanel()
self._mainSplit.setSizeFull()
mainLayout.addComponent(self._mainSplit)
mainLayout.setExpandRatio(self._mainSplit, 1)
self._leftLayout = VerticalLayout()
self._leftLayout.setSpacing(True)
self._mainSplit.setFirstComponent(self._leftLayout)
self._rightLayout = VerticalLayout()
self._rightLayout.setSpacing(True)
self._rightLayout.setMargin(True)
self._mainSplit.setSecondComponent(self._rightLayout)
self._mainSplit.setSplitPosition(200, ISizeable.UNITS_PIXELS)
self._navTree = self.createChartsTree()
self._leftLayout.addComponent(self._navTree)
self._eventLog.setReadOnly(True)
self._eventLog.setStyleName('v-textarea-chart-events-log')
self._eventLog.setSizeFull()
self._eventLog.setHeight('200px')
self.setTheme('chartdemo')
self._masterChartMinDate = self.getDateZeroTime(2006, 1, 1)
self._masterChartMaxDate = self.getDateZeroTime(2008, 12, 31)
self._detailChartPointStartDate = self.getDateZeroTime(2008, 8, 1)
self._splineThread = None
self._indicator = None
self._scatterMaleData = None
self._scatterFemaleData = None
def attach(self):
super(InvientChartsDemoWin, self).attach()
self._isAppRunningOnGAE = \
self.getInvientChartsDemoApp().isAppRunningOnGAE()
# Select line chart when the screen is loaded
self._navTree.select(DemoSeriesType.LINE.getName()
+ self._SEPARATOR + ChartName.BASIC.getName())
def isAppRunningOnGAE(self):
return self._isAppRunningOnGAE
def getInvientChartsDemoApp(self):
return self.getApplication()
def showChart(self, demoSeriesTypeName, chartNameString):
if not self._isAppRunningOnGAE:
self.stopSplineSelfUpdateThread()
demoSeriesType = self.getDemoSeriesType(demoSeriesTypeName)
chartName = self.getChartName(chartNameString)
if demoSeriesType is not None and chartName is not None:
if demoSeriesType == DemoSeriesType.COMBINATION:
if chartName == ChartName.COMBINATION_COLUMN_LINE_AND_PIE:
self.showCombination()
elif chartName == ChartName.SCATTER_WITH_REGRESSION_LINE:
self.showCombinationScatterWithRegressionLine()
elif chartName == ChartName.MULTIPLE_AXES:
self.showCombinationMultipleAxes()
elif demoSeriesType == DemoSeriesType.LINE:
if chartName == ChartName.BASIC:
self.showLine()
elif chartName == ChartName.CLICK_TO_ADD_POINT:
self.showClickToAddPoint()
elif chartName == ChartName.WITH_DATA_LABELS:
self.showLineWithDataLabels()
elif chartName == ChartName.TIMESERIES_ZOOMABLE:
self.showTimeSeriesZoomable()
elif chartName == ChartName.MASTER_DETAIL:
self.showMasterDetail()
elif demoSeriesType == DemoSeriesType.BAR:
if chartName == ChartName.BASIC:
self.showBarBasic()
elif chartName == ChartName.STACKED:
self.showBarStacked()
elif chartName == ChartName.WITH_NEGATIVE_STACK:
self.showBarWithNegStack()
elif demoSeriesType == DemoSeriesType.COLUMN:
if chartName == ChartName.BASIC:
self.showColumnBasic()
elif chartName == ChartName.WITH_NEGATIVE_VALUES:
self.showColumnWithNegValues()
elif chartName == ChartName.STACKED:
self.showColumnStacked()
elif chartName == ChartName.STACKED_AND_GROUPED:
self.showColumnStackedAndGrouped()
elif chartName == ChartName.STACKED_PERCENT:
self.showColumnStackedPercent()
elif chartName == ChartName.WITH_ROTATED_LABELS:
self.showColumnWithRotatedLabels()
elif demoSeriesType == DemoSeriesType.AREA:
if chartName == ChartName.BASIC:
self.showAreaBasic()
elif chartName == ChartName.WITH_NEGATIVE_VALUES:
self.showAreaWithNegValues()
elif chartName == ChartName.STACKED:
self.showAreaStacked()
elif chartName == ChartName.PERCENTAGE:
self.showAreaPercent()
elif chartName == ChartName.INVERTED_AXES:
self.showAreaInvertedAxes()
elif chartName == ChartName.WITH_MISSING_POINTS:
self.showAreaWithMissingPoints()
elif demoSeriesType == DemoSeriesType.AREASPLINE:
if chartName == ChartName.BASIC:
self.showAreaSpline()
elif demoSeriesType == DemoSeriesType.PIE:
if chartName == ChartName.BASIC:
self.showPie()
elif chartName == ChartName.WITH_LEGEND:
self.showPieWithLegend()
elif chartName == ChartName.DONUT:
self.showDonut()
elif demoSeriesType == DemoSeriesType.SCATTER:
if chartName == ChartName.BASIC:
self.showScatter()
elif demoSeriesType == DemoSeriesType.SPLINE:
if chartName == ChartName.BASIC:
self.showSpline()
elif chartName == ChartName.WITH_PLOTBANDS:
self.showSplineWithPlotBands()
elif chartName == ChartName.WITH_SYMBOLS:
self.showSplineWithSymbol()
elif chartName == ChartName.UPDATING_EACH_SECOND:
self.showSplineUpdatingEachSecond()
else:
self.getApplication().getMainWindow().showNotification(
'Error occurred during chart processing! Try again!!!')
else:
self.getApplication().getMainWindow().showNotification(
'Error occurred during chart processing! Try again!!!')
def showMasterDetail(self):
# Create the master chart
masterChart = self.getMasterChart()
# Create detail chart
detailChart = self.getDetailChart(masterChart)
# Register events
l = MasterChartZoomListener(self, masterChart, detailChart)
masterChart.addListener(l)
# Add master
self.addChart(masterChart, False, False, False, False)
# Add detail
self.addChart(detailChart, True, True, False)
def getDetailChart(self, masterChart):
detailChartConfig = InvientChartsConfig()
detailChartConfig.getGeneralChartConfig().setMargin(Margin())
detailChartConfig.getGeneralChartConfig().getMargin().setBottom(120)
detailChartConfig.getGeneralChartConfig().getMargin().setLeft(50)
detailChartConfig.getGeneralChartConfig().getMargin().setRight(20)
detailChartConfig.getGeneralChartConfig().setReflow(False)
detailChartConfig.getCredit().setEnabled(False)
detailChartConfig.getTitle().setText(
'Historical USD to EUR Exchange Rate')
detailChartConfig.getSubtitle().setText(
'Select an area by dragging across the lower chart')
detailXAxis = DateTimeAxis()
detailXAxes = OrderedSet()
detailXAxes.add(detailXAxis)
detailChartConfig.setXAxes(detailXAxes)
detailYAxis = NumberYAxis()
detailYAxis.setTitle(AxisTitle(''))
detailYAxes = OrderedSet()
detailYAxes.add(detailYAxis)
detailChartConfig.setYAxes(detailYAxes)
detailChartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' var point = this.points[0];'
+ ' return \'<b>\'+ point.series.name +\'</b><br/>\' + '
+ ' $wnd.Highcharts.dateFormat(\'%A %B %e %Y\', this.x) + \':<br/>\' + '
+ ' \'1 USD = \'+ $wnd.Highcharts.numberFormat(point.y, 2) +\' EUR\';'
+ '}')
detailChartConfig.getTooltip().setShared(True)
detailChartConfig.getLegend().setEnabled(False)
lineCfg = LineConfig()
marker = SymbolMarker(False)
lineCfg.setMarker(marker)
marker.setHoverState(MarkerState())
marker.getHoverState().setEnabled(True)
marker.getHoverState().setRadius(3)
detailChartConfig.addSeriesConfig(lineCfg)
detailChart = InvientCharts(detailChartConfig)
# Line instance configuration
lineSeriesCfg = LineConfig()
start = timestamp(self._detailChartPointStartDate)
lineSeriesCfg.setPointStart(start)
lineSeriesCfg.setPointInterval(24 * 3600 * 1000.0)
lineSeriesCfg.setColor(RGB(69, 114, 167))
detailSeries = DateTimeSeries(detailChart, 'USD to EUR',
SeriesType.LINE, lineSeriesCfg)
detailPoints = OrderedSet()
masterChartSeries = masterChart.getSeries('USD to EUR')
for point in masterChartSeries.getPoints():
if (timestamp(point.getX()) >=
timestamp(self._detailChartPointStartDate)):
detailPoints.add(DateTimePoint(detailSeries, point.getY()))
detailSeries.setSeriesPoints(detailPoints)
detailChart.addSeries(detailSeries)
return detailChart
def getMasterChart(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setReflow(False)
chartConfig.getGeneralChartConfig().setBorderWidth(0)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setLeft(50)
chartConfig.getGeneralChartConfig().getMargin().setRight(20)
chartConfig.getGeneralChartConfig().setZoomType(ZoomType.X)
chartConfig.getGeneralChartConfig().setClientZoom(False)
chartConfig.getGeneralChartConfig().setHeight(80)
chartConfig.getTitle().setText('')
xAxis = DateTimeAxis()
xAxis.setShowLastLabel(True)
xAxis.setMaxZoom(14 * 24 * 3600 * 1000.0)
plotBand = DateTimePlotBand('mask-before')
plotBand.setRange(DateTimeRange(self._masterChartMinDate,
self._detailChartPointStartDate))
plotBand.setColor(RGBA(0, 0, 0, 0.2))
xAxis.addPlotBand(plotBand)
xAxis.setTitle(AxisTitle(''))
xAxes = set()
xAxes.add(xAxis)
chartConfig.setXAxes(xAxes)
yAxis = NumberYAxis()
yAxis.setShowFirstLabel(False)
yAxis.setMin(0.6)
yAxis.setGrid(Grid())
yAxis.getGrid().setLineWidth(0)
yAxis.setLabel(YAxisDataLabel(False))
yAxis.setTitle(AxisTitle(''))
yAxes = set()
yAxes.add(yAxis)
chartConfig.setYAxes(yAxes)
chartConfig.getTooltip().setFormatterJsFunc(
'function() { return false; }')
chartConfig.getLegend().setEnabled(False)
chartConfig.getCredit().setEnabled(False)
# Plot options
areaCfg = AreaConfig()
colorStops = list()
colorStops.append(LinearColorStop(0, RGB(69, 114, 167)))
colorStops.append(LinearColorStop(1, RGBA(0, 0, 0, 0)))
# Fill color
areaCfg.setFillColor(LinearGradient(0, 0, 0, 70, colorStops))
areaCfg.setLineWidth(1)
areaCfg.setMarker(SymbolMarker(False))
areaCfg.setShadow(False)
areaCfg.setEnableMouseTracking(False)
areaCfg.setHoverState(SeriesState())
areaCfg.getHoverState().setLineWidth(1)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
# Provide methods to set pointInterval and pointStart and delegate
# call to SeriesConfig
seriesDataCfg = AreaConfig()
seriesDataCfg.setPointInterval(24 * 3600.0 * 1000)
start = timestamp(self._masterChartMinDate)
seriesDataCfg.setPointStart(start)
masterChartSeries = DateTimeSeries(chart, 'USD to EUR',
SeriesType.AREA, seriesDataCfg)
masterChartSeries.setSeriesPoints(self.getMasterDetailData(
masterChartSeries))
chart.addSeries(masterChartSeries)
return chart
def showLine(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.LINE)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setRight(130)
chartConfig.getGeneralChartConfig().getMargin().setBottom(25)
chartConfig.getTitle().setX(-20)
chartConfig.getTitle().setText('Monthly Average Temperature')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
chartConfig.getTitle().setX(-20)
categoryAxis = CategoryAxis()
categoryAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May',
'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(categoryAxis)
chartConfig.setXAxes(xAxesSet)
numberYAxis = NumberYAxis()
numberYAxis.setTitle(AxisTitle(u'Temperature (\u2103)'.encode('utf-8')))
plotLine = NumberPlotLine('TempAt0')
plotLine.setValue(NumberValue(0.0))
plotLine.setWidth(1)
plotLine.setZIndex(1)
plotLine.setColor(RGB(128, 128, 128))
numberYAxis.addPlotLine(plotLine)
yAxesSet = set()
yAxesSet.add(numberYAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legendPos = Position()
legendPos.setAlign(HorzAlign.RIGHT)
legendPos.setVertAlign(VertAlign.TOP)
legendPos.setX(-10)
legendPos.setY(100)
legend.setPosition(legendPos)
legend.setBorderWidth(0)
chartConfig.setLegend(legend)
# Series data label formatter
lineCfg = LineConfig()
chartConfig.addSeriesConfig(lineCfg)
# Tooltip formatter
chartConfig.getTooltip().setFormatterJsFunc(
'function() { '
+ u' return \'<b>\' + this.series.name + \'</b><br/>\' + this.x + \': \'+ this.y +\'\u2103\''.encode('utf-8')
+ '}')
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Tokyo')
seriesData.setSeriesPoints(self.getPoints(seriesData, [7.0, 6.9, 9.5,
14.5, 18.2, 21.5, 25.2, 26.5, 23.3, 18.3, 13.9, 9.6]))
chart.addSeries(seriesData)
seriesData = XYSeries('New York')
seriesData.setSeriesPoints(self.getPoints(seriesData, [-0.2, 0.8, 5.7,
11.3, 17.0, 22.0, 24.8, 24.1, 20.1, 14.1, 8.6, 2.5]))
chart.addSeries(seriesData)
seriesData = XYSeries('Berlin')
seriesData.setSeriesPoints(self.getPoints(seriesData, [-0.9, 0.6, 3.5,
8.4, 13.5, 17.0, 18.6, 17.9, 14.3, 9.0, 3.9, 1.0]))
chart.addSeries(seriesData)
seriesData = XYSeries('London')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3.9, 4.2, 5.7,
8.5, 11.9, 15.2, 17.0, 16.6, 14.2, 10.3, 6.6, 4.8]))
chart.addSeries(seriesData)
self.addChart(chart)
def showClickToAddPoint(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SCATTER)
chartConfig.getGeneralChartConfig().setMargin(Margin(70, 50, 60, 80))
chartConfig.getTitle().setText('User supplied data')
chartConfig.getSubtitle().setText('Click the plot area to add a '
'point. Click a point to remove it.')
xAxis = NumberXAxis()
xAxis.setMinPadding(0.2)
xAxis.setMaxPadding(0.2)
xAxis.setMaxZoom(60)
xAxes = set()
xAxes.add(xAxis)
chartConfig.setXAxes(xAxes)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Value'))
yAxis.setMinPadding(0.2)
yAxis.setMaxPadding(0.2)
yAxis.setMaxZoom(60)
plotLine = NumberPlotLine('At0')
plotLine.setValue(NumberValue(0.0))
plotLine.setWidth(1)
plotLine.setColor(RGB(128, 128, 128))
yAxis.addPlotLine(plotLine)
yAxes = set()
yAxes.add(yAxis)
chartConfig.setYAxes(yAxes)
chartConfig.getLegend().setEnabled(False)
scatterCfg = ScatterConfig()
scatterCfg.setLineWidth(1)
chartConfig.addSeriesConfig(scatterCfg)
# chart data
chart = InvientCharts(chartConfig)
seriesData = XYSeries('User Supplied Data')
seriesData.addPoint(DecimalPoint(seriesData, 20, 20))
seriesData.addPoint(DecimalPoint(seriesData, 80, 80))
chart.addSeries(seriesData)
l = AddPointChartClickListener(self)
chart.addListener(l)
l = AddPointClickListener(self)
chart.addListener(l, [])
self.addChart(chart, False, False)
def showLineWithDataLabels(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getTitle().setText('Monthly Average Temperature')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
categoryAxis = CategoryAxis()
categoryAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May',
'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(categoryAxis)
chartConfig.setXAxes(xAxesSet)
numberYAxis = NumberYAxis()
numberYAxis.setTitle(AxisTitle(u'Temperature (\u2103)'.encode('utf-8')))
yAxesSet = set()
yAxesSet.add(numberYAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setEnabled(False)
# Series data label formatter
lineCfg = LineConfig()
lineCfg.setDataLabel(DataLabel())
lineCfg.getDataLabel().setEnabled(True)
lineCfg.setEnableMouseTracking(False)
chartConfig.addSeriesConfig(lineCfg)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Tokyo')
seriesData.setSeriesPoints(self.getPoints(seriesData, [7.0, 6.9, 9.5,
14.5, 18.4, 21.5, 25.2, 26.5, 23.3, 18.3, 13.9, 9.6]))
chart.addSeries(seriesData)
seriesData = XYSeries('London')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3.9, 4.2, 5.7,
8.5, 11.9, 15.2, 17.0, 16.6, 14.2, 10.3, 6.6, 4.8]))
chart.addSeries(seriesData)
self.addChart(chart)
def showBarStacked(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.BAR)
chartConfig.getTitle().setText('Stacked bar chart')
xAxis = CategoryAxis()
categories = ['Apples', 'Oranges', 'Pears', 'Grapes', 'Bananas']
xAxis.setCategories(categories)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
numberYAxis = NumberYAxis()
numberYAxis.setMin(0.0)
numberYAxis.setTitle(AxisTitle('Total fruit consumption'))
yAxesSet = set()
yAxesSet.add(numberYAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setBackgroundColor(RGB(255, 255, 255))
legend.setReversed(True)
chartConfig.setLegend(legend)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\'+ this.series.name +\': \'+ this.y +\'\'; '
+ '}')
seriesCfg = SeriesConfig()
seriesCfg.setStacking(Stacking.NORMAL)
chartConfig.addSeriesConfig(seriesCfg)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 2, 3, 2, 1]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, 2, 5]))
chart.addSeries(seriesData)
self.addChart(chart)
def showBarBasic(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.BAR)
chartConfig.getTitle().setText('Historic World Population by Region')
chartConfig.getSubtitle().setText('Source: Wikipedia.org')
xAxisMain = CategoryAxis()
categories = ['Africa', 'America', 'Asia', 'Europe', 'Oceania']
xAxisMain.setCategories(categories)
xAxesSet = set()
xAxesSet.add(xAxisMain)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Population (millions)'))
yAxis.getTitle().setAlign(AxisTitleAlign.HIGH)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.series.name +\': \'+ this.y +\' millions\';'
+ '}')
barCfg = BarConfig()
barCfg.setDataLabel(DataLabel())
chartConfig.addSeriesConfig(barCfg)
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.RIGHT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(-100)
legend.getPosition().setY(100)
legend.setFloating(True)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
legend.setShadow(True)
chartConfig.setLegend(legend)
chartConfig.getCredit().setEnabled(False)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Year 1800')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[107, 31, 635, 203, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Year 1900')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[133, 156, 947, 408, 6]))
chart.addSeries(seriesData)
seriesData = XYSeries('Year 2008')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[973, 914, 4054, 732, 34]))
chart.addSeries(seriesData)
self.addChart(chart)
def showBarWithNegStack(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.BAR)
chartConfig.getTitle().setText(
'Population pyramid for Germany, midyear 2010')
chartConfig.getSubtitle().setText('Source: www.census.gov')
xAxisMain = CategoryAxis()
categories = ['0-4', '5-9', '10-14', '15-19', '20-24', '25-29',
'30-34', '35-39', '40-44', '45-49', '50-54', '55-59',
'60-64', '65-69', '70-74', '75-79', '80-84', '85-89',
'90-94', '95-99', '100 +']
xAxisMain.setCategories(categories)
xAxisMain.setReversed(False)
xAxesSet = set()
# Opposite axis
xAxesSet.add(xAxisMain)
xAxis = CategoryAxis()
xAxis.setCategories(categories)
xAxis.setOpposite(True)
xAxis.setReversed(False)
xAxis.setLinkedTo(xAxisMain)
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle(''))
yAxis.setMin(-4000000.0)
yAxis.setMax(4000000.0)
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return (Math.abs(this.value) / 1000000) + \'M\';'
+ ' }')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
tooltip = Tooltip()
tooltip.setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.series.name +\', age \'+ this.point.category +\'</b><br/>\' + '
+ ' \'Population: \'+ Highcharts.numberFormat(Math.abs(this.point.y), 0); '
+ '}')
series = SeriesConfig()
series.setStacking(Stacking.NORMAL)
chartConfig.addSeriesConfig(series)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Male')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[-1746181, -1884428, -2089758, -2222362, -2537431, -2507081,
-2443179, -2664537, -3556505, -3680231, -3143062, -2721122,
-2229181, -2227768, -2176300, -1329968, -836804, -354784,
-90569, -28367, -3878]))
chart.addSeries(seriesData)
seriesData = XYSeries('Female')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[1656154, 1787564, 1981671, 2108575, 2403438, 2366003,
2301402, 2519874, 3360596, 3493473, 3050775, 2759560,
2304444, 2426504, 2568938, 1785638, 1447162, 1005011,
330870, 130632, 21208]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnBasic(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText('Monthly Average Rainfall')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
xAxis = CategoryAxis()
xAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Rainfall (mm)'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setFloating(True)
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.LEFT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(100)
legend.getPosition().setY(70)
legend.setShadow(True)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.x +\': \'+ this.y +\' mm\'; '
+ '}')
colCfg = ColumnConfig()
colCfg.setPointPadding(0.2)
colCfg.setBorderWidth(0)
chartConfig.addSeriesConfig(colCfg)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Tokyo')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[49.9, 71.5, 106.4, 129.2, 144.0, 176.0, 135.6, 148.5, 216.4,
194.1, 95.6, 54.4]))
chart.addSeries(seriesData)
seriesData = XYSeries('New York')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[83.6, 78.8, 98.5, 93.4, 106.0, 84.5, 105.0, 104.3, 91.2,
83.5, 106.6, 92.3]))
chart.addSeries(seriesData)
seriesData = XYSeries('London')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[48.9, 38.8, 39.3, 41.4, 47.0, 48.3, 59.0, 59.6, 52.4, 65.2,
59.3, 51.2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Berlin')
seriesData.setSeriesPoints(self.getPoints(seriesData, [42.4, 33.2,
34.5, 39.7, 52.6, 75.5, 57.4, 60.4, 47.6, 39.1, 46.8, 51.1]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnWithNegValues(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText('Column chart with negative values')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears', 'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
tooltip = Tooltip()
tooltip.setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.series.name +\': \'+ this.y +\'\'; '
+ '}')
chartConfig.setTooltip(tooltip)
chartConfig.getCredit().setEnabled(False)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, -2, -3, 2, 1]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, -2, 5]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnStacked(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText('Stacked column chart')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears', 'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Total fruit consumption'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.RIGHT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(-100)
legend.getPosition().setY(20)
legend.setFloating(True)
legend.setBackgroundColor(RGB(255, 255, 255))
legend.setBorderWidth(1)
legend.setShadow(True)
chartConfig.setLegend(legend)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.x +\'</b><br/>\'+ this.series.name +\': \'+ this.y +\'<br/>\'+'
+ ' \'Total: \'+ this.point.stackTotal; '
+ '}')
colCfg = ColumnConfig()
colCfg.setStacking(Stacking.NORMAL)
chartConfig.addSeriesConfig(colCfg)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 2, 3, 2, 1]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, 2, 5]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnStackedAndGrouped(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText(
'Total fruit consumtion, grouped by gender')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears',
'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setAllowDecimals(False)
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Number of fruits'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
series = ColumnConfig()
series.setStacking(Stacking.NORMAL)
chartConfig.addSeriesConfig(series)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.x +\'</b><br/>\'+ this.series.name +\': \'+ this.y +\'<br/>\'+ \'Total: \'+ this.point.stackTotal;'
+ '}')
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
seriesData.setStack('male')
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, 2, 5]))
seriesData.setStack('male')
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 5, 6, 2, 1]))
seriesData.setStack('female')
chart.addSeries(seriesData)
seriesData = XYSeries('Janet')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 0, 4, 4, 3]))
seriesData.setStack('female')
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnStackedPercent(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText('Stacked column chart')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears',
'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Total fruit consumption'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
series = ColumnConfig()
series.setStacking(Stacking.PERCENT)
chartConfig.addSeriesConfig(series)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.series.name +\': \'+ this.y +\' (\'+ Math.round(this.percentage) +\'%)\'; '
+ '}')
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, 2, 5]))
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 2, 3, 2, 1]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnWithRotatedLabels(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setTop(50)
chartConfig.getGeneralChartConfig().getMargin().setRight(50)
chartConfig.getGeneralChartConfig().getMargin().setBottom(100)
chartConfig.getGeneralChartConfig().getMargin().setLeft(80)
chartConfig.getTitle().setText('World\'s largest cities per 2008')
xAxis = CategoryAxis()
xAxis.setCategories(['Tokyo', 'Jakarta', 'New York', 'Seoul',
'Manila', 'Mumbai', 'Sao Paulo', 'Mexico City', 'Dehli',
'Osaka', 'Cairo', 'Kolkata', 'Los Angeles', 'Shanghai',
'Moscow', 'Beijing', 'Buenos Aires', 'Guangzhou',
'Shenzhen', 'Istanbul'])
xAxis.setLabel(XAxisDataLabel())
xAxis.getLabel().setRotation(-45)
xAxis.getLabel().setAlign(HorzAlign.RIGHT)
xAxis.getLabel().setStyle('{ font: \'normal 13px Verdana, sans-serif\' }')
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Population (millions)'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.setLegend(Legend(False))
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.x +\'</b><br/>\'+ \'Population in 2008: \'+ $wnd.Highcharts.numberFormat(this.y, 1) + '
+ ' \' millions\' '
+ '}')
chart = InvientCharts(chartConfig)
colCfg = ColumnConfig()
colCfg.setDataLabel(DataLabel())
colCfg.getDataLabel().setRotation(-90)
colCfg.getDataLabel().setAlign(HorzAlign.RIGHT)
colCfg.getDataLabel().setX(-3)
colCfg.getDataLabel().setY(10)
colCfg.getDataLabel().setColor(RGB(255, 255, 255))
colCfg.getDataLabel().setFormatterJsFunc('function() {'
+ ' return this.y; '
+ '}')
colCfg.getDataLabel().setStyle(
' { font: \'normal 13px Verdana, sans-serif\' } ')
seriesData = XYSeries('Population', colCfg)
seriesData.setSeriesPoints(self.getPoints(seriesData,
[34.4, 21.8, 20.1, 20, 19.6, 19.5, 19.1, 18.4, 18, 17.3,
16.8, 15, 14.7, 14.5, 13.3, 12.8, 12.4, 11.8, 11.7, 11.2]))
chart.addSeries(seriesData)
self.addChart(chart)
def showAreaWithNegValues(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getTitle().setText('Area chart with negative values')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears',
'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
chartConfig.getCredit().setEnabled(False)
chart = InvientCharts(chartConfig)
series = XYSeries('John')
series.setSeriesPoints(self.getPoints(series, [5, 3, 4, 7, 2]))
chart.addSeries(series)
series = XYSeries('Jane')
series.setSeriesPoints(self.getPoints(series, [2, -2, -3, 2, 1]))
chart.addSeries(series)
series = XYSeries('Joe')
series.setSeriesPoints(self.getPoints(series, [3, 4, 4, -2, 5]))
chart.addSeries(series)
self.addChart(chart)
def showAreaInvertedAxes(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getGeneralChartConfig().setInverted(True)
chartConfig.getTitle().setText(
'Average fruit consumption during one week')
chartConfig.getSubtitle().setStyle(
'{ position: \'absolute\', right: \'0px\', bottom: \'10px\'}')
legend = Legend()
legend.setFloating(True)
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.RIGHT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(-150)
legend.getPosition().setY(100)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
xAxis = CategoryAxis()
xAxis.setCategories(['Monday', 'Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Number of units'))
yAxis.setMin(0.0)
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {' + ' return this.value; ' + '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc('function() {'
+ ' return \'\' + this.x + \': \' + this.y; '
+ '}')
areaCfg = AreaConfig()
areaCfg.setFillOpacity(0.5)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('John')
series.setSeriesPoints(self.getPoints(series, [3, 4, 3, 5, 4, 10, 12]))
chart.addSeries(series)
series = XYSeries('Jane')
series.setSeriesPoints(self.getPoints(series, [1, 3, 4, 3, 3, 5, 4]))
chart.addSeries(series)
self.addChart(chart)
def showAreaWithMissingPoints(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getGeneralChartConfig().setSpacing(Spacing())
chartConfig.getGeneralChartConfig().getSpacing().setBottom(30)
chartConfig.getTitle().setText('Fruit consumption *')
chartConfig.getSubtitle().setText(
'* Jane\'s banana consumption is unknown')
chartConfig.getSubtitle().setFloating(True)
chartConfig.getSubtitle().setAlign(HorzAlign.RIGHT)
chartConfig.getSubtitle().setVertAlign(VertAlign.BOTTOM)
chartConfig.getSubtitle().setY(15)
legend = Legend()
legend.setFloating(True)
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.LEFT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(150)
legend.getPosition().setY(100)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Pears', 'Oranges', 'Bananas',
'Grapes', 'Plums', 'Strawberries', 'Raspberries'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Y-Axis'))
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value; '
+ '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.series.name +\'</b><br/>\'+ this.x +\': \'+ this.y;'
+ '}')
chartConfig.getCredit().setEnabled(False)
areaCfg = AreaConfig()
areaCfg.setFillOpacity(0.5)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('John')
series.setSeriesPoints(self.getPoints(series, [0, 1, 4, 4, 5, 2, 3, 7]))
chart.addSeries(series)
series = XYSeries('Jane')
series.addPoint([DecimalPoint(series, 1.0), DecimalPoint(series, 0.0),
DecimalPoint(series, 3.0), DecimalPoint(series),
DecimalPoint(series, 3.0), DecimalPoint(series, 1.0),
DecimalPoint(series, 2.0), DecimalPoint(series, 1.0)])
chart.addSeries(series)
self.addChart(chart)
def showAreaStacked(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getTitle().setText('Historic and Estimated Worldwide '
'Population Growth by Region')
chartConfig.getSubtitle().setText('Source: Wikipedia.org')
xAxis = CategoryAxis()
xAxis.setCategories(['1750', '1800', '1850', '1900', '1950',
'1999', '2050'])
tick = Tick()
tick.setPlacement(TickmarkPlacement.ON)
xAxis.setTick(tick)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Billions'))
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc('function() {'
+ ' return this.value / 1000; '
+ '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc('function() {'
+ ' return \'\'+ this.x +\': \'+ $wnd.Highcharts.numberFormat(this.y, 0, \',\') +\' millions\';'
+ '}')
areaCfg = AreaConfig()
areaCfg.setStacking(Stacking.NORMAL)
areaCfg.setLineColor(RGB(102, 102, 102))
areaCfg.setLineWidth(1)
marker = SymbolMarker()
marker.setLineColor(RGB(102, 102, 102))
marker.setLineWidth(1)
areaCfg.setMarker(marker)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('Asia')
series.setSeriesPoints(self.getPoints(series,
[502, 635, 809, 947, 1402, 3634, 5268]))
chart.addSeries(series)
series = XYSeries('Africa')
series.setSeriesPoints(self.getPoints(series,
[106, 107, 111, 133, 221, 767, 1766]))
chart.addSeries(series)
series = XYSeries('Europe')
series.setSeriesPoints(self.getPoints(series,
[163, 203, 276, 408, 547, 729, 628]))
chart.addSeries(series)
series = XYSeries('America')
series.setSeriesPoints(self.getPoints(series,
[18, 31, 54, 156, 339, 818, 1201]))
chart.addSeries(series)
series = XYSeries('Oceania')
series.setSeriesPoints(self.getPoints(series,
[2, 2, 2, 6, 13, 30, 46]))
chart.addSeries(series)
self.addChart(chart)
def showAreaPercent(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getTitle().setText('Historic and Estimated Worldwide '
'Population Distribution by Region')
chartConfig.getSubtitle().setText('Source: Wikipedia.org')
xAxis = CategoryAxis()
xAxis.setCategories(['1750', '1800', '1850', '1900', '1950',
'1999', '2050'])
tick = Tick()
tick.setPlacement(TickmarkPlacement.ON)
xAxis.setTick(tick)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Percent'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.x +\': \' + $wnd.Highcharts.numberFormat(this.percentage, 1) + '
+ ' \'% (\'+ $wnd.Highcharts.numberFormat(this.y, 0, \',\') +\' millions)\'; '
+ '}')
areaCfg = AreaConfig()
areaCfg.setStacking(Stacking.PERCENT)
areaCfg.setLineColor(RGB(255, 255, 255))
areaCfg.setLineWidth(1)
marker = SymbolMarker()
marker.setLineColor(RGB(255, 255, 255))
marker.setLineWidth(1)
areaCfg.setMarker(marker)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('Asia')
series.setSeriesPoints(self.getPoints(series,
[502, 635, 809, 947, 1402, 3634, 5268]))
chart.addSeries(series)
series = XYSeries('Africa')
series.setSeriesPoints(self.getPoints(series,
[106, 107, 111, 133, 221, 767, 1766]))
chart.addSeries(series)
series = XYSeries('Europe')
series.setSeriesPoints(self.getPoints(series,
[163, 203, 276, 408, 547, 729, 628]))
chart.addSeries(series)
series = XYSeries('America')
series.setSeriesPoints(self.getPoints(series,
[18, 31, 54, 156, 339, 818, 1201]))
chart.addSeries(series)
series = XYSeries('Oceania')
series.setSeriesPoints(self.getPoints(series,
[2, 2, 2, 6, 13, 30, 46]))
chart.addSeries(series)
self.addChart(chart)
def showAreaBasic(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getTitle().setText('US and USSR nuclear stockpiles')
chartConfig.getSubtitle().setText(
'Source: <a href=\'http://thebulletin.metapress.com/content/c4120650912x74k7/fulltext.pdf\'>thebulletin.metapress.com</a>')
xAxis = NumberXAxis()
xAxis.setLabel(XAxisDataLabel())
xAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value;'
+ '}')
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Nuclear weapon states'))
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value / 1000 +\'k\';'
+ '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return this.series.name +\' produced <b>\'+'
+ ' $wnd.Highcharts.numberFormat(this.y, 0) +\'</b><br/>warheads in \'+ this.x;'
+ '}')
areaCfg = AreaConfig()
areaCfg.setPointStart(1940.0)
marker = SymbolMarker()
areaCfg.setMarker(marker)
marker.setEnabled(False)
marker.setSymbol(Symbol.CIRCLE)
marker.setRadius(2)
marker.setHoverState(MarkerState(True))
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
# Series -
usaAreaCfg = AreaConfig()
usaAreaCfg.setPointStart(1940.0)
series = XYSeries('USA', usaAreaCfg)
points = set()
self.addNullPoints(points, series, 5)
points = points.union(self.getPoints(series,
[6, 11, 32, 110, 235, 369, 640, 1005, 1436, 2063, 3057, 4618,
6444, 9822, 15468, 20434, 24126, 27387, 29459, 31056, 31982,
32040, 31233, 29224, 27342, 26662, 26956, 27912, 28999,
28965, 27826, 25579, 25722, 24826, 24605, 24304, 23464, 23708,
24099, 24357, 24237, 24401, 24344, 23586, 22380, 21004, 17287,
14747, 13076, 12555, 12144, 11009, 10950, 10871, 10824, 10577,
10527, 10475, 10421, 10358, 10295, 10104]))
series.setSeriesPoints(points)
chart.addSeries(series)
russiaAreaCfg = AreaConfig()
russiaAreaCfg.setPointStart(1940.0)
series = XYSeries('USSR/Russia', russiaAreaCfg)
points = set()
self.addNullPoints(points, series, 10)
points = points.union(self.getPoints(series,
[5, 25, 50, 120, 150, 200, 426, 660, 869, 1060, 1605, 2471,
3322, 4238, 5221, 6129, 7089, 8339, 9399, 10538, 11643,
13092, 14478, 15915, 17385, 19055, 21205, 23044, 25393,
27935, 30062, 32049, 33952, 35804, 37431, 39197, 45000,
43000, 41000, 39000, 37000, 35000, 33000, 31000, 29000,
27000, 25000, 24000, 23000, 22000, 21000, 20000, 19000,
18000, 18000, 17000, 16000]))
series.setSeriesPoints(points)
chart.addSeries(series)
self.addChart(chart)
def addNullPoints(self, points, series, howManyNullPoints):
for _ in range(howManyNullPoints):
points.add(DecimalPoint(series))
def showAreaSpline(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREASPLINE)
chartConfig.getTitle().setText('Average fruit consumption during '
'one week')
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legendPos = Position()
legendPos.setAlign(HorzAlign.LEFT)
legendPos.setVertAlign(VertAlign.TOP)
legendPos.setX(150)
legendPos.setY(100)
legend.setPosition(legendPos)
legend.setFloating(True)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
xAxis = CategoryAxis()
xAxis.setCategories(['Monday', 'Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday'])
plotBand = NumberPlotBand('sat-sun')
plotBand.setRange(NumberRange(4.6, 6.5))
plotBand.setColor(RGBA(68, 170, 213, 0.2))
xAxis.addPlotBand(plotBand)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Fruit units'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getCredit().setEnabled(False)
areaSpline = AreaSplineConfig()
areaSpline.setFillOpacity(0.5)
chartConfig.addSeriesConfig(areaSpline)
chart = InvientCharts(chartConfig)
series = XYSeries('John')
series.setSeriesPoints(self.getPoints(series, [3, 4, 3, 5, 4, 10, 12]))
chart.addSeries(series)
series = XYSeries('Jane')
series.setSeriesPoints(self.getPoints(series, [1, 3, 4, 3, 3, 5, 4]))
chart.addSeries(series)
self.addChart(chart)
def showPieWithLegend(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.PIE)
chartConfig.getTitle().setText('Browser market shares at a specific website, 2010')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.point.name +\'</b>: \'+ this.y +\' %\'; '
+ '}')
pie = PieConfig()
pie.setAllowPointSelect(True)
pie.setCursor('pointer')
pie.setDataLabel(PieDataLabel(False))
pie.setShowInLegend(True)
chartConfig.addSeriesConfig(pie)
chart = InvientCharts(chartConfig)
series = XYSeries('Browser Share')
points = set()
points.add(DecimalPoint(series, 'Firefox', 45.0))
points.add(DecimalPoint(series, 'IE', 26.8))
config = PointConfig(True)
points.add(DecimalPoint(series, 'Chrome', 12.8, config))
points.add(DecimalPoint(series, 'Safari', 8.5))
points.add(DecimalPoint(series, 'Opera', 6.2))
points.add(DecimalPoint(series, 'Others', 0.7))
series.setSeriesPoints(points)
chart.addSeries(series)
self.addChart(chart)
def showDonut(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.PIE)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setTop(50)
chartConfig.getGeneralChartConfig().getMargin().setRight(0)
chartConfig.getGeneralChartConfig().getMargin().setBottom(0)
chartConfig.getGeneralChartConfig().getMargin().setLeft(0)
chartConfig.getTitle().setText(
'Browser market shares at a specific website')
chartConfig.getSubtitle().setText(
'Inner circle: 2008, outer circle: 2010')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.series.name +\'</b><br/>\'+ '
+ ' this.point.name +\': \'+ this.y +\' %\'; '
+ '}')
chart = InvientCharts(chartConfig)
pieCfg = PieConfig()
pieCfg.setInnerSize(65)
pieCfg.setDataLabel(PieDataLabel(False))
series = XYSeries('2008', SeriesType.PIE, pieCfg)
points = set()
points.add(self.getPointWithColor(series, 'Firefox', 44.2,
RGB(69, 114, 167)))
points.add(self.getPointWithColor(series, 'IE', 46.6,
RGB(170, 70, 67)))
points.add(self.getPointWithColor(series, 'Chrome', 3.1,
RGB(137, 165, 78)))
points.add(self.getPointWithColor(series, 'Safari', 2.7,
RGB(128, 105, 155)))
points.add(self.getPointWithColor(series, 'Opera', 2.3,
RGB(128, 105, 155)))
points.add(self.getPointWithColor(series, 'Mozilla', 0.4,
RGB(219, 132, 61)))
series.setSeriesPoints(points)
chart.addSeries(series)
pieCfg = PieConfig()
pieCfg.setInnerSize(150)
pieCfg.setDataLabel(PieDataLabel())
pieCfg.setColor(RGB(0, 0, 0))
pieCfg.getDataLabel().setConnectorColor(RGB(0, 0, 0))
series = XYSeries('2010', SeriesType.PIE, pieCfg)
points = set()
points.add(self.getPointWithColor(series, 'Firefox', 45.0,
RGB(69, 114, 167)))
points.add(self.getPointWithColor(series, 'IE', 26.8,
RGB(170, 70, 67)))
points.add(self.getPointWithColor(series, 'Chrome', 12.8,
RGB(137, 165, 78)))
points.add(self.getPointWithColor(series, 'Safari', 8.5,
RGB(128, 105, 155)))
points.add(self.getPointWithColor(series, 'Opera', 6.2,
RGB(128, 105, 155)))
points.add(self.getPointWithColor(series, 'Mozilla', 0.2,
RGB(219, 132, 61)))
series.setSeriesPoints(points)
chart.addSeries(series)
self.addChart(chart)
def getPointWithColor(self, series, name, y, color):
point = DecimalPoint(series, name, y)
point.setConfig(PointConfig(color))
return point
def showPie(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.PIE)
chartConfig.getTitle().setText('Browser market shares at a specific '
'website, 2010')
pieCfg = PieConfig()
pieCfg.setAllowPointSelect(True)
pieCfg.setCursor('pointer')
pieCfg.setDataLabel(PieDataLabel())
pieCfg.getDataLabel().setEnabled(True)
pieCfg.getDataLabel().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.point.name +\'</b>: \'+ this.y +\' %\';'
+ '}')
pieCfg.getDataLabel().setConnectorColor(RGB(0, 0, 0))
chartConfig.addSeriesConfig(pieCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('Browser Share')
points = set()
points.add(DecimalPoint(series, 'Firefox', 45.0))
points.add(DecimalPoint(series, 'IE', 26.8))
config = PointConfig(True)
points.add(DecimalPoint(series, 'Chrome', 12.8, config))
points.add(DecimalPoint(series, 'Safari', 8.5))
points.add(DecimalPoint(series, 'Opera', 6.2))
points.add(DecimalPoint(series, 'Others', 0.7))
series.setSeriesPoints(points)
chart.addSeries(series)
self.addChart(chart)
def showScatter(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SCATTER)
chartConfig.getGeneralChartConfig().setZoomType(ZoomType.XY)
chartConfig.getTitle().setText(
'Height Versus Weight of Individuals by Gender')
chartConfig.getSubtitle().setText('Source: Heinz 2003')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.x + \' cm, \' + this.y + \' kg\'; '
+ '}')
xAxis = NumberXAxis()
xAxis.setTitle(AxisTitle('Height (cm)'))
xAxis.setStartOnTick(True)
xAxis.setEndOnTick(True)
xAxis.setShowLastLabel(True)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Weight (kg)'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legendPos = Position()
legendPos.setAlign(HorzAlign.LEFT)
legendPos.setVertAlign(VertAlign.TOP)
legendPos.setX(100)
legendPos.setY(70)
legend.setPosition(legendPos)
legend.setFloating(True)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
scatterCfg = ScatterConfig()
marker = SymbolMarker(5)
scatterCfg.setMarker(marker)
marker.setHoverState(MarkerState())
marker.getHoverState().setEnabled(True)
marker.getHoverState().setLineColor(RGB(100, 100, 100))
chartConfig.addSeriesConfig(scatterCfg)
chart = InvientCharts(chartConfig)
femaleScatterCfg = ScatterConfig()
femaleScatterCfg.setColor(RGBA(223, 83, 83, 0.5))
series = XYSeries('Female', femaleScatterCfg)
series.setSeriesPoints(self.getScatterFemalePoints(series))
chart.addSeries(series)
maleScatterCfg = ScatterConfig()
maleScatterCfg.setColor(RGBA(119, 152, 191, 0.5))
series = XYSeries('Male', maleScatterCfg)
series.setSeriesPoints(self.getScatterMalePoints(series))
chart.addSeries(series)
self.addChart(chart)
def showCombinationScatterWithRegressionLine(self):
chartConfig = InvientChartsConfig()
chartConfig.getTitle().setText('Scatter plot with regression line')
xAxis = NumberXAxis()
xAxis.setMin(-0.5)
xAxis.setMax(5.5)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chart = InvientCharts(chartConfig)
# Line series
lineCfg = LineConfig()
lineCfg.setMarker(SymbolMarker(False))
lineCfg.setHoverState(SeriesState())
lineCfg.getHoverState().setLineWidth(0)
lineSeries = XYSeries('Regression Line', lineCfg)
lineSeries.setType(SeriesType.LINE)
lineSeries.setSeriesPoints(self.getPoints(lineSeries,
[[0, 1.11], [5, 4.51]]))
chart.addSeries(lineSeries)
# Scatter series
scatterCfg = ScatterConfig()
scatterCfg.setMarker(SymbolMarker(4))
scatterSeries = XYSeries('Observations', scatterCfg)
scatterSeries.setType(SeriesType.SCATTER)
scatterSeries.setSeriesPoints(self.getPoints(scatterSeries,
[1, 1.5, 2.8, 3.5, 3.9, 4.2]))
chart.addSeries(scatterSeries)
self.addChart(chart)
def showSpline(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SPLINE)
chartConfig.getGeneralChartConfig().setInverted(True)
chartConfig.getGeneralChartConfig().setWidth(500)
chartConfig.getTitle().setText('Atmosphere Temperature by Altitude')
chartConfig.getSubtitle().setText(
'According to the Standard Atmosphere Model')
xAxis = NumberXAxis()
xAxis.setReversed(False)
xAxis.setTitle(AxisTitle('Altitude'))
xAxis.setLabel(XAxisDataLabel())
xAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value +\'km\';'
+ '}')
xAxis.setMaxPadding(0.05)
xAxis.setShowLastLabel(True)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Temperature'))
yAxis.setLineWidth(2)
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {'
+ u' return this.value + \'\u2103\';'.encode('utf-8')
+ '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
tooltip = Tooltip()
tooltip.setFormatterJsFunc(
'function() {'
+ u' return \'\' + this.x +\' km: \'+ this.y +\'\u2103\';'.encode('utf-8')
+ '}')
chartConfig.setTooltip(tooltip)
legend = Legend()
legend.setEnabled(False)
chartConfig.setLegend(legend)
splineCfg = SplineConfig()
splineCfg.setMarker(SymbolMarker(True))
chartConfig.addSeriesConfig(splineCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('Temperature')
series.setSeriesPoints(self.getPoints(series,
[[0, 15], [10, -50], [20, -56.5], [30, -46.5], [40, -22.1],
[50, -2.5], [60, -27.7], [70, -55.7], [80, -76.5]]))
chart.addSeries(series)
self.addChart(chart)
def showSplineWithSymbol(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SPLINE)
chartConfig.getTitle().setText('Monthly Average Temperature')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
xAxis = CategoryAxis()
xAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Temperature'))
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {' +
u' return this.value + \'\u2103\';'.encode('utf-8') +
'}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
tooltip = Tooltip()
tooltip.setCrosshairs(True)
tooltip.setShared(True)
chartConfig.setTooltip(tooltip)
splineCfg = SplineConfig()
symbolMarker = SymbolMarker(True)
symbolMarker.setRadius(4)
symbolMarker.setLineColor(RGB(102, 102, 102))
symbolMarker.setLineWidth(1)
splineCfg.setMarker(symbolMarker)
chartConfig.addSeriesConfig(splineCfg)
chart = InvientCharts(chartConfig)
splineCfg = SplineConfig()
splineCfg.setMarker(SymbolMarker(Symbol.SQUARE))
series = XYSeries('Tokyo', splineCfg)
series.setSeriesPoints(self.getPoints(series,
[7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2]))
config = PointConfig(ImageMarker('/graphics/sun.png'))
highest = DecimalPoint(series, 26.5, config)
series.addPoint(highest)
series.addPoint(DecimalPoint(series, 23.3))
series.addPoint(DecimalPoint(series, 18.3))
series.addPoint(DecimalPoint(series, 13.9))
series.addPoint(DecimalPoint(series, 9.6))
chart.addSeries(series)
splineCfg = SplineConfig()
splineCfg.setMarker(SymbolMarker(Symbol.DIAMOND))
series = XYSeries('London', splineCfg)
config = PointConfig(ImageMarker('/graphics/snow.png'))
lowest = DecimalPoint(series, 3.9, config)
series.addPoint(lowest)
series.addPoint(DecimalPoint(series, 4.2))
series.addPoint(DecimalPoint(series, 5.7))
series.addPoint(DecimalPoint(series, 8.5))
series.addPoint(DecimalPoint(series, 11.9))
series.addPoint(DecimalPoint(series, 15.2))
series.addPoint(DecimalPoint(series, 17.0))
series.addPoint(DecimalPoint(series, 16.6))
series.addPoint(DecimalPoint(series, 14.2))
series.addPoint(DecimalPoint(series, 10.3))
series.addPoint(DecimalPoint(series, 6.6))
series.addPoint(DecimalPoint(series, 4.8))
chart.addSeries(series)
self.addChart(chart)
def showSplineUpdatingEachSecond(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SPLINE)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setRight(10)
chartConfig.getTitle().setText('Live random data')
xAxis = DateTimeAxis()
xAxis.setTick(Tick())
xAxis.getTick().setPixelInterval(150)
xAxes = set()
xAxes.add(xAxis)
chartConfig.setXAxes(xAxes)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Value'))
plotLine = NumberPlotLine('LineAt0')
yAxis.addPlotLine(plotLine)
plotLine.setValue(NumberValue(0.0))
plotLine.setWidth(1)
plotLine.setColor(RGB(128, 128, 128))
yAxes = set()
yAxes.add(yAxis)
chartConfig.setYAxes(yAxes)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.series.name +\'</b><br/>\'+ '
+ ' $wnd.Highcharts.dateFormat(\'%Y-%m-%d %H:%M:%S\', this.x) +\'<br/>\'+ '
+ ' $wnd.Highcharts.numberFormat(this.y, 2);'
+ '}')
chartConfig.getLegend().setEnabled(False)
chart = InvientCharts(chartConfig)
seriesData = DateTimeSeries(chart, 'Random Data', True)
points = set()
dtNow = datetime.now()
# Add random data.
for cnt in range(-19, 0):
points.add(DateTimePoint(seriesData,
self.getUpdatedDate(dtNow, cnt), random()))
seriesData.setSeriesPoints(points)
chart.addSeries(seriesData)
self.addChart(chart, False, False, False)
self._indicator = ProgressIndicator(0.2)
self._indicator.setPollingInterval(1000)
self._indicator.setStyleName('i-progressindicator-invisible')
self._rightLayout.addComponent(self._indicator)
if not self.isAppRunningOnGAE():
self._splineThread = SelfUpdateSplineThread(chart)
self._splineThread.start()
else:
self.getApplication().getMainWindow().showNotification(
'This chart does not auto-update because Google App '
'Engine does not support threads.')
def stopSplineSelfUpdateThread(self):
if self._splineThread is not None:
self._splineThread.stopUpdating()
self._indicator.setEnabled(False)
self.getApplication().notifyAll()
@classmethod
def getUpdatedDate(cls, dt, seconds):
ts = getDate(dt) + seconds
return datetime.fromtimestamp(ts)
def showSplineWithPlotBands(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SPLINE)
chartConfig.getTitle().setText('Wind speed during two days')
chartConfig.getSubtitle().setText('October 6th and 7th 2009 at two '
'locations in Vik i Sogn, Norway')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + $wnd.Highcharts.dateFormat(\'%e. %b %Y, %H:00\', this.x) +\': \'+ this.y +\' m/s\'; '
+ '}')
xAxis = DateTimeAxis()
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Wind speed (m/s)'))
yAxis.setMin(0.0)
yAxis.setMinorGrid(MinorGrid())
yAxis.getMinorGrid().setLineWidth(0)
yAxis.setGrid(Grid())
yAxis.getGrid().setLineWidth(0)
numberBand = NumberPlotBand('Light air')
numberBand.setRange(NumberRange(0.3, 1.5))
numberBand.setColor(RGBA(68, 170, 213, 0.1))
numberBand.setLabel(PlotLabel('Light air'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Light breeze')
numberBand.setRange(NumberRange(1.5, 3.3))
numberBand.setColor(RGBA(0, 0, 0, 0.0))
numberBand.setLabel(PlotLabel('Light breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Gentle breeze')
numberBand.setRange(NumberRange(3.3, 5.5))
numberBand.setColor(RGBA(68, 170, 213, 0.1))
numberBand.setLabel(PlotLabel('Gentle breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Moderate breeze')
numberBand.setRange(NumberRange(5.5, 8.0))
numberBand.setColor(RGBA(0, 0, 0, 0.0))
numberBand.setLabel(PlotLabel('Moderate breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Fresh breeze')
numberBand.setRange(NumberRange(8.0, 11.0))
numberBand.setColor(RGBA(68, 170, 213, 0.1))
numberBand.setLabel(PlotLabel('Fresh breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Strong breeze')
numberBand.setRange(NumberRange(11.0, 14.0))
numberBand.setColor(RGBA(0, 0, 0, 0.0))
numberBand.setLabel(PlotLabel('Strong breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('High wind')
numberBand.setRange(NumberRange(14.0, 15.0))
numberBand.setColor(RGBA(68, 170, 213, 0.1))
numberBand.setLabel(PlotLabel('High wind'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
splineCfg = SplineConfig()
splineCfg.setLineWidth(4)
splineCfg.setHoverState(SeriesState())
splineCfg.getHoverState().setLineWidth(5)
symbolMarker = SymbolMarker(False)
splineCfg.setMarker(symbolMarker)
symbolMarker.setSymbol(Symbol.CIRCLE)
symbolMarker.setHoverState(MarkerState())
symbolMarker.getHoverState().setEnabled(True)
symbolMarker.getHoverState().setRadius(5)
symbolMarker.getHoverState().setLineWidth(1)
splineCfg.setPointStart(self.getPointStartDate(2009, 8, 6))
splineCfg.setPointInterval(3600.0 * 1000.0)
chartConfig.addSeriesConfig(splineCfg)
chart = InvientCharts(chartConfig)
series = DateTimeSeries(chart, 'Hestavollane', splineCfg, True)
series.setSeriesPoints(self.getDateTimePoints(series,
[4.3, 5.1, 4.3, 5.2, 5.4, 4.7, 3.5, 4.1, 5.6, 7.4, 6.9, 7.1,
7.9, 7.9, 7.5, 6.7, 7.7, 7.7, 7.4, 7.0, 7.1, 5.8, 5.9, 7.4,
8.2, 8.5, 9.4, 8.1, 10.9, 10.4, 10.9, 12.4, 12.1, 9.5, 7.5,
7.1, 7.5, 8.1, 6.8, 3.4, 2.1, 1.9, 2.8, 2.9, 1.3, 4.4, 4.2,
3.0, 3.0]))
chart.addSeries(series)
series = DateTimeSeries(chart, 'Voll', splineCfg, True)
series.setSeriesPoints(self.getDateTimePoints(series,
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.0, 0.3, 0.0,
0.0, 0.4, 0.0, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.6, 1.2, 1.7, 0.7, 2.9, 4.1, 2.6, 3.7, 3.9, 1.7, 2.3,
3.0, 3.3, 4.8, 5.0, 4.8, 5.0, 3.2, 2.0, 0.9, 0.4, 0.3, 0.5,
0.4]))
chart.addSeries(series)
self.addChart(chart)
def showCombination(self):
chartConfig = InvientChartsConfig()
chartConfig.getTitle().setText('Combination chart')
tooltip = Tooltip()
tooltip.setFormatterJsFunc(
'function() {'
+ ' if (this.point.name) { // the pie chart '
+ ' return this.point.name +\': \'+ this.y +\' fruits\'; '
+ ' } else {'
+ ' return this.x +\': \'+ this.y; '
+ ' } '
+ '}')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears', 'Bananas', 'Plums'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setAllowDecimals(False)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Jane', SeriesType.COLUMN)
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 2, 1, 3, 4]))
chart.addSeries(seriesData)
seriesData = XYSeries('John', SeriesType.COLUMN)
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 3, 5, 7, 6]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe', SeriesType.COLUMN)
seriesData.setSeriesPoints(self.getPoints(seriesData, [4, 3, 3, 9, 0]))
chart.addSeries(seriesData)
seriesData = XYSeries('Average', SeriesType.SPLINE)
seriesData.setSeriesPoints(self.getPoints(seriesData,
[3, 2.67, 3, 6.33, 3.33]))
chart.addSeries(seriesData)
# Series Total consumption
pieCfg = PieConfig()
pieCfg.setCenterX(100)
pieCfg.setCenterY(80)
pieCfg.setSize(100)
pieCfg.setShowInLegend(False)
pieCfg.setDataLabel(PieDataLabel())
pieCfg.getDataLabel().setEnabled(False)
totalConsumpSeriesData = XYSeries('Total consumption',
SeriesType.PIE, pieCfg)
config = PointConfig(RGB(69, 114, 167))
point = DecimalPoint(totalConsumpSeriesData, 'Jane', 13, config)
totalConsumpSeriesData.addPoint(point)
config = PointConfig(RGB(170, 70, 67))
point = DecimalPoint(totalConsumpSeriesData, 'John', 23, config)
totalConsumpSeriesData.addPoint(point)
config = PointConfig(RGB(137, 165, 78))
point = DecimalPoint(totalConsumpSeriesData, 'Joe', 19, config)
totalConsumpSeriesData.addPoint(point)
chartLabel = ChartLabel()
chartLabel.addLabel(ChartLabelItem('Total fruit consumption',
'{ left: \'40px\', top: \'8px\', color: \'black\' }'))
chartConfig.setChartLabel(chartLabel)
chart.addSeries(totalConsumpSeriesData)
self.addChart(chart)
def showCombinationMultipleAxes(self):
chartConfig = InvientChartsConfig()
chartConfig.getTitle().setText(
'Average Monthly Weather Data for Tokyo')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' var unit = { '
+ ' \'Rainfall\': \'mm\','
+ u' \'Temperature\': \'\u2103\','.encode('utf-8')
+ ' \'Sea-Level Pressure\': \'mb\''
+ ' }[this.series.name];'
+ ' return \'\' + this.x + \': \' + this.y + \' \' + unit; '
+ '}')
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.LEFT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(120)
legend.getPosition().setY(80)
legend.setFloating(True)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
xAxis = CategoryAxis()
xAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
# Multiple axes
temperatureAxis = NumberYAxis()
temperatureAxis.setAllowDecimals(False)
temperatureAxis.setLabel(YAxisDataLabel())
temperatureAxis.getLabel().setFormatterJsFunc(
'function() {'
+ u' return this.value +\'\u2103\'; '.encode('utf-8')
+ '}')
temperatureAxis.getLabel().setStyle('{ color: \'#89A54E\' }')
temperatureAxis.setTitle(AxisTitle('Temperature'))
temperatureAxis.getTitle().setStyle(' { color: \'#89A54E\' }')
temperatureAxis.setOpposite(True)
yAxesSet = set()
yAxesSet.add(temperatureAxis)
# secondary y-axis
rainfallAxis = NumberYAxis()
rainfallAxis.setGrid(Grid())
rainfallAxis.getGrid().setLineWidth(0)
rainfallAxis.setTitle(AxisTitle('Rainfall'))
rainfallAxis.getTitle().setStyle(' { color: \'#4572A7\' }')
rainfallAxis.setLabel(YAxisDataLabel())
rainfallAxis.getLabel().setStyle('{ color: \'#4572A7\' }')
rainfallAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value +\' mm\'; '
+ '}')
yAxesSet.add(rainfallAxis)
# tertiary y-axis
sealevelPressureAxis = NumberYAxis()
sealevelPressureAxis.setGrid(Grid())
sealevelPressureAxis.getGrid().setLineWidth(0)
sealevelPressureAxis.setTitle(AxisTitle('Sea-Level Pressure'))
sealevelPressureAxis.getTitle().setStyle(' { color: \'#AA4643\' }')
sealevelPressureAxis.setLabel(YAxisDataLabel())
sealevelPressureAxis.getLabel().setStyle('{ color: \'#AA4643\' }')
sealevelPressureAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value +\' mb\'; '
+ '}')
sealevelPressureAxis.setOpposite(True)
yAxesSet.add(sealevelPressureAxis)
chartConfig.setYAxes(yAxesSet)
chart = InvientCharts(chartConfig)
# Configuration of Rainfall series
colCfg = ColumnConfig()
colCfg.setColor(RGB(69, 114, 167))
# Rainfall series
rainfallSeriesData = XYSeries('Rainfall', SeriesType.COLUMN, colCfg)
rainfallSeriesData.setSeriesPoints(self.getPoints(rainfallSeriesData,
[49.9, 71.5, 106.4, 129.2, 144.0, 176.0, 135.6, 148.5, 216.4,
194.1, 95.6, 54.4]))
rainfallSeriesData.setYAxis(rainfallAxis)
chart.addSeries(rainfallSeriesData)
# Configuration of Sealevel series
seaLevelSplineCfg = SplineConfig()
seaLevelSplineCfg.setColor(RGB(170, 70, 67))
seaLevelSplineCfg.setMarker(SymbolMarker(False))
seaLevelSplineCfg.setDashStyle(DashStyle.SHORT_DOT)
# Sealevel series
seaLevelSeriesData = XYSeries('Sea-Level Pressure', SeriesType.SPLINE,
seaLevelSplineCfg)
seaLevelSeriesData.setSeriesPoints(self.getPoints(seaLevelSeriesData,
[1016, 1016, 1015.9, 1015.5, 1012.3, 1009.5, 1009.6, 1010.2,
1013.1, 1016.9, 1018.2, 1016.7]))
seaLevelSeriesData.setYAxis(sealevelPressureAxis)
chart.addSeries(seaLevelSeriesData)
# Configuration of Temperature series
tempSplineCfg = SplineConfig()
tempSplineCfg.setColor(RGB(137, 165, 78))
# Temperature series
tempSeriesData = XYSeries('Temperature', SeriesType.SPLINE,
tempSplineCfg)
tempSeriesData.setSeriesPoints(self.getPoints(tempSeriesData,
[7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2, 26.5, 23.3, 18.3,
13.9, 9.6]))
chart.addSeries(tempSeriesData)
self.addChart(chart)
def showTimeSeriesZoomable(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setZoomType(ZoomType.X)
chartConfig.getGeneralChartConfig().setSpacing(Spacing())
chartConfig.getGeneralChartConfig().getSpacing().setRight(20)
chartConfig.getSubtitle().setText(
'Click and drag in the plot area to zoom in')
xAxis = DateTimeAxis()
xAxis.setMaxZoom(14 * 24 * 3600 * 1000.0)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Exchange rate'))
yAxis.setMin(0.6)
yAxis.setStartOnTick(True)
yAxis.setShowFirstLabel(False)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setShared(True)
chartConfig.getLegend().setEnabled(False)
# Set plot options
areaCfg = AreaConfig()
colorStops = list()
colorStops.append(LinearColorStop(0, RGB(69, 114, 167)))
colorStops.append(LinearColorStop(1, RGBA(2, 0, 0, 0)))
# Fill color
areaCfg.setFillColor(LinearGradient(0, 0, 0, 300, colorStops))
areaCfg.setLineWidth(1)
areaCfg.setShadow(False)
areaCfg.setHoverState(SeriesState())
areaCfg.getHoverState().setLineWidth(1)
marker = SymbolMarker(False)
areaCfg.setMarker(marker)
marker.setHoverState(MarkerState())
marker.getHoverState().setEnabled(True)
marker.getHoverState().setRadius(5)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
# Area configuration
serieaAreaCfg = AreaConfig()
serieaAreaCfg.setPointStart(self.getPointStartDate(2006, 1, 1))
serieaAreaCfg.setPointInterval(24 * 3600 * 1000.0)
# Series
dateTimeSeries = DateTimeSeries(chart, 'USD to EUR', SeriesType.AREA,
serieaAreaCfg)
points = self.getDateTimeSeriesPoints(dateTimeSeries)
dateTimeSeries.addPoint(points)
chart.addSeries(dateTimeSeries)
self.addChart(chart)
def addChart(self, chart, isPrepend=False, isRegisterEvents=True,
isRegisterSVGEvent=True, isSetHeight=True):
if isRegisterEvents:
self.registerEvents(chart)
chart.setSizeFull()
chart.setStyleName('v-chart-min-width')
if isSetHeight:
chart.setHeight('410px')
if isPrepend:
self._rightLayout.setStyleName('v-chart-master-detail')
self._rightLayout.addComponentAsFirst(chart)
else:
self._rightLayout.removeStyleName('v-chart-master-detail')
self.emptyEventLog()
self._rightLayout.removeAllComponents()
# Add chart
self._rightLayout.addComponent(chart)
# Add "Get SVG" button and register SVG available event
if isRegisterSVGEvent:
self.registerSVGAndPrintEvent(chart)
# Server events log
lbl = Label('Events received by the server:')
self._rightLayout.addComponent(lbl)
self._rightLayout.addComponent(self._eventLog)
def registerSVGAndPrintEvent(self, chart):
gridLayout = GridLayout(2, 1)
gridLayout.setWidth('100%')
gridLayout.setSpacing(True)
svgBtn = Button('Get SVG')
gridLayout.addComponent(svgBtn)
gridLayout.setComponentAlignment(svgBtn, Alignment.MIDDLE_RIGHT)
printBtn = Button('Print')
gridLayout.addComponent(printBtn)
gridLayout.setComponentAlignment(printBtn, Alignment.MIDDLE_LEFT)
self._rightLayout.addComponent(gridLayout)
l = GetSvgClickListener(self, chart)
svgBtn.addListener(l, button.IClickListener)
l = PrintClickListener(chart)
printBtn.addListener(l, button.IClickListener)
def registerEvents(self, chart):
l = DemoChartClickListener(self)
chart.addListener(l)
if chart.getConfig().getGeneralChartConfig().getZoomType() is not None:
l = DemoChartZoomListener(self)
chart.addListener(l)
l = DemoChartResetZoomListener(self)
chart.addListener(l)
l = DemoSeriesClickListerner(self)
chart.addListener(l, [])
l = DemoSeriesHideListerner(self)
chart.addListener(l, [])
l = DemoSeriesShowListerner(self)
chart.addListener(l, [])
l = DemoSeriesLegendItemClickListerner(self)
chart.addListener(l, [])
l = DemoPointClickListener(self)
chart.addListener(l, [])
l = DemoPointRemoveListener(self)
chart.addListener(l, [])
l = DemoPointSelectListener(self)
chart.addListener(l, [])
l = DemoPointUnselectListener(self)
chart.addListener(l, [])
l = DemoPieChartLegendItemClickListener(self)
chart.addListener(l)
@classmethod
def getPointStartDate(cls, year, month, day):
dt = datetime(year, month, day)
return long(totalseconds(dt - datetime(1970, 1, 1)) * 1e03)
@classmethod
def getDateZeroTime(cls, year, month, day):
return datetime(year, month, day)
# @classmethod
# def setZeroTime(cls, cal):
# cal.set(Calendar.HOUR, 0)
# cal.set(Calendar.MINUTE, 0)
# cal.set(Calendar.SECOND, 0)
# cal.set(Calendar.MILLISECOND, 0)
def getDateTimePoints(self, series, values):
points = OrderedSet()
for value in values:
points.add(DateTimePoint(series, value))
return points
@classmethod
def getPoints(cls, series, values):
if len(values) > 0 and isinstance(values[0], (float, int)):
points = OrderedSet()
for value in values:
points.add(DecimalPoint(series, value))
return points
else:
points = OrderedSet()
for value in values:
y = None
if len(value) == 0:
continue
if len(value) == 2:
x = value[0]
y = value[1]
else:
x = value[0]
points.add(DecimalPoint(series, x, y))
return points
@classmethod
def getFormattedTimestamp(cls, dt):
if dt is None:
return None
fmt = '%y/%m/%d %H:%M:%S'
return dt.strftime(fmt)
@classmethod
def getCurrFormattedTimestamp(cls):
return cls.getFormattedTimestamp(datetime.now())
def getChartName(self, chartNameString):
for chartName in ChartName.values():
if chartNameString.lower() == chartName.getName().lower():
return chartName
return None
def getDemoSeriesType(self, demoSeriesTypeName):
for demoSeriesType in DemoSeriesType.values():
if demoSeriesTypeName.lower() == demoSeriesType.getName().lower():
return demoSeriesType
return None
def createChartsTree(self):
tree = Tree('Chart Type')
tree.setContainerDataSource(self.getContainer())
tree.setImmediate(True)
tree.setItemCaptionPropertyId(self._TREE_ITEM_CAPTION_PROP_ID)
tree.setItemCaptionMode(Tree.ITEM_CAPTION_MODE_PROPERTY)
tree.setNullSelectionAllowed(False)
for Id in tree.rootItemIds():
tree.expandItemsRecursively(Id)
l = ChartTypeChangeListener(self, tree)
tree.addListener(l, IValueChangeListener)
return tree
def showChartInstancesForSeriesType(self, demoSeriesTypeName):
self._rightLayout.removeAllComponents()
demoCharts = self.getDemoCharts(self.getDemoSeriesType(
demoSeriesTypeName))
for chartName in demoCharts:
l = SeriesTypeClickListener(self)
btn = Button(chartName.getName(), l)
self._rightLayout.addComponent(btn)
btn.setWidth('200px')
def getContainer(self):
container = HierarchicalContainer()
container.addContainerProperty(self._TREE_ITEM_CAPTION_PROP_ID, str,'')
for demoSeriesType in DemoSeriesType.values():
itemId = demoSeriesType.getName()
item = container.addItem(itemId)
item.getItemProperty(self._TREE_ITEM_CAPTION_PROP_ID).setValue(
demoSeriesType.getName())
container.setChildrenAllowed(itemId, True)
# add child
self.addChartNamesForSeriesType(container, itemId, demoSeriesType)
return container
def addChartNamesForSeriesType(self, container, parentId, demoSeriesType):
for chartName in self.getDemoCharts(demoSeriesType):
childItemId = (demoSeriesType.getName() + self._SEPARATOR
+ chartName.getName())
childItem = container.addItem(childItemId)
childItem.getItemProperty(
self._TREE_ITEM_CAPTION_PROP_ID).setValue(
chartName.getName())
container.setParent(childItemId, parentId)
container.setChildrenAllowed(childItemId, False)
def getDemoCharts(self, demoSeriesType):
chartNames = list()
if demoSeriesType == DemoSeriesType.LINE:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_DATA_LABELS)
chartNames.append(ChartName.TIMESERIES_ZOOMABLE)
chartNames.append(ChartName.MASTER_DETAIL)
chartNames.append(ChartName.CLICK_TO_ADD_POINT)
elif demoSeriesType == DemoSeriesType.BAR:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.STACKED)
chartNames.append(ChartName.WITH_NEGATIVE_STACK)
elif demoSeriesType == DemoSeriesType.COLUMN:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_NEGATIVE_VALUES)
chartNames.append(ChartName.STACKED)
chartNames.append(ChartName.STACKED_AND_GROUPED)
chartNames.append(ChartName.STACKED_PERCENT)
chartNames.append(ChartName.WITH_ROTATED_LABELS)
elif demoSeriesType == DemoSeriesType.AREA:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_NEGATIVE_VALUES)
chartNames.append(ChartName.STACKED)
chartNames.append(ChartName.PERCENTAGE)
chartNames.append(ChartName.WITH_MISSING_POINTS)
chartNames.append(ChartName.INVERTED_AXES)
elif demoSeriesType == DemoSeriesType.AREASPLINE:
chartNames.append(ChartName.BASIC)
elif demoSeriesType == DemoSeriesType.PIE:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_LEGEND)
chartNames.append(ChartName.DONUT)
elif demoSeriesType == DemoSeriesType.SCATTER:
chartNames.append(ChartName.BASIC)
elif demoSeriesType == DemoSeriesType.SPLINE:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_PLOTBANDS)
chartNames.append(ChartName.WITH_SYMBOLS)
chartNames.append(ChartName.UPDATING_EACH_SECOND)
elif demoSeriesType == DemoSeriesType.COMBINATION:
chartNames.append(ChartName.COMBINATION_COLUMN_LINE_AND_PIE)
chartNames.append(ChartName.SCATTER_WITH_REGRESSION_LINE)
chartNames.append(ChartName.MULTIPLE_AXES)
return chartNames
def logEventInfo(self, *args):
nargs = len(args)
if nargs == 1:
eventInfo, = args
self.logEventInfo(eventInfo, True)
elif nargs == 2:
if isinstance(args[1], bool):
eventInfo, isAppend = args
self._eventLog.setReadOnly(False)
if isAppend:
self._eventLog.setValue('['
+ self.getCurrFormattedTimestamp() + '] '
+ eventInfo + '\n'
+ self._eventLog.getValue())
else:
self._eventLog.setValue('')
self._eventLog.setReadOnly(True)
else:
eventName, seriesName = args
sb = ''
sb += '[' + eventName + ']'
sb += ' series -> ' + seriesName
self.logEventInfo(sb)
elif nargs == 5:
if isinstance(args[1], float):
if isinstance(args[3], float):
eventName, xAxisMin, xAxisMax, yAxisMin, yAxisMax = args
sb = ''
sb += '[' + eventName + ']'
sb += ', xAxisMin -> ' + str(xAxisMin)
sb += ', xAxisMax -> ' + str(xAxisMax)
sb += ', yAxisMin -> ' + str(yAxisMin)
sb += ', yAxisMax -> ' + str(yAxisMax)
self.logEventInfo(sb)
else:
eventName, xAxisPos, yAxisPos, mouseX, mouseY = args
sb = ''
sb += '[' + eventName + ']'
sb += ', xAxisPos -> ' + str(xAxisPos)
sb += ', yAxisPos -> ' + str(yAxisPos)
sb += ', mouseX -> ' + str(mouseX)
sb += ', mouseY -> ' + str(mouseY)
self.logEventInfo(sb)
else:
if isinstance(args[3], datetime):
eventName, seriesName, category, x, y = args
self.logEventInfo(eventName, seriesName, category,
x, y, None, None)
else:
eventName, seriesName, category, x, y = args
self.logEventInfo(eventName, seriesName, category,
x, y, None, None)
elif nargs == 7:
if isinstance(args[3], datetime):
eventName, seriesName, category, x, y, mouseX, mouseY = args
self.logStringEventInfo(eventName, seriesName, category,
str(x) if x is not None else None,
str(y) if y is not None else None,
str(mouseX) if mouseX is not None else None,
str(mouseY) if mouseY is not None else None)
else:
eventName, seriesName, category, x, y, mouseX, mouseY = args
self.logStringEventInfo(eventName, seriesName, category,
str(x) if x is not None else None,
str(y) if y is not None else None,
str(mouseX) if mouseX is not None else None,
str(mouseY) if mouseY is not None else None)
else:
raise ValueError
def logStringEventInfo(self, eventName, seriesName, category, x, y,
mouseX, mouseY):
sb = StringIO()
sb.write('[' + eventName + ']')
sb.write(' series -> ' + seriesName)
if category is not None and len(category) > 0:
sb.write(', category -> ' + category)
if x is not None:
sb.write(', x -> ' + str(x))
if y is not None:
sb.write(', y -> ' + str(y))
if mouseX is not None:
sb.write(', mouseX -> ' + str(mouseX))
if mouseY is not None:
sb.write(', mouseY -> ' + str(mouseY))
self.logEventInfo(sb.getvalue())
sb.close()
def emptyEventLog(self):
self.logEventInfo('', False)
def getScatterFemalePoints(self, series):
if self._scatterFemaleData is not None:
return self._scatterFemaleData
# Initialize data
self._scatterFemaleData = self.getPoints(series,
[[161.2, 51.6],
[167.5, 59.0], [159.5, 49.2],
[157.0, 63.0], [155.8, 53.6],
[170.0, 59.0], [159.1, 47.6],
[166.0, 69.8], [176.2, 66.8],
[160.2, 75.2], [172.5, 55.2],
[170.9, 54.2], [172.9, 62.5],
[153.4, 42.0], [160.0, 50.0],
[147.2, 49.8], [168.2, 49.2],
[175.0, 73.2], [157.0, 47.8],
[167.6, 68.8], [159.5, 50.6],
[175.0, 82.5], [166.8, 57.2],
[176.5, 87.8], [170.2, 72.8],
[174.0, 54.5], [173.0, 59.8],
[179.9, 67.3], [170.5, 67.8],
[160.0, 47.0], [154.4, 46.2],
[162.0, 55.0], [176.5, 83.0],
[160.0, 54.4], [152.0, 45.8],
[162.1, 53.6], [170.0, 73.2],
[160.2, 52.1], [161.3, 67.9],
[166.4, 56.6], [168.9, 62.3],
[163.8, 58.5], [167.6, 54.5],
[160.0, 50.2], [161.3, 60.3],
[167.6, 58.3], [165.1, 56.2],
[160.0, 50.2], [170.0, 72.9],
[157.5, 59.8], [167.6, 61.0],
[160.7, 69.1], [163.2, 55.9],
[152.4, 46.5], [157.5, 54.3],
[168.3, 54.8], [180.3, 60.7],
[165.5, 60.0], [165.0, 62.0],
[164.5, 60.3], [156.0, 52.7],
[160.0, 74.3], [163.0, 62.0],
[165.7, 73.1], [161.0, 80.0],
[162.0, 54.7], [166.0, 53.2],
[174.0, 75.7], [172.7, 61.1],
[167.6, 55.7], [151.1, 48.7],
[164.5, 52.3], [163.5, 50.0],
[152.0, 59.3], [169.0, 62.5],
[164.0, 55.7], [161.2, 54.8],
[155.0, 45.9], [170.0, 70.6],
[176.2, 67.2], [170.0, 69.4],
[162.5, 58.2], [170.3, 64.8],
[164.1, 71.6], [169.5, 52.8],
[163.2, 59.8], [154.5, 49.0],
[159.8, 50.0], [173.2, 69.2],
[170.0, 55.9], [161.4, 63.4],
[169.0, 58.2], [166.2, 58.6],
[159.4, 45.7], [162.5, 52.2],
[159.0, 48.6], [162.8, 57.8],
[159.0, 55.6], [179.8, 66.8],
[162.9, 59.4], [161.0, 53.6],
[151.1, 73.2], [168.2, 53.4],
[168.9, 69.0], [173.2, 58.4],
[171.8, 56.2], [178.0, 70.6],
[164.3, 59.8], [163.0, 72.0],
[168.5, 65.2], [166.8, 56.6],
[172.7, 105.2], [163.5, 51.8],
[169.4, 63.4], [167.8, 59.0],
[159.5, 47.6], [167.6, 63.0],
[161.2, 55.2], [160.0, 45.0],
[163.2, 54.0], [162.2, 50.2],
[161.3, 60.2], [149.5, 44.8],
[157.5, 58.8], [163.2, 56.4],
[172.7, 62.0], [155.0, 49.2],
[156.5, 67.2], [164.0, 53.8],
[160.9, 54.4], [162.8, 58.0],
[167.0, 59.8], [160.0, 54.8],
[160.0, 43.2], [168.9, 60.5],
[158.2, 46.4], [156.0, 64.4],
[160.0, 48.8], [167.1, 62.2],
[158.0, 55.5], [167.6, 57.8],
[156.0, 54.6], [162.1, 59.2],
[173.4, 52.7], [159.8, 53.2],
[170.5, 64.5], [159.2, 51.8],
[157.5, 56.0], [161.3, 63.6],
[162.6, 63.2], [160.0, 59.5],
[168.9, 56.8], [165.1, 64.1],
[162.6, 50.0], [165.1, 72.3],
[166.4, 55.0], [160.0, 55.9],
[152.4, 60.4], [170.2, 69.1],
[162.6, 84.5], [170.2, 55.9],
[158.8, 55.5], [172.7, 69.5],
[167.6, 76.4], [162.6, 61.4],
[167.6, 65.9], [156.2, 58.6],
[175.2, 66.8], [172.1, 56.6],
[162.6, 58.6], [160.0, 55.9],
[165.1, 59.1], [182.9, 81.8],
[166.4, 70.7], [165.1, 56.8],
[177.8, 60.0], [165.1, 58.2],
[175.3, 72.7], [154.9, 54.1],
[158.8, 49.1], [172.7, 75.9],
[168.9, 55.0], [161.3, 57.3],
[167.6, 55.0], [165.1, 65.5],
[175.3, 65.5], [157.5, 48.6],
[163.8, 58.6], [167.6, 63.6],
[165.1, 55.2], [165.1, 62.7],
[168.9, 56.6], [162.6, 53.9],
[164.5, 63.2], [176.5, 73.6],
[168.9, 62.0], [175.3, 63.6],
[159.4, 53.2], [160.0, 53.4],
[170.2, 55.0], [162.6, 70.5],
[167.6, 54.5], [162.6, 54.5],
[160.7, 55.9], [160.0, 59.0],
[157.5, 63.6], [162.6, 54.5],
[152.4, 47.3], [170.2, 67.7],
[165.1, 80.9], [172.7, 70.5],
[165.1, 60.9], [170.2, 63.6],
[170.2, 54.5], [170.2, 59.1],
[161.3, 70.5], [167.6, 52.7],
[167.6, 62.7], [165.1, 86.3],
[162.6, 66.4], [152.4, 67.3],
[168.9, 63.0], [170.2, 73.6],
[175.2, 62.3], [175.2, 57.7],
[160.0, 55.4], [165.1, 104.1],
[174.0, 55.5], [170.2, 77.3],
[160.0, 80.5], [167.6, 64.5],
[167.6, 72.3], [167.6, 61.4],
[154.9, 58.2], [162.6, 81.8],
[175.3, 63.6], [171.4, 53.4],
[157.5, 54.5], [165.1, 53.6],
[160.0, 60.0], [174.0, 73.6],
[162.6, 61.4], [174.0, 55.5],
[162.6, 63.6], [161.3, 60.9],
[156.2, 60.0], [149.9, 46.8],
[169.5, 57.3], [160.0, 64.1],
[175.3, 63.6], [169.5, 67.3],
[160.0, 75.5], [172.7, 68.2],
[162.6, 61.4], [157.5, 76.8],
[176.5, 71.8], [164.4, 55.5],
[160.7, 48.6], [174.0, 66.4],
[163.8, 67.3]])
return self._scatterFemaleData
def getScatterMalePoints(self, series):
if self._scatterMaleData is not None:
return self._scatterMaleData
self._scatterMaleData = self.getPoints(series,
[[174.0, 65.6],
[175.3, 71.8], [193.5, 80.7],
[186.5, 72.6], [187.2, 78.8],
[181.5, 74.8], [184.0, 86.4],
[184.5, 78.4], [175.0, 62.0],
[184.0, 81.6], [180.0, 76.6],
[177.8, 83.6], [192.0, 90.0],
[176.0, 74.6], [174.0, 71.0],
[184.0, 79.6], [192.7, 93.8],
[171.5, 70.0], [173.0, 72.4],
[176.0, 85.9], [176.0, 78.8],
[180.5, 77.8], [172.7, 66.2],
[176.0, 86.4], [173.5, 81.8],
[178.0, 89.6], [180.3, 82.8],
[180.3, 76.4], [164.5, 63.2],
[173.0, 60.9], [183.5, 74.8],
[175.5, 70.0], [188.0, 72.4],
[189.2, 84.1], [172.8, 69.1],
[170.0, 59.5], [182.0, 67.2],
[170.0, 61.3], [177.8, 68.6],
[184.2, 80.1], [186.7, 87.8],
[171.4, 84.7], [172.7, 73.4],
[175.3, 72.1], [180.3, 82.6],
[182.9, 88.7], [188.0, 84.1],
[177.2, 94.1], [172.1, 74.9],
[167.0, 59.1], [169.5, 75.6],
[174.0, 86.2], [172.7, 75.3],
[182.2, 87.1], [164.1, 55.2],
[163.0, 57.0], [171.5, 61.4],
[184.2, 76.8], [174.0, 86.8],
[174.0, 72.2], [177.0, 71.6],
[186.0, 84.8], [167.0, 68.2],
[171.8, 66.1], [182.0, 72.0],
[167.0, 64.6], [177.8, 74.8],
[164.5, 70.0], [192.0, 101.6],
[175.5, 63.2], [171.2, 79.1],
[181.6, 78.9], [167.4, 67.7],
[181.1, 66.0], [177.0, 68.2],
[174.5, 63.9], [177.5, 72.0],
[170.5, 56.8], [182.4, 74.5],
[197.1, 90.9], [180.1, 93.0],
[175.5, 80.9], [180.6, 72.7],
[184.4, 68.0], [175.5, 70.9],
[180.6, 72.5], [177.0, 72.5],
[177.1, 83.4], [181.6, 75.5],
[176.5, 73.0], [175.0, 70.2],
[174.0, 73.4], [165.1, 70.5],
[177.0, 68.9], [192.0, 102.3],
[176.5, 68.4], [169.4, 65.9],
[182.1, 75.7], [179.8, 84.5],
[175.3, 87.7], [184.9, 86.4],
[177.3, 73.2], [167.4, 53.9],
[178.1, 72.0], [168.9, 55.5],
[157.2, 58.4], [180.3, 83.2],
[170.2, 72.7], [177.8, 64.1],
[172.7, 72.3], [165.1, 65.0],
[186.7, 86.4], [165.1, 65.0],
[174.0, 88.6], [175.3, 84.1],
[185.4, 66.8], [177.8, 75.5],
[180.3, 93.2], [180.3, 82.7],
[177.8, 58.0], [177.8, 79.5],
[177.8, 78.6], [177.8, 71.8],
[177.8, 116.4], [163.8, 72.2],
[188.0, 83.6], [198.1, 85.5],
[175.3, 90.9], [166.4, 85.9],
[190.5, 89.1], [166.4, 75.0],
[177.8, 77.7], [179.7, 86.4],
[172.7, 90.9], [190.5, 73.6],
[185.4, 76.4], [168.9, 69.1],
[167.6, 84.5], [175.3, 64.5],
[170.2, 69.1], [190.5, 108.6],
[177.8, 86.4], [190.5, 80.9],
[177.8, 87.7], [184.2, 94.5],
[176.5, 80.2], [177.8, 72.0],
[180.3, 71.4], [171.4, 72.7],
[172.7, 84.1], [172.7, 76.8],
[177.8, 63.6], [177.8, 80.9],
[182.9, 80.9], [170.2, 85.5],
[167.6, 68.6], [175.3, 67.7],
[165.1, 66.4], [185.4, 102.3],
[181.6, 70.5], [172.7, 95.9],
[190.5, 84.1], [179.1, 87.3],
[175.3, 71.8], [170.2, 65.9],
[193.0, 95.9], [171.4, 91.4],
[177.8, 81.8], [177.8, 96.8],
[167.6, 69.1], [167.6, 82.7],
[180.3, 75.5], [182.9, 79.5],
[176.5, 73.6], [186.7, 91.8],
[188.0, 84.1], [188.0, 85.9],
[177.8, 81.8], [174.0, 82.5],
[177.8, 80.5], [171.4, 70.0],
[185.4, 81.8], [185.4, 84.1],
[188.0, 90.5], [188.0, 91.4],
[182.9, 89.1], [176.5, 85.0],
[175.3, 69.1], [175.3, 73.6],
[188.0, 80.5], [188.0, 82.7],
[175.3, 86.4], [170.5, 67.7],
[179.1, 92.7], [177.8, 93.6],
[175.3, 70.9], [182.9, 75.0],
[170.8, 93.2], [188.0, 93.2],
[180.3, 77.7], [177.8, 61.4],
[185.4, 94.1], [168.9, 75.0],
[185.4, 83.6], [180.3, 85.5],
[174.0, 73.9], [167.6, 66.8],
[182.9, 87.3], [160.0, 72.3],
[180.3, 88.6], [167.6, 75.5],
[186.7, 101.4], [175.3, 91.1],
[175.3, 67.3], [175.9, 77.7],
[175.3, 81.8], [179.1, 75.5],
[181.6, 84.5], [177.8, 76.6],
[182.9, 85.0], [177.8, 102.5],
[184.2, 77.3], [179.1, 71.8],
[176.5, 87.9], [188.0, 94.3],
[174.0, 70.9], [167.6, 64.5],
[170.2, 77.3], [167.6, 72.3],
[188.0, 87.3], [174.0, 80.0],
[176.5, 82.3], [180.3, 73.6],
[167.6, 74.1], [188.0, 85.9],
[180.3, 73.2], [167.6, 76.3],
[183.0, 65.9], [183.0, 90.9],
[179.1, 89.1], [170.2, 62.3],
[177.8, 82.7], [179.1, 79.1],
[190.5, 98.2], [177.8, 84.1],
[180.3, 83.2], [180.3, 83.2]])
return self._scatterMaleData
def getDateTimeSeriesPoints(self, series):
return self.getDateTimePoints(series, [0.8446, 0.8445, 0.8444, 0.8451,
0.8418, 0.8264, 0.8258, 0.8232, 0.8233, 0.8258, 0.8283, 0.8278,
0.8256, 0.8292, 0.8239, 0.8239, 0.8245, 0.8265, 0.8261, 0.8269,
0.8273, 0.8244, 0.8244, 0.8172, 0.8139, 0.8146, 0.8164, 0.82,
0.8269, 0.8269, 0.8269, 0.8258, 0.8247, 0.8286, 0.8289, 0.8316,
0.832, 0.8333, 0.8352, 0.8357, 0.8355, 0.8354, 0.8403, 0.8403,
0.8406, 0.8403, 0.8396, 0.8418, 0.8409, 0.8384, 0.8386, 0.8372,
0.839, 0.84, 0.8389, 0.84, 0.8423, 0.8423, 0.8435, 0.8422,
0.838, 0.8373, 0.8316, 0.8303, 0.8303, 0.8302, 0.8369, 0.84,
0.8385, 0.84, 0.8401, 0.8402, 0.8381, 0.8351, 0.8314, 0.8273,
0.8213, 0.8207, 0.8207, 0.8215, 0.8242, 0.8273, 0.8301, 0.8346,
0.8312, 0.8312, 0.8312, 0.8306, 0.8327, 0.8282, 0.824, 0.8255,
0.8256, 0.8273, 0.8209, 0.8151, 0.8149, 0.8213, 0.8273, 0.8273,
0.8261, 0.8252, 0.824, 0.8262, 0.8258, 0.8261, 0.826, 0.8199,
0.8153, 0.8097, 0.8101, 0.8119, 0.8107, 0.8105, 0.8084, 0.8069,
0.8047, 0.8023, 0.7965, 0.7919, 0.7921, 0.7922, 0.7934, 0.7918,
0.7915, 0.787, 0.7861, 0.7861, 0.7853, 0.7867, 0.7827, 0.7834,
0.7766, 0.7751, 0.7739, 0.7767, 0.7802, 0.7788, 0.7828, 0.7816,
0.7829, 0.783, 0.7829, 0.7781, 0.7811, 0.7831, 0.7826, 0.7855,
0.7855, 0.7845, 0.7798, 0.7777, 0.7822, 0.7785, 0.7744, 0.7743,
0.7726, 0.7766, 0.7806, 0.785, 0.7907, 0.7912, 0.7913, 0.7931,
0.7952, 0.7951, 0.7928, 0.791, 0.7913, 0.7912, 0.7941, 0.7953,
0.7921, 0.7919, 0.7968, 0.7999, 0.7999, 0.7974, 0.7942, 0.796,
0.7969, 0.7862, 0.7821, 0.7821, 0.7821, 0.7811, 0.7833, 0.7849,
0.7819, 0.7809, 0.7809, 0.7827, 0.7848, 0.785, 0.7873, 0.7894,
0.7907, 0.7909, 0.7947, 0.7987, 0.799, 0.7927, 0.79, 0.7878,
0.7878, 0.7907, 0.7922, 0.7937, 0.786, 0.787, 0.7838, 0.7838,
0.7837, 0.7836, 0.7806, 0.7825, 0.7798, 0.777, 0.777, 0.7772,
0.7793, 0.7788, 0.7785, 0.7832, 0.7865, 0.7865, 0.7853, 0.7847,
0.7809, 0.778, 0.7799, 0.78, 0.7801, 0.7765, 0.7785, 0.7811,
0.782, 0.7835, 0.7845, 0.7844, 0.782, 0.7811, 0.7795, 0.7794,
0.7806, 0.7794, 0.7794, 0.7778, 0.7793, 0.7808, 0.7824, 0.787,
0.7894, 0.7893, 0.7882, 0.7871, 0.7882, 0.7871, 0.7878, 0.79,
0.7901, 0.7898, 0.7879, 0.7886, 0.7858, 0.7814, 0.7825, 0.7826,
0.7826, 0.786, 0.7878, 0.7868, 0.7883, 0.7893, 0.7892, 0.7876,
0.785, 0.787, 0.7873, 0.7901, 0.7936, 0.7939, 0.7938, 0.7956,
0.7975, 0.7978, 0.7972, 0.7995, 0.7995, 0.7994, 0.7976, 0.7977,
0.796, 0.7922, 0.7928, 0.7929, 0.7948, 0.797, 0.7953, 0.7907,
0.7872, 0.7852, 0.7852, 0.786, 0.7862, 0.7836, 0.7837, 0.784,
0.7867, 0.7867, 0.7869, 0.7837, 0.7827, 0.7825, 0.7779, 0.7791,
0.779, 0.7787, 0.78, 0.7807, 0.7803, 0.7817, 0.7799, 0.7799,
0.7795, 0.7801, 0.7765, 0.7725, 0.7683, 0.7641, 0.7639, 0.7616,
0.7608, 0.759, 0.7582, 0.7539, 0.75, 0.75, 0.7507, 0.7505,
0.7516, 0.7522, 0.7531, 0.7577, 0.7577, 0.7582, 0.755, 0.7542,
0.7576, 0.7616, 0.7648, 0.7648, 0.7641, 0.7614, 0.757, 0.7587,
0.7588, 0.762, 0.762, 0.7617, 0.7618, 0.7615, 0.7612, 0.7596,
0.758, 0.758, 0.758, 0.7547, 0.7549, 0.7613, 0.7655, 0.7693,
0.7694, 0.7688, 0.7678, 0.7708, 0.7727, 0.7749, 0.7741, 0.7741,
0.7732, 0.7727, 0.7737, 0.7724, 0.7712, 0.772, 0.7721, 0.7717,
0.7704, 0.769, 0.7711, 0.774, 0.7745, 0.7745, 0.774, 0.7716,
0.7713, 0.7678, 0.7688, 0.7718, 0.7718, 0.7728, 0.7729, 0.7698,
0.7685, 0.7681, 0.769, 0.769, 0.7698, 0.7699, 0.7651, 0.7613,
0.7616, 0.7614, 0.7614, 0.7607, 0.7602, 0.7611, 0.7622, 0.7615,
0.7598, 0.7598, 0.7592, 0.7573, 0.7566, 0.7567, 0.7591, 0.7582,
0.7585, 0.7613, 0.7631, 0.7615, 0.76, 0.7613, 0.7627, 0.7627,
0.7608, 0.7583, 0.7575, 0.7562, 0.752, 0.7512, 0.7512, 0.7517,
0.752, 0.7511, 0.748, 0.7509, 0.7531, 0.7531, 0.7527, 0.7498,
0.7493, 0.7504, 0.75, 0.7491, 0.7491, 0.7485, 0.7484, 0.7492,
0.7471, 0.7459, 0.7477, 0.7477, 0.7483, 0.7458, 0.7448, 0.743,
0.7399, 0.7395, 0.7395, 0.7378, 0.7382, 0.7362, 0.7355, 0.7348,
0.7361, 0.7361, 0.7365, 0.7362, 0.7331, 0.7339, 0.7344, 0.7327,
0.7327, 0.7336, 0.7333, 0.7359, 0.7359, 0.7372, 0.736, 0.736,
0.735, 0.7365, 0.7384, 0.7395, 0.7413, 0.7397, 0.7396, 0.7385,
0.7378, 0.7366, 0.74, 0.7411, 0.7406, 0.7405, 0.7414, 0.7431,
0.7431, 0.7438, 0.7443, 0.7443, 0.7443, 0.7434, 0.7429, 0.7442,
0.744, 0.7439, 0.7437, 0.7437, 0.7429, 0.7403, 0.7399, 0.7418,
0.7468, 0.748, 0.748, 0.749, 0.7494, 0.7522, 0.7515, 0.7502,
0.7472, 0.7472, 0.7462, 0.7455, 0.7449, 0.7467, 0.7458, 0.7427,
0.7427, 0.743, 0.7429, 0.744, 0.743, 0.7422, 0.7388, 0.7388,
0.7369, 0.7345, 0.7345, 0.7345, 0.7352, 0.7341, 0.7341, 0.734,
0.7324, 0.7272, 0.7264, 0.7255, 0.7258, 0.7258, 0.7256, 0.7257,
0.7247, 0.7243, 0.7244, 0.7235, 0.7235, 0.7235, 0.7235, 0.7262,
0.7288, 0.7301, 0.7337, 0.7337, 0.7324, 0.7297, 0.7317, 0.7315,
0.7288, 0.7263, 0.7263, 0.7242, 0.7253, 0.7264, 0.727, 0.7312,
0.7305, 0.7305, 0.7318, 0.7358, 0.7409, 0.7454, 0.7437, 0.7424,
0.7424, 0.7415, 0.7419, 0.7414, 0.7377, 0.7355, 0.7315, 0.7315,
0.732, 0.7332, 0.7346, 0.7328, 0.7323, 0.734, 0.734, 0.7336,
0.7351, 0.7346, 0.7321, 0.7294, 0.7266, 0.7266, 0.7254, 0.7242,
0.7213, 0.7197, 0.7209, 0.721, 0.721, 0.721, 0.7209, 0.7159,
0.7133, 0.7105, 0.7099, 0.7099, 0.7093, 0.7093, 0.7076, 0.707,
0.7049, 0.7012, 0.7011, 0.7019, 0.7046, 0.7063, 0.7089, 0.7077,
0.7077, 0.7077, 0.7091, 0.7118, 0.7079, 0.7053, 0.705, 0.7055,
0.7055, 0.7045, 0.7051, 0.7051, 0.7017, 0.7, 0.6995, 0.6994,
0.7014, 0.7036, 0.7021, 0.7002, 0.6967, 0.695, 0.695, 0.6939,
0.694, 0.6922, 0.6919, 0.6914, 0.6894, 0.6891, 0.6904, 0.689,
0.6834, 0.6823, 0.6807, 0.6815, 0.6815, 0.6847, 0.6859, 0.6822,
0.6827, 0.6837, 0.6823, 0.6822, 0.6822, 0.6792, 0.6746, 0.6735,
0.6731, 0.6742, 0.6744, 0.6739, 0.6731, 0.6761, 0.6761, 0.6785,
0.6818, 0.6836, 0.6823, 0.6805, 0.6793, 0.6849, 0.6833, 0.6825,
0.6825, 0.6816, 0.6799, 0.6813, 0.6809, 0.6868, 0.6933, 0.6933,
0.6945, 0.6944, 0.6946, 0.6964, 0.6965, 0.6956, 0.6956, 0.695,
0.6948, 0.6928, 0.6887, 0.6824, 0.6794, 0.6794, 0.6803, 0.6855,
0.6824, 0.6791, 0.6783, 0.6785, 0.6785, 0.6797, 0.68, 0.6803,
0.6805, 0.676, 0.677, 0.677, 0.6736, 0.6726, 0.6764, 0.6821,
0.6831, 0.6842, 0.6842, 0.6887, 0.6903, 0.6848, 0.6824, 0.6788,
0.6814, 0.6814, 0.6797, 0.6769, 0.6765, 0.6733, 0.6729, 0.6758,
0.6758, 0.675, 0.678, 0.6833, 0.6856, 0.6903, 0.6896, 0.6896,
0.6882, 0.6879, 0.6862, 0.6852, 0.6823, 0.6813, 0.6813, 0.6822,
0.6802, 0.6802, 0.6784, 0.6748, 0.6747, 0.6747, 0.6748, 0.6733,
0.665, 0.6611, 0.6583, 0.659, 0.659, 0.6581, 0.6578, 0.6574,
0.6532, 0.6502, 0.6514, 0.6514, 0.6507, 0.651, 0.6489, 0.6424,
0.6406, 0.6382, 0.6382, 0.6341, 0.6344, 0.6378, 0.6439, 0.6478,
0.6481, 0.6481, 0.6494, 0.6438, 0.6377, 0.6329, 0.6336, 0.6333,
0.6333, 0.633, 0.6371, 0.6403, 0.6396, 0.6364, 0.6356, 0.6356,
0.6368, 0.6357, 0.6354, 0.632, 0.6332, 0.6328, 0.6331, 0.6342,
0.6321, 0.6302, 0.6278, 0.6308, 0.6324, 0.6324, 0.6307, 0.6277,
0.6269, 0.6335, 0.6392, 0.64, 0.6401, 0.6396, 0.6407, 0.6423,
0.6429, 0.6472, 0.6485, 0.6486, 0.6467, 0.6444, 0.6467, 0.6509,
0.6478, 0.6461, 0.6461, 0.6468, 0.6449, 0.647, 0.6461, 0.6452,
0.6422, 0.6422, 0.6425, 0.6414, 0.6366, 0.6346, 0.635, 0.6346,
0.6346, 0.6343, 0.6346, 0.6379, 0.6416, 0.6442, 0.6431, 0.6431,
0.6435, 0.644, 0.6473, 0.6469, 0.6386, 0.6356, 0.634, 0.6346,
0.643, 0.6452, 0.6467, 0.6506, 0.6504, 0.6503, 0.6481, 0.6451,
0.645, 0.6441, 0.6414, 0.6409, 0.6409, 0.6428, 0.6431, 0.6418,
0.6371, 0.6349, 0.6333, 0.6334, 0.6338, 0.6342, 0.632, 0.6318,
0.637, 0.6368, 0.6368, 0.6383, 0.6371, 0.6371, 0.6355, 0.632,
0.6277, 0.6276, 0.6291, 0.6274, 0.6293, 0.6311, 0.631, 0.6312,
0.6312, 0.6304, 0.6294, 0.6348, 0.6378, 0.6368, 0.6368, 0.6368,
0.636, 0.637, 0.6418, 0.6411, 0.6435, 0.6427, 0.6427, 0.6419,
0.6446, 0.6468, 0.6487, 0.6594, 0.6666, 0.6666, 0.6678, 0.6712,
0.6705, 0.6718, 0.6784, 0.6811, 0.6811, 0.6794, 0.6804, 0.6781,
0.6756, 0.6735, 0.6763, 0.6762, 0.6777, 0.6815, 0.6802, 0.678,
0.6796, 0.6817, 0.6817, 0.6832, 0.6877, 0.6912, 0.6914, 0.7009,
0.7012, 0.701, 0.7005, 0.7076, 0.7087, 0.717, 0.7105, 0.7031,
0.7029, 0.7006, 0.7035, 0.7045, 0.6956, 0.6988, 0.6915, 0.6914,
0.6859, 0.6778, 0.6815, 0.6815, 0.6843, 0.6846, 0.6846, 0.6923,
0.6997, 0.7098, 0.7188, 0.7232, 0.7262, 0.7266, 0.7359, 0.7368,
0.7337, 0.7317, 0.7387, 0.7467, 0.7461, 0.7366, 0.7319, 0.7361,
0.7437, 0.7432, 0.7461, 0.7461, 0.7454, 0.7549, 0.7742, 0.7801,
0.7903, 0.7876, 0.7928, 0.7991, 0.8007, 0.7823, 0.7661, 0.785,
0.7863, 0.7862, 0.7821, 0.7858, 0.7731, 0.7779, 0.7844, 0.7866,
0.7864, 0.7788, 0.7875, 0.7971, 0.8004, 0.7857, 0.7932, 0.7938,
0.7927, 0.7918, 0.7919, 0.7989, 0.7988, 0.7949, 0.7948, 0.7882,
0.7745, 0.771, 0.775, 0.7791, 0.7882, 0.7882, 0.7899, 0.7905,
0.7889, 0.7879, 0.7855, 0.7866, 0.7865, 0.7795, 0.7758, 0.7717,
0.761, 0.7497, 0.7471, 0.7473, 0.7407, 0.7288, 0.7074, 0.6927,
0.7083, 0.7191, 0.719, 0.7153, 0.7156, 0.7158, 0.714, 0.7119,
0.7129, 0.7129, 0.7049, 0.7095])
def getMasterDetailData(self, series):
return self.getDateTimePoints(series, [0.8446, 0.8445, 0.8444, 0.8451,
0.8418, 0.8264, 0.8258, 0.8232, 0.8233, 0.8258, 0.8283, 0.8278,
0.8256, 0.8292, 0.8239, 0.8239, 0.8245, 0.8265, 0.8261, 0.8269,
0.8273, 0.8244, 0.8244, 0.8172, 0.8139, 0.8146, 0.8164, 0.82,
0.8269, 0.8269, 0.8269, 0.8258, 0.8247, 0.8286, 0.8289, 0.8316,
0.832, 0.8333, 0.8352, 0.8357, 0.8355, 0.8354, 0.8403, 0.8403,
0.8406, 0.8403, 0.8396, 0.8418, 0.8409, 0.8384, 0.8386, 0.8372,
0.839, 0.84, 0.8389, 0.84, 0.8423, 0.8423, 0.8435, 0.8422,
0.838, 0.8373, 0.8316, 0.8303, 0.8303, 0.8302, 0.8369, 0.84,
0.8385, 0.84, 0.8401, 0.8402, 0.8381, 0.8351, 0.8314, 0.8273,
0.8213, 0.8207, 0.8207, 0.8215, 0.8242, 0.8273, 0.8301, 0.8346,
0.8312, 0.8312, 0.8312, 0.8306, 0.8327, 0.8282, 0.824, 0.8255,
0.8256, 0.8273, 0.8209, 0.8151, 0.8149, 0.8213, 0.8273, 0.8273,
0.8261, 0.8252, 0.824, 0.8262, 0.8258, 0.8261, 0.826, 0.8199,
0.8153, 0.8097, 0.8101, 0.8119, 0.8107, 0.8105, 0.8084, 0.8069,
0.8047, 0.8023, 0.7965, 0.7919, 0.7921, 0.7922, 0.7934, 0.7918,
0.7915, 0.787, 0.7861, 0.7861, 0.7853, 0.7867, 0.7827, 0.7834,
0.7766, 0.7751, 0.7739, 0.7767, 0.7802, 0.7788, 0.7828, 0.7816,
0.7829, 0.783, 0.7829, 0.7781, 0.7811, 0.7831, 0.7826, 0.7855,
0.7855, 0.7845, 0.7798, 0.7777, 0.7822, 0.7785, 0.7744, 0.7743,
0.7726, 0.7766, 0.7806, 0.785, 0.7907, 0.7912, 0.7913, 0.7931,
0.7952, 0.7951, 0.7928, 0.791, 0.7913, 0.7912, 0.7941, 0.7953,
0.7921, 0.7919, 0.7968, 0.7999, 0.7999, 0.7974, 0.7942, 0.796,
0.7969, 0.7862, 0.7821, 0.7821, 0.7821, 0.7811, 0.7833, 0.7849,
0.7819, 0.7809, 0.7809, 0.7827, 0.7848, 0.785, 0.7873, 0.7894,
0.7907, 0.7909, 0.7947, 0.7987, 0.799, 0.7927, 0.79, 0.7878,
0.7878, 0.7907, 0.7922, 0.7937, 0.786, 0.787, 0.7838, 0.7838,
0.7837, 0.7836, 0.7806, 0.7825, 0.7798, 0.777, 0.777, 0.7772,
0.7793, 0.7788, 0.7785, 0.7832, 0.7865, 0.7865, 0.7853, 0.7847,
0.7809, 0.778, 0.7799, 0.78, 0.7801, 0.7765, 0.7785, 0.7811,
0.782, 0.7835, 0.7845, 0.7844, 0.782, 0.7811, 0.7795, 0.7794,
0.7806, 0.7794, 0.7794, 0.7778, 0.7793, 0.7808, 0.7824, 0.787,
0.7894, 0.7893, 0.7882, 0.7871, 0.7882, 0.7871, 0.7878, 0.79,
0.7901, 0.7898, 0.7879, 0.7886, 0.7858, 0.7814, 0.7825, 0.7826,
0.7826, 0.786, 0.7878, 0.7868, 0.7883, 0.7893, 0.7892, 0.7876,
0.785, 0.787, 0.7873, 0.7901, 0.7936, 0.7939, 0.7938, 0.7956,
0.7975, 0.7978, 0.7972, 0.7995, 0.7995, 0.7994, 0.7976, 0.7977,
0.796, 0.7922, 0.7928, 0.7929, 0.7948, 0.797, 0.7953, 0.7907,
0.7872, 0.7852, 0.7852, 0.786, 0.7862, 0.7836, 0.7837, 0.784,
0.7867, 0.7867, 0.7869, 0.7837, 0.7827, 0.7825, 0.7779, 0.7791,
0.779, 0.7787, 0.78, 0.7807, 0.7803, 0.7817, 0.7799, 0.7799,
0.7795, 0.7801, 0.7765, 0.7725, 0.7683, 0.7641, 0.7639, 0.7616,
0.7608, 0.759, 0.7582, 0.7539, 0.75, 0.75, 0.7507, 0.7505,
0.7516, 0.7522, 0.7531, 0.7577, 0.7577, 0.7582, 0.755, 0.7542,
0.7576, 0.7616, 0.7648, 0.7648, 0.7641, 0.7614, 0.757, 0.7587,
0.7588, 0.762, 0.762, 0.7617, 0.7618, 0.7615, 0.7612, 0.7596,
0.758, 0.758, 0.758, 0.7547, 0.7549, 0.7613, 0.7655, 0.7693,
0.7694, 0.7688, 0.7678, 0.7708, 0.7727, 0.7749, 0.7741, 0.7741,
0.7732, 0.7727, 0.7737, 0.7724, 0.7712, 0.772, 0.7721, 0.7717,
0.7704, 0.769, 0.7711, 0.774, 0.7745, 0.7745, 0.774, 0.7716,
0.7713, 0.7678, 0.7688, 0.7718, 0.7718, 0.7728, 0.7729, 0.7698,
0.7685, 0.7681, 0.769, 0.769, 0.7698, 0.7699, 0.7651, 0.7613,
0.7616, 0.7614, 0.7614, 0.7607, 0.7602, 0.7611, 0.7622, 0.7615,
0.7598, 0.7598, 0.7592, 0.7573, 0.7566, 0.7567, 0.7591, 0.7582,
0.7585, 0.7613, 0.7631, 0.7615, 0.76, 0.7613, 0.7627, 0.7627,
0.7608, 0.7583, 0.7575, 0.7562, 0.752, 0.7512, 0.7512, 0.7517,
0.752, 0.7511, 0.748, 0.7509, 0.7531, 0.7531, 0.7527, 0.7498,
0.7493, 0.7504, 0.75, 0.7491, 0.7491, 0.7485, 0.7484, 0.7492,
0.7471, 0.7459, 0.7477, 0.7477, 0.7483, 0.7458, 0.7448, 0.743,
0.7399, 0.7395, 0.7395, 0.7378, 0.7382, 0.7362, 0.7355, 0.7348,
0.7361, 0.7361, 0.7365, 0.7362, 0.7331, 0.7339, 0.7344, 0.7327,
0.7327, 0.7336, 0.7333, 0.7359, 0.7359, 0.7372, 0.736, 0.736,
0.735, 0.7365, 0.7384, 0.7395, 0.7413, 0.7397, 0.7396, 0.7385,
0.7378, 0.7366, 0.74, 0.7411, 0.7406, 0.7405, 0.7414, 0.7431,
0.7431, 0.7438, 0.7443, 0.7443, 0.7443, 0.7434, 0.7429, 0.7442,
0.744, 0.7439, 0.7437, 0.7437, 0.7429, 0.7403, 0.7399, 0.7418,
0.7468, 0.748, 0.748, 0.749, 0.7494, 0.7522, 0.7515, 0.7502,
0.7472, 0.7472, 0.7462, 0.7455, 0.7449, 0.7467, 0.7458, 0.7427,
0.7427, 0.743, 0.7429, 0.744, 0.743, 0.7422, 0.7388, 0.7388,
0.7369, 0.7345, 0.7345, 0.7345, 0.7352, 0.7341, 0.7341, 0.734,
0.7324, 0.7272, 0.7264, 0.7255, 0.7258, 0.7258, 0.7256, 0.7257,
0.7247, 0.7243, 0.7244, 0.7235, 0.7235, 0.7235, 0.7235, 0.7262,
0.7288, 0.7301, 0.7337, 0.7337, 0.7324, 0.7297, 0.7317, 0.7315,
0.7288, 0.7263, 0.7263, 0.7242, 0.7253, 0.7264, 0.727, 0.7312,
0.7305, 0.7305, 0.7318, 0.7358, 0.7409, 0.7454, 0.7437, 0.7424,
0.7424, 0.7415, 0.7419, 0.7414, 0.7377, 0.7355, 0.7315, 0.7315,
0.732, 0.7332, 0.7346, 0.7328, 0.7323, 0.734, 0.734, 0.7336,
0.7351, 0.7346, 0.7321, 0.7294, 0.7266, 0.7266, 0.7254, 0.7242,
0.7213, 0.7197, 0.7209, 0.721, 0.721, 0.721, 0.7209, 0.7159,
0.7133, 0.7105, 0.7099, 0.7099, 0.7093, 0.7093, 0.7076, 0.707,
0.7049, 0.7012, 0.7011, 0.7019, 0.7046, 0.7063, 0.7089, 0.7077,
0.7077, 0.7077, 0.7091, 0.7118, 0.7079, 0.7053, 0.705, 0.7055,
0.7055, 0.7045, 0.7051, 0.7051, 0.7017, 0.7, 0.6995, 0.6994,
0.7014, 0.7036, 0.7021, 0.7002, 0.6967, 0.695, 0.695, 0.6939,
0.694, 0.6922, 0.6919, 0.6914, 0.6894, 0.6891, 0.6904, 0.689,
0.6834, 0.6823, 0.6807, 0.6815, 0.6815, 0.6847, 0.6859, 0.6822,
0.6827, 0.6837, 0.6823, 0.6822, 0.6822, 0.6792, 0.6746, 0.6735,
0.6731, 0.6742, 0.6744, 0.6739, 0.6731, 0.6761, 0.6761, 0.6785,
0.6818, 0.6836, 0.6823, 0.6805, 0.6793, 0.6849, 0.6833, 0.6825,
0.6825, 0.6816, 0.6799, 0.6813, 0.6809, 0.6868, 0.6933, 0.6933,
0.6945, 0.6944, 0.6946, 0.6964, 0.6965, 0.6956, 0.6956, 0.695,
0.6948, 0.6928, 0.6887, 0.6824, 0.6794, 0.6794, 0.6803, 0.6855,
0.6824, 0.6791, 0.6783, 0.6785, 0.6785, 0.6797, 0.68, 0.6803,
0.6805, 0.676, 0.677, 0.677, 0.6736, 0.6726, 0.6764, 0.6821,
0.6831, 0.6842, 0.6842, 0.6887, 0.6903, 0.6848, 0.6824, 0.6788,
0.6814, 0.6814, 0.6797, 0.6769, 0.6765, 0.6733, 0.6729, 0.6758,
0.6758, 0.675, 0.678, 0.6833, 0.6856, 0.6903, 0.6896, 0.6896,
0.6882, 0.6879, 0.6862, 0.6852, 0.6823, 0.6813, 0.6813, 0.6822,
0.6802, 0.6802, 0.6784, 0.6748, 0.6747, 0.6747, 0.6748, 0.6733,
0.665, 0.6611, 0.6583, 0.659, 0.659, 0.6581, 0.6578, 0.6574,
0.6532, 0.6502, 0.6514, 0.6514, 0.6507, 0.651, 0.6489, 0.6424,
0.6406, 0.6382, 0.6382, 0.6341, 0.6344, 0.6378, 0.6439, 0.6478,
0.6481, 0.6481, 0.6494, 0.6438, 0.6377, 0.6329, 0.6336, 0.6333,
0.6333, 0.633, 0.6371, 0.6403, 0.6396, 0.6364, 0.6356, 0.6356,
0.6368, 0.6357, 0.6354, 0.632, 0.6332, 0.6328, 0.6331, 0.6342,
0.6321, 0.6302, 0.6278, 0.6308, 0.6324, 0.6324, 0.6307, 0.6277,
0.6269, 0.6335, 0.6392, 0.64, 0.6401, 0.6396, 0.6407, 0.6423,
0.6429, 0.6472, 0.6485, 0.6486, 0.6467, 0.6444, 0.6467, 0.6509,
0.6478, 0.6461, 0.6461, 0.6468, 0.6449, 0.647, 0.6461, 0.6452,
0.6422, 0.6422, 0.6425, 0.6414, 0.6366, 0.6346, 0.635, 0.6346,
0.6346, 0.6343, 0.6346, 0.6379, 0.6416, 0.6442, 0.6431, 0.6431,
0.6435, 0.644, 0.6473, 0.6469, 0.6386, 0.6356, 0.634, 0.6346,
0.643, 0.6452, 0.6467, 0.6506, 0.6504, 0.6503, 0.6481, 0.6451,
0.645, 0.6441, 0.6414, 0.6409, 0.6409, 0.6428, 0.6431, 0.6418,
0.6371, 0.6349, 0.6333, 0.6334, 0.6338, 0.6342, 0.632, 0.6318,
0.637, 0.6368, 0.6368, 0.6383, 0.6371, 0.6371, 0.6355, 0.632,
0.6277, 0.6276, 0.6291, 0.6274, 0.6293, 0.6311, 0.631, 0.6312,
0.6312, 0.6304, 0.6294, 0.6348, 0.6378, 0.6368, 0.6368, 0.6368,
0.636, 0.637, 0.6418, 0.6411, 0.6435, 0.6427, 0.6427, 0.6419,
0.6446, 0.6468, 0.6487, 0.6594, 0.6666, 0.6666, 0.6678, 0.6712,
0.6705, 0.6718, 0.6784, 0.6811, 0.6811, 0.6794, 0.6804, 0.6781,
0.6756, 0.6735, 0.6763, 0.6762, 0.6777, 0.6815, 0.6802, 0.678,
0.6796, 0.6817, 0.6817, 0.6832, 0.6877, 0.6912, 0.6914, 0.7009,
0.7012, 0.701, 0.7005, 0.7076, 0.7087, 0.717, 0.7105, 0.7031,
0.7029, 0.7006, 0.7035, 0.7045, 0.6956, 0.6988, 0.6915, 0.6914,
0.6859, 0.6778, 0.6815, 0.6815, 0.6843, 0.6846, 0.6846, 0.6923,
0.6997, 0.7098, 0.7188, 0.7232, 0.7262, 0.7266, 0.7359, 0.7368,
0.7337, 0.7317, 0.7387, 0.7467, 0.7461, 0.7366, 0.7319, 0.7361,
0.7437, 0.7432, 0.7461, 0.7461, 0.7454, 0.7549, 0.7742, 0.7801,
0.7903, 0.7876, 0.7928, 0.7991, 0.8007, 0.7823, 0.7661, 0.785,
0.7863, 0.7862, 0.7821, 0.7858, 0.7731, 0.7779, 0.7844, 0.7866,
0.7864, 0.7788, 0.7875, 0.7971, 0.8004, 0.7857, 0.7932, 0.7938,
0.7927, 0.7918, 0.7919, 0.7989, 0.7988, 0.7949, 0.7948, 0.7882,
0.7745, 0.771, 0.775, 0.7791, 0.7882, 0.7882, 0.7899, 0.7905,
0.7889, 0.7879, 0.7855, 0.7866, 0.7865, 0.7795, 0.7758, 0.7717,
0.761, 0.7497, 0.7471, 0.7473, 0.7407, 0.7288, 0.7074, 0.6927,
0.7083, 0.7191, 0.719, 0.7153, 0.7156, 0.7158, 0.714, 0.7119,
0.7129, 0.7129, 0.7049, 0.7095])
class MasterChartZoomListener(ChartZoomListener):
def __init__(self, window, masterChart, detailChart):
self._window = window
self._masterChart = masterChart
self._detailChart = detailChart
def chartZoom(self, chartZoomEvent):
# chartZoomEvent.getChartArea().get
masterChartSeries = self._masterChart.getSeries('USD to EUR')
min_ = chartZoomEvent.getChartArea().getxAxisMin()
max_ = chartZoomEvent.getChartArea().getxAxisMax()
detailPoints = set()
detailChartSeries = self._detailChart.getSeries('USD to EUR')
self._detailChart.removeSeries(detailChartSeries)
for point in masterChartSeries.getPoints():
if (timestamp(point.getX()) > min_
and timestamp(point.getX()) < max_):
dtp = DateTimePoint(detailChartSeries,
point.getX(), point.getY())
detailPoints.add(dtp)
# Update series with new points
detailChartSeries.setSeriesPoints(detailPoints)
self._detailChart.addSeries(detailChartSeries)
self._detailChart.refresh()
# Update plotbands
masterDateTimeAxis = iter(self._masterChart.getConfig().getXAxes()).next() # FIXME: iterator
masterDateTimeAxis.removePlotBand('mask-before')
plotBandBefore = DateTimePlotBand('mask-before')
plotBandBefore.setRange(DateTimeRange(self._window._masterChartMinDate,
datetime.fromtimestamp(min_ / 1e03)))
plotBandBefore.setColor(RGBA(0, 0, 0, 0.2))
masterDateTimeAxis.addPlotBand(plotBandBefore)
masterDateTimeAxis.removePlotBand('mask-after')
plotBandAfter = DateTimePlotBand('mask-after')
plotBandAfter.setRange(DateTimeRange(
datetime.fromtimestamp(max_ / 1e03),
self._window._masterChartMaxDate))
plotBandAfter.setColor(RGBA(0, 0, 0, 0.2))
masterDateTimeAxis.addPlotBand(plotBandAfter)
self._masterChart.refresh()
class AddPointChartClickListener(ChartClickListener):
def __init__(self, window):
self._window = window
def chartClick(self, chartClickEvent):
self._window.logEventInfo('chartClick',
chartClickEvent.getPoint().getX(),
chartClickEvent.getPoint().getY(),
chartClickEvent.getMousePosition().getMouseX(),
chartClickEvent.getMousePosition().getMouseY())
xySeries = chartClickEvent.getChart().getSeries('User Supplied Data')
xySeries.addPoint(DecimalPoint(xySeries,
chartClickEvent.getPoint().getX(),
chartClickEvent.getPoint().getY()))
class AddPointClickListener(PointClickListener):
def __init__(self, window):
self._window = window
def pointClick(self, pointClickEvent):
self._window.logEventInfo('pointClick',
pointClickEvent.getPoint().getSeries().getName(),
pointClickEvent.getCategory(),
pointClickEvent.getPoint().getX(),
pointClickEvent.getPoint().getY(),
pointClickEvent.getMousePosition().getMouseX(),
pointClickEvent.getMousePosition().getMouseY())
xySeries = pointClickEvent.getChart().getSeries('User Supplied Data')
if len(xySeries.getPoints()) > 1:
# remove the clicked point
xySeries.removePoint(pointClickEvent.getPoint())
class SelfUpdateSplineThread(Thread):
def __init__(self, chart):
super(SelfUpdateSplineThread, self).__init__()
self._chart = chart
self._keepUpdating = True ## FIXME: volatile
def stopUpdating(self):
self._keepUpdating = False
print 'stopUpdating ' + self._keepUpdating
def keepUpdating(self):
return self._keepUpdating
def run(self):
while self.keepUpdating():
# Sleep for 1 second
try:
sleep(1000)
except KeyboardInterrupt, e:
print ('InterruptedException occured. Exception message '
+ str(e))
seriesData = self._chart.getSeries('Random Data')
seriesData.addPoint(DateTimePoint(seriesData, datetime(),
random()), True)
print 'Inside run() keepUpdating ' + self._keepUpdating
class GetSvgClickListener(button.IClickListener):
def __init__(self, window, chart):
self._window = window
self._chart = chart
def buttonClick(self, event):
l = DemoChartSVGAvailableListener(self._window)
self._chart.addListener(l)
class DemoChartSVGAvailableListener(ChartSVGAvailableListener):
def __init__(self, window):
self._window = window
def svgAvailable(self, chartSVGAvailableEvent):
self._window.logEventInfo('[svgAvailable]' + ' svg -> '
+ chartSVGAvailableEvent.getSVG())
class PrintClickListener(button.IClickListener):
def __init__(self, chart):
self._chart = chart
def buttonClick(self, event):
self._chart.print_()
class DemoChartClickListener(ChartClickListener):
def __init__(self, window):
self._window = window
def chartClick(self, chartClickEvent):
self._window.logEventInfo('chartClick',
chartClickEvent.getPoint().getX(),
chartClickEvent.getPoint().getY(),
chartClickEvent.getMousePosition().getMouseX(),
chartClickEvent.getMousePosition().getMouseY())
class DemoChartZoomListener(ChartZoomListener):
def __init__(self, window):
self._window = window
def chartZoom(self, chartZoomEvent):
self._window.logEventInfo('chartSelection',
chartZoomEvent.getChartArea().getxAxisMin(),
chartZoomEvent.getChartArea().getxAxisMax(),
chartZoomEvent.getChartArea().getyAxisMin(),
chartZoomEvent.getChartArea().getyAxisMax())
class DemoChartResetZoomListener(ChartResetZoomListener):
def __init__(self, window):
self._window = window
def chartResetZoom(self, chartResetZoomEvent):
self._window.logEventInfo('[chartSelectionReset]')
class DemoSeriesClickListerner(SeriesClickListerner):
def __init__(self, window):
self._window = window
def seriesClick(self, seriesClickEvent):
EVENT_NAME = 'seriesClick'
if isinstance(seriesClickEvent.getNearestPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
seriesClickEvent.getSeries().getName(),
None,
seriesClickEvent.getNearestPoint().getX(),
seriesClickEvent.getNearestPoint().getY(),
seriesClickEvent.getMousePosition().getMouseX(),
seriesClickEvent.getMousePosition().getMouseY())
else:
self._window.logEventInfo(EVENT_NAME,
seriesClickEvent.getSeries().getName(),
None,
seriesClickEvent.getNearestPoint().getX(),
seriesClickEvent.getNearestPoint().getY(),
seriesClickEvent.getMousePosition().getMouseX(),
seriesClickEvent.getMousePosition().getMouseY())
class DemoSeriesHideListerner(SeriesHideListerner):
def __init__(self, window):
self._window = window
def seriesHide(self, seriesHideEvent):
self._window.logEventInfo('seriesHide',
seriesHideEvent.getSeries().getName())
class DemoSeriesShowListerner(SeriesShowListerner):
def __init__(self, window):
self._window = window
def seriesShow(self, seriesShowEvent):
self._window.logEventInfo('seriesShow',
seriesShowEvent.getSeries().getName())
class DemoSeriesLegendItemClickListerner(SeriesLegendItemClickListerner):
def __init__(self, window):
self._window = window
def seriesLegendItemClick(self, seriesLegendItemClickEvent):
self._window.logEventInfo('seriesLegendItemClick',
seriesLegendItemClickEvent.getSeries().getName())
class DemoPointClickListener(PointClickListener):
def __init__(self, window):
self._window = window
def pointClick(self, pointClickEvent):
EVENT_NAME = 'pointClick'
if isinstance(pointClickEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
pointClickEvent.getPoint().getSeries().getName(),
pointClickEvent.getCategory(),
pointClickEvent.getPoint().getX(),
pointClickEvent.getPoint().getY(),
pointClickEvent.getMousePosition().getMouseX(),
pointClickEvent.getMousePosition().getMouseY())
else:
self._window.logEventInfo(EVENT_NAME,
pointClickEvent.getPoint().getSeries().getName(),
pointClickEvent.getCategory(),
pointClickEvent.getPoint().getX(),
pointClickEvent.getPoint().getY(),
pointClickEvent.getMousePosition().getMouseX(),
pointClickEvent.getMousePosition().getMouseY())
class DemoPointRemoveListener(PointRemoveListener):
def __init__(self, window):
self._window = window
def pointRemove(self, pointRemoveEvent):
EVENT_NAME = 'pointRemove'
if isinstance(pointRemoveEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
pointRemoveEvent.getPoint().getSeries().getName(),
pointRemoveEvent.getCategory(),
pointRemoveEvent.getPoint().getX(),
pointRemoveEvent.getPoint().getY())
else:
self._window.logEventInfo(EVENT_NAME,
pointRemoveEvent.getPoint().getSeries().getName(),
pointRemoveEvent.getCategory(),
pointRemoveEvent.getPoint().getX(),
pointRemoveEvent.getPoint().getY())
class DemoPointSelectListener(PointSelectListener):
def __init__(self, window):
self._window = window
def pointSelected(self, pointSelectEvent):
EVENT_NAME = 'pointSelected'
if isinstance(pointSelectEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
pointSelectEvent.getPoint().getSeries().getName(),
pointSelectEvent.getCategory(),
pointSelectEvent.getPoint().getX(),
pointSelectEvent.getPoint().getY())
else:
self._window.logEventInfo(EVENT_NAME,
pointSelectEvent.getPoint().getSeries().getName(),
pointSelectEvent.getCategory(),
pointSelectEvent.getPoint().getX(),
pointSelectEvent.getPoint().getY())
class DemoPointUnselectListener(PointUnselectListener):
def __init__(self, window):
self._window = window
def pointUnSelect(self, pointUnSelectEvent):
EVENT_NAME = 'pointUnSelected'
if isinstance(pointUnSelectEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
pointUnSelectEvent.getPoint().getSeries().getName(),
pointUnSelectEvent.getCategory(),
pointUnSelectEvent.getPoint().getX(),
pointUnSelectEvent.getPoint().getY())
else:
self._window.logEventInfo(EVENT_NAME,
pointUnSelectEvent.getPoint().getSeries().getName(),
pointUnSelectEvent.getCategory(),
pointUnSelectEvent.getPoint().getX(),
pointUnSelectEvent.getPoint().getY())
class DemoPieChartLegendItemClickListener(PieChartLegendItemClickListener):
def __init__(self, window):
self._window = window
def legendItemClick(self, legendItemClickEvent):
EVENT_NAME = 'pieLegendItemClick'
if isinstance(legendItemClickEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
legendItemClickEvent.getPoint().getSeries().getName(),
None,
legendItemClickEvent.getPoint().getX(),
legendItemClickEvent.getPoint().getY())
class ChartName(object):
BASIC = None
DONUT = None
CLICK_TO_ADD_POINT = None
MASTER_DETAIL = None
TIMESERIES_ZOOMABLE = None
WITH_DATA_LABELS = None
STACKED = None
WITH_NEGATIVE_STACK = None
WITH_NEGATIVE_VALUES = None
STACKED_AND_GROUPED = None
STACKED_PERCENT = None
WITH_ROTATED_LABELS = None
WITH_MISSING_POINTS = None
INVERTED_AXES = None
WITH_LEGEND = None
WITH_PLOTBANDS = None
WITH_SYMBOLS = None
UPDATING_EACH_SECOND = None
COMBINATION_COLUMN_LINE_AND_PIE = None
PERCENTAGE = None
SCATTER_WITH_REGRESSION_LINE = None
MULTIPLE_AXES = None
def __init__(self, name):
self._name = name
def getName(self):
return self._name
@classmethod
def values(cls):
return [cls.BASIC, cls.DONUT, cls.CLICK_TO_ADD_POINT, cls.MASTER_DETAIL,
cls.TIMESERIES_ZOOMABLE, cls.WITH_DATA_LABELS, cls.STACKED,
cls.WITH_NEGATIVE_STACK, cls.WITH_NEGATIVE_VALUES,
cls.STACKED_AND_GROUPED, cls.STACKED_PERCENT,
cls.WITH_ROTATED_LABELS, cls.WITH_MISSING_POINTS,
cls.INVERTED_AXES, cls.WITH_LEGEND, cls.WITH_PLOTBANDS, cls.WITH_SYMBOLS,
cls.UPDATING_EACH_SECOND, cls.COMBINATION_COLUMN_LINE_AND_PIE,
cls.PERCENTAGE, cls.SCATTER_WITH_REGRESSION_LINE, cls.MULTIPLE_AXES]
ChartName.BASIC = ChartName('Basic')
ChartName.DONUT = ChartName('Donut')
ChartName.CLICK_TO_ADD_POINT = ChartName('Click to add a point')
ChartName.MASTER_DETAIL = ChartName('Master-detail')
ChartName.TIMESERIES_ZOOMABLE = ChartName('Time series, zoomable')
ChartName.WITH_DATA_LABELS = ChartName('With data labels')
ChartName.STACKED = ChartName('Stacked')
ChartName.WITH_NEGATIVE_STACK = ChartName('With negative stack')
ChartName.WITH_NEGATIVE_VALUES = ChartName('With negative values')
ChartName.STACKED_AND_GROUPED = ChartName('Stacked and grouped')
ChartName.STACKED_PERCENT = ChartName('Stacked percentage')
ChartName.WITH_ROTATED_LABELS = ChartName('With rotated labels')
ChartName.WITH_MISSING_POINTS = ChartName('With missing points')
ChartName.INVERTED_AXES = ChartName('Inverted axes')
ChartName.WITH_LEGEND = ChartName('With legend')
ChartName.WITH_PLOTBANDS = ChartName('With plot bands')
ChartName.WITH_SYMBOLS = ChartName('With symbols')
ChartName.UPDATING_EACH_SECOND = ChartName('Updating each second')
ChartName.COMBINATION_COLUMN_LINE_AND_PIE = ChartName('Column, spline and pie')
ChartName.PERCENTAGE = ChartName('Percentage')
ChartName.SCATTER_WITH_REGRESSION_LINE = ChartName('Scatter with regression line')
ChartName.MULTIPLE_AXES = ChartName('Multiple axes')
class DemoSeriesType(object):
LINE = None
SPLINE = None
SCATTER = None
AREA = None
AREASPLINE = None
BAR = None
COLUMN = None
PIE = None
COMBINATION = None
def __init__(self, seriesType, name):
self._seriesType = seriesType
self._name = name
def getSeriesType(self):
return self._seriesType
def getName(self):
return self._name
@classmethod
def values(cls):
return [cls.LINE, cls.SPLINE, cls.SCATTER, cls.AREA, cls.AREASPLINE,
cls.BAR, cls.COLUMN, cls.PIE, cls.COMBINATION]
DemoSeriesType.LINE = DemoSeriesType(SeriesType.LINE, 'Line')
DemoSeriesType.SPLINE = DemoSeriesType(SeriesType.SPLINE, 'Spline')
DemoSeriesType.SCATTER = DemoSeriesType(SeriesType.SCATTER, 'Scatter')
DemoSeriesType.AREA = DemoSeriesType(SeriesType.AREA, 'Area - Line')
DemoSeriesType.AREASPLINE = DemoSeriesType(SeriesType.AREASPLINE, 'Area - Spline')
DemoSeriesType.BAR = DemoSeriesType(SeriesType.BAR, 'Bar')
DemoSeriesType.COLUMN = DemoSeriesType(SeriesType.COLUMN, 'Column')
DemoSeriesType.PIE = DemoSeriesType(SeriesType.PIE, 'Pie')
DemoSeriesType.COMBINATION = DemoSeriesType(SeriesType.COMMONSERIES, 'Combination')
class ChartTypeChangeListener(IValueChangeListener):
def __init__(self, window, tree):
self._window = window
self._tree = tree
def valueChange(self, event):
# try:
selectedId = event.getProperty().getValue()
if self._tree.getParent(selectedId) is not None:
parentId = self._tree.getParent(selectedId)
demoSeriesTypeName = self._tree.getContainerProperty(parentId,
self._window._TREE_ITEM_CAPTION_PROP_ID).getValue()
seriesInstanceName = self._tree.getContainerProperty(selectedId,
self._window._TREE_ITEM_CAPTION_PROP_ID).getValue()
print ('parent : ' + demoSeriesTypeName
+ ', selected : ' + seriesInstanceName)
self._window.showChart(demoSeriesTypeName, seriesInstanceName)
else:
demoSeriesTypeName = self._tree.getContainerProperty(selectedId,
self._window._TREE_ITEM_CAPTION_PROP_ID).getValue()
print 'Selected ' + demoSeriesTypeName
self._window.showChartInstancesForSeriesType(demoSeriesTypeName)
# except Exception, e:
# e.printStackTrace()
class SeriesTypeClickListener(button.IClickListener):
def __init__(self, window):
self._window = window
def buttonClick(self, event):
self._window._navTree.select(self.demoSeriesTypeName
+ self._window._SEPARATOR + event.getButton().getCaption())
| apache-2.0 | 8,711,462,497,910,326,000 | 39.505624 | 144 | 0.567964 | false |
mjtamlyn/archery-scoring | scores/migrations/0001_initial.py | 1 | 2398 | # -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('entries', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Arrow',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('arrow_value', models.PositiveIntegerField()),
('arrow_of_round', models.PositiveIntegerField()),
('is_x', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Dozen',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('total', models.PositiveIntegerField()),
('dozen', models.PositiveIntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Score',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('score', models.PositiveIntegerField(default=0, db_index=True)),
('hits', models.PositiveIntegerField(default=0)),
('golds', models.PositiveIntegerField(default=0)),
('xs', models.PositiveIntegerField(default=0)),
('alteration', models.IntegerField(default=0)),
('retired', models.BooleanField(default=False)),
('disqualified', models.BooleanField(default=False)),
('target', models.OneToOneField(to='entries.TargetAllocation', on_delete=models.CASCADE)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='dozen',
name='score',
field=models.ForeignKey(to='scores.Score', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='arrow',
name='score',
field=models.ForeignKey(to='scores.Score', on_delete=models.CASCADE),
preserve_default=True,
),
]
| bsd-3-clause | 7,393,244,661,349,838,000 | 35.892308 | 114 | 0.525855 | false |
gemrb/gemrb | gemrb/GUIScripts/bg1/ImportFile.py | 1 | 2330 | # GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
#character generation, import (GUICG20)
import GemRB
from GUIDefines import *
import GUICommon
import CharGenCommon
#import from a character sheet
ImportWindow = 0
TextAreaControl = 0
def OnLoad():
global ImportWindow, TextAreaControl
ImportWindow = GemRB.LoadWindow(20, "GUICG")
TextAreaControl = ImportWindow.GetControl(4)
TextAreaControl.SetText(10963)
TextAreaControl = ImportWindow.GetControl(2)
TextAreaControl.ListResources(CHR_EXPORTS)
DoneButton = ImportWindow.GetControl(0)
DoneButton.SetText (11973)
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
CancelButton = ImportWindow.GetControl(1)
CancelButton.SetText (13727)
DoneButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, DonePress)
CancelButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, CancelPress)
TextAreaControl.SetEvent(IE_GUI_TEXTAREA_ON_SELECT, SelectPress)
ImportWindow.ShowModal(MODAL_SHADOW_NONE)
return
def SelectPress():
DoneButton = ImportWindow.GetControl(0)
DoneButton.SetState(IE_GUI_BUTTON_ENABLED)
return
def DonePress():
ImportWindow.Close()
FileName = TextAreaControl.QueryText()
Slot = GemRB.GetVar("Slot")
GemRB.CreatePlayer(FileName, Slot| 0x8000, 1)
GemRB.SetToken ("CHARNAME", GemRB.GetPlayerName (Slot))
GemRB.SetToken ("SmallPortrait", GemRB.GetPlayerPortrait (Slot, 1)["ResRef"])
GemRB.SetToken ("LargePortrait", GemRB.GetPlayerPortrait (Slot, 0)["ResRef"])
GemRB.SetVar ("ImportedChar", 1)
CharGenCommon.jumpTo("appearance")
return
def CancelPress():
ImportWindow.Close()
GemRB.SetNextScript(GemRB.GetToken("NextScript"))
return
| gpl-2.0 | -4,098,847,976,789,444,000 | 29.657895 | 81 | 0.777682 | false |
datafolklabs/cement | cement/core/extension.py | 1 | 3997 | """Cement core extensions module."""
import sys
from abc import abstractmethod
from ..core import exc
from ..core.interface import Interface
from ..core.handler import Handler
from ..utils.misc import minimal_logger
LOG = minimal_logger(__name__)
class ExtensionInterface(Interface):
"""
This class defines the Extension Interface. Handlers that implement this
interface must provide the methods and attributes defined below. In
general, most implementations should sub-class from the provided
:class:`ExtensionHandler` base class as a starting point.
"""
class Meta:
"""Handler meta-data."""
#: The string identifier of the interface.
interface = 'extension'
@abstractmethod
def load_extension(self, ext_module):
"""
Load an extension whose module is ``ext_module``. For example,
``cement.ext.ext_json``.
Args:
ext_module (str): The name of the extension to load
"""
pass # pragma: no cover
@abstractmethod
def load_extensions(self, ext_list):
"""
Load all extensions from ``ext_list``.
Args:
ext_list (list): A list of extension modules to load. For example:
``['cement.ext.ext_json', 'cement.ext.ext_logging']``
"""
pass # pragma: no cover
class ExtensionHandler(ExtensionInterface, Handler):
"""
This handler implements the Extention Interface, which handles loading
framework extensions. All extension handlers should sub-class from
here, or ensure that their implementation meets the requirements of this
base class.
"""
class Meta:
"""
Handler meta-data (can be passed as keyword arguments to the parent
class).
"""
#: The string identifier of the handler.
label = 'cement'
def __init__(self, **kw):
super().__init__(**kw)
self.app = None
self._loaded_extensions = []
def get_loaded_extensions(self):
"""
Get all loaded extensions.
Returns:
list: A list of loaded extensions.
"""
return self._loaded_extensions
def list(self):
"""
Synonymous with ``get_loaded_extensions()``.
Returns:
list: A list of loaded extensions.
"""
return self._loaded_extensions
def load_extension(self, ext_module):
"""
Given an extension module name, load or in other-words ``import`` the
extension.
Args:
ext_module (str): The extension module name. For example:
``cement.ext.ext_logging``.
Raises:
cement.core.exc.FrameworkError: Raised if ``ext_module`` can not be
loaded.
"""
# If its not a full module path then preppend our default path
if ext_module.find('.') == -1:
ext_module = 'cement.ext.ext_%s' % ext_module
if ext_module in self._loaded_extensions:
LOG.debug("framework extension '%s' already loaded" % ext_module)
return
LOG.debug("loading the '%s' framework extension" % ext_module)
try:
if ext_module not in sys.modules:
__import__(ext_module, globals(), locals(), [], 0)
if hasattr(sys.modules[ext_module], 'load'):
sys.modules[ext_module].load(self.app)
if ext_module not in self._loaded_extensions:
self._loaded_extensions.append(ext_module)
except ImportError as e:
raise exc.FrameworkError(e.args[0])
def load_extensions(self, ext_list):
"""
Given a list of extension modules, iterate over the list and pass
individually to ``self.load_extension()``.
Args:
ext_list (list): A list of extension module names (str).
"""
for ext in ext_list:
self.load_extension(ext)
| bsd-3-clause | 2,490,373,445,105,531,400 | 26.565517 | 79 | 0.589192 | false |
nkiraly/koadstation | tiledraweru14/provisioning/roles/maptile-import-tools/files/populate.py | 1 | 16836 | #!/usr/bin/env python
from os import chdir, remove
from sys import stderr, stdout
from optparse import OptionParser
from subprocess import Popen, PIPE
from xml.etree.ElementTree import parse, SubElement
from os.path import dirname, basename, splitext, join
from urlparse import urlparse, urljoin
from tempfile import mkstemp
from StringIO import StringIO
from zipfile import ZipFile
from urllib import urlopen
from time import strftime
import sys, traceback
import json
import cascadenik
import mapnik
epsg3857 = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null'
parser = OptionParser(usage="""%prog [options] [url...]""")
defaults = dict(style='https://raw.github.com/nkiraly/OSM-Solar/tiledrawer/tiledrawer.cfg',
bbox=(37.777, -122.352, 37.839, -122.226))
parser.set_defaults(**defaults)
parser.add_option('-s', '--style', dest='style',
help='URL of a style description, default %(style)s.' % defaults)
parser.add_option('-b', '--bbox', dest='bbox',
help='Bounding box in floating point geographic coordinates: south west north east.',
type='float', nargs=4)
def download_file(url):
""" Shell out to curl to download extract and return its local filename.
"""
s, h, path, p, q, f = urlparse(url)
base, ext = splitext(basename(path))
handle, filename = mkstemp(dir='progress', prefix=base+'-', suffix=ext)
curl = 'curl', '-s', '-o', filename, '-L', url
print >> stderr, '+', ' '.join(curl)
curl = Popen(curl, stdout=stdout, stderr=PIPE)
curl.wait()
if curl.returncode:
raise Exception('curl command returned %d' % curl.returncode)
return filename
def download_file_local(url):
return url
def combine_extracts(bbox, files):
""" Shell out to osmosis to combine extracts and pull out a bounding box.
"""
osmosis = ['osmosis']
for file in files:
osmosis += ['--rb', file, '--log-progress']
osmosis += ['--merge'] * len(files[1:])
osmosis += ['--bb'] + ['%s=%.6f' % kv for kv in zip('bottom left top right'.split(), bbox)]
osmosis += ['--wx', '-']
handle, filename = mkstemp(dir='progress', prefix='out-', suffix='.osm.bz2')
print >> stderr, '+', ' '.join(osmosis), '| bzip2 >', filename
osmosis = Popen(osmosis, stderr=open('progress/osmosis.log', 'w'), stdout=PIPE)
bzout = Popen(['bzip2'], stdin=osmosis.stdout, stdout=open(filename, 'w'))
osmosis.wait()
bzout.wait()
if osmosis.returncode:
raise Exception('osmosis command returned %d' % osmosis.returncode)
if bzout.returncode:
raise Exception('bzout command returned %d' % bzout.returncode)
return filename
def import_extract_osm2pgsql(filename):
""" Shell out to osm2pgsql to import extract file to Postgis.
"""
# Remove possible existing line table to get rid of its High Road views
psql = Popen('psql -U osm planet_osm'.split(), stdin=PIPE, stderr=PIPE, stdout=PIPE)
psql.stdin.write('DROP TABLE IF EXISTS planet_osm_line CASCADE;')
psql.stdin.close()
psql.wait()
if psql.returncode:
raise Exception('psql command returned %d' % psql.returncode)
# Import new OSM data
# TODO: is it safe to ask for 4GB of RAM here? Check /proc/meminfo MemFree.
osm2pgsql = 'osm2pgsql -smucK -C 4096 -U osm -d planet_osm -S osm2pgsql/default.style'.split()
osm2pgsql += [filename]
print >> stderr, '+', ' '.join(osm2pgsql)
logfile = open('progress/osm2pgsql.log', 'w')
osm2pgsql = Popen(osm2pgsql, stdout=logfile, stderr=logfile)
osm2pgsql.wait()
if osm2pgsql.returncode:
raise Exception('osm2pgsql command returned %d' % osm2pgsql.returncode)
# Apply new High Road views
highroad_sql = urlopen('https://raw.github.com/nkiraly/HighRoad/master/high_road_views-setup.pgsql').read()
psql = Popen('psql -U osm planet_osm'.split(), stdin=PIPE, stderr=PIPE, stdout=PIPE)
psql.stdin.write(highroad_sql)
psql.stdin.close()
psql.wait()
if psql.returncode:
raise Exception('psql command returned %d' % psql.returncode)
def import_extract_imposm(filename):
""" Shell out to imposm to import extract file to Postgis.
"""
imposm = 'imposm --read --write --table-prefix=imposm_'.split()
imposm += '--connect postgis://osm:@127.0.0.1/planet_osm'.split()
imposm += ['--cache-dir=/usr/local/tiledrawer/progress', filename]
print >> stderr, '+', ' '.join(imposm)
logfile = open('progress/imposm.log', 'w')
imposm = Popen(imposm, stdout=logfile, stderr=logfile)
imposm.wait()
if imposm.returncode:
raise Exception('imposm command returned %d' % imposm.returncode)
def download_coastline():
""" Download and unpack an unprojected "good" coastline from metro.teczno.com.
"""
curl = 'curl -sL http://osm-metro-extracts.s3.amazonaws.com/coastline-good-latlon.tar.bz2'.split()
print >> stderr, '+', ' '.join(curl), '| bzcat | tar -C progress -xf -'
curl = Popen(curl, stdout=PIPE, stderr=PIPE)
bzcat = Popen('bzcat'.split(), stdin=curl.stdout, stdout=PIPE, stderr=PIPE)
tar = Popen('tar -C progress -xf -'.split(), stdin=bzcat.stdout, stderr=PIPE)
curl.wait()
bzcat.wait()
tar.wait()
if curl.returncode:
raise Exception('curl command returned %d' % curl.returncode)
if bzcat.returncode:
raise Exception('bzcat command returned %d' % bzcat.returncode)
if tar.returncode:
raise Exception('tar command returned %d' % tar.returncode)
return 'progress/coastline-good.shp'
def import_coastline(filename, bbox=None):
""" Shell out to shp2pgsql to import a coastline file to Postgis.
The coastline file is understood to be unprojected (EPSG:4326).
"""
handle, extract_filename = mkstemp(dir='progress', prefix='coastline-', suffix='.shp')
remove(extract_filename)
ogr2ogr = 'ogr2ogr -t_srs EPSG:3857'.split()
if bbox is not None:
ogr2ogr += ['-spat']
ogr2ogr += map(str, [bbox[1], bbox[0], bbox[3], bbox[2]])
ogr2ogr += [extract_filename, filename]
print >> stderr, '+', ' '.join(ogr2ogr)
ogr2ogr = Popen(ogr2ogr)
ogr2ogr.wait()
if ogr2ogr.returncode:
raise Exception('ogr2ogr command returned %d' % ogr2ogr.returncode)
shp2pgsql = 'shp2pgsql', '-dID', '-s', '3857', extract_filename, 'coastline'
psql = 'psql -U osm planet_osm'.split()
print >> stderr, '+', ' '.join(shp2pgsql), '|', ' '.join(psql)
shp2pgsql = Popen(shp2pgsql, stdout=PIPE, stderr=PIPE)
psql = Popen(psql, stdin=shp2pgsql.stdout, stdout=PIPE, stderr=PIPE)
shp2pgsql.wait()
psql.wait()
if shp2pgsql.returncode:
raise Exception('shp2pgsql command returned %d' % shp2pgsql.returncode)
if psql.returncode:
raise Exception('psql command returned %d' % psql.returncode)
def import_style(url):
"""
"""
if url.endswith('.zip'):
import_style_tilemill(url)
update_status('Building Mapnik 2.0 (populate.py)')
build_mapnik2()
elif url.endswith('.cfg'):
import_style_tdcfg(url)
elif url.endswith('.mml'):
import_style_mml(url)
def build_mapnik2():
"""
"""
print >> stderr, '+ ./mapnik2.sh'
mapnik2 = Popen('./mapnik2.sh')
mapnik2.wait()
def get_shapefile_tablename(filepath):
"""
"""
filename = basename(filepath)
if filename == 'tile-drawer.osm2psgsql-polygon.shp':
return 'planet_osm_polygon'
elif filename == 'tile-drawer.osm2psgsql-point.shp':
return 'planet_osm_point'
elif filename == 'tile-drawer.osm2psgsql-line.shp':
return 'planet_osm_line'
elif filename == 'tile-drawer.imposm-admin.shp':
return 'imposm_admin'
elif filename == 'tile-drawer.imposm-aeroways.shp':
return 'imposm_aeroways'
elif filename == 'tile-drawer.imposm-amenities.shp':
return 'imposm_amenities'
elif filename == 'tile-drawer.imposm-buildings.shp':
return 'imposm_buildings'
elif filename == 'tile-drawer.imposm-landusages-gen0.shp':
return 'imposm_landusages_gen0'
elif filename == 'tile-drawer.imposm-landusages-gen1.shp':
return 'imposm_landusages_gen1'
elif filename == 'tile-drawer.imposm-landusages.shp':
return 'imposm_landusages'
elif filename == 'tile-drawer.imposm-mainroads-gen0.shp':
return 'imposm_mainroads_gen0'
elif filename == 'tile-drawer.imposm-mainroads-gen1.shp':
return 'imposm_mainroads_gen1'
elif filename == 'tile-drawer.imposm-mainroads.shp':
return 'imposm_mainroads'
elif filename == 'tile-drawer.imposm-minorroads.shp':
return 'imposm_minorroads'
elif filename == 'tile-drawer.imposm-motorways-gen0.shp':
return 'imposm_motorways_gen0'
elif filename == 'tile-drawer.imposm-motorways-gen1.shp':
return 'imposm_motorways_gen1'
elif filename == 'tile-drawer.imposm-motorways.shp':
return 'imposm_motorways'
elif filename == 'tile-drawer.imposm-places.shp':
return 'imposm_places'
elif filename == 'tile-drawer.imposm-railways-gen0.shp':
return 'imposm_railways_gen0'
elif filename == 'tile-drawer.imposm-railways-gen1.shp':
return 'imposm_railways_gen1'
elif filename == 'tile-drawer.imposm-railways.shp':
return 'imposm_railways'
elif filename == 'tile-drawer.imposm-roads-gen0.shp':
return 'imposm_roads_gen0'
elif filename == 'tile-drawer.imposm-roads-gen1.shp':
return 'imposm_roads_gen1'
elif filename == 'tile-drawer.imposm-roads.shp':
return 'imposm_roads'
elif filename == 'tile-drawer.imposm-transport-areas.shp':
return 'imposm_transport_areas'
elif filename == 'tile-drawer.imposm-transport-points.shp':
return 'imposm_transport_points'
elif filename == 'tile-drawer.imposm-waterareas-gen0.shp':
return 'imposm_waterareas_gen0'
elif filename == 'tile-drawer.imposm-waterareas-gen1.shp':
return 'imposm_waterareas_gen1'
elif filename == 'tile-drawer.imposm-waterareas.shp':
return 'imposm_waterareas'
elif filename == 'tile-drawer.imposm-waterways.shp':
return 'imposm_waterways'
elif filename == 'tile-drawer.coastline.shp':
return 'coastline'
else:
# wat
return ''
def import_style_tilemill(url):
""" Load a zipped-up stylesheet created from Tilemill.
"""
archive = ZipFile(StringIO(urlopen(url).read()))
xmlname = [name for name in archive.namelist() if name.endswith('.xml')][0]
doc = parse(StringIO(archive.read(xmlname)))
# Map shapefiles to PostGIS datasources.
def add_parameter(datasource, parameter, value):
SubElement(datasource, 'Parameter', dict(name=parameter)).text = value
for layer in doc.findall('Layer'):
for ds in layer.findall('Datasource'):
params = dict( [(p.attrib['name'], p.text)
for p in ds.findall('Parameter')] )
if params.get('type', None) == 'shape' and 'file' in params:
ds.clear()
add_parameter(ds, 'type', 'postgis')
add_parameter(ds, 'host', 'localhost')
add_parameter(ds, 'user', 'osm')
add_parameter(ds, 'dbname', 'planet_osm')
add_parameter(ds, 'table', get_shapefile_tablename(params['file']))
add_parameter(ds, 'extent', '-20037508,-20037508,20037508,20037508')
add_parameter(ds, 'estimate_extent', 'false')
out = open('gunicorn/mapnik2.xml', 'w')
out.write('<?xml version="1.0" encoding="utf-8"?>\n')
doc.write(out)
# Build a new TileStache configuration file.
config = json.load(open('gunicorn/tilestache.cfg'))
config['layers'] = {'tiles': {'provider': {}}}
layer = config['layers']['tiles']
layer['provider']['name'] = 'mapnik'
layer['provider']['mapfile'] = 'mapnik2.xml'
layer['bounds'] = dict(zip('south west north east'.split(), options.bbox))
layer['bounds'].update(dict(low=0, high=18))
layer['preview'] = dict(zoom=15, lat=(options.bbox[0]/2 + options.bbox[2]/2), lon=(options.bbox[1]/2 + options.bbox[3]/2))
# Done.
json.dump(config, open('gunicorn/tilestache.cfg', 'w'), indent=2)
def import_style_tdcfg(url):
""" Load a Cascadenik style and its constituent pieces from a URL.
"""
style = json.loads(urlopen(url).read())
mapfile = urljoin(options.style, style['mapfile'])
# Create a local style.xml file by way of a dummy mapnik.Map instance.
mmap = mapnik.Map(1, 1)
mmap.srs = epsg3857
cascadenik.load_map(mmap, mapfile, 'gunicorn', verbose=False)
mapnik.save_map(mmap, 'gunicorn/style.xml')
# Build a new TileStache configuration file.
config = json.load(open('gunicorn/tilestache.cfg'))
config['layers'] = {'tiles': {'provider': {}}}
layer = config['layers']['tiles']
layer['provider']['name'] = 'mapnik'
layer['provider']['mapfile'] = 'style.xml'
layer['bounds'] = dict(zip('south west north east'.split(), options.bbox))
layer['bounds'].update(dict(low=0, high=18))
layer['preview'] = dict(zoom=15, lat=(options.bbox[0]/2 + options.bbox[2]/2), lon=(options.bbox[1]/2 + options.bbox[3]/2))
# Apply various layer options.
for (parameter, value) in style['layer'].items():
if parameter == 'png options' and 'palette' in value:
palette_url = urljoin(url, value['palette'])
palette_data = urlopen(palette_url).read()
palette_file = 'gunicorn/palette.act'
print >> stderr, ' ', palette_file, '<--', palette_url
open(palette_file, 'w').write(palette_data)
value['palette'] = 'palette.act'
layer[parameter] = value
# Done.
json.dump(config, open('gunicorn/tilestache.cfg', 'w'), indent=2)
def import_style_mml(url):
"""
"""
# Create a local style.xml file by way of a dummy mapnik.Map instance.
mmap = mapnik.Map(1, 1)
mmap.srs = epsg3857
cascadenik.load_map(mmap, url, 'gunicorn', verbose=False)
mapnik.save_map(mmap, 'gunicorn/style.xml')
# Build a new TileStache configuration file.
config = json.load(open('gunicorn/tilestache.cfg'))
config['layers'] = {'tiles': {'provider': {}}}
layer = config['layers']['tiles']
layer['provider']['name'] = 'mapnik'
layer['provider']['mapfile'] = 'style.xml'
layer['bounds'] = dict(zip('south west north east'.split(), options.bbox))
layer['bounds'].update(dict(low=0, high=18))
layer['preview'] = dict(zoom=15, lat=(options.bbox[0]/2 + options.bbox[2]/2), lon=(options.bbox[1]/2 + options.bbox[3]/2))
# Done.
json.dump(config, open('gunicorn/tilestache.cfg', 'w'), indent=2)
def update_status(message):
"""
"""
status_file = open('/usr/local/tiledrawer/progress/status.txt', 'a')
status_ts = strftime('%a %b %d %H:%M:%S %Z %Y')
print "%s %s" % ( status_ts, message )
print >> status_file, status_ts, message
if __name__ == '__main__':
options, urls = parser.parse_args()
if dirname(__file__):
print >> stderr, '+ chdir', dirname(__file__)
chdir(dirname(__file__))
try:
update_status('Preparing database (populate.py)')
import_extract_osm2pgsql('postgres/init-data/null.osm')
import_coastline('postgres/init-data/null.shp')
update_status('Importing map style (populate.py)')
import_style(options.style)
update_status('Importing OpenStreetMap data (populate.py)')
osm_files = map(download_file, urls)
osm_filename = combine_extracts(options.bbox, osm_files)
import_extract_osm2pgsql(osm_filename)
import_extract_imposm(osm_filename)
update_status('Importing coastline data (populate.py)')
coast_filename = download_coastline()
import_coastline(coast_filename, options.bbox)
except Exception as ex:
update_status("populate.py exception: %s" % ex)
traceback.print_exc(file=sys.stdout)
exit(1)
else:
update_status('Finished (populate.py)')
| bsd-2-clause | 8,744,793,812,280,144,000 | 32.60479 | 126 | 0.616892 | false |
CroatianMeteorNetwork/RMS | RMS/Astrometry/CheckFit.py | 1 | 25717 | """ Automatic refining of astrometry calibration. The initial astrometric calibration is needed, which will be
refined by using all stars from a given night.
"""
from __future__ import print_function, division, absolute_import
import os
import sys
import copy
import shutil
import random
import argparse
import numpy as np
import scipy.optimize
import matplotlib.pyplot as plt
import RMS.ConfigReader as cr
from RMS.Formats import Platepar
from RMS.Formats import CALSTARS
from RMS.Formats import StarCatalog
from RMS.Formats import FFfile
from RMS.Astrometry.ApplyAstrometry import raDecToXYPP, xyToRaDecPP, rotationWrtHorizon, getFOVSelectionRadius
from RMS.Astrometry.Conversions import date2JD, jd2Date, raDec2AltAz
from RMS.Astrometry.FFTalign import alignPlatepar
from RMS.Math import angularSeparation
# Import Cython functions
import pyximport
pyximport.install(setup_args={'include_dirs':[np.get_include()]})
from RMS.Astrometry.CyFunctions import matchStars, subsetCatalog
def computeMinimizationTolerances(config, platepar, star_dict_len):
""" Compute tolerances for minimization. """
# Calculate the function tolerance, so the desired precision can be reached (the number is calculated
# in the same regard as the cost function)
fatol = (config.dist_check_threshold**2)/np.sqrt(star_dict_len*config.min_matched_stars + 1)
# Parameter estimation tolerance for angular values
fov_w = platepar.X_res/platepar.F_scale
xatol_ang = config.dist_check_threshold*fov_w/platepar.X_res
return fatol, xatol_ang
def matchStarsResiduals(config, platepar, catalog_stars, star_dict, match_radius, ret_nmatch=False, \
sky_coords=False, lim_mag=None, verbose=False):
""" Match the image and catalog stars with the given astrometry solution and estimate the residuals
between them.
Arguments:
config: [Config structure]
platepar: [Platepar structure] Astrometry parameters.
catalog_stars: [ndarray] An array of catalog stars (ra, dec, mag).
star_dict: [ndarray] A dictionary where the keys are JDs when the stars were recorded and values are
2D list of stars, each entry is (X, Y, bg_level, level, fwhm).
match_radius: [float] Maximum radius for star matching (pixels).
min_matched_stars: [int] Minimum number of matched stars on the image for the image to be accepted.
Keyword arguments:
ret_nmatch: [bool] If True, the function returns the number of matched stars and the average
deviation. False by default.
sky_coords: [bool] If True, sky coordinate residuals in RA, dec will be used to compute the cost,
function, not image coordinates.
lim_mag: [float] Override the limiting magnitude from config. None by default.
verbose: [bool] Print results. True by default.
Return:
cost: [float] The cost function which weights the number of matched stars and the average deviation.
"""
if lim_mag is None:
lim_mag = config.catalog_mag_limit
# Estimate the FOV radius
fov_radius = getFOVSelectionRadius(platepar)
# Dictionary containing the matched stars, the keys are JDs of every image
matched_stars = {}
# Go through every FF image and its stars
for jd in star_dict:
# Estimate RA,dec of the centre of the FOV
_, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], \
platepar, extinction_correction=False)
RA_c = RA_c[0]
dec_c = dec_c[0]
# Get stars from the catalog around the defined center in a given radius
_, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, jd, platepar.lat, platepar.lon, \
fov_radius, lim_mag)
ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T
# Extract stars for the given Julian date
stars_list = star_dict[jd]
stars_list = np.array(stars_list)
# Convert all catalog stars to image coordinates
cat_x_array, cat_y_array = raDecToXYPP(ra_catalog, dec_catalog, jd, platepar)
# Take only those stars which are within the FOV
x_indices = np.argwhere((cat_x_array >= 0) & (cat_x_array < platepar.X_res))
y_indices = np.argwhere((cat_y_array >= 0) & (cat_y_array < platepar.Y_res))
cat_good_indices = np.intersect1d(x_indices, y_indices).astype(np.uint32)
# cat_x_array = cat_x_array[good_indices]
# cat_y_array = cat_y_array[good_indices]
# # Plot image stars
# im_y, im_x, _, _ = stars_list.T
# plt.scatter(im_y, im_x, facecolors='none', edgecolor='g')
# # Plot catalog stars
# plt.scatter(cat_y_array[cat_good_indices], cat_x_array[cat_good_indices], c='r', s=20, marker='+')
# plt.show()
# Match image and catalog stars
matched_indices = matchStars(stars_list, cat_x_array, cat_y_array, cat_good_indices, match_radius)
# Skip this image is no stars were matched
if len(matched_indices) < config.min_matched_stars:
continue
matched_indices = np.array(matched_indices)
matched_img_inds, matched_cat_inds, dist_list = matched_indices.T
# Extract data from matched stars
matched_img_stars = stars_list[matched_img_inds.astype(np.int)]
matched_cat_stars = extracted_catalog[matched_cat_inds.astype(np.int)]
# Put the matched stars to a dictionary
matched_stars[jd] = [matched_img_stars, matched_cat_stars, dist_list]
# # Plot matched stars
# im_y, im_x, _, _ = matched_img_stars.T
# cat_y = cat_y_array[matched_cat_inds.astype(np.int)]
# cat_x = cat_x_array[matched_cat_inds.astype(np.int)]
# plt.scatter(im_x, im_y, c='r', s=5)
# plt.scatter(cat_x, cat_y, facecolors='none', edgecolor='g')
# plt.xlim([0, platepar.X_res])
# plt.ylim([platepar.Y_res, 0])
# plt.show()
# If residuals on the image should be computed
if not sky_coords:
unit_label = 'px'
# Extract all distances
global_dist_list = []
# level_list = []
# mag_list = []
for jd in matched_stars:
# matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd]
_, _, dist_list = matched_stars[jd]
global_dist_list += dist_list.tolist()
# # TEST
# level_list += matched_img_stars[:, 3].tolist()
# mag_list += matched_cat_stars[:, 2].tolist()
# # Plot levels vs. magnitudes
# plt.scatter(mag_list, np.log10(level_list))
# plt.xlabel('Magnitude')
# plt.ylabel('Log10 level')
# plt.show()
# Compute the residuals on the sky
else:
unit_label = 'arcmin'
global_dist_list = []
# Go through all matched stars
for jd in matched_stars:
matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd]
# Go through all stars on the image
for img_star_entry, cat_star_entry in zip(matched_img_stars, matched_cat_stars):
# Extract star coords
star_y = img_star_entry[0]
star_x = img_star_entry[1]
cat_ra = cat_star_entry[0]
cat_dec = cat_star_entry[1]
# Convert image coordinates to RA/Dec
_, star_ra, star_dec, _ = xyToRaDecPP([jd2Date(jd)], [star_x], [star_y], [1], \
platepar, extinction_correction=False)
# Compute angular distance between the predicted and the catalog position
ang_dist = np.degrees(angularSeparation(np.radians(cat_ra), np.radians(cat_dec), \
np.radians(star_ra[0]), np.radians(star_dec[0])))
# Store the angular separation in arc minutes
global_dist_list.append(ang_dist*60)
# Number of matched stars
n_matched = len(global_dist_list)
if n_matched == 0:
if verbose:
print('No matched stars with radius {:.1f} px!'.format(match_radius))
if ret_nmatch:
return 0, 9999.0, 9999.0, {}
else:
return 9999.0
# Calculate the average distance
avg_dist = np.median(global_dist_list)
cost = (avg_dist**2)*(1.0/np.sqrt(n_matched + 1))
if verbose:
print()
print("Matched {:d} stars with radius of {:.1f} px".format(n_matched, match_radius))
print(" Average distance = {:.3f} {:s}".format(avg_dist, unit_label))
print(" Cost function = {:.5f}".format(cost))
if ret_nmatch:
return n_matched, avg_dist, cost, matched_stars
else:
return cost
def checkFitGoodness(config, platepar, catalog_stars, star_dict, match_radius, verbose=False):
""" Checks if the platepar is 'good enough', given the extracted star positions. Returns True if the
fit is deemed good, False otherwise. The goodness of fit is determined by 2 criteria: the average
star residual (in pixels) has to be below a certain threshold, and an average number of matched stars
per image has to be above a predefined threshold as well.
Arguments:
config: [Config structure]
platepar: [Platepar structure] Initial astrometry parameters.
catalog_stars: [ndarray] An array of catalog stars (ra, dec, mag).
star_dict: [ndarray] A dictionary where the keys are JDs when the stars were recorded and values are
2D list of stars, each entry is (X, Y, bg_level, level).
match_radius: [float] Maximum radius for star matching (pixels).
Keyword arguments:
verbose: [bool] If True, fit status will be printed on the screen. False by default.
Return:
[bool] True if the platepar is good, False otherwise.
"""
if verbose:
print()
print("CHECK FIT GOODNESS:")
# Match the stars and calculate the residuals
n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
star_dict, match_radius, ret_nmatch=True, verbose=verbose)
# ### Plot zenith distance vs. residual
# # Go through all images
# for jd in matched_stars:
# _, cat_stars, dists = matched_stars[jd]
# # Extract RA/Dec
# ra, dec, _ = cat_stars.T
# zangle_list = []
# for ra_t, dec_t in zip(ra, dec):
# # Compute zenith distance
# azim, elev = raDec2AltAz(ra_t, dec_t, jd, platepar.lat, platepar.lon)
# zangle = 90 - elev
# zangle_list.append(zangle)
# # Plot zangle vs. distance
# plt.scatter(zangle_list, dists, c='k', s=0.1)
# plt.xlabel('Zenith angle')
# plt.ylabel('Residual (px)')
# plt.show()
# ###
# Check that the average distance is within the threshold
if avg_dist <= config.dist_check_threshold:
if verbose:
print()
print('The minimum residual is satisfied!')
# Check that the minimum number of stars is matched per every image
if n_matched >= len(star_dict)*1:
return True
else:
if verbose:
print('But there are not enough stars on every image, recalibrating...')
return False
def _calcImageResidualsAstro(params, config, platepar, catalog_stars, star_dict, match_radius):
""" Calculates the differences between the stars on the image and catalog stars in image coordinates with
the given astrometrical solution.
Arguments:
params: [list] Fit parameters - reference RA, Dec, position angle, and scale.
config: [Config]
platepar: [Platepar]
catalog_stars: [list] List of (ra, dec, mag) entries (angles in degrees).
star_dict: [dict] Dictionary which contains the JD, and a list of (X, Y, bg_intens, intens) of the
stars on the image.
match_radius: [float] Star match radius (px).
Return:
[float] The average pixel residual (difference between image and catalog positions) normalized
by the square root of the total number of matched stars.
"""
# Make a copy of the platepar
pp = copy.deepcopy(platepar)
# Extract fitting parameters
ra_ref, dec_ref, pos_angle_ref, F_scale = params
# Set the fitting parameters to the platepar clone
pp.RA_d = ra_ref
pp.dec_d = dec_ref
pp.pos_angle_ref = pos_angle_ref
pp.F_scale = F_scale
# Match stars and calculate image residuals
return matchStarsResiduals(config, pp, catalog_stars, star_dict, match_radius, verbose=False)
def starListToDict(config, calstars_list, max_ffs=None):
""" Converts the list of calstars into dictionary where the keys are FF file JD and the values is
a list of (X, Y, bg_intens, intens) of stars.
"""
# Convert the list to a dictionary
calstars = {ff_file: star_data for ff_file, star_data in calstars_list}
# Dictionary which will contain the JD, and a list of (X, Y, bg_intens, intens) of the stars
star_dict = {}
# Take only those files with enough stars on them
for ff_name in calstars:
stars_list = calstars[ff_name]
# Check if there are enough stars on the image
if len(stars_list) >= config.ff_min_stars:
# Calculate the JD time of the FF file
dt = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
jd = date2JD(*dt)
# Add the time and the stars to the dict
star_dict[jd] = stars_list
if max_ffs is not None:
# Limit the number of FF files used
if len(star_dict) > max_ffs:
# Randomly choose calstars_files_N image files from the whole list
rand_keys = random.sample(list(star_dict), max_ffs)
star_dict = {key: star_dict[key] for key in rand_keys}
return star_dict
def autoCheckFit(config, platepar, calstars_list, _fft_refinement=False):
""" Attempts to refine the astrometry fit with the given stars and and initial astrometry parameters.
Arguments:
config: [Config structure]
platepar: [Platepar structure] Initial astrometry parameters.
calstars_list: [list] A list containing stars extracted from FF files. See RMS.Formats.CALSTARS for
more details.
Keyword arguments:
_fft_refinement: [bool] Internal flag indicating that autoCF is running the second time recursively
after FFT platepar adjustment.
Return:
(platepar, fit_status):
platepar: [Platepar structure] Estimated/refined platepar.
fit_status: [bool] True if fit was successfuly, False if not.
"""
def _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement):
""" Run FFT alignment before giving up on ACF. """
if not _fft_refinement:
print()
print("-------------------------------------------------------------------------------")
print('The initial platepar is bad, trying to refine it using FFT phase correlation...')
print()
# Prepare data for FFT image registration
calstars_dict = {ff_file: star_data for ff_file, star_data in calstars_list}
# Extract star list from CALSTARS file from FF file with most stars
max_len_ff = max(calstars_dict, key=lambda k: len(calstars_dict[k]))
# Take only X, Y (change order so X is first)
calstars_coords = np.array(calstars_dict[max_len_ff])[:, :2]
calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]
# Get the time of the FF file
calstars_time = FFfile.getMiddleTimeFF(max_len_ff, config.fps, ret_milliseconds=True)
# Try aligning the platepar using FFT image registration
platepar_refined = alignPlatepar(config, platepar, calstars_time, calstars_coords)
print()
### If there are still not enough stars matched, try FFT again ###
min_radius = 10
# Prepare star dictionary to check the match
dt = FFfile.getMiddleTimeFF(max_len_ff, config.fps, ret_milliseconds=True)
jd = date2JD(*dt)
star_dict_temp = {}
star_dict_temp[jd] = calstars_dict[max_len_ff]
# Check the number of matched stars
n_matched, _, _, _ = matchStarsResiduals(config, platepar_refined, catalog_stars, \
star_dict_temp, min_radius, ret_nmatch=True, verbose=True)
# Realign again if necessary
if n_matched < config.min_matched_stars:
print()
print("-------------------------------------------------------------------------------")
print('Doing a second FFT pass as the number of matched stars was too small...')
print()
platepar_refined = alignPlatepar(config, platepar_refined, calstars_time, calstars_coords)
print()
### ###
# Redo autoCF
return autoCheckFit(config, platepar_refined, calstars_list, _fft_refinement=True)
else:
print('Auto Check Fit failed completely, please redo the plate manually!')
return platepar, False
if _fft_refinement:
print('Second ACF run with an updated platepar via FFT phase correlation...')
# Load catalog stars (overwrite the mag band ratios if specific catalog is used)
catalog_stars, _, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(config.star_catalog_path, \
config.star_catalog_file, lim_mag=config.catalog_mag_limit, \
mag_band_ratios=config.star_catalog_band_ratios)
# Dictionary which will contain the JD, and a list of (X, Y, bg_intens, intens) of the stars
star_dict = starListToDict(config, calstars_list, max_ffs=config.calstars_files_N)
# There has to be a minimum of 200 FF files for star fitting
if len(star_dict) < config.calstars_files_N:
print('Not enough FF files in CALSTARS for ACF!')
return platepar, False
# Calculate the total number of calibration stars used
total_calstars = sum([len(star_dict[key]) for key in star_dict])
print('Total calstars:', total_calstars)
if total_calstars < config.calstars_min_stars:
print('Not enough calibration stars, need at least', config.calstars_min_stars)
return platepar, False
print()
# A list of matching radiuses to try
min_radius = 0.5
radius_list = [10, 5, 3, 1.5, min_radius]
# Calculate the function tolerance, so the desired precision can be reached (the number is calculated
# in the same regard as the cost function)
fatol, xatol_ang = computeMinimizationTolerances(config, platepar, len(star_dict))
### If the initial match is good enough, do only quick recalibratoin ###
# Match the stars and calculate the residuals
n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \
min_radius, ret_nmatch=True)
if n_matched >= config.calstars_files_N:
# Check if the average distance with the tightest radius is close
if avg_dist < config.dist_check_quick_threshold:
print("Using quick fit with smaller radiia...")
# Use a reduced set of initial radius values
radius_list = [1.5, min_radius]
##########
# Match increasingly smaller search radiia around image stars
for i, match_radius in enumerate(radius_list):
# Match the stars and calculate the residuals
n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \
match_radius, ret_nmatch=True)
print()
print("-------------------------------------------------------------")
print("Refining camera pointing with max pixel deviation = {:.1f} px".format(match_radius))
print("Initial values:")
print(" Matched stars = {:>6d}".format(n_matched))
print(" Average deviation = {:>6.2f} px".format(avg_dist))
# The initial number of matched stars has to be at least the number of FF imaages, otherwise it means
# that the initial platepar is no good
if n_matched < config.calstars_files_N:
print("The total number of initially matched stars is too small! Please manually redo the plate or make sure there are enough calibration stars.")
# Try to refine the platepar with FFT phase correlation and redo the ACF
return _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement)
# Check if the platepar is good enough and do not estimate further parameters
if checkFitGoodness(config, platepar, catalog_stars, star_dict, min_radius, verbose=True):
# Print out notice only if the platepar is good right away
if i == 0:
print("Initial platepar is good enough!")
return platepar, True
# Initial parameters for the astrometric fit
p0 = [platepar.RA_d, platepar.dec_d, platepar.pos_angle_ref, platepar.F_scale]
# Fit the astrometric parameters
res = scipy.optimize.minimize(_calcImageResidualsAstro, p0, args=(config, platepar, catalog_stars, \
star_dict, match_radius), method='Nelder-Mead', \
options={'fatol': fatol, 'xatol': xatol_ang})
print(res)
# If the fit was not successful, stop further fitting
if not res.success:
# Try to refine the platepar with FFT phase correlation and redo the ACF
return _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement)
else:
# If the fit was successful, use the new parameters from now on
ra_ref, dec_ref, pos_angle_ref, F_scale = res.x
platepar.RA_d = ra_ref
platepar.dec_d = dec_ref
platepar.pos_angle_ref = pos_angle_ref
platepar.F_scale = F_scale
# Check if the platepar is good enough and do not estimate further parameters
if checkFitGoodness(config, platepar, catalog_stars, star_dict, min_radius, verbose=True):
return platepar, True
# Match the stars and calculate the residuals
n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
star_dict, min_radius, ret_nmatch=True)
print("FINAL SOLUTION with radius {:.1} px:".format(min_radius))
print(" Matched stars = {:>6d}".format(n_matched))
print(" Average deviation = {:>6.2f} px".format(avg_dist))
# Mark the platepar to indicate that it was automatically refined with CheckFit
platepar.auto_check_fit_refined = True
# Recompute alt/az of the FOV centre
platepar.az_centre, platepar.alt_centre = raDec2AltAz(platepar.RA_d, platepar.dec_d, platepar.JD, \
platepar.lat, platepar.lon)
# Recompute the rotation wrt horizon
platepar.rotation_from_horiz = rotationWrtHorizon(platepar)
return platepar, True
if __name__ == "__main__":
### COMMAND LINE ARGUMENTS
# Init the command line arguments parser
arg_parser = argparse.ArgumentParser(description="Check if the calibration file matches the stars, and improve it.")
arg_parser.add_argument('dir_path', nargs=1, metavar='DIR_PATH', type=str, \
help='Path to the folder with FF or image files. This folder also has to contain the platepar file.')
arg_parser.add_argument('-c', '--config', nargs=1, metavar='CONFIG_PATH', type=str, \
help="Path to a config file which will be used instead of the default one.")
# Parse the command line arguments
cml_args = arg_parser.parse_args()
#########################
dir_path = cml_args.dir_path[0]
# Check if the given directory is OK
if not os.path.exists(dir_path):
print('No such directory:', dir_path)
sys.exit()
# Load the config file
config = cr.loadConfigFromDirectory(cml_args.config, dir_path)
# Get a list of files in the night folder
file_list = os.listdir(dir_path)
# Find and load the platepar file
if config.platepar_name in file_list:
# Load the platepar
platepar = Platepar.Platepar()
platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat)
else:
print('Cannot find the platepar file in the night directory: ', config.platepar_name)
sys.exit()
# Find the CALSTARS file in the given folder
calstars_file = None
for calstars_file in file_list:
if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
break
if calstars_file is None:
print('CALSTARS file could not be found in the given directory!')
sys.exit()
# Load the calstars file
calstars_list = CALSTARS.readCALSTARS(dir_path, calstars_file)
print('CALSTARS file: ' + calstars_file + ' loaded!')
# Run the automatic astrometry fit
pp, fit_status = autoCheckFit(config, platepar, calstars_list)
# If the fit suceeded, save the platepar
if fit_status:
print('ACF sucessful!')
# Save the old platepar
shutil.move(os.path.join(dir_path, config.platepar_name), os.path.join(dir_path,
config.platepar_name + '.old'))
# Save the new platepar
pp.write(os.path.join(dir_path, config.platepar_name)) | gpl-3.0 | 8,900,784,430,323,704,000 | 33.848238 | 158 | 0.631722 | false |
akshaybabloo/gollahalli-com | gollahalli_cms/editor/tests/test_models.py | 1 | 26159 | import datetime
import os
import shutil
import unittest.mock as mock
from io import BytesIO
import pytz
from PIL import Image
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase
from gollahalli_cms.editor.models import ContentModel, EducationModel, ProjectsModel, TutorialsModel, ExperienceModel, SkillsModel, \
SkillsContentModel, PublicationsModel, PublicationsContentModel, MetaContentModel
def mock_datetime_now():
"""
Date and time with timezone.
Returns
-------
datetime: datetime
Datetime object.
"""
return datetime.datetime(2013, 11, 20, 20, 8, 7, 127325, tzinfo=pytz.UTC)
def mock_date():
"""
Mocks date.
Returns
-------
datetime: datetime
Datetime object.
"""
return datetime.date(2013, 11, 20)
class ContentModelTest(TestCase):
"""
Test case for `ContentModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `ContentModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
ContentModel.objects.create(ref_id=1,
website_name="Akshay Raj Gollahalli",
cv=SimpleUploadedFile('best_file_eva.txt',
'these are the file contents!'.encode('utf-8')),
bio="bio",
url="https://www.example.com",
first_name="Some name",
last_name="last name",
email_id="[email protected]",
github="https://www.github.com",
twitter="https://www.twitter.com",
linkedin="https://www.linkedin.com",
file=SimpleUploadedFile('content_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'content_model.jpg', 'image/jpeg', im_io,
None))
def test_model(self):
"""
Tests `ref_id`, `website_name`, `bio`, `url`, `first_name`, `last_name`, `email_id`, `github`, `twitter` and
`linkedin`.
"""
content = ContentModel.objects.get(ref_id=1)
self.assertEqual(content.ref_id, 1)
self.assertEqual(content.website_name, "Akshay Raj Gollahalli")
self.assertEqual(content.bio, "bio")
self.assertEqual(content.url, "https://www.example.com")
self.assertEqual(content.first_name, "Some name")
self.assertEqual(content.last_name, "last name")
self.assertEqual(content.email_id, "[email protected]")
self.assertEqual(content.github, "https://www.github.com")
self.assertEqual(content.twitter, "https://www.twitter.com")
self.assertEqual(content.linkedin, "https://www.linkedin.com")
def test_timedate(self):
"""
Tests `created` and `updated` date and time.
"""
content = ContentModel.objects.get(ref_id=1)
self.assertEqual(content.updated, mock_datetime_now())
def test_uploads(self):
"""
Tests file uploads of `cv`, `file`, and `image`
"""
content = ContentModel.objects.get(ref_id=1)
self.assertEqual(content.cv, content.cv.name)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `cv`, `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
ContentModel.objects.update(ref_id=1,
cv=SimpleUploadedFile('best_file_eva_1.txt',
'these are the file contents!'.encode('utf-8')),
file=SimpleUploadedFile('content_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'content_model_1.jpg', 'image/jpeg', im_io,
None))
content = ContentModel.objects.get(ref_id=1)
self.assertEqual(content.cv, content.cv.name)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class EducationModelTest(TestCase):
"""
Test case for `EducationModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `EducationModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
EducationModel.objects.create(id=1,
ref_id=model,
title="some title",
from_date=mock_date(),
to_date=mock_date(),
where="somewhere",
current=True,
file=SimpleUploadedFile('education_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'education_model.jpg', 'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id`, `title`, `from_date`, `to_date`, `where` and `current`.
"""
content = EducationModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.title, "some title")
self.assertEqual(content.from_date, mock_date())
self.assertEqual(content.to_date, mock_date())
self.assertEqual(content.where, "somewhere")
self.assertEqual(content.current, True)
def test_files(self):
"""
Tests `file` and `image`.
"""
content = EducationModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
EducationModel.objects.update(id=1,
file=SimpleUploadedFile('education_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'education_model_1.jpg', 'image/jpeg',
im_io,
None))
content = EducationModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class ProjectsModelTest(TestCase):
"""
Test case for `ProjectsModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `ProjectsModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
ProjectsModel.objects.create(id=1,
ref_id=model,
link="https://www.example.com",
title="some title",
category="some category",
long_description="very long description\n yes very long",
short_description="short description",
file=SimpleUploadedFile('project_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'project_model.jpg', 'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id`, `link`, `title`, `category`, `long_description`, and `short_description`
"""
content = ProjectsModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.link, "https://www.example.com")
self.assertEqual(content.title, "some title")
self.assertEqual(content.category, "some category")
self.assertEqual(content.long_description, "very long description\n yes very long")
self.assertEqual(content.short_description, "short description")
def test_files(self):
"""
Tests `file` and `image`.
"""
content = ProjectsModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
ProjectsModel.objects.update(id=1,
file=SimpleUploadedFile('project_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'project_model_1.jpg', 'image/jpeg',
im_io,
None))
content = ProjectsModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class TutorialsModelTest(TestCase):
"""
Test case for `TutorialsModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `TutorialsModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
TutorialsModel.objects.create(id=1,
ref_id=model,
link="https://www.example.com",
title="some title",
long_description="very long description\n yes very long",
file=SimpleUploadedFile('tutorials_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'tutorials_model.jpg', 'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id`, `link`, `title` and `long_description`
"""
content = TutorialsModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.link, "https://www.example.com")
self.assertEqual(content.title, "some title")
self.assertEqual(content.long_description, "very long description\n yes very long")
def test_files(self):
"""
Tests `file` and `image`.
"""
content = TutorialsModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
TutorialsModel.objects.update(id=1,
file=SimpleUploadedFile('tutorial_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'tutorial_model_1.jpg', 'image/jpeg',
im_io,
None))
content = TutorialsModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class ExperienceModelTest(TestCase):
"""
Test case for `ExperienceModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `ExperienceModel` and mocks django `timezone`
"""
model = ContentModel.objects.create(ref_id=1)
ExperienceModel.objects.create(id=1,
ref_id=model,
title="some title",
from_date=mock_date(),
to_date=mock_date(),
where_city="some city",
where_country="some country",
current=True,
company="some company")
def test_model(self):
"""
Tests `id`, `title`, `from_date`, `to_date`, `where_city`, `where_country`, `company` and `current`.
"""
content = ExperienceModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.title, "some title")
self.assertEqual(content.from_date, mock_date())
self.assertEqual(content.to_date, mock_date())
self.assertEqual(content.where_city, "some city")
self.assertEqual(content.where_country, "some country")
self.assertEqual(content.company, "some company")
self.assertEqual(content.current, True)
class SkillsModelTest(TestCase):
"""
Test case for `SkillsModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `SkillsModel` and mocks django `timezone`
"""
model = ContentModel.objects.create(ref_id=1)
SkillsModel.objects.create(ref_id=model, type_of_skill="some type")
def test_model(self):
"""
Tests `type_of_skill`
"""
content = SkillsModel.objects.get(type_of_skill="some type")
self.assertEqual(content.type_of_skill, "some type")
class SkillsContentModelTest(TestCase):
"""
Test case for `SkillsContentModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `SkillsContentModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
skills_model = SkillsModel.objects.create(ref_id=model, type_of_skill="some type")
SkillsContentModel.objects.create(id=1,
type_of_skill=skills_model,
content="some content",
file=SimpleUploadedFile('skills_content_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'skills_content_model.jpg',
'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id` and `content`
"""
content = SkillsContentModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.content, "some content")
def test_files(self):
"""
Tests `file` and `image`.
"""
content = SkillsContentModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
SkillsContentModel.objects.update(id=1,
file=SimpleUploadedFile('skills_content_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'skills_content_model_1.jpg',
'image/jpeg', im_io, None))
content = SkillsContentModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class PublicationsModelTest(TestCase):
"""
Test case for `PublicationsModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `PublicationsModel` and mocks django `timezone`
"""
model = ContentModel.objects.create(ref_id=1)
PublicationsModel.objects.create(ref_id=model, type_of_publication="some publication")
def test_model(self):
"""
Tests `type_of_publication`
"""
content = PublicationsModel.objects.get(type_of_publication="some publication")
self.assertEqual(content.type_of_publication, "some publication")
class PublicationsContentModelTest(TestCase):
"""
Test case for `PublicationsContentMode`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `PublicationsModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
publication_model = PublicationsModel.objects.create(ref_id=model, type_of_publication="some publication")
PublicationsContentModel.objects.create(id=1,
type_of_publication=publication_model,
content="some content",
file=SimpleUploadedFile('publication_content_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'publication_content_model.jpg',
'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id` and `content`
"""
content = PublicationsContentModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.content, "some content")
def test_files(self):
"""
Tests `file` and `image`.
"""
content = PublicationsContentModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
PublicationsContentModel.objects.update(id=1,
file=SimpleUploadedFile('publication_content_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None,
'publication_content_model_1.jpg',
'image/jpeg', im_io, None))
content = PublicationsContentModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class MetaContentModelTest(TestCase):
"""
Test case for `MetaContentModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up `MetaContentModel`
"""
MetaContentModel.objects.create(ref_id=1,
header="some header",
footer="some footer",
meta="some meta")
def test_model(self):
"""
Tests `id`, `header`, `footer` and `meta`
"""
content = MetaContentModel.objects.get(ref_id=1)
self.assertEqual(content.ref_id, 1)
self.assertEqual(content.header, "some header")
self.assertEqual(content.footer, "some footer")
self.assertEqual(content.meta, "some meta")
| mit | 7,561,253,899,140,527,000 | 38.218891 | 133 | 0.512176 | false |
xjw1001001/IGCexpansion | test/Ancestral_reconstruction/PAML/parse reconstructed fasta.py | 1 | 7314 | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 10 08:23:33 2017
@author: xjw1001001
"""
#only when PAML in desktop is available,the yeast version only
from Bio import Seq, SeqIO, AlignIO
from Bio.Phylo.PAML import codeml, baseml
import numpy as np
paralog_list = [['YLR406C', 'YDL075W'],
['YER131W', 'YGL189C'],
['YML026C', 'YDR450W'],
['YNL301C', 'YOL120C'],
['YNL069C', 'YIL133C'],
['YMR143W', 'YDL083C'],
['YJL177W', 'YKL180W'],
['YBR191W', 'YPL079W'],
['YER074W', 'YIL069C'],
['YDR418W', 'YEL054C'],
['YBL087C', 'YER117W'],
['YLR333C', 'YGR027C'],
['YMR142C', 'YDL082W'],
['YER102W', 'YBL072C'],
]
for pair in paralog_list:
primalline=[]
fastaline=[]
with open('/Users/xjw1001001/Desktop/PAML/output/' + '_'.join(pair) +'/out/construct.fasta','r') as f:
for line in f.readlines():
primalline.append(line)
sline = '>' + line
sline=sline.replace('node #14','Root'+pair[0])
sline=sline.replace(' ','')
sline=sline.replace('\n','')
sline=sline.replace('node#15','N0'+pair[0])
for i in range(5):
sline=sline.replace('node#' + str(15+1+i),'N'+str(1+i)+pair[1])
sline=sline.replace('node#' + str(20+1+i),'N'+str(1+i)+pair[0])
sline=sline.replace(pair[0],pair[0] + '\n')
sline=sline.replace(pair[1],pair[1] + '\n')
fastaline.append(sline)
f1 = open('/Users/xjw1001001/Desktop/PAML/PAMLfasta/PAML_' + '_'.join(pair) +'.fasta','w+')
for line in fastaline:
f1.write(line)
f1.write('\n')
f1.close()
#ERa_ERb
pair = ['ERa','ERb']
primalline=[]
fastaline=[]
substitution_dict = {'node#39':'N14ERa','node#38':'N8ERa','node#37':'N7ERa','node#36':'N6ERa','node#41':'N9ERa','node#40':'N5ERa'
,'node#35':'N4ERa','node#44':'N13ERa','node#46':'N12ERa','node#47':'N11ERa','node#45':'N10ERa'
,'node#43':'N3ERa','node#42':'N2ERa','node#34':'N1ERa'
,'node#53':'N14ERb','node#52':'N8ERb','node#51':'N7ERb','node#50':'N6ERb','node#55':'N9ERb','node#54':'N5ERb'
,'node#49':'N4ERb','node#58':'N13ERb','node#60':'N12ERb','node#61':'N11ERb','node#59':'N10ERb'
,'node#57':'N3ERb','node#56':'N2ERb','node#48':'N1ERb'}
with open('/Users/xjw1001001/Desktop/PAML/output/' + '_'.join(pair) +'/out/construct.fasta','r') as f:
for line in f.readlines():
primalline.append(line)
sline = '>' + line
sline=sline.replace('node #32','Root'+pair[0])
sline=sline.replace(' ','')
sline=sline.replace('\n','')
sline=sline.replace('node#33','N0'+pair[0])
for i in substitution_dict.keys():
sline=sline.replace(i,substitution_dict[i])
sline=sline.replace(pair[0],pair[0] + '\n')
sline=sline.replace(pair[1],pair[1] + '\n')
fastaline.append(sline)
f1 = open('/Users/xjw1001001/Desktop/PAML/PAMLfasta/PAML_' + '_'.join(pair) +'.fasta','w+')
for line in fastaline:
f1.write(line)
f1.write('\n')
f1.close()
#ARa_ERa
pair = ['ARa','ERa']
primalline=[]
fastaline=[]
substitution_dict = {'node#36':'N12ERa','node#35':'N11ERa','node#34':'N7ERa','node#33':'N6ERa','node#32':'N5ERa','node#37':'N8ERa'
,'node#31':'N4ERa','node#41':'N10ERa','node#40':'N9ERa','node#39':'N3ERa','node#38':'N2ERa'
,'node#30':'N1ERa'
,'node#48':'N12ARa','node#47':'N11ARa','node#46':'N7ARa','node#45':'N6ARa','node#44':'N5ARa','node#49':'N8ARa'
,'node#43':'N4ARa','node#53':'N10ARa','node#52':'N9ARa','node#51':'N3ARa','node#50':'N2ARa'
,'node#42':'N1ARa','node#29':'N0ERa','node#28':'RootERa'}
with open('/Users/xjw1001001/Desktop/PAML/output/' + '_'.join(pair) +'/out/construct.fasta','r') as f:
for line in f.readlines():
primalline.append(line)
sline = '>' + line
sline=sline.replace(' ','')
sline=sline.replace('\n','')
for i in substitution_dict.keys():
sline=sline.replace(i,substitution_dict[i])
sline=sline.replace(pair[0],pair[0] + '\n')
sline=sline.replace(pair[1],pair[1] + '\n')
fastaline.append(sline)
f1 = open('/Users/xjw1001001/Desktop/PAML/PAMLfasta/PAML_' + '_'.join(pair) +'.fasta','w+')
for line in fastaline:
f1.write(line)
f1.write('\n')
f1.close()
#ARGRMRPR
pairlist = [['AR', 'MR'],
['AR', 'GR'],
['AR', 'PR'],
['MR', 'GR'],
['MR', 'PR'],
['PR', 'GR']]
for pair in pairlist:
primalline=[]
fastaline=[]
substitution_dict = {'node#25':'N4'+pair[0],'node#31':'N9'+pair[0],'node#30':'N7'+pair[0]
,'node#32':'N8'+pair[0],'node#29':'N6'+pair[0],'node#28':'N5'+pair[0]
,'node#27':'N3'+pair[0],'node#26':'N2'+pair[0],'node#24':'N1'+pair[0]
,'node#34':'N4'+pair[1],'node#40':'N9'+pair[1],'node#39':'N7'+pair[1]
,'node#41':'N8'+pair[1],'node#38':'N6'+pair[1],'node#37':'N5'+pair[1]
,'node#36':'N3'+pair[1],'node#35':'N2'+pair[1],'node#33':'N1'+pair[1]
,'node#23':'N0'+pair[0],'node#22':'ROOT'+pair[0]
}
with open('/Users/xjw1001001/Desktop/PAML/output/' + '_'.join(pair) +'/out/construct.fasta','r') as f:
for line in f.readlines():
primalline.append(line)
sline = '>' + line
sline=sline.replace(' ','')
sline=sline.replace('\n','')
for i in substitution_dict.keys():
sline=sline.replace(i,substitution_dict[i])
sline=sline.replace(pair[0],pair[0] + '\n')
sline=sline.replace(pair[1],pair[1] + '\n')
fastaline.append(sline)
f1 = open('/Users/xjw1001001/Desktop/PAML/PAMLfasta/PAML_' + '_'.join(pair) +'.fasta','w+')
for line in fastaline:
f1.write(line)
f1.write('\n')
f1.close()
PAML_parameter_dict = {}
path = '/Users/xjw1001001/Desktop/PAML/'
paralog_list = [['YLR406C', 'YDL075W'],#pair#TODO: other data
['YER131W', 'YGL189C'], ['YML026C', 'YDR450W'], ['YNL301C', 'YOL120C'], ['YNL069C', 'YIL133C'],
['YMR143W', 'YDL083C'], ['YJL177W', 'YKL180W'], ['YBR191W', 'YPL079W'], ['YER074W', 'YIL069C'],
['YDR418W', 'YEL054C'], ['YBL087C', 'YER117W'], ['YLR333C', 'YGR027C'], ['YMR142C', 'YDL082W'],
['YER102W', 'YBL072C'], ['EDN', 'ECP'],['ERa', 'ERb'],['AR', 'MR'],['AR', 'GR'],['AR', 'PR'],
['MR', 'GR'],['MR', 'PR'],['PR', 'GR'] ]
for pair in paralog_list:#parameters: kappa(-5), omega(-1), tau,branches
PAML_parameter_dict['_'.join(pair)] = {}
codeml_result = codeml.read(path+'output/' + '_'.join(pair) + '/out/' + '_'.join(pair) + '_codeml')
#baseml_result = baseml.read('/Users/xjw1001001/Documents/GitHub/IGCexpansion2/test/Ancestral_reconstruction/PAML/output/' + '_'.join(pair) + '/' + '_'.join(pair) + '_baseml')
parameter_list = codeml_result['NSsites'][0]['parameters']['parameter list'].split(' ')
PAML_parameter_dict['_'.join(pair)]['kappa'] = parameter_list[-5]
PAML_parameter_dict['_'.join(pair)]['omega'] = parameter_list[-1]
| gpl-3.0 | -1,790,204,341,473,735,700 | 45.592357 | 179 | 0.537462 | false |
qinjian623/dlnotes | tutorials/tensorflow/mnist_softmax.py | 1 | 2619 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A very simple MNIST classifier.
See extensive documentation at
http://tensorflow.org/tutorials/mnist/beginners/index.md
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
# Import data
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
FLAGS = None
def main(_):
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
# Create the model
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.matmul(x, W) + b
# Define loss and optimizer
y_ = tf.placeholder(tf.float32, [None, 10])
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.nn.softmax_cross_entropy_with_logits on the raw
# outputs of 'y', and then average across the batch.
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(y, y_))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.InteractiveSession()
# Train
tf.initialize_all_variables().run()
for _ in range(50000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# Test trained model
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(sess.run(accuracy, feed_dict={x: mnist.test.images,
y_: mnist.test.labels}))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, default='/tmp/data',
help='Directory for storing data')
FLAGS = parser.parse_args()
tf.app.run()
| gpl-3.0 | -4,167,562,545,988,799,000 | 33.012987 | 80 | 0.66323 | false |
Eigenstate/dabble | dabble/test/dor_abssel/test_dor_absolute_sel.py | 1 | 1915 | """
Tests absolute box size with ignored selection
"""
import os
import pytest
dir = os.path.dirname(__file__) + "/"
#==============================================================================
@pytest.mark.skip(reason="Missing input file")
def test_absolute_box(tmpdir):
"""
Tests the absolute box size for a system with ligands far from
the box
"""
from vmd import atomsel, molecule
from dabble import DabbleBuilder
# Build the system
p = str(tmpdir)
b = DabbleBuilder(solute_filename=os.path.join(dir, "dor_ligs.mae"),
output_filename=os.path.join(p, "test.mae"),
user_x=75., user_y=75., user_z=115.,
overwrite=True, tmp_dir=p,
exclude_sel="same fragment as resname FRAG")
b.write()
# Load the built system
m2 = molecule.load("mae", os.path.join(p, "test.mae"))
molecule.set_top(m2)
# Check all the ligands are there
assert len(set(atomsel("resname FRAG").residue)) == 3
#==============================================================================
#def test_absolute_box_noexclude(tmpdir):
# """
# Tests the absolute box size for a system with ligands far from
# the box, without using an exclude selection
# """
# from vmd import atomsel, molecule
#
# from dabble import DabbleBuilder
# p = str(tmpdir)
#
# # Building the system should raise a valueerror in sanity check
# # as resids are duplicated in protein chain
# with pytest.raises(ValueError):
# b = DabbleBuilder(solute_filename=os.path.join(dir, "dor_ligs.mae"),
# output_filename=os.path.join(p, "test.mae"),
# user_x=75., user_y=75., user_z=115.,
# overwrite=True, tmp_dir=p)
# b.write()
#
#==============================================================================
| gpl-2.0 | 6,778,799,397,482,887,000 | 33.196429 | 79 | 0.526371 | false |
rinigus/osmscout-server | scripts/import/prepare_distribution.py | 1 | 5119 | #!/usr/bin/env python
# This script prepares files before uploading them for distribution
# This has to be run after all imports are finished
import json, pickle, os, stat, shutil
from mapbox_country_pack import world_pack as mapboxgl_world_pack
root_dir = "distribution"
bucket = open("bucket_name", "r").read().strip()
url_base = "http://data.modrana.org/osm_scout_server"
#url_base = "https://kuqrhldx.e24files.com"
url_specs = {
"base": url_base,
"type": "url",
#"osmscout": "osmscout-27",
"geocoder_nlp": "geocoder-nlp-29",
"postal_global": "postal-global-2",
"postal_country": "postal-country-2",
"mapnik_global": "mapnik-global-1",
"mapnik_country": "mapnik-country-24",
"mapboxgl_country": "mapboxgl-16",
"mapboxgl_global": "mapboxgl-16",
"mapboxgl_glyphs": "mapboxgl-16",
"valhalla": "valhalla-24",
}
dist = json.loads( open("countries.json", "r").read() )
dist["postal/global"] = {
"id": "postal/global",
"type": "postal/global",
"postal_global": { "path": "postal/global-v1" }
}
dist["mapnik/global"] = {
"id": "mapnik/global",
"type": "mapnik/global",
"mapnik_global": { "path": "mapnik/global" }
}
dist["mapboxgl/glyphs"] = {
"id": "mapboxgl/glyphs",
"type": "mapboxgl/glyphs",
"mapboxgl_glyphs": { "path": "mapboxgl/glyphs" }
}
dist["url"] = url_specs
# could make it smarter in future to check whether the files have
# changed since the last upload
toupload = []
upload_commands = "#!/bin/bash\nset -e\nrm -f digest.md5\n"
def uploader(dirname, targetname, extra="/"):
global toupload, upload_commands
toupload.append([dirname, targetname])
upload_commands += "echo\necho " + dirname + "\n"
sd = dirname.replace("/", "\/")
st = targetname.replace("/", "\/")
upload_commands += "md5deep -t -l -r " + dirname + " | sed 's/%s/%s/g' >> digest.md5\n" % (sd,st)
upload_commands += "s3cmd --config=.s3cfg sync " + dirname + extra + " s3://" + bucket + "/" + targetname + extra + " --acl-public --signature-v2 " + "\n"
def getprop(dirname):
props = {}
for p in ["size", "size-compressed", "timestamp", "version"]:
v = open(dirname + "." + p, "r").read().split()[0]
props[p] = v
return props
# fill database details
for d in dist:
for sub in dist[d]:
if "packages" in dist[d][sub]:
continue # this item is distributed via packages
try:
rpath = dist[d][sub]["path"]
print(rpath)
except:
continue
locdir = root_dir + "/" + rpath
remotedir = url_specs[sub] + "/" + rpath
dist[d][sub].update( getprop(locdir) )
uploader(locdir, remotedir)
uploader(root_dir + "/valhalla", url_specs["valhalla"] + "/valhalla")
uploader(root_dir + "/mapboxgl/packages", url_specs["mapboxgl_country"] + "/mapboxgl/packages")
# add mapbox global object after uploader commands are ready
dist["mapboxgl/global"] = {
"id": "mapboxgl/global",
"type": "mapboxgl/global",
"mapboxgl_global": mapboxgl_world_pack()
}
# save provided countries
fjson = open("provided/countries_provided.json", "w")
fjson.write( json.dumps( dist, sort_keys=True, indent=4, separators=(',', ': ')) )
fjson.close()
uploader("provided/countries_provided.json", "countries_provided.json", extra = "")
upload_commands += "bzip2 -f digest.md5\n"
uploader("digest.md5.bz2", "digest.md5.bz2", extra = "")
upload_commands += "echo\necho 'Set S3 permissions'\n"
upload_commands += "s3cmd --config=.s3cfg setacl s3://" + bucket + "/ --acl-public --recursive\n"
upload_commands += "mv digest.md5 digest.md5.bz2.md5\n"
uploader("digest.md5.bz2.md5", "digest.md5.bz2.md5", extra = "")
# save uploader script
fscript = open("uploader.sh", "w")
fscript.write( upload_commands )
fscript.write( "echo\necho 'Set S3 permissions'\n" )
fscript.write( "s3cmd --config=.s3cfg setacl s3://" + bucket + "/ --acl-public --recursive\n" )
fscript.write( "s3cmd --config=.s3cfg setacl s3://" + bucket + "/ --acl-private\n" )
fscript.close()
st = os.stat('uploader.sh')
os.chmod('uploader.sh', st.st_mode | stat.S_IEXEC)
print("Check uploader script and run it")
# generate public_html folder for testing
testing_mirror = "public_http"
shutil.rmtree(testing_mirror, ignore_errors=True)
os.mkdir(testing_mirror)
os.symlink("../provided/countries_provided.json",
os.path.join(testing_mirror, "countries_provided.json"))
distlink = { "geocoder_nlp": "geocoder-nlp",
"mapboxgl_country": "mapboxgl",
"mapnik_country": "mapnik",
"mapnik_global": "mapnik",
#"osmscout": "osmscout",
"postal_country": "postal",
"postal_global": "postal",
"valhalla": "valhalla" }
for t in ["geocoder_nlp", "mapboxgl_country",
"mapnik_country", "mapnik_global",
#"osmscout",
"postal_country", "postal_global", "valhalla" ]:
d = os.path.join(testing_mirror, url_specs[t])
os.mkdir(d)
os.symlink( "../../distribution/" + distlink[t], os.path.join(d, distlink[t]) )
| gpl-3.0 | 1,752,026,904,201,722,600 | 33.126667 | 158 | 0.621606 | false |
jose-caballero/cvmfsreplica | cvmfsreplica/cvmfsreplicaex.py | 1 | 1122 | #! /usr/bin/env python
#
# exception classes for cvmfsreplica project
class ServiceConfigurationFailure(Exception):
"""
Exception to be raised when basic service configuration
cannot be read
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class RepositoriesConfigurationFailure(Exception):
"""
Exception to be raised when basic repositories configuration
cannot be read
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class PluginConfigurationFailure(Exception):
"""
Exception to be raised when a plugin configuration
cannot be read
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class AcceptancePluginFailed(Exception):
"""
Exception to be raised when an Acceptance Plugin
failed and it has an attribute should_abort = True
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| gpl-3.0 | -3,819,787,098,476,026,400 | 22.87234 | 64 | 0.635472 | false |
lohner/Praktomat | src/tasks/views.py | 1 | 5082 | # -*- coding: utf-8 -*-
import tempfile
import zipfile
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.shortcuts import render, get_object_or_404
from django.http import Http404
from django.http import HttpResponseRedirect, HttpResponse
from datetime import datetime
from django import forms
from django.core import urlresolvers
from django.contrib import messages
import django.utils.timezone
from tasks.models import Task
from solutions.forms import ModelSolutionFormSet
from solutions.models import Solution, SolutionFile
from accounts.models import User
from accounts.views import access_denied
from attestation.models import Attestation
from attestation.views import user_task_attestation_map
from configuration import get_settings
@login_required
def taskList(request):
now = django.utils.timezone.now()
tasks = Task.objects.filter(publication_date__lte = now).order_by('submission_date')
expired_Tasks = Task.objects.filter(submission_date__lt = now).order_by('publication_date','submission_date')
try:
tutors = request.user.tutorial.tutors.all()
except:
tutors = None
trainers = User.objects.filter(groups__name="Trainer")
# we only have a single user here, so the rating_list only contains a single row;
# this row belongs to that user
(_,attestations,threshold,calculated_grade) = user_task_attestation_map([request.user], tasks)[0]
attestations = map(lambda a, b: (a,)+b, tasks, attestations)
def tasksWithSolutions(tasks):
return map(lambda t: (t, t.final_solution(request.user)), tasks)
return render(request,
'tasks/task_list.html',
{
'tasks':tasksWithSolutions(tasks),
'expired_tasks': tasksWithSolutions(expired_Tasks),
'attestations':attestations,
'show_final_grade': get_settings().final_grades_published,
'tutors':tutors,
'trainers':trainers,
'threshold':threshold,
'calculated_grade':calculated_grade,
})
@login_required
def taskDetail(request,task_id):
task = get_object_or_404(Task,pk=task_id)
if task.publication_date >= datetime.now() and not request.user.is_trainer:
raise Http404
my_solutions = Task.objects.get(pk=task_id).solution_set.filter(author = request.user)
return render(request,
'tasks/task_detail.html',
{
'task': task,
'solutions': my_solutions,
})
class ImportForm(forms.Form):
file = forms.FileField()
@staff_member_required
def import_tasks(request):
""" View in the admin """
if request.method == 'POST':
form = ImportForm(request.POST, request.FILES)
if form.is_valid():
try:
Task.import_Tasks(form.files['file'], request.user)
messages.success(request, "The import was successfull.")
return HttpResponseRedirect(urlresolvers.reverse('admin:tasks_task_changelist'))
except Exception, e:
from django.forms.utils import ErrorList
msg = "An Error occured. The import file was propably malformed.: %s" % str(e)
form._errors["file"] = ErrorList([msg])
else:
form = ImportForm()
return render(request, 'admin/tasks/task/import.html', {'form': form, 'title':"Import Task" })
@staff_member_required
def download_final_solutions(request, task_id):
""" download all final solutions of a task from the admin interface """
zip_file = tempfile.SpooledTemporaryFile()
zip = zipfile.ZipFile(zip_file,'w')
for solution_file in SolutionFile.objects.filter(solution__task=task_id):
if solution_file.solution.final:
zip.write(solution_file.file.path, solution_file.file.name)
zip.close()
zip_file.seek(0)
response = HttpResponse(zip_file.read(), content_type="application/zip")
response['Content-Disposition'] = 'attachment; filename=FinalSolutions.zip'
return response
@staff_member_required
def model_solution(request, task_id):
""" View in the admin """
task = get_object_or_404(Task,pk=task_id)
if request.method == "POST":
solution = Solution(task = task, author=request.user)
formset = ModelSolutionFormSet(request.POST, request.FILES, instance=solution)
if formset.is_valid():
try:
solution.save();
# no deleting the old solution:
# delete will cascade on db level deleting checker results and checker
# as this isn't easily prevented just keep the old solution around until the task is deleted
formset.save()
solution.check_solution(request.session)
task.model_solution = solution;
task.save()
except:
solution.delete() # delete files
raise # dont commit db changes
else:
formset = ModelSolutionFormSet()
context = {"formset": formset, "task": task, 'title': "Model Solution", 'is_popup': True, }
return render(request, "admin/tasks/task/model_solution.html", context)
| gpl-2.0 | -790,482,774,938,512,000 | 36.925373 | 110 | 0.684967 | false |
SchulzLab/SOS | install_script.py | 1 | 11581 | #!/usr/bin/env python
import os
from optparse import OptionParser
import subprocess
import sys
#import commands
class install_script():
def __init__(self):
self.prog_installed = []
def obtaining_tar(self, prog, path):
if (prog == 6):
os.chdir(path)
#Before obtaining tha tar file of the corresponding tool, we always check whether the folder exists in the path. If it exists then we throw an exception otherwise we download the tool
#Checking and downloading oases
chk = self.checkfolder("oases")
if(chk == False):
os.system("git clone --recursive http://github.com/dzerbino/oases.git")
else:
print ("The path already contains a folder named oases. Please rename the folder or remove it from the path")
sys.exit()
#Checking and downloading SEECER. This is not the version mentioned in the manuscript of SEECER. This is the modified version which was used for the SOS manuscript.
chk1 = self.checkfolder("SEECER.tar.gz")
if(chk1 == False):
os.system("wget https://zenodo.org/record/3686150/files/SEECER.tar.gz?download=1")
os.system("tar -zxvf SEECER.tar.gz")
else:
print ("The path already contains a folder named SEECER.tar.gz. Please rename it or remove it from the path")
#Checking and downloading salmon
chk2 = self.checkfolder("salmon-1.1.0_linux_x86_64.tar.gz")
if(chk2 == False):
#To get the latest version of salmon, please change the link in the next three lines
print("-----salmon installation-------")
os.system("wget https://github.com/COMBINE-lab/salmon/releases/download/v1.1.0/salmon-1.1.0_linux_x86_64.tar.gz >"+path+"/LogFiles/salmon.txt 2> "+path+"/LogFiles/salmonError.txt")
os.system("tar -zxvf salmon-1.1.0_linux_x86_64.tar.gz >"+path+"/LogFiles/salmon.txt 2> "+path+"/LogFiles/salmonError.txt")
self.prog_installed.append(path+"/salmon-1.1.0_linux_x86_64.tar.gz")
else:
print ("The path already contains a folder named salmon-1.1.0_linux_x86_64.tar.gz. Please rename it or remove it from the path")
sys.exit()
chk3 = self.checkfolder("ORNA")
if(chk3 == False):
os.system("git clone https://github.com/SchulzLab/ORNA")
self.prog_installed.append(path+"/ORNA")
else:
print ("The path already contains a folder named ORNA. Please rename it or remove it from the path")
chk4 = self.checkfolder("KREATION")
if(chk4 == False):
print("-----KREATION installation-------")
os.system("git clone https://github.com/SchulzLab/KREATION >"+path+"/LogFiles/KREATION.txt 2> "+path+"/LogFiles/KreationError.txt")
self.prog_installed.append(path+"/KREATION")
else:
print ("The path already contains a folder named KREATION. Please rename it or remove it from the path")
if(prog==1):
os.chdir(path)
chk6 = self.checkfolder("oases")
if(chk6 == False):
os.system("git clone http://github.com/dzerbino/oases.git >"+path+"/LogFiles/Oases.txt 2> "+path+"/LogFiles/OasesError.txt")
else:
print ("The path already contains a folder named oases. please rename the folder or remove it from the path")
sys.exit()
if(prog==2):
os.chdir(path)
output = subprocess.check_output("uname")
chk2 = self.checkfolder("salmon-1.1.0_linux_x86_64")
if(chk2 == False):
print("-----salmon installation-------")
os.system("wget https://github.com/COMBINE-lab/salmon/releases/download/v1.1.0/salmon-1.1.0_linux_x86_64.tar.gz >"+path+"/LogFiles/salmon.txt 2> "+path+"/LogFiles/salmonError.txt")
os.system("tar -zxvf salmon-1.1.0_linux_x86_64.tar.gz >"+path+"/LogFiles/salmon.txt 2> "+path+"/LogFiles/salmonError.txt")
self.prog_installed.append(path+"/salmon-1.1.0_linux_x86_64.tar.gz")
chksalmon=self.checkfolder(path+"/salmon-latest_linux_x86_64/bin/salmon")
if(chksalmon==False):
print("Salmon did not install correctly. Please try again")
sys.exit()
else:
print("Salmon installed successfully")
else:
print ("The path already contains a folder named salmon-1.1.0_linux_x86_64.tar.gz. please rename it or remove it from the path")
sys.exit()
if (prog == 3):
os.chdir(path)
chk2 = self.checkfolder("ORNA")
if(chk2 == False):
os.system("git clone https://github.com/SchulzLab/ORNA >"+path+"/LogFiles/ORNA.txt 2> "+path+"/LogFiles/ORNAError.txt")
self.prog_installed.append(path+"/ORNA")
else:
print ("The path already contains a folder named ORNA. Please rename it or remove it from the path")
if (prog == 4):
os.chdir(path)
s,t = subprocess.check_output("which cd-hit-est")
if(s == 256):
uc = input("cd-hit is not found in the environment variables. Do you want to install (y/n) : ")
if(uc == "y"):
os.system("git clone https://github.com/weizhongli/cdhit >"+path+"/LogFiles/cdhit.txt 2> "+path+"/LogFiles/cdhitError.txt")
self.install_cdhit(path)
os.chdir(path)
else:
print ("Please remember that cd-hit-est is required for the running of KREATION and must be in the environment variable $PATH")
chk2 = self.checkfolder("KREATION")
if(chk2 == False):
print("-----KREATION installation-------")
os.system("git clone https://github.com/SchulzLab/KREATION >"+path+"/LogFiles/KREATION.txt 2> "+path+"/LogFiles/KreationError.txt")
self.prog_installed.append(path+"/KREATION")
chkkreation=self.checkfolder(path+"/KREATION/KREATION.py")
if(chkkreation==False):
print("KREATION did not install correctly. Please try again")
sys.exit()
else:
print("KREATION installed successfully")
else:
print ("The path already contains a folder named KREATION. Please rename it or remove it from the path")
if (prog == 5):
os.chdir(path)
chk1 = self.checkfolder("SEECER.tar.gz")
if(chk1 == False):
print("-----SEECER installation-----")
os.system("wget https://zenodo.org/record/3686150/files/SEECER.tar.gz > "+path+"/LogFiles/Seecer.txt 2> "+path+"/LogFiles/SeecerError.txt")
os.system("tar -zxvf SEECER.tar.gz > "+path+"/LogFiles/Seecer.txt 2> "+path+"/LogFiles/SeecerError.txt")
chkkreation=self.checkfolder(path+"/SEECER-0.1.3/SEECER/bin/run_seecer.sh")
if(chkkreation==False):
print("SEECER did not install correctly. Please try again")
sys.exit()
else:
print("SEECER installed successfully")
else:
print ("The path already contains a folder named SEECER.tar.gz. Please rename it or remove it from the path")
if(prog==8):
os.chdir(path)
chk5 = self.checkfolder("velvet")
if(chk5 == False):
os.system("git clone http://github.com/dzerbino/velvet.git >"+path+"/LogFiles/Velvet.txt 2> "+path+"/LogFiles/VelvetError.txt")
else:
print ("The path already contains a folder named velvet. please rename the folder or remove it from the path")
sys.exit()
def install_oases(self, path, cs):
print("------Oases installation------")
path2 = path + "/oases"
os.chdir(path2)
os.system("make "+cs+" > "+path+"/LogFiles/Oases.txt 2> "+path+"/LogFiles/OasesError.txt")
self.prog_installed.append(path2)
chk=self.checkfolder(path+"/oases/oases")
if(chk==False):
print("Oases did not install correctly. Please try again")
sys.exit()
else:
print("Oases installed successfully")
def install_orna(self, path):
print("------ORNA installation------")
path2 = path + "/ORNA"
os.chdir(path2)
os.system("bash install.sh > "+path+"/LogFiles/ORNA.txt 2> "+path+"/LogFiles/ORNAError.txt")
self.prog_installed.append(path2)
chk=self.checkfolder(path+"/ORNA/build/bin/ORNA")
if(chk==False):
print("ORNA did not install correctly. Please try again")
sys.exit()
else:
print("ORNA installed successfully")
def install_velvet(self,path, cs):
path1 = path + "/velvet"
os.chdir(path1)
print("------Velvet installation------")
os.system("make "+cs+" > "+path+"/LogFiles/velvet.txt 2> "+path+"/LogFiles/VelvetError.txt")
self.prog_installed.append(path1)
chk=self.checkfolder(path+"/velvet/velvetg") and self.checkfolder(path+"/velvet/velveth")
if(chk==False):
print("velvet did not install correctly. Please try again")
sys.exit()
else:
print("velvet installed successfully")
def install_cdhit(self, path):
path1 = path + "/cdhit"
os.chdir(path1)
print("------cd-hit-est installation------")
os.system("make > "+path+"/LogFiles/cdhit.txt 2> "+path+"/LogFiles/cdHitError.txt")
def getoptions(self):
parser = OptionParser()
parser.add_option("-f", "--folder", dest="foldername", help="destination folder")
(options, args) = parser.parse_args()
return options
def checkfolder(self, program):
var = os.path.exists(program)
return var
########### MAIN PROGRAM ###########
x = install_script()
y1 = x.getoptions()
if(y1.foldername != None):
try:
os.chdir(y1.foldername)
except:
uc = input("folder "+ y1.foldername + " does not exists. Do you want to create one (y/n) : ")
if(uc == "y"):
os.system("mkdir " +y1.foldername)
os.chdir(y1.foldername)
else:
sys.exit()
pwd = os.getcwd()
os.system("mkdir LogFiles")
print ("Programs to install :")
print ("1. OASES")
print ("2. SALMON")
print ("3. ORNA")
print ("4. KREATION")
print ("5. SEECER")
print ("6. ALL")
print ("7. QUIT")
x1 = input("Enter the option number (if multiple options then separate it by comma): ")
y = x1.split(",")
acs = ""
vd = ""
flg = 0
cs = ""
a13 = ""
if("7" in y):
print("Thank you. It was nice working for you")
sys.exit()
if "6" in y:
#Obtaining and installing oases and velvet
vc = input("Execution of Oases requires velvet. Do you want to install velvet (y/n) : ")
if(vc == "y"):
ch = input("Do you want to include additional compilation settings for velvet (refer to velvet manual for details) y/n : ")
if(ch == "y"):
print("Enter the additional compilation settings of velvet seperated by space (for instance - \'MAXKMERLENGTH=57\'):")
a1 = input()
a11 = a1.split()
for a2 in a11:
a2 = a2.replace("'","")
a2 = "\'" + a2 + "\'"
a13 = a13 + " " + a2
cs = cs + a13
flg = 1
cs = cs + "\'VELVET_DIR="+pwd+"/velvet\'"
if(vc == "n"):
vd = input("Enter the location of velvet : ")
cs = cs + " \'VELVET_DIR=" + vd +"\'"
x.obtaining_tar(1, pwd)
if (flg == 1):
x.obtaining_tar(8, pwd)
x.install_velvet(pwd, cs)
x.install_oases(pwd, cs)
#Obtaining salmon
x.obtaining_tar(2, pwd)
#Obtaining ORNA
x.obtaining_tar(3, pwd)
x.install_orna(pwd)
#Obtaining KREATION
x.obtaining_tar(4, pwd)
#Obtaining SEECER
x.obtaining_tar(5, pwd)
else:
for i in y:
if(int(i) == 1):
vc = input("Execution of Oases requires velvet. Do you want to install velvet (y/n) : ")
if(vc == "y"):
ch = input("Do you want to include additional compilation settings for velvet (refer to velvet manual for details) y/n : ")
if(ch == "y"):
print("Enter the additional compilation settings of velvet seperated by space (for instance - \'MAXKMERLENGTH=57\'):")
a1 = input()
a11 = a1.split()
for a2 in a11:
a2 = a2.replace("'","")
a2 = "\'" + a2 + "\'"
a13 = a13 + " " + a2
cs = cs + a13
flg = 1
cs = cs + " \'VELVET_DIR="+pwd+"/velvet\'"
if(vc == "n"):
vd = input("Enter the location of velvet : ")
if("\\" not in vd):
cs = cs + " \'VELVET_DIR=" +pwd+"\\"+ vd +"\'"
else:
cs = cs + " \'VELVET_DIR=" + vd +"\'"
x.obtaining_tar(1,pwd)
if(flg == 1):
x.obtaining_tar(8,pwd)
x.install_velvet(pwd, cs)
x.install_oases(pwd, cs)
elif(int(i)==3):
x.obtaining_tar(3,pwd)
x.install_orna(pwd)
else:
x.obtaining_tar(int(i), pwd)
| mit | 928,657,617,958,597,400 | 36.723127 | 186 | 0.656075 | false |
spirali/elphie | elphie/textparser.py | 1 | 1946 |
def normalize_tokens(tokens):
# Remove empty texts
tokens = [kv for kv in tokens if kv[0] != "text" or kv[1]]
# Merge lines
i = 1
while i < len(tokens):
token_name, value = tokens[i]
if token_name == "newline" and tokens[i - 1][0] == "newline":
value2 = tokens[i - 1][1]
del tokens[i]
del tokens[i - 1]
tokens.insert(i - 1, ("newline", value + value2))
continue
i += 1
# Remove trailing empty lines
if tokens and tokens[-1][0] == "newline":
tokens = tokens[:-1]
return tokens
def parse_text(text, escape_char="~", begin_char="{", end_char="}"):
result = []
start = 0
i = 0
counter = 0
while i < len(text):
c = text[i]
if c == escape_char:
result.append(("text", text[start:i]))
i += 1
start = i
while i < len(text) and text[i] != begin_char:
i += 1
result.append(("begin", text[start:i]))
i += 1
start = i
counter += 1
elif c == end_char:
result.append(("text", text[start:i]))
result.append(("end", None))
i += 1
start = i
counter -= 1
if counter < 0:
raise Exception("Invalid format, too many closing characters")
else:
i += 1
if i != start:
result.append(("text", text[start:i]))
final_result = []
for r in result:
if r[0] != "text":
final_result.append(r)
continue
lines = r[1].split("\n")
final_result.append(("text", lines[0]))
for line in lines[1:]:
final_result.append(("newline", 1))
final_result.append(("text", line))
if counter > 0:
raise Exception("Invalid format, unclosed command")
return normalize_tokens(final_result)
| bsd-2-clause | 5,328,669,194,339,669,000 | 28.044776 | 78 | 0.482014 | false |
abacuspix/NFV_project | Build_Web_With_Flask/Building web applications with Flask_Code/chapter08/ex05.py | 1 | 1529 | # coding:utf-8
from flask import Flask, render_template, session, flash
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# strong secret key!!
app.config['SECRET_KEY'] = '\xa6\xb5\x0e\x7f\xd3}\x0b-\xaa\x03\x03\x82\x10\xbe\x1e0u\x93,{\xd4Z\xa3\x8f'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ex05.sqlite'
db = SQLAlchemy(app)
class Product(db.Model):
__tablename__ = 'products'
id = db.Column(db.Integer, primary_key=True)
sku = db.Column(db.String(30), unique=True)
name = db.Column(db.String(255), nullable=False)
def __unicode__(self):
return self.name
@app.route("/cart/add/<sku>")
def add_to_cart_view(sku):
product = Product.query.filter_by(sku=sku).first()
if product is not None:
session['cart'] = session.get('cart') or dict()
item = session['cart'].get(product.sku) or dict()
item['qty'] = item.get('qty', 0) + 1
session['cart'][product.sku] = item
flash(u'%s add to cart. Total: %d' % (product, item['qty']))
return render_template('cart.html')
def init():
"""
Initializes and populates the database
"""
db.create_all()
if Product.query.count() == 0:
db.session.add_all([
Product(sku='010', name='Boots'),
Product(sku='020', name='Gauntlets'),
Product(sku='030', name='Helmets'),
])
db.session.commit()
if __name__ == '__main__':
app.debug = True
with app.test_request_context():
init()
app.run() | mit | -965,825,808,537,951,900 | 24.5 | 104 | 0.59843 | false |
DeepThoughtTeam/tensorflow | tensorflow/python/kernel_tests/tensor_array_ops_test.py | 1 | 16965 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tensor_array_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,g-bad-import-order
import tensorflow.python.platform
# pylint: enable=unused-import,g-bad-import-order
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import errors
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
class TensorArrayTest(tf.test.TestCase):
def _testTensorArrayWriteRead(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
w0 = h.write(0, [[4.0, 5.0]])
w1 = w0.write(1, [[1.0]])
w2 = w1.write(2, -3.0)
r0 = w2.read(0)
r1 = w2.read(1)
r2 = w2.read(2)
d0, d1, d2 = sess.run([r0, r1, r2])
self.assertAllEqual([[4.0, 5.0]], d0)
self.assertAllEqual([[1.0]], d1)
self.assertAllEqual(-3.0, d2)
def testTensorArrayWriteRead(self):
self._testTensorArrayWriteRead(use_gpu=False)
self._testTensorArrayWriteRead(use_gpu=True)
def _testTensorArrayWritePack(self, tf_dtype, use_gpu):
dtype = tf_dtype.as_numpy_dtype()
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf_dtype, tensor_array_name="foo", size=3)
if tf_dtype == tf.string:
convert = lambda x: np.asarray(x).astype(np.str)
else:
convert = lambda x: np.asarray(x).astype(dtype)
w0 = h.write(0, convert([[4.0, 5.0]]))
w1 = w0.write(1, convert([[6.0, 7.0]]))
w2 = w1.write(2, convert([[8.0, 9.0]]))
c0 = w2.pack()
self.assertAllEqual(
convert([[[4.0, 5.0]], [[6.0, 7.0]], [[8.0, 9.0]]]), c0.eval())
def _testTensorArrayWritePackWithType(self, tf_dtype):
self._testTensorArrayWritePack(tf_dtype=tf_dtype, use_gpu=False)
self._testTensorArrayWritePack(tf_dtype=tf_dtype, use_gpu=True)
def testTensorArrayWritePack(self):
self._testTensorArrayWritePackWithType(tf.float32)
self._testTensorArrayWritePackWithType(tf.float64)
self._testTensorArrayWritePackWithType(tf.int32)
self._testTensorArrayWritePackWithType(tf.int64)
self._testTensorArrayWritePackWithType(tf.complex64)
self._testTensorArrayWritePackWithType(tf.string)
def testTensorArrayUnpackWrongMajorSizeFails(self):
with self.test_session():
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
with self.assertRaisesOpError(
r"Input value must have first dimension "
r"equal to the array size \(2 vs. 3\)"):
h.unpack([1.0, 2.0]).flow.eval()
def testTensorArrayPackNotAllValuesAvailableFails(self):
with self.test_session():
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
with self.assertRaisesOpError(
"Could not read from TensorArray index 1 "
"because it has not yet been written to."):
h.write(0, [[4.0, 5.0]]).pack().eval()
def _testTensorArrayUnpackRead(self, tf_dtype, use_gpu):
dtype = tf_dtype.as_numpy_dtype()
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf_dtype, tensor_array_name="foo", size=3)
if tf_dtype == tf.string:
convert = lambda x: np.asarray(x).astype(np.str)
else:
convert = lambda x: np.asarray(x).astype(dtype)
# Unpack a vector into scalars
w0 = h.unpack(convert([1.0, 2.0, 3.0]))
r0 = w0.read(0)
r1 = w0.read(1)
r2 = w0.read(2)
d0, d1, d2 = sess.run([r0, r1, r2])
self.assertAllEqual(convert(1.0), d0)
self.assertAllEqual(convert(2.0), d1)
self.assertAllEqual(convert(3.0), d2)
# Unpack a matrix into vectors
w1 = h.unpack(convert([[1.0, 1.1], [2.0, 2.1], [3.0, 3.1]]))
r0 = w1.read(0)
r1 = w1.read(1)
r2 = w1.read(2)
d0, d1, d2 = sess.run([r0, r1, r2])
self.assertAllEqual(convert([1.0, 1.1]), d0)
self.assertAllEqual(convert([2.0, 2.1]), d1)
self.assertAllEqual(convert([3.0, 3.1]), d2)
def _testTensorArrayUnpackReadWithType(self, tf_dtype):
self._testTensorArrayUnpackRead(tf_dtype=tf_dtype, use_gpu=False)
self._testTensorArrayUnpackRead(tf_dtype=tf_dtype, use_gpu=True)
def testTensorArrayUnpackRead(self):
self._testTensorArrayUnpackReadWithType(tf.float32)
self._testTensorArrayUnpackReadWithType(tf.float64)
self._testTensorArrayUnpackReadWithType(tf.int32)
self._testTensorArrayUnpackReadWithType(tf.int64)
self._testTensorArrayUnpackReadWithType(tf.complex64)
self._testTensorArrayUnpackReadWithType(tf.string)
def _testTensorGradArrayWriteRead(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
g_h = h.grad()
w0 = h.write(0, [[4.0, 5.0]])
w1 = w0.write(1, [[1.0]])
w2 = w1.write(2, -3.0)
g_w0 = g_h.write(0, [[5.0, 6.0]])
g_w1 = g_w0.write(1, [[2.0]])
g_w2 = g_w1.write(2, -2.0)
r0 = w2.read(0)
r1 = w2.read(1)
r2 = w2.read(2)
g_r0 = g_w2.read(0)
g_r1 = g_w2.read(1)
g_r2 = g_w2.read(2)
d0, d1, d2, g_d0, g_d1, g_d2 = sess.run([r0, r1, r2, g_r0, g_r1, g_r2])
self.assertAllEqual([[4.0, 5.0]], d0)
self.assertAllEqual([[1.0]], d1)
self.assertAllEqual(-3.0, d2)
self.assertAllEqual([[5.0, 6.0]], g_d0)
self.assertAllEqual([[2.0]], g_d1)
self.assertAllEqual(-2.0, g_d2)
def testTensorGradArrayWriteRead(self):
self._testTensorGradArrayWriteRead(use_gpu=False)
self._testTensorGradArrayWriteRead(use_gpu=True)
def _testTensorGradAccessTwiceReceiveSameObject(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
g_h_0 = h.grad()
g_h_1 = h.grad()
with tf.control_dependencies([g_h_0.write(0, [[4.0, 5.0]]).flow]):
# Write with one gradient handle, read with another copy of it
r1_0 = g_h_1.read(0)
t_g_h_0, t_g_h_1, d_r1_0 = sess.run([g_h_0.handle, g_h_1.handle, r1_0])
self.assertAllEqual(t_g_h_0, t_g_h_1)
self.assertAllEqual([[4.0, 5.0]], d_r1_0)
def testTensorGradAccessTwiceReceiveSameObject(self):
self._testTensorGradAccessTwiceReceiveSameObject(False)
self._testTensorGradAccessTwiceReceiveSameObject(True)
def _testTensorArrayWriteWrongIndexOrDataTypeFails(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
# Test writing the wrong datatype
with self.assertRaisesOpError(
"TensorArray dtype is float but Op is trying to write dtype string"):
h.write(-1, "wrong_type_scalar").flow.eval()
# Test writing to a negative index
with self.assertRaisesOpError(
"Tried to write to index -1 but array size is: 3"):
h.write(-1, 3.0).flow.eval()
# Test reading from too large an index
with self.assertRaisesOpError(
"Tried to write to index 3 but array size is: 3"):
h.write(3, 3.0).flow.eval()
def testTensorArrayWriteWrongIndexOrDataTypeFails(self):
self._testTensorArrayWriteWrongIndexOrDataTypeFails(use_gpu=False)
self._testTensorArrayWriteWrongIndexOrDataTypeFails(use_gpu=True)
def _testTensorArrayReadWrongIndexOrDataTypeFails(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
w0 = h.write(0, [[4.0, 5.0]])
# Test reading wrong datatype
r0_bad = gen_data_flow_ops._tensor_array_read(
handle=w0.handle, index=0, dtype=tf.int64, flow_in=w0.flow)
with self.assertRaisesOpError(
"TensorArray dtype is float but Op requested dtype int64."):
r0_bad.eval()
# Test reading from a different index than the one we wrote to
r1 = w0.read(1)
with self.assertRaisesOpError(
"Could not read from TensorArray index 1 because "
"it has not yet been written to."):
r1.eval()
# Test reading from a negative index
with self.assertRaisesOpError(
r"Tried to read from index -1 but array size is: 3"):
h.read(-1).eval()
# Test reading from too large an index
with self.assertRaisesOpError(
"Tried to read from index 3 but array size is: 3"):
h.read(3).eval()
def testTensorArrayReadWrongIndexOrDataTypeFails(self):
self._testTensorArrayReadWrongIndexOrDataTypeFails(use_gpu=False)
self._testTensorArrayReadWrongIndexOrDataTypeFails(use_gpu=True)
def _testTensorArrayWriteMultipleFails(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
with self.assertRaisesOpError(
"Could not write to TensorArray index 2 because "
"it has already been written to."):
h.write(2, 3.0).write(2, 3.0).flow.eval()
def testTensorArrayWriteMultipleFails(self):
self._testTensorArrayWriteMultipleFails(use_gpu=False)
self._testTensorArrayWriteMultipleFails(use_gpu=True)
def _testTensorArrayWriteGradientAddMultipleAddsType(self, use_gpu, dtype):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=dtype, tensor_array_name="foo", size=3)
h._gradient_add = True
c = lambda x: np.asarray(x, dtype=dtype.as_numpy_dtype)
w0 = h.write(2, c(3.0))
w1 = w0.write(2, c(4.0))
self.assertAllEqual(c(7.00), w1.read(2).eval())
def _testTensorArrayWriteGradientAddMultipleAdds(self, use_gpu):
for dtype in [tf.int32, tf.int64, tf.float32, tf.float64, tf.complex64]:
self._testTensorArrayWriteGradientAddMultipleAddsType(use_gpu, dtype)
def testTensorArrayWriteGradientAddMultipleAdds(self):
self._testTensorArrayWriteGradientAddMultipleAdds(use_gpu=False)
self._testTensorArrayWriteGradientAddMultipleAdds(use_gpu=True)
def _testMultiTensorArray(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h1 = data_flow_ops.TensorArray(
size=1, dtype=tf.float32, tensor_array_name="foo")
w1 = h1.write(0, 4.0)
r1 = w1.read(0)
h2 = data_flow_ops.TensorArray(
size=1, dtype=tf.float32, tensor_array_name="bar")
w2 = h2.write(0, 5.0)
r2 = w2.read(0)
r = r1 + r2
self.assertAllClose(9.0, r.eval())
def testMultiTensorArray(self):
self._testMultiTensorArray(use_gpu=False)
self._testMultiTensorArray(use_gpu=True)
def _testDuplicateTensorArrayFails(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h1 = data_flow_ops.TensorArray(
size=1, dtype=tf.float32, tensor_array_name="foo")
c1 = h1.write(0, 4.0)
h2 = data_flow_ops.TensorArray(
size=1, dtype=tf.float32, tensor_array_name="foo")
c2 = h2.write(0, 5.0)
with self.assertRaises(errors.AlreadyExistsError):
sess.run([c1.flow, c2.flow])
def testDuplicateTensorArrayFails(self):
self._testDuplicateTensorArrayFails(use_gpu=False)
self._testDuplicateTensorArrayFails(use_gpu=True)
def _testTensorArrayGradientWriteReadType(self, use_gpu, dtype):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.as_dtype(dtype), tensor_array_name="foo", size=3)
c = lambda x: np.array(x, dtype=dtype)
value_0 = tf.constant(c([[4.0, 5.0]]))
value_1 = tf.constant(c(3.0))
w0 = h.write(0, value_0)
w1 = w0.write(1, value_1)
r0 = w1.read(0)
r1 = w1.read(1)
r0_2 = w1.read(0)
# Test individual components' gradients
grad_just_r0 = tf.gradients(
ys=[r0], xs=[value_0], grad_ys=[c([[2.0, 3.0]])])
grad_just_r0_vals = sess.run(grad_just_r0)
self.assertAllEqual(c([[2.0, 3.0]]), grad_just_r0_vals[0])
grad_r0_r0_2 = tf.gradients(
ys=[r0, r0_2], xs=[value_0],
grad_ys=[c([[2.0, 3.0]]), c([[1.0, -1.0]])])
grad_r0_r0_2_vals = sess.run(grad_r0_r0_2)
self.assertAllEqual(c([[3.0, 2.0]]), grad_r0_r0_2_vals[0])
grad_just_r1 = tf.gradients(
ys=[r1], xs=[value_1], grad_ys=[c(-2.0)])
grad_just_r1_vals = sess.run(grad_just_r1)
self.assertAllEqual(c(-2.0), grad_just_r1_vals[0])
# Test combined gradients
grad = tf.gradients(
ys=[r0, r0_2, r1], xs=[value_0, value_1],
grad_ys=[c(-1.0), c(-2.0), c([[2.0, 3.0]])])
grad_vals = sess.run(grad)
self.assertEqual(len(grad_vals), 2)
self.assertAllClose(c(-3.0), grad_vals[0])
self.assertAllEqual(c([[2.0, 3.0]]), grad_vals[1])
def _testTensorArrayGradientWriteRead(self, use_gpu):
for dtype in (np.float32, np.float64, np.int32, np.int64, np.complex64):
self._testTensorArrayGradientWriteReadType(use_gpu, dtype)
def testTensorArrayGradientWriteRead(self):
self._testTensorArrayGradientWriteRead(False)
self._testTensorArrayGradientWriteRead(True)
def _testTensorArrayGradientWritePackAndRead(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=2)
value_0 = tf.constant([-1.0, 1.0])
value_1 = tf.constant([-10.0, 10.0])
w0 = h.write(0, value_0)
w1 = w0.write(1, value_1)
p0 = w1.pack()
r0 = w1.read(0)
# Test gradient accumulation between read(0) and pack()
grad_r = tf.gradients(
ys=[p0, r0], xs=[value_0, value_1],
grad_ys=[
[[2.0, 3.0], [4.0, 5.0]],
[-0.5, 1.5]])
grad_vals = sess.run(grad_r) # 2 + 2 entries
self.assertAllClose([2.0 - 0.5, 3.0 + 1.5], grad_vals[0])
self.assertAllEqual([4.0, 5.0], grad_vals[1])
def testTensorArrayGradientWritePackAndRead(self):
self._testTensorArrayGradientWritePackAndRead(False)
self._testTensorArrayGradientWritePackAndRead(True)
def _testTensorArrayGradientUnpackRead(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=2)
value = tf.constant([[1.0, -1.0], [10.0, -10.0]])
w = h.unpack(value)
r0 = w.read(0)
r0_1 = w.read(0)
r1 = w.read(1)
# Test combined gradients + aggregation of read(0)
grad = tf.gradients(
ys=[r0, r0_1, r1], xs=[value], grad_ys=
[[2.0, 3.0], [-1.5, 1.5], [4.0, 5.0]])
grad_vals = sess.run(grad)
self.assertEqual(len(grad_vals), 1)
self.assertAllClose([[2.0 - 1.5, 3.0 + 1.5], [4.0, 5.0]], grad_vals[0])
def testTensorArrayGradientUnpackRead(self):
self._testTensorArrayGradientUnpackRead(False)
self._testTensorArrayGradientUnpackRead(True)
def _testCloseTensorArray(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
c1 = h.close()
sess.run(c1)
def testCloseTensorArray(self):
self._testCloseTensorArray(use_gpu=False)
self._testCloseTensorArray(use_gpu=True)
def _testWriteCloseTensorArray(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
w0 = h.write(0, [[4.0, 5.0]])
w1 = w0.write(1, [3.0])
w1.close().run() # Expected to run without problems
with self.assertRaisesOpError(r"Tensor foo has already been closed."):
with tf.control_dependencies([w1.close()]):
w1.write(2, 3.0).flow.eval()
def testWriteCloseTensorArray(self):
self._testWriteCloseTensorArray(use_gpu=False)
self._testWriteCloseTensorArray(use_gpu=True)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | 7,498,672,109,148,403,000 | 35.327623 | 80 | 0.641379 | false |
mozilla/bztools | auto_nag/history.py | 1 | 16781 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from pprint import pprint
from libmozdata.bugzilla import Bugzilla
from auto_nag import logger
class History(object):
BOT = "[email protected]"
def __init__(self):
super(History, self).__init__()
def get_bugs(self):
logger.info("History: get bugs: start...")
def bug_handler(bug, data):
data.add(bug["id"])
fields = {
"changedby": [
"keywords",
"product",
"component",
"assigned_to",
"cf_crash_signature",
"everconfirmed",
"cf_has_regression_range",
"cf_has_str",
"priority",
"bug_severity",
"resolution",
"bug_status",
"bug_type",
"cf_status_firefox68",
"cf_status_firefox67",
"cf_status_firefox66",
"cf_status_firefox65",
"cf_status_firefox64",
"cf_status_firefox63",
"cf_status_firefox62",
],
"equals": ["commenter", "setters.login_name"],
}
queries = []
bugids = set()
for op, fs in fields.items():
for f in fs:
params = {"include_fields": "id", "f1": f, "o1": op, "v1": History.BOT}
queries.append(
Bugzilla(params, bughandler=bug_handler, bugdata=bugids, timeout=20)
)
for q in queries:
q.get_data().wait()
logger.info("History: get bugs: end.")
return bugids
def get_bug_info(self, bugids):
logger.info("History: get bugs info: start...")
def history_handler(bug, data):
bugid = str(bug["id"])
for h in bug["history"]:
if h["who"] == History.BOT:
del h["who"]
data[bugid].append(h)
def comment_handler(bug, bugid, data):
bugid = str(bugid)
for comment in bug["comments"]:
if comment["author"] == History.BOT:
text = comment["text"]
data[bugid].append(
{"comment": text, "date": comment["creation_time"]}
)
data = {str(bugid): [] for bugid in bugids}
Bugzilla(
list(data.keys()),
historyhandler=history_handler,
historydata=data,
commenthandler=comment_handler,
commentdata=data,
timeout=960,
).get_data().wait()
logger.info("History: get bugs info: end.")
return data
def cleanup(self, data):
# res is a dictionary: change_date_time => change or comment
res = {}
for bugid, info in data.items():
res[bugid] = x = {}
for c in info:
if "changes" in c:
when = c["when"]
del c["when"]
if when not in x:
x[when] = {"changes": c["changes"]}
else:
x[when]["changes"] += c["changes"]
if "comment" in c:
when = c["date"]
del c["date"]
if when not in x:
x[when] = {"comment": c["comment"]}
else:
x[when]["comment"] = c["comment"]
return res
def get_pc(self, changes):
p = ""
c = ""
for change in changes:
if change.get("field_name") == "component" and "added" in change:
c = change["added"]
if change.get("field_name") == "product" and "added" in change:
p = change["added"]
return "{}::{}".format(p, c)
def get_ni(self, changes):
for change in changes:
if change.get("field_name") == "flagtypes.name" and "added" in change:
c = change["added"]
ni = "needinfo?("
if c.startswith(ni):
return c[len(ni) : -1]
return ""
def guess_tool(self, data):
res = []
no_tool = []
for bugid, info in data.items():
for date, i in info.items():
if "comment" in i:
c = i["comment"]
if c.startswith("Crash volume for signature"):
continue
tool = None
if c.startswith(
"The leave-open keyword is there and there is no activity for"
):
tool = "leave_open_no_activity"
elif c.startswith("Closing because no crashes reported for"):
tool = "no_crashes"
elif c.startswith("Moving to p3 because no activity for at least"):
tool = "old_p2_bug"
elif c.startswith("Moving to p2 because no activity for at least"):
tool = "old_p1_bug"
elif c.startswith(
"There's a r+ patch which didn't land and no activity in this bug"
) or c.startswith(
"There are some r+ patches which didn't land and no activity in this bug for"
):
tool = "not_landed"
elif c.startswith(
"The meta keyword is there, the bug doesn't depend on other bugs and there is no activity for"
):
tool = "meta_no_deps_no_activity"
elif (
"[mozregression](https://wiki.mozilla.org/Auto-tools/Projects/Mozregression)"
in c
):
tool = "has_str_no_range"
elif (
"as the bug is tracked by a release manager for the current nightly"
in c
):
tool = "mismatch_priority_tracking_nightly"
elif (
"as the bug is tracked by a release manager for the current beta"
in c
):
tool = "mismatch_priority_tracking_beta"
elif (
"as the bug is tracked by a release manager for the current release"
in c
):
tool = "mismatch_priority_tracking_release"
elif c.startswith("The priority flag is not set for this bug.\n:"):
tool = "no_priority"
elif c.startswith(
"The priority flag is not set for this bug and there is no activity for"
):
tool = "ni_triage_owner"
if tool is None:
no_tool.append((bugid, info))
else:
extra = self.get_ni(i.get("changes", []))
res.append(
{"tool": tool, "date": date, "bugid": bugid, "extra": extra}
)
else:
changes = i["changes"]
N = len(res)
for change in changes:
if change.get("added") == "meta":
res.append(
{
"tool": "summary_meta_missing",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name") in {"component", "product"}:
res.append(
{
"tool": "component",
"date": date,
"bugid": bugid,
"extra": self.get_pc(changes),
}
)
break
elif change.get("field_name") == "cf_has_str":
res.append(
{
"tool": "has_str_no_hasstr",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("removed") == "leave-open":
res.append(
{
"tool": "leave_open",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name") == "assigned_to":
res.append(
{
"tool": "no_assignee",
"date": date,
"bugid": bugid,
"extra": change["added"],
}
)
break
elif (
change.get("field_name", "").startswith("cf_status_firefox")
and change.get("added") == "affected"
):
res.append(
{
"tool": "nighty_reopened",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "status"
and change.get("added") == "ASSIGNED"
):
res.append(
{
"tool": "assignee_but_unconfirmed",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "keywords"
and change.get("added") == "regression"
):
res.append(
{
"tool": "regression",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "severity"
and change.get("added") == "major"
):
res.append(
{
"tool": "tracked_bad_severity",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name") == "cf_crash_signature":
res.append(
{
"tool": "copy_duplicate_info",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "keywords"
and change.get("removed") == "stalled"
):
res.append(
{
"tool": "regression",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "type"
and change.get("added") == "defect"
):
res.append(
{
"tool": "regression_but_type_enhancement_task",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "keywords"
and change.get("removed") == "dupeme"
):
res.append(
{
"tool": "closed_dupeme",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "keywords"
and change.get("added") == "dupeme"
):
res.append(
{
"tool": "dupeme_whiteboard_keyword",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name") == "summary" and change.get(
"added"
).startswith("[meta]"):
res.append(
{
"tool": "meta_summary_missing",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name", "").startswith(
"cf_status_firefox"
) and change.get("added") in {
"?",
"fixed",
"verified",
"unaffected",
}:
res.append(
{
"tool": "missing_beta_status",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
if len(res) == N:
no_tool.append((bugid, info))
if no_tool:
pprint(no_tool)
return res
def get(self):
bugids = self.get_bugs()
bugs = self.get_bug_info(bugids)
bugs = self.cleanup(bugs)
history = self.guess_tool(bugs)
return history
| bsd-3-clause | 1,119,390,109,281,556,700 | 38.859857 | 118 | 0.311722 | false |
econ-ark/HARK | HARK/ConsumptionSaving/tests/test_SmallOpenEconomy.py | 1 | 1397 | import copy
from HARK import distribute_params
from HARK.ConsumptionSaving.ConsAggShockModel import (
AggShockConsumerType,
SmallOpenEconomy,
init_cobb_douglas,
)
from HARK.distribution import Uniform
import numpy as np
import unittest
class testSmallOpenEconomy(unittest.TestCase):
def test_small_open(self):
agent = AggShockConsumerType()
agent.AgentCount = 100 # Very low number of agents for the sake of speed
agent.cycles = 0
# Make agents heterogeneous in their discount factor
agents = distribute_params(
agent, "DiscFac", 3, Uniform(bot=0.90, top=0.94) # Impatient agents
)
# Make an economy with those agents living in it
small_economy = SmallOpenEconomy(
agents=agents,
Rfree=1.03,
wRte=1.0,
KtoLnow=1.0,
**copy.copy(init_cobb_douglas)
)
small_economy.act_T = 400 # Short simulation history
small_economy.max_loops = 3 # Give up quickly for the sake of time
small_economy.make_AggShkHist() # Simulate a history of aggregate shocks
small_economy.verbose = False # Turn off printed messages
# Give data about the economy to all the agents in it
for this_type in small_economy.agents:
this_type.get_economy_data(small_economy)
small_economy.solve()
| apache-2.0 | -2,018,076,852,372,516,600 | 32.261905 | 81 | 0.652112 | false |
power12317/weblate | weblate/trans/tests/__init__.py | 1 | 1330 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2013 Michal Čihař <[email protected]>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from weblate.trans.tests.test_diff import *
from weblate.trans.tests.test_checks import *
from weblate.trans.tests.test_format_checks import *
from weblate.trans.tests.test_source_checks import *
from weblate.trans.tests.test_chars_checks import *
from weblate.trans.tests.test_same_checks import *
from weblate.trans.tests.test_consistency_checks import *
from weblate.trans.tests.test_markup_checks import *
from weblate.trans.tests.test_models import *
from weblate.trans.tests.test_views import *
from weblate.trans.tests.test_commands import *
| gpl-3.0 | 5,367,127,302,535,231,000 | 41.806452 | 71 | 0.767898 | false |
janekg89/flutype_webapp | flutype/urls.py | 1 | 4288 | from django.conf.urls import url
from django.contrib import admin
from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^$', views.studies_view, name='index'),
url(r'^admin/', include(admin.site.urls), name='admin'),
url(r'^mystudies/$', views.my_studies_view, name='my_studies'),
url(r'^uploadfile_study/(?P<sid>.*)/$', views.upload_file_study, name='upload_file_study'),
url(r'^study/(?P<sid>.*)/$', views.study_view, name='study'),
url(r'^study/(?P<pk>.*)/edit$', views.study_edit, name='study_edit'),
url(r'^study/(?P<sid>.*)/import_measurement$', views.import_measurement_view, name='import_measurement'),
url(r'^studies/new/$', views.study_new, name='study_new'),
url(r'^study_ligands/(?P<sid>.*)/$', views.study_ligands_view, name='study_ligands'),
url(r'^tutorial_db/$', views.tutorial_db_view, name='tutorial_db'),
url(r'^glossary/$', views.glossary_view, name='glossary'),
url(r'^measurements/$', views.measurements_view, name='measurements'),
url(r'^mymeasurements/$', views.my_measurements_view, name='my_measurements'),
url(r'^measurement/(?P<sid>.*)/$',views.measurement_view, name='rawspotcollectionview'),
url(r'^measurement_ligands/(?P<sid>.*)/$', views.measurement_ligands_view, name='measurement_ligands'),
url(r'^m/(?P<measurement_sid>.*)/result/(?P<sid>.*)/$', views.measurement_result_view, name='qspotcollectionview'),
url(r'^m/(?P<measurement_sid>.*)/result/(?P<sid>.*)/data$', views.barplot_data_view, name='barplot_plotly1'),
url(r'^m/(?P<measurement_sid>.*)/result/(?P<sid>.*)/data2$', views.barplot2_data_view, name='barplot_plotly2'),
url(r'^m/(?P<measurement_sid>.*)/result/(?P<sid>.*)/barplot_p$', views.highcharts_view, name='heatmap_highchart1'),
url(r'^uploadfile_measurement/(?P<sid>.*)/$', views.upload_file_measurement, name='upload_file_measurement'),
url(r'^users/$', views.users_view, name='users'),
url(r'^about/$', views.about_en_view, name='about'),
url(r'^about_de/$', views.about_de_view, name='about_de'),
url(r'^database_scheme/$', views.database_scheme_en_view, name='database_scheme'),
url(r'^database_scheme_de/$', views.database_scheme_de_view, name='database_scheme_de'),
url(r'^gal_file/$', views.gal_file_view, name='gal_file'),
url(r'^raw_gal_file/$', views.raw_gal_file_view, name='raw_gal_file'),
url(r'^tutorial/$', views.tutorial_en_view, name='tutorial'),
url(r'^tree/$', views.tutorial_tree_view, name='tutorial_tree'),
url(r'^tutorial_de/$', views.tutorial_de_view, name='tutorial_de'),
url(r'^steps/$', views.steps_view, name='steps'),
url(r'^processes/$', views.processes_view, name='processes'),
url(r'^process/(?P<sid>.*)/$', views.process_view, name='processview'),
url(r'^image/processtep/(?P<id>.*)/$', views.image_process_view, name='imageviewprocess'),
url(r'^g/(?P<model_name>.*)/new/$', views.new_view, name='new'),
url(r'^ligandbatch/(?P<model_name>.*)/new/$', views.ligandbatch_new, name='new_ligandbatch'),
url(r'^studies/new/$', views.study_new, name='study_new'),
url(r'^g/(?P<model_name>.*)/(?P<pk>.*)/delete$', views.delete_view, name='delete'),
url(r'^g/(?P<model_name>.*)/(?P<pk>.*)/edit$', views.edit_view, name='edit'),
url(r'^buffers/$', views.buffer_view, name='buffers'),
url(r'^peptides/$', views.peptide_view, name='peptides'),
url(r'^complexes/$', views.complex_view, name='complexes'),
url(r'^viruses/$', views.virus_view, name='viruses'),
url(r'^antibodies/$', views.antibody_view, name='antibodies'),
url(r'^bufferbatches/$', views.buffer_batch_view, name='bufferbatches'),
url(r'^peptidebatches/$', views.peptide_batch_view, name='peptidebatches'),
url(r'^complexbatches/$', views.complex_batch_view, name='complexbatches'),
url(r'^virusbatches/$', views.virus_batch_view, name='virusbatches'),
url(r'^antibodybatches/$', views.antibody_batch_view, name='antibodybatches'),
url(r'^password/$', views.change_password_view, name='change_password'),
url(r'^qspotcollection/(?P<sid>.*)/data$', views.barplot_data_view, name='barplot_plotly'),
url(r'^qspotcollection/(?P<sid>.*)/barplot_p$', views.highcharts_view, name='heatmap_highchart'),
] | lgpl-3.0 | -968,179,063,116,697,900 | 55.434211 | 119 | 0.652052 | false |
reeshupatel/demo | keystone/openstack/common/lockutils.py | 1 | 12121 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import errno
import functools
import os
import shutil
import subprocess
import sys
import tempfile
import threading
import time
import weakref
from oslo.config import cfg
from keystone.openstack.common import fileutils
from keystone.openstack.common.gettextutils import _, _LE, _LI
from keystone.openstack.common import log as logging
LOG = logging.getLogger(__name__)
util_opts = [
cfg.BoolOpt('disable_process_locking', default=False,
help='Enables or disables inter-process locks.'),
cfg.StrOpt('lock_path',
default=os.environ.get("KEYSTONE_LOCK_PATH"),
help='Directory to use for lock files.')
]
CONF = cfg.CONF
CONF.register_opts(util_opts)
def set_defaults(lock_path):
cfg.set_defaults(util_opts, lock_path=lock_path)
class _FileLock(object):
"""Lock implementation which allows multiple locks, working around
issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does
not require any cleanup. Since the lock is always held on a file
descriptor rather than outside of the process, the lock gets dropped
automatically if the process crashes, even if __exit__ is not executed.
There are no guarantees regarding usage by multiple green threads in a
single process here. This lock works only between processes. Exclusive
access between local threads should be achieved using the semaphores
in the @synchronized decorator.
Note these locks are released when the descriptor is closed, so it's not
safe to close the file descriptor while another green thread holds the
lock. Just opening and closing the lock file can break synchronisation,
so lock files must be accessed only using this abstraction.
"""
def __init__(self, name):
self.lockfile = None
self.fname = name
def acquire(self):
basedir = os.path.dirname(self.fname)
if not os.path.exists(basedir):
fileutils.ensure_tree(basedir)
LOG.info(_LI('Created lock path: %s'), basedir)
self.lockfile = open(self.fname, 'w')
while True:
try:
# Using non-blocking locks since green threads are not
# patched to deal with blocking locking calls.
# Also upon reading the MSDN docs for locking(), it seems
# to have a laughable 10 attempts "blocking" mechanism.
self.trylock()
LOG.debug('Got file lock "%s"', self.fname)
return True
except IOError as e:
if e.errno in (errno.EACCES, errno.EAGAIN):
# external locks synchronise things like iptables
# updates - give it some time to prevent busy spinning
time.sleep(0.01)
else:
raise threading.ThreadError(_("Unable to acquire lock on"
" `%(filename)s` due to"
" %(exception)s") %
{
'filename': self.fname,
'exception': e,
})
def __enter__(self):
self.acquire()
return self
def release(self):
try:
self.unlock()
self.lockfile.close()
LOG.debug('Released file lock "%s"', self.fname)
except IOError:
LOG.exception(_LE("Could not release the acquired lock `%s`"),
self.fname)
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
def exists(self):
return os.path.exists(self.fname)
def trylock(self):
raise NotImplementedError()
def unlock(self):
raise NotImplementedError()
class _WindowsLock(_FileLock):
def trylock(self):
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
def unlock(self):
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
class _FcntlLock(_FileLock):
def trylock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
def unlock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
class _PosixLock(object):
def __init__(self, name):
# Hash the name because it's not valid to have POSIX semaphore
# names with things like / in them. Then use base64 to encode
# the digest() instead taking the hexdigest() because the
# result is shorter and most systems can't have shm sempahore
# names longer than 31 characters.
h = hashlib.sha1()
h.update(name.encode('ascii'))
self.name = str((b'/' + base64.urlsafe_b64encode(
h.digest())).decode('ascii'))
def acquire(self, timeout=None):
self.semaphore = posix_ipc.Semaphore(self.name,
flags=posix_ipc.O_CREAT,
initial_value=1)
self.semaphore.acquire(timeout)
return self
def __enter__(self):
self.acquire()
return self
def release(self):
self.semaphore.release()
self.semaphore.close()
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
def exists(self):
try:
semaphore = posix_ipc.Semaphore(self.name)
except posix_ipc.ExistentialError:
return False
else:
semaphore.close()
return True
if os.name == 'nt':
import msvcrt
InterProcessLock = _WindowsLock
FileLock = _WindowsLock
else:
import base64
import fcntl
import hashlib
import posix_ipc
InterProcessLock = _PosixLock
FileLock = _FcntlLock
_semaphores = weakref.WeakValueDictionary()
_semaphores_lock = threading.Lock()
def _get_lock_path(name, lock_file_prefix, lock_path=None):
# NOTE(mikal): the lock name cannot contain directory
# separators
name = name.replace(os.sep, '_')
if lock_file_prefix:
sep = '' if lock_file_prefix.endswith('-') else '-'
name = '%s%s%s' % (lock_file_prefix, sep, name)
local_lock_path = lock_path or CONF.lock_path
if not local_lock_path:
# NOTE(bnemec): Create a fake lock path for posix locks so we don't
# unnecessarily raise the RequiredOptError below.
if InterProcessLock is not _PosixLock:
raise cfg.RequiredOptError('lock_path')
local_lock_path = 'posixlock:/'
return os.path.join(local_lock_path, name)
def external_lock(name, lock_file_prefix=None, lock_path=None):
LOG.debug('Attempting to grab external lock "%(lock)s"',
{'lock': name})
lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
# NOTE(bnemec): If an explicit lock_path was passed to us then it
# means the caller is relying on file-based locking behavior, so
# we can't use posix locks for those calls.
if lock_path:
return FileLock(lock_file_path)
return InterProcessLock(lock_file_path)
def remove_external_lock_file(name, lock_file_prefix=None):
"""Remove an external lock file when it's not used anymore
This will be helpful when we have a lot of lock files
"""
with internal_lock(name):
lock_file_path = _get_lock_path(name, lock_file_prefix)
try:
os.remove(lock_file_path)
except OSError:
LOG.info(_LI('Failed to remove file %(file)s'),
{'file': lock_file_path})
def internal_lock(name):
with _semaphores_lock:
try:
sem = _semaphores[name]
except KeyError:
sem = threading.Semaphore()
_semaphores[name] = sem
LOG.debug('Got semaphore "%(lock)s"', {'lock': name})
return sem
@contextlib.contextmanager
def lock(name, lock_file_prefix=None, external=False, lock_path=None):
"""Context based lock
This function yields a `threading.Semaphore` instance (if we don't use
eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is
True, in which case, it'll yield an InterProcessLock instance.
:param lock_file_prefix: The lock_file_prefix argument is used to provide
lock files on disk with a meaningful prefix.
:param external: The external keyword argument denotes whether this lock
should work across multiple processes. This means that if two different
workers both run a method decorated with @synchronized('mylock',
external=True), only one of them will execute at a time.
"""
int_lock = internal_lock(name)
with int_lock:
if external and not CONF.disable_process_locking:
ext_lock = external_lock(name, lock_file_prefix, lock_path)
with ext_lock:
yield ext_lock
else:
yield int_lock
LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
"""Synchronization decorator.
Decorating a method like so::
@synchronized('mylock')
def foo(self, *args):
...
ensures that only one thread will execute the foo method at a time.
Different methods can share the same lock::
@synchronized('mylock')
def foo(self, *args):
...
@synchronized('mylock')
def bar(self, *args):
...
This way only one of either foo or bar can be executing at a time.
"""
def wrap(f):
@functools.wraps(f)
def inner(*args, **kwargs):
try:
with lock(name, lock_file_prefix, external, lock_path):
LOG.debug('Got semaphore / lock "%(function)s"',
{'function': f.__name__})
return f(*args, **kwargs)
finally:
LOG.debug('Semaphore / lock released "%(function)s"',
{'function': f.__name__})
return inner
return wrap
def synchronized_with_prefix(lock_file_prefix):
"""Partial object generator for the synchronization decorator.
Redefine @synchronized in each project like so::
(in nova/utils.py)
from nova.openstack.common import lockutils
synchronized = lockutils.synchronized_with_prefix('nova-')
(in nova/foo.py)
from nova import utils
@utils.synchronized('mylock')
def bar(self, *args):
...
The lock_file_prefix argument is used to provide lock files on disk with a
meaningful prefix.
"""
return functools.partial(synchronized, lock_file_prefix=lock_file_prefix)
def main(argv):
"""Create a dir for locks and pass it to command from arguments
If you run this:
python -m openstack.common.lockutils python setup.py testr <etc>
a temporary directory will be created for all your locks and passed to all
your tests in an environment variable. The temporary dir will be deleted
afterwards and the return value will be preserved.
"""
lock_dir = tempfile.mkdtemp()
os.environ["KEYSTONE_LOCK_PATH"] = lock_dir
try:
ret_val = subprocess.call(argv[1:])
finally:
shutil.rmtree(lock_dir, ignore_errors=True)
return ret_val
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 | 1,740,347,212,759,408,400 | 30.98153 | 78 | 0.613646 | false |
vpelletier/neoppod | neo/tests/testHandler.py | 1 | 2979 | #
# Copyright (C) 2009-2016 Nexedi SA
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from mock import Mock
from . import NeoUnitTestBase
from neo.lib.handler import EventHandler
from neo.lib.protocol import PacketMalformedError, UnexpectedPacketError, \
BrokenNodeDisallowedError, NotReadyError, ProtocolError
class HandlerTests(NeoUnitTestBase):
def setUp(self):
NeoUnitTestBase.setUp(self)
app = Mock()
self.handler = EventHandler(app)
def setFakeMethod(self, method):
self.handler.fake_method = method
def getFakePacket(self):
p = Mock({
'decode': (),
'__repr__': 'Fake Packet',
})
p.handler_method_name = 'fake_method'
return p
def test_dispatch(self):
conn = self.getFakeConnection()
packet = self.getFakePacket()
# all is ok
self.setFakeMethod(lambda c: None)
self.handler.dispatch(conn, packet)
# raise UnexpectedPacketError
conn.mockCalledMethods = {}
def fake(c):
raise UnexpectedPacketError('fake packet')
self.setFakeMethod(fake)
self.handler.dispatch(conn, packet)
self.checkErrorPacket(conn)
self.checkAborted(conn)
# raise PacketMalformedError
conn.mockCalledMethods = {}
def fake(c):
raise PacketMalformedError('message')
self.setFakeMethod(fake)
self.handler.dispatch(conn, packet)
self.checkClosed(conn)
# raise BrokenNodeDisallowedError
conn.mockCalledMethods = {}
def fake(c):
raise BrokenNodeDisallowedError
self.setFakeMethod(fake)
self.handler.dispatch(conn, packet)
self.checkErrorPacket(conn)
self.checkAborted(conn)
# raise NotReadyError
conn.mockCalledMethods = {}
def fake(c):
raise NotReadyError
self.setFakeMethod(fake)
self.handler.dispatch(conn, packet)
self.checkErrorPacket(conn)
self.checkAborted(conn)
# raise ProtocolError
conn.mockCalledMethods = {}
def fake(c):
raise ProtocolError
self.setFakeMethod(fake)
self.handler.dispatch(conn, packet)
self.checkErrorPacket(conn)
self.checkAborted(conn)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -6,620,514,194,886,175,000 | 31.736264 | 75 | 0.653911 | false |
morgenst/PyAnalysisTools | tests/unit/TestUtilities.py | 1 | 2542 | import os
import unittest
from PyAnalysisTools.base import Utilities, InvalidInputError
# from pyfakefs.fake_filesystem_unittest import TestCase
cwd = os.path.dirname(__file__)
class TestUtilities(unittest.TestCase):
def setUp(self):
pass
# self.setUpPyfakefs()
def tearDown(self):
pass
# self.tearDownPyfakefs()
def test_merge_dicts(self):
d1 = {'foo': 1}
d2 = {'bar': 2}
self.assertEqual({'foo': 1, 'bar': 2}, Utilities.merge_dictionaries(d1, d2))
def test_merge_dicts_single(self):
d1 = {'foo': 1}
self.assertEqual(d1, Utilities.merge_dictionaries(d1))
def test_merge_dicts_fail(self):
d1 = {'foo': 1}
d2 = ['bar', 2]
self.assertEqual({'foo': 1}, Utilities.merge_dictionaries(d1, d2))
def test_check_required_args_found(self):
self.assertIsNone(Utilities.check_required_args('arg', arg=1))
def test_check_required_args_missing(self):
self.assertEqual('arg', Utilities.check_required_args('arg', foo=1))
@unittest.skip("Requires fake fs")
def test_cleaner_check_lifetime(self):
self.fs.create_file('/foo/bar.txt')
self.assertTrue(Utilities.Cleaner.check_lifetime(100, 'foo', ['bar.txt']))
def test_flatten_single_element(self):
self.assertEqual(['foo/bar/1'], Utilities.flatten({'foo': {'bar': ["1"]}}))
def test_flatten_more_elements(self):
self.assertEqual(['foo/bar/1', 'foo/bar/2'], Utilities.flatten({'foo': {'bar': ["1", "2"]}}))
@unittest.skip("Requires fake fs")
def test_cleaner_default_ctor(self):
cleaner = Utilities.Cleaner(base_path='foo')
self.assertTrue(cleaner.safe)
self.assertEqual('/foo', cleaner.base_path)
self.assertEqual([".git", ".keep", ".svn", "InstallArea", "RootCoreBin", "WorkArea"], cleaner.keep_pattern)
self.assertEqual([], cleaner.deletion_list)
self.assertEqual(14., cleaner.touch_threshold_days)
self.assertEqual(None, cleaner.trash_path)
@unittest.skip("Requires fake fs")
def test_cleaner_default_ctor_trash(self):
cleaner = Utilities.Cleaner(base_path='foo', trash_path='bar')
self.assertEqual('bar', cleaner.trash_path)
def test_cleaner_default_ctor_missing_arg(self):
self.assertRaises(InvalidInputError, Utilities.Cleaner)
def test_cleaner_default_setup_trash(self):
cleaner = Utilities.Cleaner(base_path='foo', safe=False)
self.assertIsNone(cleaner.setup_temporary_trash())
| mit | -6,940,602,139,215,756,000 | 35.84058 | 115 | 0.646735 | false |
Namax0r/resistor-calculator | resistor_calculator.py | 1 | 9566 | #!/usr/bin/env python
# Basic version handling
try:
# Python2
import Tkinter as tk
except ImportError:
# Python3
import tkinter as tk
from tkinter.ttk import Combobox
from tkinter import messagebox
# Small utility that adds dot notation access to dictionary attributes
class dotdict(dict):
__getattr__ = dict.get
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
# Main view window
root = tk.Tk()
# Store width and height in variable for ease of change
window_width = 300
window_height = 380
# Set min and max size of a GUI window
root.minsize(window_width, window_height)
root.maxsize(window_width, window_height)
# Var is used to store our result
var_result = tk.StringVar()
var_max = tk.StringVar()
var_min = tk.StringVar()
# Create dictionary of colors and values
d = {
#Values of the band are stored as string to allow concatenation of the numbers.
'band':{
'black': "0", 'brown': "1", 'red': "2", 'orange': "3",
'yellow': "4", 'green': "5", 'blue': "6", 'violet': "7",
'gray': "8", 'white': "9"
},
'multiplier':{
'black': 1, 'brown': 10, 'red': 100, 'orange': 1000,
'yellow': 10000, 'green': 100000, 'blue': 1000000,
'violet': 10000000
},
'tolerance':{
'brown': 0.01, 'red': 0.02, 'green': 0.005, 'blue': 0.025,
'violet': 0.010, 'gray': 0.005, 'gold': 0.05, 'silver': 0.10
}
}
# Enable dot notation on the dictionary
d = dotdict(d)
class ResistorCalculator:
def __init__(self, parent, title):
self.parent = parent
self.parent.title(title)
self.parent.protocol("WM_DELETE_WINDOW", self.close_program)
# Define variables to store values of comboboxes
self.band1_var_result = 0
self.band2_var_result = 0
self.band3_var_result = 0
self.multiplier_var_result = 0
self.tolerance_var_result = 0
self.build_window()
# Function to destroy the window when [X] is pressed
def close_program(self, event=None):
self.parent.destroy()
# Function called when '<<ComboboxSelected>>' event is triggered
def combobox_handler(self, event):
#store values of comboboxes in variables.
self.band1_var_result = self.band1_var.get()
self.band2_var_result = self.band2_var.get()
self.band3_var_result = self.band3_var.get()
self.multiplier_var_result = self.multiplier_var.get()
self.tolerance_var_result = self.tolerance_var.get()
# Function to handle error, when there are not enough arguments for formula to calculate properly.
def error_not_enough_args(self):
tk.messagebox.showinfo("Error", "Not enough arguments to calculate. Please select more values.")
# Function to add a mark at the end of a result
def add_mark(self, val, mark):
return val, mark
# Function to calculate the resistors
def calculate_resistor(self):
try:
# If there are only 2 bands to add, change the formula to skip the band3
if self.band3_var_result == " ":
bands = d.band[self.band1_var_result] + d.band[self.band2_var_result]
else:
bands = d.band[self.band1_var_result] + d.band[self.band2_var_result] + d.band[self.band3_var_result]
# Convert string into int so we can do mathematical operations on it
int_bands = int(bands)
# Set multiplier and tolerance
multiplier = d.multiplier[self.multiplier_var_result]
tolerance = d.tolerance[self.tolerance_var_result]
# Calculate the resistance based on the formula
formula = (int_bands * multiplier)
max_resistance = formula + (formula * tolerance)
min_resistance = formula - (formula * tolerance)
result_max = max_resistance / multiplier
result_min = min_resistance / multiplier
result_normal = formula / multiplier
if formula < 1000:
result_max = max_resistance
result_min = min_resistance
result_normal = formula
# if result of formula exceeds 1000 add "k" after the result.
elif formula > 1000 and formula < 1000000:
result_max = self.add_mark(result_max, "kΩ")
result_min = self.add_mark(result_min, "kΩ")
result_normal = self.add_mark(result_normal, "kΩ")
else:
result_max = self.add_mark(result_max, "MΩ")
result_min = self.add_mark(result_min, "MΩ")
result_normal = self.add_mark(result_normal, "MΩ")
# Set the variables that display result in the GUI
var_result.set(result_normal)
var_max.set(result_max)
var_min.set(result_min)
# KeyError exception when there are not enough values to calculate
except KeyError:
self.error_not_enough_args()
# Function to build a GUI window and all of it's widgets.
def build_window(self):
# Band 1
band1_label = tk.Label(self.parent, text="Band 1" )
band1_label.grid(row=0, column=0, ipadx=30, pady=5)
self.band1_var = tk.StringVar()
band1_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.band1_var)
band1_combo['values']=('black', 'brown', 'red', 'orange',
'yellow', 'green', 'blue', 'violet',
'gray', 'white')
band1_combo.bind('<<ComboboxSelected>>', self.combobox_handler)
band1_combo.grid(row=0, column=1, padx=10)
# Band 2
band2_label = tk.Label( self.parent, text="Band 2")
band2_label.grid(row=2, column=0, pady=5)
self.band2_var = tk.StringVar()
band2_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.band2_var)
band2_combo['values']=('black', 'brown', 'red', 'orange',
'yellow', 'green', 'blue', 'violet',
'gray', 'white')
band2_combo.bind('<<ComboboxSelected>>', self.combobox_handler)
band2_combo.grid(row=2, column=1)
# Band 3
band3_label = tk.Label( self.parent, text="Band 3" )
band3_label.grid(row=4, column=0, pady=5)
self.band3_var = tk.StringVar()
# Setting band3 to " " helps with modification of calculation formula based on this value
self.band3_var.set(" ")
band3_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.band3_var)
band3_combo['values']=('black', 'brown', 'red', 'orange',
'yellow', 'green', 'blue', 'violet',
'gray', 'white')
band3_combo.bind('<<ComboboxSelected>>', self.combobox_handler)
band3_combo.grid(row=4, column=1)
# Multiplier
multiplier_label = tk.Label( self.parent, text="Multiplier" )
multiplier_label.grid(row=6, column=0, pady=5)
self.multiplier_var = tk.StringVar()
multiplier_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.multiplier_var)
multiplier_combo['values']=('black', 'brown', 'red', 'orange',
'yellow', 'green', 'blue', 'violet')
multiplier_combo.bind('<<ComboboxSelected>>', self.combobox_handler)
multiplier_combo.grid(row=6, column=1)
# Tolerance
tolerance_label = tk.Label( self.parent, text="Tolerance" )
tolerance_label.grid(row=8, column=0, pady=5)
self.tolerance_var = tk.StringVar()
tolerance_combo = Combobox(self.parent, state='readonly', height = '10', justify = 'center', textvariable=self.tolerance_var)
tolerance_combo['values']=('brown', 'red', 'green', 'blue',
'violet', 'gray', 'gold', 'silver')
tolerance_combo.bind('<<ComboboxSelected>>', self.combobox_handler)
tolerance_combo.grid(row=8, column=1)
# Calculate button
self.calculate_button = tk.Button(self.parent, text ="Calculate", command = self.calculate_resistor)
self.calculate_button.grid(row=9, column=1, pady=5, ipadx=40)
# Results section
result_label = tk.Message( self.parent, text="Result:")
result_label.grid(row=12, column=0, pady=10)
result_value = tk.Message( self.parent, textvariable=var_result, relief=tk.RAISED )
result_value.grid(row=12, column=1)
max_result_label = tk.Message( self.parent, text="Max:")
max_result_label.grid(row=13, column=0, pady=10, ipadx=20)
max_result_value = tk.Message( self.parent, textvariable=var_max, relief=tk.RAISED)
max_result_value.grid(row=13, column=1)
min_result_label = tk.Message( self.parent, text="Min:")
min_result_label.grid(row=14, column=0, pady=10)
min_result_value = tk.Message( self.parent, textvariable=var_min, relief=tk.RAISED )
min_result_value.grid(row=14, column=1)
# Author name, displayed at the bottom of a program
author_name = tk.Label(self.parent, text="by Namax0r", relief=tk.SUNKEN, bd=1)
author_name.place(x=window_width - 70, y=window_height - 20)
if __name__ == '__main__':
app = ResistorCalculator(root, "Resistor Calculator")
root.mainloop()
| mit | -1,463,508,674,641,083,400 | 43.259259 | 135 | 0.601255 | false |
mjasher/gac | GAC/flopy/modflow/mfdrn.py | 1 | 7133 | """
mfdrn module. Contains the ModflowDrn class. Note that the user can access
the ModflowDrn class as `flopy.modflow.ModflowDrn`.
Additional information for this MODFLOW package can be found at the `Online
MODFLOW Guide
<http://water.usgs.gov/ogw/modflow/MODFLOW-2005-Guide/index.html?drn.htm>`_.
"""
import sys
import numpy as np
from flopy.mbase import Package
from flopy.utils.util_list import mflist
class ModflowDrn(Package):
"""
MODFLOW Drain Package Class.
Parameters
----------
model : model object
The model object (of type :class:`flopy.modflow.mf.Modflow`) to which
this package will be added.
ipakcb : int
is a flag and a unit number. (default is 0).
stress_period_data : list of boundaries or
recarray of boundaries or
dictionary of boundaries
Each drain cell is defined through definition of
layer(int), row(int), column(int), elevation(float), conductance(float)
The simplest form is a dictionary with a lists of boundaries for each
stress period, where each list of boundaries itself is a list of
boundaries. Indices of the dictionary are the numbers of the stress
period. This gives the form of
stress_period_data =
{0: [
[lay, row, col, stage, cond],
[lay, row, col, stage, cond],
[lay, row, col, stage, cond],
],
1: [
[lay, row, col, stage, cond],
[lay, row, col, stage, cond],
[lay, row, col, stage, cond],
], ...
kper:
[
[lay, row, col, stage, cond],
[lay, row, col, stage, cond],
[lay, row, col, stage, cond],
]
}
Note that if no values are specified for a certain stress period, then
the list of boundaries for the previous stress period for which values
were defined is used. Full details of all options to specify
stress_period_data can be found in the flopy3boundaries Notebook in
the basic subdirectory of the examples directory
dtype : dtype definition
if data type is different from default
options : list of strings
Package options. (default is None).
extension : string
Filename extension (default is 'drn')
unitnumber : int
File unit number (default is 21).
Attributes
----------
Methods
-------
See Also
--------
Notes
-----
Parameters are not supported in FloPy.
Examples
--------
>>> import flopy
>>> ml = flopy.modflow.Modflow()
>>> lrcec = {0:[2, 3, 4, 10., 100.]} #this drain will be applied to all
>>> #stress periods
>>> drn = flopy.modflow.ModflowDrn(ml, stress_period_data=lrcec)
"""
def __init__(self, model, ipakcb=0, stress_period_data=None, dtype=None,
extension='drn', unitnumber=21, options=None, **kwargs):
"""
Package constructor
"""
Package.__init__(self, model, extension, 'DRN',
unitnumber) # Call ancestor's init to set self.parent, extension, name and unit number
self.heading = '# DRN for MODFLOW, generated by Flopy.'
self.url = 'drn.htm'
self.ipakcb = ipakcb # 0: no cell by cell terms are written
self.np = 0
if options is None:
options = []
self.options = options
if dtype is not None:
self.dtype = dtype
else:
self.dtype = self.get_default_dtype(structured=self.parent.structured)
self.stress_period_data = mflist(self, stress_period_data)
self.parent.add_package(self)
def __repr__(self):
return 'Drain class'
@staticmethod
def get_default_dtype(structured=True):
if structured:
dtype = np.dtype([("k", np.int), ("i", np.int),
("j", np.int), ("elev", np.float32),
("cond", np.float32)])
else:
dtype = np.dtype([("node", np.int), ("elev", np.float32),
("cond", np.float32)])
return dtype
def ncells(self):
# Returns the maximum number of cells that have drains (developed for MT3DMS SSM package)
# print 'Function must be implemented properly for drn package'
return self.stress_period_data.mxact
def write_file(self):
"""
Write the file.
"""
f_drn = open(self.fn_path, 'w')
f_drn.write('{0}\n'.format(self.heading))
# f_drn.write('%10i%10i\n' % (self.mxactd, self.idrncb))
line = '{0:10d}{1:10d}'.format(self.stress_period_data.mxact, self.ipakcb)
for opt in self.options:
line += ' ' + str(opt)
line += '\n'
f_drn.write(line)
self.stress_period_data.write_transient(f_drn)
f_drn.close()
def add_record(self, kper, index, values):
try:
self.stress_period_data.add_record(kper, index, values)
except Exception as e:
raise Exception("mfdrn error adding record to list: " + str(e))
@staticmethod
def get_empty(ncells=0, aux_names=None, structured=True):
# get an empty recaray that correponds to dtype
dtype = ModflowDrn.get_default_dtype(structured=structured)
if aux_names is not None:
dtype = Package.add_to_dtype(dtype, aux_names, np.float32)
d = np.zeros((ncells, len(dtype)), dtype=dtype)
d[:, :] = -1.0E+10
return np.core.records.fromarrays(d.transpose(), dtype=dtype)
@staticmethod
def load(f, model, nper=None, ext_unit_dict=None):
"""
Load an existing package.
Parameters
----------
f : filename or file handle
File to load.
model : model object
The model object (of type :class:`flopy.modflow.mf.Modflow`) to
which this package will be added.
ext_unit_dict : dictionary, optional
If the arrays in the file are specified using EXTERNAL,
or older style array control records, then `f` should be a file
handle. In this case ext_unit_dict is required, which can be
constructed using the function
:class:`flopy.utils.mfreadnam.parsenamefile`.
Returns
-------
drn : ModflowDrn object
ModflowDrn object.
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow()
>>> drn = flopy.modflow.ModflowDrn.load('test.drn', m)
"""
if model.verbose:
sys.stdout.write('loading drn package file...\n')
return Package.load(model, ModflowDrn, f, nper)
| gpl-2.0 | -3,510,670,181,342,770,700 | 33.311881 | 112 | 0.549418 | false |
mosen/salt-osx | _modules/deprecated/mac_shadow.py | 1 | 10388 | # -*- coding: utf-8 -*-
'''
Manage Mac OSX local directory passwords and policies.
Note that it is usually better to apply password policies through the creation of a configuration profile.
Tech Notes:
Usually when a password is changed by the system, there's a responsibility to check the hash list and generate hashes
for each. Many osx password changing scripts/modules only deal with the SHA-512 PBKDF2 hash when working with the local
node.
'''
# Authentication concepts reference:
# https://developer.apple.com/library/mac/documentation/Networking/Conceptual/Open_Directory/openDirectoryConcepts/openDirectoryConcepts.html#//apple_ref/doc/uid/TP40000917-CH3-CIFCAIBB
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__) # Start logging
import os
import base64
import salt.utils
import string
import binascii
import salt.exceptions
try:
from passlib.utils import pbkdf2, ab64_encode, ab64_decode
HAS_PASSLIB = True
except ImportError:
HAS_PASSLIB = False
def __virtual__():
if HAS_PASSLIB and salt.utils.platform.is_darwin():
return True
else:
return False
def _pl_salted_sha512_pbkdf2_from_string(strvalue, salt_bin=None, iterations=1000):
'''
Create a PBKDF2-SHA512 hash with a 128 byte key length.
The standard passlib.hash.pbkdf2_sha512 functions assume a 64 byte key length which does not match OSX's
implementation.
:param strvalue: The string to derive the hash from
:param salt: The (randomly generated) salt
:param iterations: The number of iterations, for Mac OS X it's normally between 23000-25000? need to confirm.
:return: (binary digest, binary salt, number of iterations used)
'''
if salt_bin is None:
salt_bin = os.urandom(32)
key_length = 128
hmac_sha512, dsize = pbkdf2.get_prf("hmac-sha512")
digest_bin = pbkdf2.pbkdf2(strvalue, salt_bin, iterations, key_length, hmac_sha512)
return digest_bin, salt_bin, iterations
def _extract_authdata(item):
'''
Extract version, authority tag, and authority data from a single array item of AuthenticationAuthority
item
The NSString instance representing the authority string
returns
version (default 1.0.0), tag, data as a tuple
'''
parts = string.split(item, ';', 2)
if not parts[0]:
parts[0] = '1.0.0'
return {
'version': parts[0],
'tag': parts[1],
'data': parts[2]
}
def authorities(name):
'''
Read the list of authentication authorities for the given user.
name
Short username of the local user.
'''
authorities_plist = __salt__['cmd.run']('/usr/bin/dscl -plist . read /Users/{0} AuthenticationAuthority'.format(name))
plist = __salt__['plist.parse_string'](authorities_plist)
authorities_list = [_extract_authdata(item) for item in plist.objectForKey_('dsAttrTypeStandard:AuthenticationAuthority')]
return authorities_list
def user_shadowhash(name):
'''
Read the existing hash for the named user.
Returns a dict with the ShadowHash content for the named user in the form:
{ 'HASH_TYPE': { 'entropy': <base64 hash>, 'salt': <base64 salt>, 'iterations': <n iterations> }}
Hash types are hard coded to SALTED-SHA-PBKDF2, CRAM-MD5, NT, RECOVERABLE.
In future releases the AuthenticationAuthority property should be checked for the hash list
name
The username associated with the local directory user.
'''
# We have to strip the output string, convert hex back to binary data, read that plist and get our specific
# key/value property to find the hash. I.E there's a lot of unwrapping to do.
log.debug('Reading ShadowHashData')
data = __salt__['dscl.read']('.', '/Users/{0}'.format(name), 'ShadowHashData')
log.debug('Got ShadowHashData')
log.debug(data)
if data is None:
log.debug('No such record/attribute found, returning None')
return None
if 'dsAttrTypeNative:ShadowHashData' not in data:
raise salt.exceptions.SaltInvocationError(
'Expected to find ShadowHashData in user record: {0}'.format(name)
)
plist_hex = string.replace(data['dsAttrTypeNative:ShadowHashData'], ' ', '')
plist_bin = binascii.unhexlify(plist_hex)
# plistlib is not used, because mavericks ships without binary plist support from plistlib.
plist = __salt__['plist.parse_string'](plist_bin)
log.debug(plist)
pbkdf = plist.objectForKey_('SALTED-SHA512-PBKDF2')
cram_md5 = plist.objectForKey_('CRAM-MD5')
nt = plist.objectForKey_('NT')
recoverable = plist.objectForKey_('RECOVERABLE')
hashes = {}
if pbkdf is not None:
hashes['SALTED-SHA512-PBKDF2'] = {
'entropy': pbkdf.objectForKey_('entropy').base64EncodedStringWithOptions_(0),
'salt': pbkdf.objectForKey_('salt').base64EncodedStringWithOptions_(0),
'iterations': pbkdf.objectForKey_('iterations')
}
if cram_md5 is not None:
hashes['CRAM-MD5'] = cram_md5.base64EncodedStringWithOptions_(0)
if nt is not None:
hashes['NT'] = nt.base64EncodedStringWithOptions_(0)
if recoverable is not None:
hashes['RECOVERABLE'] = recoverable.base64EncodedStringWithOptions_(0)
return hashes
def info(name):
'''
Return information for the specified user
CLI Example:
.. code-block:: bash
salt '*' mac_shadow.info admin
'''
# dscl -plist . -read /Users/<User> ShadowHashData
# Read out name from dscl
# Read out passwd hash from decrypted ShadowHashData in dslocal
# Read out lstchg/min/max/warn/inact/expire from PasswordPolicy
pass
def gen_password(password, salt=None, iterations=None):
'''
Generate hashed (PBKDF2-SHA512) password
Returns a dict containing values for 'entropy', 'salt' and 'iterations'.
password
Plaintext password to be hashed.
salt
Cryptographic salt (base64 encoded). If not given, a random 32-character salt will be
generated. (32 bytes is the standard salt length for OSX)
iterations
Number of iterations for the key derivation function, default is 1000
CLI Example:
.. code-block:: bash
salt '*' mac_shadow.gen_password 'I_am_password'
salt '*' mac_shadow.gen_password 'I_am_password' 'Ausrbk5COuB9V4ata6muoj+HPjA92pefPfbW9QPnv9M=' 23000
'''
if iterations is None:
iterations = 1000
if salt is None:
salt_bin = os.urandom(32)
else:
salt_bin = base64.b64decode(salt, '+/')
entropy, used_salt, used_iterations = _pl_salted_sha512_pbkdf2_from_string(password, salt_bin, iterations)
result = {
'entropy': base64.b64encode(entropy, '+/'),
'salt': base64.b64encode(used_salt, '+/'),
'iterations': used_iterations
}
return {'SALTED-SHA512-PBKDF2': result}
def set_password_hash(name, hashtype, hash, salt=None, iterations=None):
'''
Set the given hash as the shadow hash data for the named user.
name
The name of the local user, which is assumed to be in the local directory service.
hashtype
A valid hash type, one of: PBKDF2, CRAM-MD5, NT, RECOVERABLE
hash
The computed hash
salt (optional)
The salt to use, if applicable.
iterations
The number of iterations to use, if applicable.
'''
# current_hashes = user_shadowhash(name)
# current_pbkdf2 = current_hashes['SALTED-SHA512-PBKDF2']
#
# log.debug('Current ShadowHashdata follows')
# log.debug(current_hashes)
shd = {'SALTED-SHA512-PBKDF2': {'entropy': hash, 'salt': salt, 'iterations': iterations}}
log.debug('Encoding following dict as bplist')
log.debug(shd)
# if shd['SALTED-SHA512-PBKDF2']['entropy'] == current_pbkdf2['entropy']:
# log.debug('Entropy IS EQUAL!')
shd_bplist = __salt__['plist.gen_string'](shd, 'binary')
shd_bplist_b64 = base64.b64encode(shd_bplist, '+/')
log.debug('Flushing directory services cache')
__salt__['dscl.flushcache']()
log.debug('Writing directly to dslocal')
__salt__['plist.append_key']('/var/db/dslocal/nodes/Default/users/{0}.plist'.format(name),
'ShadowHashData',
'data',
shd_bplist_b64)
log.debug('Flushing directory services cache')
__salt__['dscl.flushcache']()
return True
def set_password(name, password, salt=None, iterations=None):
'''
Set the password for a named user (insecure).
Use mac_shadow.set_password_hash to supply pre-computed hash values.
For the moment this sets only the PBKDF2-SHA512 salted hash.
To be a good citizen we should set every hash in the authority list.
name
The name of the local user, which is assumed to be in the local directory service.
password
The plaintext password to set (warning: insecure, used for testing)
salt
The salt to use, defaults to automatically generated.
iterations
The number of iterations to use, defaults to an automatically generated random number.
CLI Example:
.. code-block:: bash
salt '*' mac_shadow.set_password macuser macpassword
'''
#current_hashes = user_shadowhash(name)
#current_pbkdf2 = current_hashes['SALTED-SHA512-PBKDF2']
# hash = gen_password(password, current_pbkdf2['salt'], current_pbkdf2['iterations'])
hash = gen_password(password, salt, iterations)
#
# log.debug('Current ShadowHashData follows')
# if current_hashes:
# log.debug(current_hashes)
#
# if hash['SALTED-SHA512-PBKDF2']['entropy'] == current_pbkdf2['entropy']:
# return False # No change required
# else:
# log.debug('No Shadow Hash Data exists for User: {0}'.format(name))
set_password_hash(
name,
'PBKDF2',
hash['SALTED-SHA512-PBKDF2']['entropy'],
hash['SALTED-SHA512-PBKDF2']['salt'],
hash['SALTED-SHA512-PBKDF2']['iterations']
)
return True
def del_password(name):
'''
Delete the password from name user
CLI Example:
.. code-block:: bash
salt '*' shadow.del_password username
'''
pass # Re-order authentication authority and remove ShadowHashData
| mit | -2,658,986,317,874,695,000 | 30.383686 | 185 | 0.663939 | false |
rodrigosurita/GDAd | sdaps/model/questionnaire.py | 1 | 9008 | # -*- coding: utf8 -*-
# SDAPS - Scripts for data acquisition with paper based surveys
# Copyright(C) 2008, Christoph Simon <[email protected]>
# Copyright(C) 2008, Benjamin Berg <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
u'''
Hinweis zu den Diamantstrukturen
Bei Klassen mit mehreren Basisklassen definiert maximal eine Basisklasse
eine eigene __init__ - Funktion. Die anderen Klassen sind "nur" Mixin - Klassen.
Dadurch werden die Probleme der Diamantstruktur umgangen.
'''
import buddy
import data
import struct
class DataObject(object):
u'''Mixin
'''
def get_data(self):
if not self.id in self.sheet.data:
self.sheet.data[self.id] = getattr(data, self.__class__.__name__)(self)
return self.sheet.data[self.id]
data = property(get_data)
class Questionnaire(buddy.Object):
'''
Identification: There is only one.
Reference: survey.questionnaire
Parent: self.survey
'''
def __init__(self):
self.survey = None
self.qobjects = list()
self.last_id = (0, 0)
self.init_attributes()
def init_attributes(self):
self.page_count = 0
def add_qobject(self, qobject, new_id=None):
qobject.questionnaire = self
# XXX: Is this any good?
if new_id is not None:
assert new_id > self.last_id
self.last_id = new_id
qobject.id = new_id
else:
self.last_id = qobject.init_id(self.last_id)
self.qobjects.append(qobject)
def get_sheet(self):
return self.survey.sheet
sheet = property(get_sheet)
def __unicode__(self):
return unicode().join(
[u'%s\n' % self.__class__.__name__] +
[unicode(qobject) for qobject in self.qobjects]
)
class QObject(buddy.Object):
'''
Identification: id ==(major, minor)
Reference: survey.questionnaire.qobjects[i](i != id)
Parent: self.questionnaire
'''
def __init__(self):
self.questionnaire = None
self.boxes = list()
self.last_id = -1
self.init_attributes()
def init_attributes(self):
pass
def init_id(self, id):
self.id = (id[0], id[1] + 1)
return self.id
def add_box(self, box):
box.question = self
self.last_id = box.init_id(self.last_id)
self.boxes.append(box)
def get_sheet(self):
return self.questionnaire.sheet
sheet = property(get_sheet)
def calculate_survey_id(self, md5):
pass
def id_str(self):
ids = [str(x) for x in self.id]
return u'.'.join(ids)
def id_csv(self, theid=None):
if theid is None:
theid = self.id
ids = [str(x) for x in theid]
return u'_'.join(ids)
def id_filter(self):
ids = [str(x) for x in self.id]
return u'_' + u'_'.join(ids)
def __unicode__(self):
return u'(%s)\n' % (
self.__class__.__name__,
)
class Head(QObject):
def init_attributes(self):
QObject.init_attributes(self)
self.title = unicode()
def init_id(self, id):
self.id = (id[0] + 1, 0)
return self.id
def __unicode__(self):
return u'%s(%s) %s\n' % (
self.id_str(),
self.__class__.__name__,
self.title,
)
class Question(QObject):
def init_attributes(self):
QObject.init_attributes(self)
self.page_number = 0
self.question = unicode()
def calculate_survey_id(self, md5):
for box in self.boxes:
box.calculate_survey_id(md5)
def __unicode__(self):
return u'%s(%s) %s {%i}\n' % (
self.id_str(),
self.__class__.__name__,
self.question,
self.page_number
)
class Choice(Question):
def __unicode__(self):
return unicode().join(
[Question.__unicode__(self)] +
[unicode(box) for box in self.boxes]
)
def get_answer(self):
'''it's a list containing all selected values
'''
answer = list()
for box in self.boxes:
if box.data.state:
answer.append(box.value)
return answer
class Mark(Question):
def init_attributes(self):
Question.init_attributes(self)
self.answers = list()
def __unicode__(self):
if len(self.answers) == 2:
return unicode().join(
[Question.__unicode__(self)] +
[u'\t%s - %s\n' % tuple(self.answers)] +
[unicode(box) for box in self.boxes]
)
else:
return unicode().join(
[Question.__unicode__(self)] +
[u'\t? - ?\n'] +
[unicode(box) for box in self.boxes]
)
def get_answer(self):
'''it's an integer between 0 and 5
1 till 5 are valid marks, 0 is returned if there's something wrong
'''
# box.value is zero based, a mark is based 1
answer = list()
for box in self.boxes:
if box.data.state:
answer.append(box.value)
if len(answer) == 1:
return answer[0] + 1
else:
return 0
def set_answer(self, answer):
for box in self.boxes:
box.data.state = box.value == answer - 1
class Text(Question):
def __unicode__(self):
return unicode().join(
[Question.__unicode__(self)] +
[unicode(box) for box in self.boxes]
)
def get_answer(self):
'''it's a bool, wether there is content in the textbox
'''
assert len(self.boxes) == 1
return self.boxes[0].data.state
class Additional_Head(Head):
pass
class Additional_Mark(Question, DataObject):
def init_attributes(self):
Question.init_attributes(self)
self.answers = list()
def __unicode__(self):
return unicode().join(
[Question.__unicode__(self)] +
[u'\t%s - %s\n' % tuple(self.answers)]
)
def get_answer(self):
return self.data.value
def set_answer(self, answer):
self.data.value = answer
class Additional_FilterHistogram(Question, DataObject):
def init_attributes(self):
Question.init_attributes(self)
self.answers = list()
self.filters = list()
def __unicode__(self):
result = []
result.append(Question.__unicode__(self))
for i in xrange(len(self.answers)):
result.append(u'\t%s - %s\n' % (self.answers[i], self.filters[i]))
return unicode().join(result)
def get_answer(self):
return self.data.value
def set_answer(self, answer):
raise NotImplemented()
class Box(buddy.Object, DataObject):
'''
Identification: id of the parent and value of the box ::
id == (major, minor, value)
Reference: survey.questionnaire.qobjects[i].boxes[j]
Parent: self.question
'''
def __init__(self):
self.question = None
self.init_attributes()
def init_attributes(self):
self.page_number = 0
self.x = 0
self.y = 0
self.width = 0
self.height = 0
self.text = unicode()
def init_id(self, id):
self.value = id + 1
self.id = self.question.id + (self.value,)
return self.value
def id_str(self):
ids = [str(x) for x in self.id]
return u'.'.join(ids)
def get_sheet(self):
return self.question.sheet
sheet = property(get_sheet)
def calculate_survey_id(self, md5):
tmp = struct.pack('!ffff', self.x, self.y, self.width, self.height)
md5.update(tmp)
def __unicode__(self):
return u'\t%i(%s) %s %s %s %s %s\n' % (
self.value,
(self.__class__.__name__).ljust(8),
(u'%.1f' % self.x).rjust(5),
(u'%.1f' % self.y).rjust(5),
(u'%.1f' % self.width).rjust(5),
(u'%.1f' % self.height).rjust(5),
self.text
)
class Checkbox(Box):
def init_attributes(self):
Box.init_attributes(self)
self.form = "box"
def calculate_survey_id(self, md5):
Box.calculate_survey_id(self, md5)
md5.update(self.form)
class Textbox(Box):
pass
| gpl-3.0 | 2,867,286,513,674,983,400 | 24.232493 | 83 | 0.559614 | false |
mornsun/javascratch | src/topcoder.py/LC_330_Patching_Array.py | 1 | 1807 | #!/usr/bin/env python
#coding=utf8
'''
Given a sorted positive integer array nums and an integer n, add/patch elements to the array such that any number in range [1, n] inclusive can be formed by the sum of some elements in the array. Return the minimum number of patches required.
Example 1:
nums = [1, 3], n = 6
Return 1.
Combinations of nums are [1], [3], [1,3], which form possible sums of: 1, 3, 4.
Now if we add/patch 2 to nums, the combinations are: [1], [2], [3], [1,3], [2,3], [1,2,3].
Possible sums are 1, 2, 3, 4, 5, 6, which now covers the range [1, 6].
So we only need 1 patch.
Example 2:
nums = [1, 5, 10], n = 20
Return 2.
The two patches can be [2, 4].
Example 3:
nums = [1, 2, 2], n = 5
Return 0.
@author: Chauncey
beat 92.56%
'''
import heapq
import datetime
import time
import sys
class Solution(object):
def minPatches(self, nums, n):
"""
:type nums: List[int]
:type n: int
:rtype: int
"""
if n<=0:
return 0
if nums is None:
nums = []
miss = 1
index = 0
patch = 0
while miss<=n:
if index>=len(nums) or miss<nums[index]:
miss <<= 1
patch += 1
continue
if miss>=nums[index]:
miss += nums[index]
index += 1
continue
return patch
if __name__ == '__main__':
solution = Solution()
start_time = datetime.datetime.now()
print solution.minPatches([1, 3], 6) #1
print solution.minPatches([1, 5, 10], 20) #2
print solution.minPatches([1, 2, 2], 5) #0
print solution.minPatches([], 7) #3
elapsed = datetime.datetime.now() - start_time
print 'elapsed: ', elapsed.total_seconds()
#transactions = [buy, sell, cooldown, buy, sell] | gpl-2.0 | 8,682,041,023,751,641,000 | 24.111111 | 242 | 0.570559 | false |
SaltusVita/ReoGrab | Spiders.py | 1 | 6942 | '''
Created on 2 сент. 2016 г.
@author: garet
'''
import urllib.request
import queue
import sqlite3
import re
import json
from urllib.parse import urlparse
from Parser import HtmlPage
import lxml
class BaseSpider:
def __init__(self):
self.urls = QueueUrls()
self.cache = SqliteCache('some_db')
def add_urls(self, urls):
self.urls.add_urls(urls)
def add_urls_routed(self, urls):
result = []
for url in urls:
if self.fetch_route(url) is not None:
result.append(url)
self.add_urls(result)
def add_route(self, route):
self.routes.append(route)
def add_routes(self, routes):
pass
def fetch_route(self, url):
if not hasattr(self, 'routes'):
return
for route in self.routes:
part_url = re.match(route['re'], url)
if part_url is not None and part_url.group(0) == url:
if 'skip' in route and route['skip'] is True:
break
return route
return None
def save_cache(self, url, data=None):
pass
def get_cache(self, url):
pass
def run(self):
self.init()
self.work()
# self.clear()
def init(self):
if hasattr(self, 'start_urls'):
self.add_urls(self.start_urls)
if hasattr(self, 'routes'):
self.add_routes(self.routes)
def work(self):
while not self.urls.empty():
url = self.urls.get_url()
response = self.get_page(url)
route = self.fetch_route(url)
if route is None:
continue
if 'type' in route and route['type'] == 'sitemap':
urls = self.sitemap(response)
self.add_urls_routed(urls)
continue
if 'name' in route and hasattr(self, route['name']):
getattr(self, route['name'])(response)
pass
def sitemap(self, data):
sitemap_text = data.text.replace('<?xml version="1.0" encoding="UTF-8"?>', '')
doc = lxml.etree.XML(sitemap_text)
ns = {"d": "http://www.sitemaps.org/schemas/sitemap/0.9"}
return doc.xpath("//d:loc/text()", namespaces=ns)
def charset(self, headers):
encode = 'UTF-8'
if hasattr(headers, 'Content-Type'):
m = re.search('charset=([a-z 0-9\-\_]+)', self.headers, re.IGNORECASE)
if m:
encode = m.group(1)
return encode
def get_page(self, url):
r = self.cache.get(url)
if r is not None:
print(r['url'])
return Response(r)
r = self.get_data(url)
self.cache.set(r)
print('{0} --- {1}'.format(url, r['url']))
return Response(r)
@staticmethod
def get_data(url):
try:
r = urllib.request.urlopen(url)
out = {
'url': r.geturl(),
'code': r.getcode(),
'headers': json.dumps(r.getheaders()),
'data': r.read()
}
return out
except urllib.error.HTTPError as e:
out = {
'url': e.geturl(),
'code': e.getcode(),
'headers': json.dumps(e.getheaders()),
'data': e.read()
}
return out
class QueueUrls:
def __init__(self):
self._urls_queue = queue.Queue()
self._urls_set = set()
def add_url(self, url):
u = urlparse(url)
url = u[0] + '://' + u[1] + u[2] + u[3]
if u[4] != '':
url += '?' + u[4]
if url not in self._urls_set:
self._urls_queue.put(url)
self._urls_set.add(url)
def add_urls(self, urls):
urls_type = type(urls)
if urls_type is str:
self.add_url(urls)
return
for url in urls:
self.add_url(url)
def exist_url(self, url):
if url in self._urls_set:
return True
return False
def get_url(self):
return self._urls_queue.get()
def empty(self):
return self._urls_queue.empty()
class SqliteCache:
def __init__(self, db_name):
self.db_name = db_name
self.init_db()
def init_db(self):
file = self.db_name + '.sqlite'
self._db = sqlite3.connect(file)
self._cursor = self._db.cursor()
# Create table
sql = """
CREATE TABLE IF NOT EXISTS tbl_urls(
url TEXT primary key not null,
code INTEGER,
headers TEXT,
data BLOB,
time TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);"""
self._cursor.execute(sql)
def get(self, url):
if self._cursor is None:
self.InitDB()
sql = "SELECT * FROM tbl_urls WHERE url=?;"
self._cursor.execute(sql, (url,))
row = self._cursor.fetchone()
if row is not None:
out = {
'url': row[0],
'code': row[1],
'headers': json.loads(row[2]),
'data': row[3]
}
return out
return None
def set(self, dat):
if self._cursor is None:
self.init_db()
sql = "INSERT OR REPLACE INTO tbl_urls(url,code,headers,data) VALUES (?,?,?,?);"
self._cursor.execute(sql, (dat['url'], dat['code'], dat['headers'], dat['data']))
self._db.commit()
class Download:
def __init__(self):
self.method = 'GET'
self.user_agent = self.random_user_agent()
@staticmethod
def random_user_agent(self, browser=None, os=None):
return 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 8.0; WOW64; Trident/5.0; .NET CLR 2.7.40781; .NET4.0E; en-SG)'
@staticmethod
def get_page(url):
r = urllib.request.urlopen(url)
code = r.getcode()
headers = r.getheaders()
data = r.read()
url = r.geturl()
# return Response(r)
class Response:
def __init__(self, res):
self.code = res['code']
self.headers = res['headers']
self.data = res['data']
self.url = res['url']
def charset(self):
encode = 'UTF-8'
if hasattr(self.headers, 'Content-Type'):
m = re.search('charset=([a-z 0-9\-\_]+)', self.headers, re.IGNORECASE)
if m:
encode = m.group(1)
return encode
@property
def text(self):
encode = self.charset()
return self.data.decode(encode)
def parser(self):
return HtmlPage(self.html, self.url)
| bsd-3-clause | 2,515,193,081,959,107,000 | 26.430328 | 124 | 0.486666 | false |
rven/odoo | addons/l10n_ch/models/res_bank.py | 1 | 16379 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
from odoo import api, fields, models, _
from odoo.exceptions import ValidationError
from odoo.tools.misc import mod10r
from odoo.exceptions import UserError
import werkzeug.urls
ISR_SUBSCRIPTION_CODE = {'CHF': '01', 'EUR': '03'}
CLEARING = "09000"
_re_postal = re.compile('^[0-9]{2}-[0-9]{1,6}-[0-9]$')
def _is_l10n_ch_postal(account_ref):
""" Returns True if the string account_ref is a valid postal account number,
i.e. it only contains ciphers and is last cipher is the result of a recursive
modulo 10 operation ran over the rest of it. Shorten form with - is also accepted.
"""
if _re_postal.match(account_ref or ''):
ref_subparts = account_ref.split('-')
account_ref = ref_subparts[0] + ref_subparts[1].rjust(6, '0') + ref_subparts[2]
if re.match('\d+$', account_ref or ''):
account_ref_without_check = account_ref[:-1]
return mod10r(account_ref_without_check) == account_ref
return False
def _is_l10n_ch_isr_issuer(account_ref, currency_code):
""" Returns True if the string account_ref is a valid a valid ISR issuer
An ISR issuer is postal account number that starts by 01 (CHF) or 03 (EUR),
"""
if (account_ref or '').startswith(ISR_SUBSCRIPTION_CODE[currency_code]):
return _is_l10n_ch_postal(account_ref)
return False
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
l10n_ch_postal = fields.Char(
string="Swiss Postal Account",
readonly=False, store=True,
compute='_compute_l10n_ch_postal',
help="This field is used for the Swiss postal account number on a vendor account and for the client number on "
"your own account. The client number is mostly 6 numbers without -, while the postal account number can "
"be e.g. 01-162-8")
# fields to configure ISR payment slip generation
l10n_ch_isr_subscription_chf = fields.Char(string='CHF ISR Subscription Number', help='The subscription number provided by the bank or Postfinance to identify the bank, used to generate ISR in CHF. eg. 01-162-8')
l10n_ch_isr_subscription_eur = fields.Char(string='EUR ISR Subscription Number', help='The subscription number provided by the bank or Postfinance to identify the bank, used to generate ISR in EUR. eg. 03-162-5')
l10n_ch_show_subscription = fields.Boolean(compute='_compute_l10n_ch_show_subscription', default=lambda self: self.env.company.country_id.code == 'CH')
def _is_isr_issuer(self):
return (_is_l10n_ch_isr_issuer(self.l10n_ch_postal, 'CHF')
or _is_l10n_ch_isr_issuer(self.l10n_ch_postal, 'EUR'))
@api.constrains("l10n_ch_postal", "partner_id")
def _check_postal_num(self):
"""Validate postal number format"""
for rec in self:
if rec.l10n_ch_postal and not _is_l10n_ch_postal(rec.l10n_ch_postal):
# l10n_ch_postal is used for the purpose of Client Number on your own accounts, so don't do the check there
if rec.partner_id and not rec.partner_id.ref_company_ids:
raise ValidationError(
_("The postal number {} is not valid.\n"
"It must be a valid postal number format. eg. 10-8060-7").format(rec.l10n_ch_postal))
return True
@api.constrains("l10n_ch_isr_subscription_chf", "l10n_ch_isr_subscription_eur")
def _check_subscription_num(self):
"""Validate ISR subscription number format
Subscription number can only starts with 01 or 03
"""
for rec in self:
for currency in ["CHF", "EUR"]:
subscrip = rec.l10n_ch_isr_subscription_chf if currency == "CHF" else rec.l10n_ch_isr_subscription_eur
if subscrip and not _is_l10n_ch_isr_issuer(subscrip, currency):
example = "01-162-8" if currency == "CHF" else "03-162-5"
raise ValidationError(
_("The ISR subcription {} for {} number is not valid.\n"
"It must starts with {} and we a valid postal number format. eg. {}"
).format(subscrip, currency, ISR_SUBSCRIPTION_CODE[currency], example))
return True
@api.depends('partner_id', 'company_id')
def _compute_l10n_ch_show_subscription(self):
for bank in self:
if bank.partner_id:
bank.l10n_ch_show_subscription = bank.partner_id.ref_company_ids.country_id.code =='CH'
elif bank.company_id:
bank.l10n_ch_show_subscription = bank.company_id.country_id.code == 'CH'
else:
bank.l10n_ch_show_subscription = self.env.company.country_id.code == 'CH'
@api.depends('acc_number', 'acc_type')
def _compute_sanitized_acc_number(self):
#Only remove spaces in case it is not postal
postal_banks = self.filtered(lambda b: b.acc_type == "postal")
for bank in postal_banks:
bank.sanitized_acc_number = bank.acc_number
super(ResPartnerBank, self - postal_banks)._compute_sanitized_acc_number()
@api.model
def _get_supported_account_types(self):
rslt = super(ResPartnerBank, self)._get_supported_account_types()
rslt.append(('postal', _('Postal')))
return rslt
@api.model
def retrieve_acc_type(self, acc_number):
""" Overridden method enabling the recognition of swiss postal bank
account numbers.
"""
acc_number_split = ""
# acc_number_split is needed to continue to recognize the account
# as a postal account even if the difference
if acc_number and " " in acc_number:
acc_number_split = acc_number.split(" ")[0]
if _is_l10n_ch_postal(acc_number) or (acc_number_split and _is_l10n_ch_postal(acc_number_split)):
return 'postal'
else:
return super(ResPartnerBank, self).retrieve_acc_type(acc_number)
@api.depends('acc_number', 'partner_id', 'acc_type')
def _compute_l10n_ch_postal(self):
for record in self:
if record.acc_type == 'iban':
record.l10n_ch_postal = self._retrieve_l10n_ch_postal(record.sanitized_acc_number)
elif record.acc_type == 'postal':
if record.acc_number and " " in record.acc_number:
record.l10n_ch_postal = record.acc_number.split(" ")[0]
else:
record.l10n_ch_postal = record.acc_number
# In case of ISR issuer, this number is not
# unique and we fill acc_number with partner
# name to give proper information to the user
if record.partner_id and record.acc_number[:2] in ["01", "03"]:
record.acc_number = ("{} {}").format(record.acc_number, record.partner_id.name)
@api.model
def _is_postfinance_iban(self, iban):
"""Postfinance IBAN have format
CHXX 0900 0XXX XXXX XXXX K
Where 09000 is the clearing number
"""
return iban.startswith('CH') and iban[4:9] == CLEARING
@api.model
def _pretty_postal_num(self, number):
"""format a postal account number or an ISR subscription number
as per specifications with '-' separators.
eg. 010001628 -> 01-162-8
"""
if re.match('^[0-9]{2}-[0-9]{1,6}-[0-9]$', number or ''):
return number
currency_code = number[:2]
middle_part = number[2:-1]
trailing_cipher = number[-1]
middle_part = middle_part.lstrip("0")
return currency_code + '-' + middle_part + '-' + trailing_cipher
@api.model
def _retrieve_l10n_ch_postal(self, iban):
"""Reads a swiss postal account number from a an IBAN and returns it as
a string. Returns None if no valid postal account number was found, or
the given iban was not from Swiss Postfinance.
CH09 0900 0000 1000 8060 7 -> 10-8060-7
"""
if self._is_postfinance_iban(iban):
# the IBAN corresponds to a swiss account
return self._pretty_postal_num(iban[-9:])
return None
def _get_qr_code_url(self, qr_method, amount, currency, debtor_partner, free_communication, structured_communication):
if qr_method == 'ch_qr':
qr_code_vals = self._l10n_ch_get_qr_vals(amount, currency, debtor_partner, free_communication, structured_communication)
return '/report/barcode/?type=%s&value=%s&width=%s&height=%s&quiet=1&mask=ch_cross' % ('QR', werkzeug.urls.url_quote_plus('\n'.join(qr_code_vals)), 256, 256)
return super()._get_qr_code_url(qr_method, amount, currency, debtor_partner, free_communication, structured_communication)
def _l10n_ch_get_qr_vals(self, amount, currency, debtor_partner, free_communication, structured_communication):
comment = ""
if free_communication:
comment = (free_communication[:137] + '...') if len(free_communication) > 140 else free_communication
creditor_addr_1, creditor_addr_2 = self._get_partner_address_lines(self.partner_id)
debtor_addr_1, debtor_addr_2 = self._get_partner_address_lines(debtor_partner)
# Compute reference type (empty by default, only mandatory for QR-IBAN,
# and must then be 27 characters-long, with mod10r check digit as the 27th one,
# just like ISR number for invoices)
reference_type = 'NON'
reference = ''
if self._is_qr_iban():
# _check_for_qr_code_errors ensures we can't have a QR-IBAN without a QR-reference here
reference_type = 'QRR'
reference = structured_communication
currency = currency or self.currency_id or self.company_id.currency_id
return [
'SPC', # QR Type
'0200', # Version
'1', # Coding Type
self.sanitized_acc_number, # IBAN
'K', # Creditor Address Type
(self.acc_holder_name or self.partner_id.name)[:70], # Creditor Name
creditor_addr_1, # Creditor Address Line 1
creditor_addr_2, # Creditor Address Line 2
'', # Creditor Postal Code (empty, since we're using combined addres elements)
'', # Creditor Town (empty, since we're using combined addres elements)
self.partner_id.country_id.code, # Creditor Country
'', # Ultimate Creditor Address Type
'', # Name
'', # Ultimate Creditor Address Line 1
'', # Ultimate Creditor Address Line 2
'', # Ultimate Creditor Postal Code
'', # Ultimate Creditor Town
'', # Ultimate Creditor Country
'{:.2f}'.format(amount), # Amount
currency.name, # Currency
'K', # Ultimate Debtor Address Type
debtor_partner.commercial_partner_id.name[:70], # Ultimate Debtor Name
debtor_addr_1, # Ultimate Debtor Address Line 1
debtor_addr_2, # Ultimate Debtor Address Line 2
'', # Ultimate Debtor Postal Code (not to be provided for address type K)
'', # Ultimate Debtor Postal City (not to be provided for address type K)
debtor_partner.country_id.code, # Ultimate Debtor Postal Country
reference_type, # Reference Type
reference, # Reference
comment, # Unstructured Message
'EPD', # Mandatory trailer part
]
def _get_partner_address_lines(self, partner):
""" Returns a tuple of two elements containing the address lines to use
for this partner. Line 1 contains the street and number, line 2 contains
zip and city. Those two lines are limited to 70 characters
"""
streets = [partner.street, partner.street2]
line_1 = ' '.join(filter(None, streets))
line_2 = partner.zip + ' ' + partner.city
return line_1[:70], line_2[:70]
def _check_qr_iban_range(self, iban):
if not iban or len(iban) < 9:
return False
iid_start_index = 4
iid_end_index = 8
iid = iban[iid_start_index : iid_end_index+1]
return re.match('\d+', iid) \
and 30000 <= int(iid) <= 31999 # Those values for iid are reserved for QR-IBANs only
def _is_qr_iban(self):
""" Tells whether or not this bank account has a QR-IBAN account number.
QR-IBANs are specific identifiers used in Switzerland as references in
QR-codes. They are formed like regular IBANs, but are actually something
different.
"""
self.ensure_one()
return self.acc_type == 'iban' \
and self._check_qr_iban_range(self.sanitized_acc_number)
@api.model
def _is_qr_reference(self, reference):
""" Checks whether the given reference is a QR-reference, i.e. it is
made of 27 digits, the 27th being a mod10r check on the 26 previous ones.
"""
return reference \
and len(reference) == 27 \
and re.match('\d+$', reference) \
and reference == mod10r(reference[:-1])
def _eligible_for_qr_code(self, qr_method, debtor_partner, currency):
if qr_method == 'ch_qr':
return self.acc_type == 'iban' and \
self.partner_id.country_id.code == 'CH' and \
(not debtor_partner or debtor_partner.country_id.code == 'CH') \
and currency.name in ('EUR', 'CHF')
return super()._eligible_for_qr_code(qr_method, debtor_partner, currency)
def _check_for_qr_code_errors(self, qr_method, amount, currency, debtor_partner, free_communication, structured_communication):
def _partner_fields_set(partner):
return partner.zip and \
partner.city and \
partner.country_id.code and \
(partner.street or partner.street2)
if qr_method == 'ch_qr':
if not _partner_fields_set(self.partner_id):
return _("The partner set on the bank account meant to receive the payment (%s) must have a complete postal address (street, zip, city and country).", self.acc_number)
if debtor_partner and not _partner_fields_set(debtor_partner):
return _("The partner the QR-code must have a complete postal address (street, zip, city and country).")
if self._is_qr_iban() and not self._is_qr_reference(structured_communication):
return _("When using a QR-IBAN as the destination account of a QR-code, the payment reference must be a QR-reference.")
return super()._check_for_qr_code_errors(qr_method, amount, currency, debtor_partner, free_communication, structured_communication)
@api.model
def _get_available_qr_methods(self):
rslt = super()._get_available_qr_methods()
rslt.append(('ch_qr', _("Swiss QR bill"), 10))
return rslt
| agpl-3.0 | 1,126,025,373,065,044,900 | 51.16242 | 216 | 0.567251 | false |
googleapis/googleapis-gen | google/cloud/talent/v4beta1/talent-v4beta1-py/google/cloud/talent_v4beta1/services/completion/transports/grpc.py | 1 | 11561 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.talent_v4beta1.types import completion_service
from .base import CompletionTransport, DEFAULT_CLIENT_INFO
class CompletionGrpcTransport(CompletionTransport):
"""gRPC backend transport for Completion.
A service handles auto completion.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(self, *,
host: str = 'jobs.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(cls,
host: str = 'jobs.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def complete_query(self) -> Callable[
[completion_service.CompleteQueryRequest],
completion_service.CompleteQueryResponse]:
r"""Return a callable for the complete query method over gRPC.
Completes the specified prefix with keyword
suggestions. Intended for use by a job search auto-
complete search box.
Returns:
Callable[[~.CompleteQueryRequest],
~.CompleteQueryResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'complete_query' not in self._stubs:
self._stubs['complete_query'] = self.grpc_channel.unary_unary(
'/google.cloud.talent.v4beta1.Completion/CompleteQuery',
request_serializer=completion_service.CompleteQueryRequest.serialize,
response_deserializer=completion_service.CompleteQueryResponse.deserialize,
)
return self._stubs['complete_query']
__all__ = (
'CompletionGrpcTransport',
)
| apache-2.0 | 3,560,645,474,204,908,500 | 44.515748 | 91 | 0.607992 | false |
rldleblanc/ceph-tools | osd_hunter.py | 1 | 6255 | #!/usr/bin/python
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
import argparse
import re
import datetime
import operator
import pprint
import glob
import gzip
slow_threshold = 10 #seconds
# Nothing to change past here
verbose = None
re_slow = re.compile(r'^(\d+-\d+-\d+\s+\d+:\d+:\d+\.\d+)\s+\w+\s+0.*slow.*(client\.\d+\.\d+:\d+).*from\s+(\d+(,\d+)*)')
re_io = re.compile(r'^(\d+-\d+-\d+\s+\d+:\d+:\d+\.\d+)\s+\w+\s+1.*<==.*(osd\.\d+|client).*(client\.\d+\.\d+:\d+).*')
def get_date(datestring):
nofrag, frag = datestring.split(".")
date = datetime.datetime.strptime(nofrag, "%Y-%m-%d %H:%M:%S")
frag = frag[:6] #truncate to microseconds
frag += (6 - len(frag)) * '0'
date = date.replace(microsecond=int(frag))
return date
def get_log_files(args):
if args.all is True:
if args.zip is True:
return glob.glob(args.logdir + "ceph-osd.*.log*")
else:
return glob.glob(args.logdir + "ceph-osd.*.log")
else:
if args.zip is True:
return glob.glob(args.logdir + "ceph-osd." + str(args.osd) + ".log*")
else:
return glob.glob(args.logdir + "ceph-osd." + str(args.osd) + ".log")
def find_blocked(args):
slow_osds = {}
if args.all is True:
if verbose >= 1:
print "Searching all OSDs."
for file in get_log_files(args):
result = search_logs(file)
if result:
slow_osds.update(result)
pass
else:
if verbose >= 1:
print "Going to search OSD " + str(args.osd) + "."
slow_osds = search_logs(get_log_files(args)[0])
if verbose >=3:
pprint.pprint(slow_osds)
if len(slow_osds) > 0:
print_output(slow_osds)
else:
print "Could not find any slow OSDs."
def print_output(slow_osds):
# Tally up the slow OSDs
# go thorugh all arrays and create a new array of slow OSDs
# with the OSD ID as the key and increment the value for each
# Sort the list asending and print out the OSDs.
osd_report = {}
for key in slow_osds.keys():
if slow_osds[key].get('start', None):
if slow_osds[key].get('slow', None):
for i in slow_osds[key]['slow']:
if i not in osd_report.keys():
osd_report[i] = 1
else:
osd_report[i] += 1
osd_report = sorted(osd_report.items(), key=operator.itemgetter(1))
if len(osd_report) > 0:
for i in osd_report:
print "OSD " + str(i[0]) + ": " + str(i[1])
else:
print "Could not find any slow OSDs."
def search_logs(logfile):
if verbose >= 1:
print "Searching through " + logfile + "..."
try:
# Iterate through the file looking for slow messages so we know
# which I/O are problematic
if 'gz' in logfile:
with gzip.open(logfile, 'rb') as f:
return scan_file(f)
else:
with open(logfile, 'rb') as f:
return scan_file(f)
return None
except OSError, e:
print "Could not open " + logfile + " for reading."
sys.exit(1)
def scan_file(fd):
slow_osds = {}
# If the line has slow, capture the date/time, the client id
# and the secondary OSDs as slow clients
for line in fd:
matches = re_slow.match(line)
if matches and not matches.group(1) in slow_osds.keys():
slow_osds[matches.group(2)] = {}
#slow_osds[matches.group(2)]['start'] = get_date(matches.group(1))
slow_osds[matches.group(2)]['slow'] = matches.group(3).split(",")
# On the second iteration, look for lines that have the client id
# 1. Get the data/time stamp from the request from the client,
# set as the start time for the I/O
# 2. If it has ondisk status. Get the date/time. Compare with the
# start time and if less than 30 seconds, move osd to the
# fast list.
if len(slow_osds) > 0:
# Jump back to the start of the file
fd.seek(0)
for line in fd:
matches = re_io.match(line)
if matches and matches.group(3) in slow_osds.keys():
if 'client' in matches.group(2):
slow_osds[matches.group(3)]['start'] = get_date(matches.group(1))
elif 'osd' in matches.group(2) and slow_osds[matches.group(3)].get('start', None):
latency = get_date(matches.group(1)) - slow_osds[matches.group(3)]['start']
osd = matches.group(2).split(".")[1]
if latency < datetime.timedelta(seconds=slow_threshold):
if osd in slow_osds[matches.group(3)]['slow']:
slow_osds[matches.group(3)]['slow'].remove(osd)
if not slow_osds[matches.group(3)].get('fast', None):
slow_osds[matches.group(3)]['fast'] = [osd]
elif osd not in slow_osds[matches.group(3)]['fast']:
slow_osds[matches.group(3)]['fast'] += [osd]
return slow_osds
def main():
# Main execution
global verbose
parser = argparse.ArgumentParser(description="Hunts for slow OSDs by looking thorugh OSD logs.")
osdgroup = parser.add_mutually_exclusive_group(required=True)
osdgroup.add_argument('-o', '--osd', type=int, help="an OSD on this host that is reporting slow I/O.")
osdgroup.add_argument('-a', '--all', action="store_true", default="false", help="Search logs of all OSDs in logdir.")
parser.add_argument('-z', '--zip', action="store_true", default="false", help="Also search through compressed logfiles.")
parser.add_argument('-l', '--logdir', default="/var/log/ceph/", help="Location of log files. Defaults to /var/log/ceph/.")
parser.add_argument('-v', '--verbose', action="count", default=0, help="Increase verbosity, more flags means more output.")
args = parser.parse_args()
verbose = args.verbose
if verbose >= 3:
pprint.pprint(args)
if args.all or args.osd:
find_blocked(args)
if __name__ == "__main__":
main()
| lgpl-3.0 | -2,655,373,338,628,918,300 | 37.850932 | 127 | 0.561311 | false |
santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/numpy-1.0.4-py2.5-linux-x86_64.egg/numpy/distutils/cpuinfo.py | 1 | 22466 | #!/usr/bin/env python
"""
cpuinfo
Copyright 2002 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Pearu Peterson
"""
__all__ = ['cpu']
import sys, re, types
import os
import commands
import warnings
def getoutput(cmd, successful_status=(0,), stacklevel=1):
try:
status, output = commands.getstatusoutput(cmd)
except EnvironmentError, e:
warnings.warn(str(e), UserWarning, stacklevel=stacklevel)
return False, output
if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status:
return True, output
return False, output
def command_info(successful_status=(0,), stacklevel=1, **kw):
info = {}
for key in kw:
ok, output = getoutput(kw[key], successful_status=successful_status,
stacklevel=stacklevel+1)
if ok:
info[key] = output.strip()
return info
def command_by_line(cmd, successful_status=(0,), stacklevel=1):
ok, output = getoutput(cmd, successful_status=successful_status,
stacklevel=stacklevel+1)
if not ok:
return
for line in output.splitlines():
yield line.strip()
def key_value_from_command(cmd, sep, successful_status=(0,),
stacklevel=1):
d = {}
for line in command_by_line(cmd, successful_status=successful_status,
stacklevel=stacklevel+1):
l = [s.strip() for s in line.split(sep, 1)]
if len(l) == 2:
d[l[0]] = l[1]
return d
class CPUInfoBase(object):
"""Holds CPU information and provides methods for requiring
the availability of various CPU features.
"""
def _try_call(self,func):
try:
return func()
except:
pass
def __getattr__(self,name):
if not name.startswith('_'):
if hasattr(self,'_'+name):
attr = getattr(self,'_'+name)
if type(attr) is types.MethodType:
return lambda func=self._try_call,attr=attr : func(attr)
else:
return lambda : None
raise AttributeError,name
def _getNCPUs(self):
return 1
def _is_32bit(self):
return not self.is_64bit()
class LinuxCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = [ {} ]
ok, output = getoutput('uname -m')
if ok:
info[0]['uname_m'] = output.strip()
try:
fo = open('/proc/cpuinfo')
except EnvironmentError, e:
warnings.warn(str(e), UserWarning)
else:
for line in fo:
name_value = [s.strip() for s in line.split(':', 1)]
if len(name_value) != 2:
continue
name, value = name_value
if not info or info[-1].has_key(name): # next processor
info.append({})
info[-1][name] = value
fo.close()
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['vendor_id']=='AuthenticAMD'
def _is_AthlonK6_2(self):
return self._is_AMD() and self.info[0]['model'] == '2'
def _is_AthlonK6_3(self):
return self._is_AMD() and self.info[0]['model'] == '3'
def _is_AthlonK6(self):
return re.match(r'.*?AMD-K6',self.info[0]['model name']) is not None
def _is_AthlonK7(self):
return re.match(r'.*?AMD-K7',self.info[0]['model name']) is not None
def _is_AthlonMP(self):
return re.match(r'.*?Athlon\(tm\) MP\b',
self.info[0]['model name']) is not None
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['family'] == '15'
def _is_Athlon64(self):
return re.match(r'.*?Athlon\(tm\) 64\b',
self.info[0]['model name']) is not None
def _is_AthlonHX(self):
return re.match(r'.*?Athlon HX\b',
self.info[0]['model name']) is not None
def _is_Opteron(self):
return re.match(r'.*?Opteron\b',
self.info[0]['model name']) is not None
def _is_Hammer(self):
return re.match(r'.*?Hammer\b',
self.info[0]['model name']) is not None
# Alpha
def _is_Alpha(self):
return self.info[0]['cpu']=='Alpha'
def _is_EV4(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
def _is_EV5(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
def _is_EV56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
def _is_PCA56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
# Intel
#XXX
_is_i386 = _not_impl
def _is_Intel(self):
return self.info[0]['vendor_id']=='GenuineIntel'
def _is_i486(self):
return self.info[0]['cpu']=='i486'
def _is_i586(self):
return self.is_Intel() and self.info[0]['cpu family'] == '5'
def _is_i686(self):
return self.is_Intel() and self.info[0]['cpu family'] == '6'
def _is_Celeron(self):
return re.match(r'.*?Celeron',
self.info[0]['model name']) is not None
def _is_Pentium(self):
return re.match(r'.*?Pentium',
self.info[0]['model name']) is not None
def _is_PentiumII(self):
return re.match(r'.*?Pentium.*?II\b',
self.info[0]['model name']) is not None
def _is_PentiumPro(self):
return re.match(r'.*?PentiumPro\b',
self.info[0]['model name']) is not None
def _is_PentiumMMX(self):
return re.match(r'.*?Pentium.*?MMX\b',
self.info[0]['model name']) is not None
def _is_PentiumIII(self):
return re.match(r'.*?Pentium.*?III\b',
self.info[0]['model name']) is not None
def _is_PentiumIV(self):
return re.match(r'.*?Pentium.*?(IV|4)\b',
self.info[0]['model name']) is not None
def _is_PentiumM(self):
return re.match(r'.*?Pentium.*?M\b',
self.info[0]['model name']) is not None
def _is_Prescott(self):
return self.is_PentiumIV() and self.has_sse3()
def _is_Nocona(self):
return self.is_64bit() and self.is_PentiumIV()
def _is_Core2(self):
return self.is_64bit() and self.is_Intel() and \
re.match(r'.*?Core\(TM\)2\b', \
self.info[0]['model name']) is not None
def _is_Itanium(self):
return re.match(r'.*?Itanium\b',
self.info[0]['family']) is not None
def _is_XEON(self):
return re.match(r'.*?XEON\b',
self.info[0]['model name'],re.IGNORECASE) is not None
_is_Xeon = _is_XEON
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_fdiv_bug(self):
return self.info[0]['fdiv_bug']=='yes'
def _has_f00f_bug(self):
return self.info[0]['f00f_bug']=='yes'
def _has_mmx(self):
return re.match(r'.*?\bmmx\b',self.info[0]['flags']) is not None
def _has_sse(self):
return re.match(r'.*?\bsse\b',self.info[0]['flags']) is not None
def _has_sse2(self):
return re.match(r'.*?\bsse2\b',self.info[0]['flags']) is not None
def _has_sse3(self):
return re.match(r'.*?\bsse3\b',self.info[0]['flags']) is not None
def _has_3dnow(self):
return re.match(r'.*?\b3dnow\b',self.info[0]['flags']) is not None
def _has_3dnowext(self):
return re.match(r'.*?\b3dnowext\b',self.info[0]['flags']) is not None
def _is_64bit(self):
if self.is_Alpha():
return True
if self.info[0].get('clflush size','')=='64':
return True
if self.info[0].get('uname_m','')=='x86_64':
return True
if self.info[0].get('arch','')=='IA-64':
return True
return False
def _is_32bit(self):
return not self.is_64bit()
class IRIXCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = key_value_from_command('sysconf', sep=' ',
successful_status=(0,1))
self.__class__.info = info
def _not_impl(self): pass
def _is_singleCPU(self):
return self.info.get('NUM_PROCESSORS') == '1'
def _getNCPUs(self):
return int(self.info.get('NUM_PROCESSORS', 1))
def __cputype(self,n):
return self.info.get('PROCESSORS').split()[0].lower() == 'r%s' % (n)
def _is_r2000(self): return self.__cputype(2000)
def _is_r3000(self): return self.__cputype(3000)
def _is_r3900(self): return self.__cputype(3900)
def _is_r4000(self): return self.__cputype(4000)
def _is_r4100(self): return self.__cputype(4100)
def _is_r4300(self): return self.__cputype(4300)
def _is_r4400(self): return self.__cputype(4400)
def _is_r4600(self): return self.__cputype(4600)
def _is_r4650(self): return self.__cputype(4650)
def _is_r5000(self): return self.__cputype(5000)
def _is_r6000(self): return self.__cputype(6000)
def _is_r8000(self): return self.__cputype(8000)
def _is_r10000(self): return self.__cputype(10000)
def _is_r12000(self): return self.__cputype(12000)
def _is_rorion(self): return self.__cputype('orion')
def get_ip(self):
try: return self.info.get('MACHINE')
except: pass
def __machine(self,n):
return self.info.get('MACHINE').lower() == 'ip%s' % (n)
def _is_IP19(self): return self.__machine(19)
def _is_IP20(self): return self.__machine(20)
def _is_IP21(self): return self.__machine(21)
def _is_IP22(self): return self.__machine(22)
def _is_IP22_4k(self): return self.__machine(22) and self._is_r4000()
def _is_IP22_5k(self): return self.__machine(22) and self._is_r5000()
def _is_IP24(self): return self.__machine(24)
def _is_IP25(self): return self.__machine(25)
def _is_IP26(self): return self.__machine(26)
def _is_IP27(self): return self.__machine(27)
def _is_IP28(self): return self.__machine(28)
def _is_IP30(self): return self.__machine(30)
def _is_IP32(self): return self.__machine(32)
def _is_IP32_5k(self): return self.__machine(32) and self._is_r5000()
def _is_IP32_10k(self): return self.__machine(32) and self._is_r10000()
class DarwinCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = command_info(arch='arch',
machine='machine')
info['sysctl_hw'] = key_value_from_command('sysctl hw', sep='=')
self.__class__.info = info
def _not_impl(self): pass
def _getNCPUs(self):
return int(self.info['sysctl_hw'].get('hw.ncpu', 1))
def _is_Power_Macintosh(self):
return self.info['sysctl_hw']['hw.machine']=='Power Macintosh'
def _is_i386(self):
return self.info['arch']=='i386'
def _is_ppc(self):
return self.info['arch']=='ppc'
def __machine(self,n):
return self.info['machine'] == 'ppc%s'%n
def _is_ppc601(self): return self.__machine(601)
def _is_ppc602(self): return self.__machine(602)
def _is_ppc603(self): return self.__machine(603)
def _is_ppc603e(self): return self.__machine('603e')
def _is_ppc604(self): return self.__machine(604)
def _is_ppc604e(self): return self.__machine('604e')
def _is_ppc620(self): return self.__machine(620)
def _is_ppc630(self): return self.__machine(630)
def _is_ppc740(self): return self.__machine(740)
def _is_ppc7400(self): return self.__machine(7400)
def _is_ppc7450(self): return self.__machine(7450)
def _is_ppc750(self): return self.__machine(750)
def _is_ppc403(self): return self.__machine(403)
def _is_ppc505(self): return self.__machine(505)
def _is_ppc801(self): return self.__machine(801)
def _is_ppc821(self): return self.__machine(821)
def _is_ppc823(self): return self.__machine(823)
def _is_ppc860(self): return self.__machine(860)
class SunOSCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = command_info(arch='arch',
mach='mach',
uname_i='uname_i',
isainfo_b='isainfo -b',
isainfo_n='isainfo -n',
)
info['uname_X'] = key_value_from_command('uname -X', sep='=')
for line in command_by_line('psrinfo -v 0'):
m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at', line)
if m:
info['processor'] = m.group('p')
break
self.__class__.info = info
def _not_impl(self): pass
def _is_32bit(self):
return self.info['isainfo_b']=='32'
def _is_64bit(self):
return self.info['isainfo_b']=='64'
def _is_i386(self):
return self.info['isainfo_n']=='i386'
def _is_sparc(self):
return self.info['isainfo_n']=='sparc'
def _is_sparcv9(self):
return self.info['isainfo_n']=='sparcv9'
def _getNCPUs(self):
return int(self.info['uname_X'].get('NumCPU', 1))
def _is_sun4(self):
return self.info['arch']=='sun4'
def _is_SUNW(self):
return re.match(r'SUNW',self.info['uname_i']) is not None
def _is_sparcstation5(self):
return re.match(r'.*SPARCstation-5',self.info['uname_i']) is not None
def _is_ultra1(self):
return re.match(r'.*Ultra-1',self.info['uname_i']) is not None
def _is_ultra250(self):
return re.match(r'.*Ultra-250',self.info['uname_i']) is not None
def _is_ultra2(self):
return re.match(r'.*Ultra-2',self.info['uname_i']) is not None
def _is_ultra30(self):
return re.match(r'.*Ultra-30',self.info['uname_i']) is not None
def _is_ultra4(self):
return re.match(r'.*Ultra-4',self.info['uname_i']) is not None
def _is_ultra5_10(self):
return re.match(r'.*Ultra-5_10',self.info['uname_i']) is not None
def _is_ultra5(self):
return re.match(r'.*Ultra-5',self.info['uname_i']) is not None
def _is_ultra60(self):
return re.match(r'.*Ultra-60',self.info['uname_i']) is not None
def _is_ultra80(self):
return re.match(r'.*Ultra-80',self.info['uname_i']) is not None
def _is_ultraenterprice(self):
return re.match(r'.*Ultra-Enterprise',self.info['uname_i']) is not None
def _is_ultraenterprice10k(self):
return re.match(r'.*Ultra-Enterprise-10000',self.info['uname_i']) is not None
def _is_sunfire(self):
return re.match(r'.*Sun-Fire',self.info['uname_i']) is not None
def _is_ultra(self):
return re.match(r'.*Ultra',self.info['uname_i']) is not None
def _is_cpusparcv7(self):
return self.info['processor']=='sparcv7'
def _is_cpusparcv8(self):
return self.info['processor']=='sparcv8'
def _is_cpusparcv9(self):
return self.info['processor']=='sparcv9'
class Win32CPUInfo(CPUInfoBase):
info = None
pkey = r"HARDWARE\DESCRIPTION\System\CentralProcessor"
# XXX: what does the value of
# HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0
# mean?
def __init__(self):
if self.info is not None:
return
info = []
try:
#XXX: Bad style to use so long `try:...except:...`. Fix it!
import _winreg
prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"\
"\s+stepping\s+(?P<STP>\d+)",re.IGNORECASE)
chnd=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, self.pkey)
pnum=0
while 1:
try:
proc=_winreg.EnumKey(chnd,pnum)
except _winreg.error:
break
else:
pnum+=1
info.append({"Processor":proc})
phnd=_winreg.OpenKey(chnd,proc)
pidx=0
while True:
try:
name,value,vtpe=_winreg.EnumValue(phnd,pidx)
except _winreg.error:
break
else:
pidx=pidx+1
info[-1][name]=value
if name=="Identifier":
srch=prgx.search(value)
if srch:
info[-1]["Family"]=int(srch.group("FML"))
info[-1]["Model"]=int(srch.group("MDL"))
info[-1]["Stepping"]=int(srch.group("STP"))
except:
print sys.exc_value,'(ignoring)'
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['VendorIdentifier']=='AuthenticAMD'
def _is_Am486(self):
return self.is_AMD() and self.info[0]['Family']==4
def _is_Am5x86(self):
return self.is_AMD() and self.info[0]['Family']==4
def _is_AMDK5(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model'] in [0,1,2,3]
def _is_AMDK6(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model'] in [6,7]
def _is_AMDK6_2(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model']==8
def _is_AMDK6_3(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model']==9
def _is_AMDK7(self):
return self.is_AMD() and self.info[0]['Family'] == 6
# To reliably distinguish between the different types of AMD64 chips
# (Athlon64, Operton, Athlon64 X2, Semperon, Turion 64, etc.) would
# require looking at the 'brand' from cpuid
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['Family'] == 15
# Intel
def _is_Intel(self):
return self.info[0]['VendorIdentifier']=='GenuineIntel'
def _is_i386(self):
return self.info[0]['Family']==3
def _is_i486(self):
return self.info[0]['Family']==4
def _is_i586(self):
return self.is_Intel() and self.info[0]['Family']==5
def _is_i686(self):
return self.is_Intel() and self.info[0]['Family']==6
def _is_Pentium(self):
return self.is_Intel() and self.info[0]['Family']==5
def _is_PentiumMMX(self):
return self.is_Intel() and self.info[0]['Family']==5 \
and self.info[0]['Model']==4
def _is_PentiumPro(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model']==1
def _is_PentiumII(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model'] in [3,5,6]
def _is_PentiumIII(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model'] in [7,8,9,10,11]
def _is_PentiumIV(self):
return self.is_Intel() and self.info[0]['Family']==15
def _is_PentiumM(self):
return self.is_Intel() and self.info[0]['Family'] == 6 \
and self.info[0]['Model'] in [9, 13, 14]
def _is_Core2(self):
return self.is_Intel() and self.info[0]['Family'] == 6 \
and self.info[0]['Model'] in [15, 16, 17]
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_mmx(self):
if self.is_Intel():
return (self.info[0]['Family']==5 and self.info[0]['Model']==4) \
or (self.info[0]['Family'] in [6,15])
elif self.is_AMD():
return self.info[0]['Family'] in [5,6,15]
else:
return False
def _has_sse(self):
if self.is_Intel():
return (self.info[0]['Family']==6 and \
self.info[0]['Model'] in [7,8,9,10,11]) \
or self.info[0]['Family']==15
elif self.is_AMD():
return (self.info[0]['Family']==6 and \
self.info[0]['Model'] in [6,7,8,10]) \
or self.info[0]['Family']==15
else:
return False
def _has_sse2(self):
if self.is_Intel():
return self.is_Pentium4() or self.is_PentiumM() \
or self.is_Core2()
elif self.is_AMD():
return self.is_AMD64()
else:
return False
def _has_3dnow(self):
return self.is_AMD() and self.info[0]['Family'] in [5,6,15]
def _has_3dnowext(self):
return self.is_AMD() and self.info[0]['Family'] in [6,15]
if sys.platform.startswith('linux'): # variations: linux2,linux-i386 (any others?)
cpuinfo = LinuxCPUInfo
elif sys.platform.startswith('irix'):
cpuinfo = IRIXCPUInfo
elif sys.platform == 'darwin':
cpuinfo = DarwinCPUInfo
elif sys.platform.startswith('sunos'):
cpuinfo = SunOSCPUInfo
elif sys.platform.startswith('win32'):
cpuinfo = Win32CPUInfo
elif sys.platform.startswith('cygwin'):
cpuinfo = LinuxCPUInfo
#XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices.
else:
cpuinfo = CPUInfoBase
cpu = cpuinfo()
if __name__ == "__main__":
cpu.is_blaa()
cpu.is_Intel()
cpu.is_Alpha()
print 'CPU information:',
for name in dir(cpuinfo):
if name[0]=='_' and name[1]!='_':
r = getattr(cpu,name[1:])()
if r:
if r!=1:
print '%s=%s' %(name[1:],r),
else:
print name[1:],
print
| bsd-3-clause | -3,900,664,180,430,769,700 | 31.989721 | 85 | 0.54509 | false |
skim1420/spinnaker | spinbot/event/release_branch_pull_request_handler.py | 1 | 2049 | from .handler import Handler
from .pull_request_event import GetBaseBranch, GetPullRequest, GetTitle, GetRepo
from gh import ReleaseBranchFor, ParseCommitMessage
format_message = ('Features cannot be merged into release branches. The following commits ' +
'are not tagged as one of "{}":\n\n{}\n\n' +
'Read more about [commit conventions](https://www.spinnaker.io/community/contributing/submitting/#commit-message-conventions) ' +
'and [patch releases](https://www.spinnaker.io/community/releases/release-cadence/#patching-the-release-candidate) here.')
class ReleaseBranchPullRequestHandler(Handler):
def __init__(self):
super().__init__()
self.omit_repos = self.config.get('omit_repos', [])
self.allowed_types = self.config.get(
'allowed_types',
['fix', 'chore', 'docs', 'test']
)
def handles(self, event):
return (event.type == 'PullRequestEvent'
and event.payload.get('action') == 'opened'
and ReleaseBranchFor(GetBaseBranch(event)) != None)
def handle(self, g, event):
repo = GetRepo(event)
if repo in self.omit_repos:
self.logging.info('Skipping {} because it\'s in omitted repo {}'.format(event, repo))
return
pull_request = GetPullRequest(g, event)
if pull_request is None:
self.logging.warn('Unable to determine PR that created {}'.format(event))
return
commits = pull_request.get_commits()
bad_commits = []
for commit in commits:
message = ParseCommitMessage(commit.commit.message)
if message is None or message.get('type') not in self.allowed_types:
bad_commits.append(commit.commit)
if len(bad_commits) > 0:
pull_request.create_issue_comment(format_message.format(
', '.join(self.allowed_types),
'\n\n'.join(map(lambda c: '{}: {}'.format(c.sha, c.message), bad_commits))
))
ReleaseBranchPullRequestHandler()
| apache-2.0 | -3,984,927,411,745,407,000 | 40.816327 | 133 | 0.627135 | false |
wjwwood/open-robotics-platform | template.py | 1 | 1949 | #!/usr/bin/env python -OO
# encoding: utf-8
###########
# ORP - Open Robotics Platform
#
# Copyright (c) 2010 John Harrison, William Woodall
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##########
"""
${TM_NEW_FILE_BASENAME}.py - <PURPOSE>
Created by ${TM_FULLNAME} on ${TM_DATE}.
"""
__author__ = "William Woodall"
__copyright__ = "Copyright (c) 2010 John Harrison, William Woodall"
### Imports ###
# Standard Python Libraries
import sys
import os
try: # try to catch any missing dependancies
# <PKG> for <PURPOSE>
PKGNAME = '<EASY_INSTALL NAME>'
import <LIBRARY NAME>
del PKGNAME
except ImportError as PKG_ERROR: # We are missing something, let them know...
sys.stderr.write(str(PKG_ERROR)+"\nYou might not have the "+PKGNAME+" \
module, try 'easy_install "+PKGNAME+"', else consult google.")
### Class ###
### Functions ###
def main():
pass
### IfMain ###
if __name__ == '__main__':
main()
| mit | 97,687,108,796,476,430 | 29.453125 | 79 | 0.709595 | false |
Ebag333/Pyfa | eos/effects/subsystembonusgallentedefensivearmoredwarfare.py | 1 | 1528 | # subSystemBonusGallenteDefensiveArmoredWarfare
#
# Used by:
# Subsystem: Proteus Defensive - Warfare Processor
type = "passive"
def handler(fit, src, context):
fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "buffDuration",
src.getModifiedItemAttr("subsystemBonusGallenteDefensive"),
skill="Gallente Defensive Systems")
fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "warfareBuff3Value",
src.getModifiedItemAttr("subsystemBonusGallenteDefensive"),
skill="Gallente Defensive Systems")
fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "warfareBuff4Value",
src.getModifiedItemAttr("subsystemBonusGallenteDefensive"),
skill="Gallente Defensive Systems")
fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "warfareBuff2Value",
src.getModifiedItemAttr("subsystemBonusGallenteDefensive"),
skill="Gallente Defensive Systems")
fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Armored Command"), "warfareBuff1Value",
src.getModifiedItemAttr("subsystemBonusGallenteDefensive"),
skill="Gallente Defensive Systems")
| gpl-3.0 | -8,301,749,523,908,657,000 | 65.434783 | 109 | 0.632199 | false |
seanbell/opensurfaces | server/normals/views.py | 1 | 9087 | import json
from django.shortcuts import render, get_object_or_404
from django.db.models import F
from django.http import HttpResponse
from django.views.decorators.http import require_POST
from django.core.urlresolvers import reverse
from django.contrib.admin.views.decorators import staff_member_required
from django.views.decorators.csrf import ensure_csrf_cookie
from endless_pagination.decorators import page_template
from common.utils import dict_union, prepare_votes_bar, \
json_success_response, json_error_response
from normals.models import ShapeRectifiedNormalLabel
def rectified_normal_detail(request, pk):
entry = get_object_or_404(ShapeRectifiedNormalLabel, pk=pk)
votes = [
prepare_votes_bar(entry, 'qualities', 'correct', 'correct', 'Quality'),
]
data = {
'nav': 'browse/rectified-normal',
'entry': entry,
'votes': votes,
}
return render(request, 'rectified_normal_detail.html', data)
@page_template('grid3_page.html')
def rectified_normal_all(request, template='endless_list.html',
extra_context=None):
entries = ShapeRectifiedNormalLabel.objects.all().order_by('-id')
if 'publishable' in request.GET:
entries = entries.filter(shape__photo__license__publishable=True)
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'all',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb.html',
'header': 'All submissions',
'header_small': 'sorted by submission time',
#'enable_voting': False,
}, extra_context)
return render(request, template, context)
@page_template('grid3_page.html')
def rectified_normal_good(request, template='endless_list.html',
extra_context=None):
entries = ShapeRectifiedNormalLabel.objects \
.filter(shape__planar=True, correct=True, correct_score__isnull=False) \
.order_by('-correct_score')
#.filter(admin_score__gt=0, shape__synthetic=False) \
#.order_by('-admin_score', '-shape__pixel_area')
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'good',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb.html',
'header': 'High quality submissions'
#'header_sub': 'These submissions were voted as high quality.'
#'enable_voting': False,
}, extra_context)
return render(request, template, context)
@page_template('grid3_page.html')
def rectified_normal_bad(request, template='endless_list.html',
extra_context=None):
entries = ShapeRectifiedNormalLabel.objects \
.filter(shape__planar=True, correct=False, correct_score__isnull=False) \
.order_by('correct_score')
#.filter(admin_score__lt=0, shape__synthetic=False) \
#.order_by('admin_score', 'shape__num_vertices')
if 'publishable' in request.GET:
entries = entries.filter(shape__photo__license__publishable=True)
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'bad',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb.html',
'header': 'Low quality submissions',
'header_small': 'sorted by quality',
#'enable_voting': False,
}, extra_context)
return render(request, template, context)
@page_template('grid3_page.html')
def rectified_normal_auto(request, template='endless_list.html',
extra_context=None):
entries = ShapeRectifiedNormalLabel.objects \
.filter(shape__planar=True, shape__correct=True, automatic=True) \
.order_by('-shape__num_vertices')
if 'publishable' in request.GET:
entries = entries.filter(shape__photo__license__publishable=True)
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'auto',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb.html',
'header': 'Automatically rectified shapes',
'header_small': 'using vanishing points',
}, extra_context)
return render(request, template, context)
@page_template('grid3_page.html')
def rectified_normal_best(request, template='endless_list.html',
extra_context=None):
entries = ShapeRectifiedNormalLabel.objects \
.filter(shape__photo__inappropriate=False,
shape__correct=True, shape__planar=True,
shape__rectified_normal_id=F('id')) \
if 'by-id' in request.GET:
header_small = 'sorted by id'
entries = entries.order_by('-id')
else:
header_small = 'sorted by complexity'
entries = entries.order_by('-shape__num_vertices')
if 'publishable' in request.GET:
entries = entries.filter(shape__photo__license__publishable=True)
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'best',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb.html',
'header': 'High quality submissions',
'header_small': header_small,
}, extra_context)
return render(request, template, context)
@staff_member_required
@page_template('grid3_page.html')
def rectified_normal_curate(
request, template='endless_list_curate.html', extra_context=None):
entries = ShapeRectifiedNormalLabel.objects \
.filter(shape__planar=True, correct=True) \
.order_by('-shape__num_vertices')
if 'publishable' in request.GET:
entries = entries.filter(shape__photo__license__publishable=True)
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'curate',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb.html',
'header': 'Curate rectified textures',
'curate_post_url': reverse('rectified-normal-curate-post'),
'curate': True
}, extra_context)
return render(request, template, context)
@require_POST
@staff_member_required
def rectified_normal_curate_post(request):
if request.POST['model'] != "shapes/shaperectifiednormallabel":
return json_error_response("invalid model")
normal = ShapeRectifiedNormalLabel.objects.get(id=request.POST['id'])
normal.quality_method = 'A'
normal.correct = not normal.correct
normal.save()
normal.shape.update_entropy(save=True)
return HttpResponse(
json.dumps({'selected': not normal.correct}),
mimetype='application/json')
@ensure_csrf_cookie
@page_template('grid3_page.html')
def rectified_normal_voted_none(request, template='endless_list.html',
extra_context=None):
entries = ShapeRectifiedNormalLabel.objects \
.filter(admin_score=0, time_ms__gt=500, shape__dominant_delta__isnull=False) \
.order_by('-shape__synthetic', '?')
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'vote',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb_vote.html',
'enable_voting': True,
}, extra_context)
return render(request, template, context)
@ensure_csrf_cookie
@page_template('grid3_page.html')
def rectified_normal_voted_yes(request, template='endless_list.html',
extra_context=None):
entries = ShapeRectifiedNormalLabel.objects \
.filter(admin_score__gt=0) \
.order_by('-admin_score', '-shape__pixel_area')
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'voted-yes',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb_vote.html',
'enable_voting': True,
}, extra_context)
return render(request, template, context)
@ensure_csrf_cookie
@page_template('grid3_page.html')
def rectified_normal_voted_no(request, template='endless_list.html',
extra_context=None):
entries = ShapeRectifiedNormalLabel.objects \
.filter(admin_score__lt=0) \
.order_by('admin_score', '-shape__pixel_area')
context = dict_union({
'nav': 'browse/rectified-normal', 'subnav': 'voted-no',
'entries': entries,
'base_template': 'rectified_normal_base.html',
'thumb_template': 'rectified_normal_thumb_vote.html',
'enable_voting': True,
}, extra_context)
return render(request, template, context)
@require_POST
def rectified_normal_vote(request):
id = request.POST['id']
score = request.POST['score']
ShapeRectifiedNormalLabel.objects.filter(id=id).update(admin_score=score)
return json_success_response()
| mit | -7,393,847,818,757,390,000 | 33.683206 | 86 | 0.646748 | false |
ericpp/hippyvm | testing/test_var_funcs.py | 1 | 1346 | import py.test
from hippy.objects.floatobject import W_FloatObject
from testing.test_interpreter import BaseTestInterpreter
class TestVarFuncs(BaseTestInterpreter):
def test_print_r(self):
output = self.run('''
class A {
private $y = 5;
}
$a = new A;
$a->x = array($a);
$a->zzz = array($a);
$result = print_r($a, TRUE);
echo str_replace("\\n", '\\n', $result);
''')
expected = """\
A Object
(
[y:A:private] => 5
[x] => Array
(
[0] => A Object
*RECURSION*
)
[zzz] => Array
(
[0] => A Object
*RECURSION*
)
)
"""
assert self.space.str_w(output[0]) == '\\n'.join(expected.split('\n'))
@py.test.mark.parametrize(['input', 'expected'],
[["'xxx'", 0.], ["'3.4bcd'", 3.4], ['2e1', 20.],
['5', 5.], ['1.3', 1.3], ["array()", 0.]])
def test_floatval(self, input, expected):
output, = self.run('echo floatval(%s);' % input)
assert output == W_FloatObject(expected)
def test_floatval_object(self):
with self.warnings(['Notice: Object of class stdClass '
'could not be converted to double']):
output, = self.run('echo floatval(new stdClass);')
assert output == W_FloatObject(1.)
| mit | 8,298,484,580,307,833,000 | 27.041667 | 78 | 0.506686 | false |
a25kk/bfa | src/bfa.sitecontent/bfa/sitecontent/widgets/content/video.py | 1 | 4222 | # -*- coding: utf-8 -*-
"""Module providing event filter widget"""
import uuid as uuid_tool
from Acquisition import aq_inner
from Products.Five import BrowserView
from plone import api
from plone.i18n.normalizer import IIDNormalizer
from wildcard.media.behavior import IVideo
from zope.component import queryUtility
class WidgetContentVideoCard(BrowserView):
""" Basic context content card """
def __call__(self, widget_data=None, widget_mode="view", **kw):
self.params = {"widget_mode": widget_mode, "widget_data": widget_data}
return self.render()
def render(self):
return self.index()
@staticmethod
def can_edit():
return not api.user.is_anonymous()
@property
def record(self):
return self.params['widget_data']
def has_content(self):
if self.widget_content():
return True
return False
def widget_uid(self):
try:
widget_id = self.record['id']
except (KeyError, TypeError):
widget_id = str(uuid_tool.uuid4())
return widget_id
@staticmethod
def normalizer():
return queryUtility(IIDNormalizer)
def card_subject_classes(self, item):
context = item
subjects = context.Subject()
class_list = [
"c-card-tag--{0}".format(self.normalizer().normalize(keyword))
for keyword in subjects
]
return class_list
def card_css_classes(self, item):
class_list = self.card_subject_classes(item)
if class_list:
return " ".join(class_list)
else:
return "c-card-tag--all"
@staticmethod
def has_image(context):
try:
lead_img = context.image
except AttributeError:
lead_img = None
if lead_img is not None:
return True
return False
@staticmethod
def has_animated_cover(context):
try:
animated_lead_img = context.image_animated
except AttributeError:
animated_lead_img = None
if animated_lead_img is not None:
return True
return False
@staticmethod
def get_standalone_image_caption(context):
try:
caption = context.image_caption
except AttributeError:
caption = None
return caption
def get_embed_url(self):
"""
Try to guess video id from a various case of possible youtube urls and
returns the correct url for embed.
For example:
- 'https://youtu.be/VIDEO_ID'
- 'https://www.youtube.com/watch?v=VIDEO_ID'
- 'https://www.youtube.com/embed/2Lb2BiUC898'
"""
video_behavior = IVideo(self.context)
if not video_behavior:
return ""
video_id = video_behavior.get_youtube_id_from_url()
if not video_id:
return ""
return "https://www.youtube.com/embed/" + video_id
def get_edit_url(self):
"""
If the user can edit the video, returns the edit url.
"""
if not api.user.has_permission(
'Modify portal content',
obj=self.context):
return ""
from plone.protect.utils import addTokenToUrl
url = "%s/@@edit" % self.context.absolute_url()
return addTokenToUrl(url)
def widget_content(self):
context = aq_inner(self.context)
widget_data = self.params["widget_data"]
if widget_data and "uuid" in widget_data:
context = api.content.get(UID=widget_data["uuid"])
details = {
"title": context.Title(),
"description": context.Description(),
"url": context.absolute_url(),
"timestamp": context.Date,
"uuid": context.UID(),
"has_image": self.has_image(context),
"has_animated_cover": self.has_animated_cover(context),
"image_caption": self.get_standalone_image_caption(context),
"css_classes": "c-card--{0} {1}".format(
context.UID(), self.card_css_classes(context)
),
"content_item": context,
}
return details
| mit | 7,906,045,721,442,587,000 | 29.594203 | 78 | 0.578399 | false |
lgarren/spack | var/spack/repos/builtin/packages/r-affycomp/package.py | 1 | 1773 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RAffycomp(RPackage):
"""The package contains functions that can be used to compare
expression measures for Affymetrix Oligonucleotide Arrays."""
homepage = "https://www.bioconductor.org/packages/affycomp/"
url = "https://git.bioconductor.org/packages/affycomp"
version('1.52.0', git='https://git.bioconductor.org/packages/affycomp', commit='1b97a1cb21ec93bf1e5c88d5d55b988059612790')
depends_on('[email protected]:3.4.9', when='@1.52.0')
depends_on('r-biobase', type=('build', 'run'))
| lgpl-2.1 | -6,339,694,925,005,970,000 | 45.657895 | 126 | 0.681331 | false |
redshiftzero/pgpbuddy | tests/test_crypto.py | 1 | 10432 | from unittest.mock import patch
from unittest import TestCase
from nose.tools import assert_list_equal
from pgpbuddy.crypto import *
from tests.mock_gpg import *
class TestCheckEncryptionAndSignature(TestCase):
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.missing, Signature.missing))
def test_plain(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.missing
assert signature_status == Signature.missing
assert not reason
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.missing, Signature.incorrect))
def test_not_encrypted_incorrect_signature(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.missing
assert signature_status == Signature.incorrect
assert reason
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.missing, Signature.correct))
def test_not_encrypted_correct_signature(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.missing
assert signature_status == Signature.correct
assert not reason
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.correct, Signature.missing))
def test_correct_encrypted_no_sig(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.correct
assert signature_status == Signature.missing
assert not reason
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.correct, Signature.incorrect))
def test_correct_encrypted_incorrect_sig(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.correct
assert signature_status == Signature.incorrect
assert reason
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.correct, Signature.correct))
def test_correct_encrypted_correct_sig(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.correct
assert signature_status == Signature.correct
assert not reason
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.incorrect, Signature.correct))
def test_incorrect_encrypted_sig_correct(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.incorrect
assert signature_status == Signature.missing # with incorrect encryption can not check the sig
assert reason
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.incorrect, Signature.missing))
def test_incorrect_encrypted_sig_missing(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.incorrect
assert signature_status == Signature.missing # with incorrect encryption can not check the sig
assert reason
@patch('gnupg.GPG', decrypt=mock_decrypt(Encryption.incorrect, Signature.incorrect))
def test_incorrect_encrypted_sig_incorrect(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.incorrect
assert signature_status == Signature.missing # with incorrect encryption can not check the sig
assert reason
@patch('gnupg.GPG', decrypt=mock_decrypt_unexpected_output())
def test_fallback(self, gpg):
encryption_status, signature_status, reason = check_encryption_and_signature(gpg, "blabla")
assert encryption_status == Encryption.incorrect
assert signature_status == Signature.incorrect
assert reason
class TestImportKeysFromAttachments(TestCase):
def _mock_key(self, content):
return "-----BEGIN PGP PUBLIC KEY BLOCK-----\n{}\n-----END PGP PUBLIC KEY BLOCK-----\n".format(content)
@patch('gnupg.GPG')
def test_no_attachments(self, gpg):
attachments = []
remaining_attachments = import_public_keys_from_attachments(gpg, attachments)
assert remaining_attachments == []
assert not gpg.import_keys.called
@patch('gnupg.GPG')
def test_plain_attachment(self, gpg):
attachments = [("blabla", None)]
remaining_attachments = import_public_keys_from_attachments(gpg, attachments)
assert_list_equal(attachments, remaining_attachments)
assert not gpg.import_keys.called
@patch('gnupg.GPG', import_keys=mock_import_keys(True))
def test_key_attachment(self, gpg):
key = self._mock_key("PRETEND THIS IS A KEY")
attachments = [(key, None)]
remaining_attachments = import_public_keys_from_attachments(gpg, attachments)
expected = []
assert_list_equal(expected, remaining_attachments)
gpg.import_keys.assert_called_once_with(self.__format_key(key))
@patch('gnupg.GPG', import_keys=mock_import_keys(False))
def test_key_attachment_import_fails(self, gpg):
key = self._mock_key("PRETEND THIS IS A KEY")
attachments = [(key, None)]
remaining_attachments = import_public_keys_from_attachments(gpg, attachments)
expected = attachments
assert_list_equal(expected, remaining_attachments)
gpg.import_keys.assert_called_once_with(self.__format_key(key))
@patch('gnupg.GPG')
def test_binary_attachment(self, gpg):
attachments = [(self._mock_key("This will be binary so not considered a key").encode(), None)]
remaining_attachments = import_public_keys_from_attachments(gpg, attachments)
expected = attachments
assert_list_equal(expected, remaining_attachments)
assert not gpg.import_keys.called
@patch('gnupg.GPG', import_keys=mock_import_keys([False, True, True]))
def test_mixture_of_everything(self, gpg):
key1 = self._mock_key("Failing key")
key2 = self._mock_key("Succeeding key")
key3 = self._mock_key("Another succeeding key")
attachments = [("blabla", None), (key1, None), (b"binary", None), (key2, None), ("ladida", None), (key3, None)]
remaining_attachments = import_public_keys_from_attachments(gpg, attachments)
expected = [attachments[0], attachments[1], attachments[2], attachments[4]]
assert_list_equal(expected, remaining_attachments)
gpg.import_keys.assert_any_call(self.__format_key(key1))
gpg.import_keys.assert_any_call(self.__format_key(key2))
gpg.import_keys.assert_any_call(self.__format_key(key3))
@patch('gnupg.GPG')
def test_preserve_encryption_status(self, gpg):
attachments = [("bla", Encryption.missing), ("blu", Encryption.correct), ("ble", Encryption.incorrect)]
remaining_attachments = import_public_keys_from_attachments(gpg, attachments)
expected = attachments
assert_list_equal(expected, remaining_attachments)
assert not gpg.import_keys.called
@staticmethod
def __format_key(key):
return key.strip().split("\n")
class TestImportFromKeyServer():
server = 'pgp.mit.edu'
@patch('gnupg.GPG', search_keys=mock_search_keys([]), recv_keys=mock_recv_keys())
def test_no_key_found(self, gpg):
sender = "[email protected]"
import_public_keys_from_server(gpg, sender)
gpg.search_keys.assert_called_once_with(sender, self.server)
assert not gpg.recv_keys.called
@patch('gnupg.GPG', search_keys=mock_search_keys(["key1"]), recv_keys=mock_recv_keys())
def test_one_key_found(self, gpg):
sender = "[email protected]"
import_public_keys_from_server(gpg, sender)
gpg.search_keys.assert_called_once_with(sender, self.server)
gpg.recv_keys.assert_called_once_with(self.server, "key1")
@patch('gnupg.GPG', search_keys=mock_search_keys(["key1", "key2"]), recv_keys=mock_recv_keys())
def test_two_keys_found(self, gpg):
sender = "[email protected]"
import_public_keys_from_server(gpg, sender)
gpg.search_keys.assert_called_once_with(sender, self.server)
gpg.recv_keys.assert_any_call(self.server, "key1")
gpg.recv_keys.assert_any_call(self.server, "key2")
class TestPublicKeyAvailable(TestCase):
@patch('gnupg.GPG', encrypt=mock_encrypt(success=True))
def test_available(self, gpg):
sender = "[email protected]"
result = check_public_key_available(gpg, sender)
assert result == PublicKey.available
@patch('gnupg.GPG', encrypt=mock_encrypt(success=False))
def test_not_available(self, gpg):
sender = "[email protected]"
result = check_public_key_available(gpg, sender)
assert result == PublicKey.not_available
class TestVerifyExternalSig(TestCase):
@patch('gnupg.GPG', verify_data=mock_verify(Signature.correct))
def test_good_sig(self, gpg):
sig = b"good sig"
data = "to be signed"
signature_status, reason = verify_external_sig(gpg, data, sig)
assert signature_status == Signature.correct
assert not reason
@patch('gnupg.GPG', verify_data=mock_verify(Signature.incorrect, PublicKey.not_available))
def test_no_public_key(self, gpg):
sig = b"bad sig"
data = "to be signed"
signature_status, reason = verify_external_sig(gpg, data, sig)
assert signature_status == Signature.incorrect
assert reason
@patch('gnupg.GPG', verify_data=mock_verify(Signature.incorrect, PublicKey.available))
def test_bad_sig(self, gpg):
sig = b"bad sig"
data = "to be signed"
signature_status, reason = verify_external_sig(gpg, data, sig)
assert signature_status == Signature.incorrect
assert reason
@patch('gnupg.GPG', verify_data=mock_verify(Signature.missing))
def test_no_sig(self, gpg):
sig = b"bad sig"
data = "to be signed"
signature_status, reason = verify_external_sig(gpg, data, sig)
assert signature_status == Signature.missing
assert not reason
| gpl-2.0 | -8,515,414,340,223,326,000 | 40.233202 | 120 | 0.681557 | false |
CoderDuan/mantaflow | scenes/simpleplume.py | 2 | 1414 | #
# Simple example scene (hello world)
# Simulation of a buoyant smoke density plume (with noise texture as smoke source)
#
#import pdb; pdb.set_trace()
from manta import *
# solver params
res = 64
gs = vec3(res, int(1.5*res), res)
s = FluidSolver(name='main', gridSize = gs)
# prepare grids
flags = s.create(FlagGrid)
vel = s.create(MACGrid)
density = s.create(RealGrid)
pressure = s.create(RealGrid)
# noise field, tweak a bit for smoke source
noise = s.create(NoiseField, loadFromFile=True)
noise.posScale = vec3(45)
noise.clamp = True
noise.clampNeg = 0
noise.clampPos = 1
noise.valOffset = 0.75
noise.timeAnim = 0.2
source = s.create(Cylinder, center=gs*vec3(0.5,0.1,0.5), radius=res*0.14, z=gs*vec3(0, 0.02, 0))
flags.initDomain()
flags.fillGrid()
if (GUI):
gui = Gui()
gui.show()
#main loop
for t in range(250):
mantaMsg('\nFrame %i' % (s.frame))
if t<100:
densityInflow(flags=flags, density=density, noise=noise, shape=source, scale=1, sigma=0.5)
# optionally, enforce inflow velocity
#source.applyToGrid(grid=vel, value=vec3(0.1,0,0))
advectSemiLagrange(flags=flags, vel=vel, grid=density, order=2)
advectSemiLagrange(flags=flags, vel=vel, grid=vel , order=2, strength=1.0)
setWallBcs(flags=flags, vel=vel)
addBuoyancy(density=density, vel=vel, gravity=vec3(0,-6e-4,0), flags=flags)
solvePressure( flags=flags, vel=vel, pressure=pressure )
s.step()
| gpl-3.0 | -5,257,533,783,658,418,000 | 24.25 | 96 | 0.701556 | false |
BaseBot/Triangula | src/python/setup.py | 1 | 1035 | __author__ = 'tom'
from setuptools import setup
# Makes use of the sphinx and sphinx-pypi-upload packages. To build for local development
# use 'python setup.py develop'. To upload a version to pypi use 'python setup.py clean sdist upload'.
# To build docs use 'python setup.py build_sphinx' and to upload docs to pythonhosted.org use
# 'python setup.py upload_sphinx'. Both uploads require 'python setup.py register' to be run, and will
# only work for Tom as they need the pypi account credentials.
setup(
name='triangula',
version='0.3.1',
description='Code for Triangula',
classifiers=['Programming Language :: Python :: 2.7'],
url='https://github.com/tomoinn/triangula/',
author='Tom Oinn',
author_email='[email protected]',
license='ASL2.0',
packages=['triangula'],
install_requires=['evdev==0.5.0', 'euclid==0.1', 'pyserial==2.7', 'numpy==1.10.1'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
dependency_links=[],
zip_safe=False)
| apache-2.0 | 2,400,253,366,044,438,500 | 40.4 | 102 | 0.689855 | false |
libAtoms/matscipy | scripts/fracture_mechanics/run_crack_thin_strip.py | 1 | 4618 | #! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
"""
Script to run classical molecular dynamics for a crack slab,
incrementing the load in small steps until fracture starts.
James Kermode <[email protected]>
August 2013
"""
import numpy as np
import ase.io
import ase.units as units
from ase.constraints import FixAtoms
from ase.md.verlet import VelocityVerlet
from ase.md.velocitydistribution import MaxwellBoltzmannDistribution
from ase.io.netcdftrajectory import NetCDFTrajectory
from matscipy.fracture_mechanics.crack import (get_strain,
get_energy_release_rate,
ConstantStrainRate,
find_tip_stress_field)
import sys
sys.path.insert(0, '.')
import params
# ********** Read input file ************
print 'Loading atoms from file "crack.xyz"'
atoms = ase.io.read('crack.xyz')
orig_height = atoms.info['OrigHeight']
orig_crack_pos = atoms.info['CrackPos'].copy()
# ***** Setup constraints *******
top = atoms.positions[:, 1].max()
bottom = atoms.positions[:, 1].min()
left = atoms.positions[:, 0].min()
right = atoms.positions[:, 0].max()
# fix atoms in the top and bottom rows
fixed_mask = ((abs(atoms.positions[:, 1] - top) < 1.0) |
(abs(atoms.positions[:, 1] - bottom) < 1.0))
fix_atoms = FixAtoms(mask=fixed_mask)
print('Fixed %d atoms\n' % fixed_mask.sum())
# Increase epsilon_yy applied to all atoms at constant strain rate
strain_atoms = ConstantStrainRate(orig_height,
params.strain_rate*params.timestep)
atoms.set_constraint(fix_atoms)
atoms.set_calculator(params.calc)
# ********* Setup and run MD ***********
# Set the initial temperature to 2*simT: it will then equilibriate to
# simT, by the virial theorem
MaxwellBoltzmannDistribution(atoms, 2.0*params.sim_T)
# Initialise the dynamical system
dynamics = VelocityVerlet(atoms, params.timestep)
# Print some information every time step
def printstatus():
if dynamics.nsteps == 1:
print """
State Time/fs Temp/K Strain G/(J/m^2) CrackPos/A D(CrackPos)/A
---------------------------------------------------------------------------------"""
log_format = ('%(label)-4s%(time)12.1f%(temperature)12.6f'+
'%(strain)12.5f%(G)12.4f%(crack_pos_x)12.2f (%(d_crack_pos_x)+5.2f)')
atoms.info['label'] = 'D' # Label for the status line
atoms.info['time'] = dynamics.get_time()/units.fs
atoms.info['temperature'] = (atoms.get_kinetic_energy() /
(1.5*units.kB*len(atoms)))
atoms.info['strain'] = get_strain(atoms)
atoms.info['G'] = get_energy_release_rate(atoms)/(units.J/units.m**2)
crack_pos = find_tip_stress_field(atoms)
atoms.info['crack_pos_x'] = crack_pos[0]
atoms.info['d_crack_pos_x'] = crack_pos[0] - orig_crack_pos[0]
print log_format % atoms.info
dynamics.attach(printstatus)
# Check if the crack has advanced enough and apply strain if it has not
def check_if_crack_advanced(atoms):
crack_pos = find_tip_stress_field(atoms)
# strain if crack has not advanced more than tip_move_tol
if crack_pos[0] - orig_crack_pos[0] < params.tip_move_tol:
strain_atoms.apply_strain(atoms)
dynamics.attach(check_if_crack_advanced, 1, atoms)
# Save frames to the trajectory every `traj_interval` time steps
trajectory = NetCDFTrajectory(params.traj_file, mode='w')
def write_frame(atoms):
trajectory.write(atoms)
dynamics.attach(write_frame, params.traj_interval, atoms)
# Start running!
dynamics.run(params.nsteps)
| gpl-2.0 | 7,947,069,792,221,883,000 | 33.721805 | 90 | 0.638372 | false |
mvaled/sentry | src/sentry/api/endpoints/group_integration_details.py | 1 | 11884 | from __future__ import absolute_import
from django.db import IntegrityError, transaction
from rest_framework.response import Response
from sentry import features
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.serializers.models.integration import IntegrationIssueConfigSerializer
from sentry.integrations import IntegrationFeatures
from sentry.integrations.exceptions import IntegrationError, IntegrationFormError
from sentry.models import Activity, ExternalIssue, GroupLink, Integration
from sentry.signals import integration_issue_created, integration_issue_linked
MISSING_FEATURE_MESSAGE = "Your organization does not have access to this feature."
class GroupIntegrationDetailsEndpoint(GroupEndpoint):
def _has_issue_feature(self, organization, user):
has_issue_basic = features.has(
"organizations:integrations-issue-basic", organization, actor=user
)
has_issue_sync = features.has(
"organizations:integrations-issue-sync", organization, actor=user
)
return has_issue_sync or has_issue_basic
def create_issue_activity(self, request, group, installation, external_issue):
issue_information = {
"title": external_issue.title,
"provider": installation.model.get_provider().name,
"location": installation.get_issue_url(external_issue.key),
"label": installation.get_issue_display_name(external_issue) or external_issue.key,
}
Activity.objects.create(
project=group.project,
group=group,
type=Activity.CREATE_ISSUE,
user=request.user,
data=issue_information,
)
def get(self, request, group, integration_id):
if not self._has_issue_feature(group.organization, request.user):
return Response({"detail": MISSING_FEATURE_MESSAGE}, status=400)
# Keep link/create separate since create will likely require
# many external API calls that aren't necessary if the user is
# just linking
action = request.GET.get("action")
if action not in {"link", "create"}:
return Response({"detail": "Action is required and should be either link or create"})
organization_id = group.project.organization_id
try:
integration = Integration.objects.get(id=integration_id, organizations=organization_id)
except Integration.DoesNotExist:
return Response(status=404)
if not (
integration.has_feature(IntegrationFeatures.ISSUE_BASIC)
or integration.has_feature(IntegrationFeatures.ISSUE_SYNC)
):
return Response(
{"detail": "This feature is not supported for this integration."}, status=400
)
try:
return Response(
serialize(
integration,
request.user,
IntegrationIssueConfigSerializer(group, action, params=request.GET),
organization_id=organization_id,
)
)
except IntegrationError as exc:
return Response({"detail": exc.message}, status=400)
# was thinking put for link an existing issue, post for create new issue?
def put(self, request, group, integration_id):
if not self._has_issue_feature(group.organization, request.user):
return Response({"detail": MISSING_FEATURE_MESSAGE}, status=400)
external_issue_id = request.data.get("externalIssue")
if not external_issue_id:
return Response({"externalIssue": ["Issue ID is required"]}, status=400)
organization_id = group.project.organization_id
try:
integration = Integration.objects.get(id=integration_id, organizations=organization_id)
except Integration.DoesNotExist:
return Response(status=404)
if not (
integration.has_feature(IntegrationFeatures.ISSUE_BASIC)
or integration.has_feature(IntegrationFeatures.ISSUE_SYNC)
):
return Response(
{"detail": "This feature is not supported for this integration."}, status=400
)
installation = integration.get_installation(organization_id)
try:
data = installation.get_issue(external_issue_id, data=request.data)
except IntegrationFormError as exc:
return Response(exc.field_errors, status=400)
except IntegrationError as exc:
return Response({"non_field_errors": [exc.message]}, status=400)
defaults = {
"title": data.get("title"),
"description": data.get("description"),
"metadata": data.get("metadata"),
}
external_issue_key = installation.make_external_key(data)
external_issue, created = ExternalIssue.objects.get_or_create(
organization_id=organization_id,
integration_id=integration.id,
key=external_issue_key,
defaults=defaults,
)
if created:
integration_issue_linked.send_robust(
integration=integration,
organization=group.project.organization,
user=request.user,
sender=self.__class__,
)
else:
external_issue.update(**defaults)
installation.store_issue_last_defaults(group.project_id, request.data)
try:
installation.after_link_issue(external_issue, data=request.data)
except IntegrationFormError as exc:
return Response(exc.field_errors, status=400)
except IntegrationError as exc:
return Response({"non_field_errors": [exc.message]}, status=400)
try:
with transaction.atomic():
GroupLink.objects.create(
group_id=group.id,
project_id=group.project_id,
linked_type=GroupLink.LinkedType.issue,
linked_id=external_issue.id,
relationship=GroupLink.Relationship.references,
)
except IntegrityError:
return Response({"non_field_errors": ["That issue is already linked"]}, status=400)
self.create_issue_activity(request, group, installation, external_issue)
# TODO(jess): would be helpful to return serialized external issue
# once we have description, title, etc
url = data.get("url") or installation.get_issue_url(external_issue.key)
context = {
"id": external_issue.id,
"key": external_issue.key,
"url": url,
"integrationId": external_issue.integration_id,
"displayName": installation.get_issue_display_name(external_issue),
}
return Response(context, status=201)
def post(self, request, group, integration_id):
if not self._has_issue_feature(group.organization, request.user):
return Response({"detail": MISSING_FEATURE_MESSAGE}, status=400)
organization_id = group.project.organization_id
try:
integration = Integration.objects.get(id=integration_id, organizations=organization_id)
except Integration.DoesNotExist:
return Response(status=404)
if not (
integration.has_feature(IntegrationFeatures.ISSUE_BASIC)
or integration.has_feature(IntegrationFeatures.ISSUE_SYNC)
):
return Response(
{"detail": "This feature is not supported for this integration."}, status=400
)
installation = integration.get_installation(organization_id)
try:
data = installation.create_issue(request.data)
except IntegrationFormError as exc:
return Response(exc.field_errors, status=400)
except IntegrationError as exc:
return Response({"non_field_errors": [exc.message]}, status=400)
external_issue_key = installation.make_external_key(data)
external_issue, created = ExternalIssue.objects.get_or_create(
organization_id=organization_id,
integration_id=integration.id,
key=external_issue_key,
defaults={
"title": data.get("title"),
"description": data.get("description"),
"metadata": data.get("metadata"),
},
)
try:
with transaction.atomic():
GroupLink.objects.create(
group_id=group.id,
project_id=group.project_id,
linked_type=GroupLink.LinkedType.issue,
linked_id=external_issue.id,
relationship=GroupLink.Relationship.references,
)
except IntegrityError:
return Response({"detail": "That issue is already linked"}, status=400)
if created:
integration_issue_created.send_robust(
integration=integration,
organization=group.project.organization,
user=request.user,
sender=self.__class__,
)
installation.store_issue_last_defaults(group.project_id, request.data)
self.create_issue_activity(request, group, installation, external_issue)
# TODO(jess): return serialized issue
url = data.get("url") or installation.get_issue_url(external_issue.key)
context = {
"id": external_issue.id,
"key": external_issue.key,
"url": url,
"integrationId": external_issue.integration_id,
"displayName": installation.get_issue_display_name(external_issue),
}
return Response(context, status=201)
def delete(self, request, group, integration_id):
if not self._has_issue_feature(group.organization, request.user):
return Response({"detail": MISSING_FEATURE_MESSAGE}, status=400)
# note here externalIssue refers to `ExternalIssue.id` wheras above
# it refers to the id from the provider
external_issue_id = request.GET.get("externalIssue")
if not external_issue_id:
return Response({"detail": "External ID required"}, status=400)
organization_id = group.project.organization_id
try:
integration = Integration.objects.get(id=integration_id, organizations=organization_id)
except Integration.DoesNotExist:
return Response(status=404)
if not (
integration.has_feature(IntegrationFeatures.ISSUE_BASIC)
or integration.has_feature(IntegrationFeatures.ISSUE_SYNC)
):
return Response(
{"detail": "This feature is not supported for this integration."}, status=400
)
try:
external_issue = ExternalIssue.objects.get(
organization_id=organization_id, integration_id=integration.id, id=external_issue_id
)
except ExternalIssue.DoesNotExist:
return Response(status=404)
with transaction.atomic():
GroupLink.objects.filter(
group_id=group.id,
project_id=group.project_id,
linked_type=GroupLink.LinkedType.issue,
linked_id=external_issue_id,
relationship=GroupLink.Relationship.references,
).delete()
# check if other groups reference this external issue
# and delete if not
if not GroupLink.objects.filter(
linked_type=GroupLink.LinkedType.issue, linked_id=external_issue_id
).exists():
external_issue.delete()
return Response(status=204)
| bsd-3-clause | -9,031,003,722,667,048,000 | 39.838488 | 100 | 0.61688 | false |
madmatah/lapurge | lapurge/types.py | 1 | 3448 | # Copyright (c) 2013 Matthieu Huguet
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from collections import OrderedDict
from datetime import datetime
import os
import sys
class Backup:
""" A Backup represents a file in the backup directory """
def __init__(self, mtime, filepath):
self.mtime = mtime
self.filepath = filepath
def remove(self, simulate=True):
if (simulate):
print ("REMOVE " + str(self))
return True
else:
try:
os.remove(self.filepath)
return True
except OSError as info:
sys.stderr.write("ERROR : %s\n" % info)
return False
def __key(self):
return (self.mtime, self.filepath)
def __eq__(x, y):
return x.__key() == y.__key()
def __hash__(self):
return hash(self.__key())
def __str__(self):
return self.filepath + " (" + str(self.mtime.date().isoformat()) + ")"
@classmethod
def from_path(cls, filepath):
stats = os.lstat(filepath)
mtime = datetime.utcfromtimestamp(stats.st_mtime)
return cls(mtime, filepath)
class BackupCollection:
""" Collection of Backup elements grouped by date """
def __init__(self, backups={}):
self.backups = dict(backups)
def add(self, backup):
""" add a backup to the collection """
date = backup.mtime.date()
if date not in self.backups:
s = set()
s.add(backup)
self.backups[date] = s
else:
self.backups[date].add(backup)
def days(self, recent_first=True):
""" returns the list of days having backups, ordered by modification
date (most recent backups first by default) """
return sorted(self.backups.keys(), reverse=recent_first)
def except_days(self, days):
""" returns a copy of the BackupCollection without the specified days """
filtered_backups = {day: self.backups[day] for day in self.days() if day not in days}
return BackupCollection(filtered_backups)
def remove_all(self, simulate=True):
""" remove every backups of this collection """
errors = False
for days in self.days(recent_first=False):
for backup in self.backups[days]:
if not backup.remove(simulate):
errors = True
return not errors
| mit | 2,537,618,906,637,552,000 | 33.48 | 93 | 0.640371 | false |
ftkghost/SuperSaver | supersaver/core/decorator.py | 1 | 1206 | from functools import wraps
from .exception import UnauthorizedUser, UnsupportedHttpMethod
ß
def login_required(func):
@wraps(func)
def check_user_login(request, *args, **kwargs):
if not request.user.is_authenticated():
raise UnauthorizedUser()
return func(request, *args, **kwargs)
return check_user_login
def allow_http_methods(method_list):
"""
A clone of Django's require_http_methods decoration.
We want a customized Exception.
https://github.com/django/django/blob/master/django/views/decorators/http.py#L19
Note: method list should be upper case.
"""
def decorator(func):
@wraps(func)
def inner(request, *args, **kwargs):
if request.method not in method_list:
raise UnsupportedHttpMethod(request.method)
return func(request, *args, **kwargs)
return inner
return decorator
def redirect_after_signin(func):
@wraps(func)
def inner(request, *args, **kwargs):
resp = func(request, *args, **kwargs)
if not request.user.is_authenticated():
resp.set_cookie('next', request.get_full_path())
return resp
return inner
| bsd-2-clause | 6,125,387,155,594,682,000 | 29.125 | 84 | 0.648963 | false |
silvau/Addons_Odoo | hr_bulk_period/__openerp__.py | 1 | 1643 | # -*- encoding: utf-8 -*-
############################################################################
# Module for OpenERP, Open Source Management Solution
#
# Copyright (c) 2013 Zenpar - http://www.zeval.com.mx/
# All Rights Reserved.
############################################################################
# Coded by: [email protected]
# Manager: Orlando Zentella [email protected]
############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Bulk period on payslip',
'version' : '1.0',
'author' : 'silvau',
'website' : 'http://www.zeval.com.mx',
'category' : 'HR',
'depends' : ['hr_payroll'],
'data': [
'wizard/hr_bulk_period.xml',
'hr_payroll_view.xml',
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
'images': [],
}
| gpl-2.0 | 7,758,319,804,437,166,000 | 37.209302 | 78 | 0.52465 | false |
NMGRL/pychron | pychron/ml/tasks/actions.py | 1 | 1114 | # ===============================================================================
# Copyright 2019 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import List, Int, HasTraits, Str, Bool
from traitsui.api import View, UItem, Item, HGroup, VGroup
# ============= standard library imports ========================
# ============= local library imports ==========================
# ============= EOF =============================================
| apache-2.0 | 6,036,008,776,714,166,000 | 45.416667 | 81 | 0.531418 | false |
rlutz/xorn | src/backend/gnet_bae.py | 1 | 1626 | # gaf.netlist - gEDA Netlist Extraction and Generation
# Copyright (C) 1998-2010 Ales Hvezda
# Copyright (C) 1998-2010 gEDA Contributors (see ChangeLog for details)
# Copyright (C) 2013-2019 Roland Lutz
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# Bartels Format
# Layout board;
# PARTS
# part : footprint;
# CONNECT
# /net1/ uref.pin=uref.pin=uref.pin=...uref.pin;
# /net2/ PRIORITY(1..100) MINDIST(mm) ROUTWIDTH(mm) uref.pin(width_mm)=...;
# END.
def run(f, netlist):
f.write('LAYOUT board;\n')
f.write('PARTS\n')
for package in reversed(netlist.packages):
f.write(' %s : %s;\n' % (
package.refdes, package.get_attribute('footprint', 'unknown')))
f.write('CONNECT\n')
for net in reversed(netlist.nets):
f.write(" /'%s'/ %s;\n" % (
net.name, '='.join('%s.%s' % (pin.package.refdes, pin.number)
for pin in reversed(net.connections))))
f.write('END.\n')
| gpl-2.0 | -7,487,250,061,490,648,000 | 39.65 | 77 | 0.674662 | false |
EvilCult/Video-Downloader | Library/toolClass.py | 1 | 3025 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pycurl
import StringIO
import random
class Tools :
def __init__ (self) :
pass
def getPage (self, url, requestHeader = []) :
resultFormate = StringIO.StringIO()
fakeIp = self.fakeIp()
requestHeader.append('CLIENT-IP:' + fakeIp)
requestHeader.append('X-FORWARDED-FOR:' + fakeIp)
try:
curl = pycurl.Curl()
curl.setopt(pycurl.URL, url.strip())
curl.setopt(pycurl.ENCODING, 'gzip,deflate')
curl.setopt(pycurl.HEADER, 1)
curl.setopt(pycurl.TIMEOUT, 120)
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
curl.setopt(pycurl.HTTPHEADER, requestHeader)
curl.setopt(pycurl.WRITEFUNCTION, resultFormate.write)
curl.perform()
headerSize = curl.getinfo(pycurl.HEADER_SIZE)
curl.close()
header = resultFormate.getvalue()[0 : headerSize].split('\r\n')
body = resultFormate.getvalue()[headerSize : ]
except Exception, e:
header = ''
body = ''
return header, body
def fakeIp (self) :
fakeIpList = []
for x in xrange(0, 4):
fakeIpList.append(str(int(random.uniform(0, 255))))
fakeIp = '.'.join(fakeIpList)
return fakeIp
def xor (self, x, y, base = 32) :
stat = True
if x >= 0 :
x = str(bin(int(str(x), 10)))[2:]
for i in xrange(0, base - len(x)):
x = '0' + x
else :
x = str(bin(int(str(x + 1), 10)))[3:]
for i in xrange(0, base - len(x)):
x = '0' + x
t = ''
for i in xrange(0,len(x)):
if x[i] == '1' :
t = t + '0'
else :
t = t + '1'
x = t
if y >= 0 :
y = str(bin(int(str(y), 10)))[2:]
for i in xrange(0, base - len(y)):
y = '0' + y
else :
y = str(bin(int(str(y + 1), 10)))[3:]
for i in xrange(0, base - len(y)):
y = '0' + y
t = ''
for i in xrange(0,len(y)):
if y[i] == '1' :
t = t + '0'
else :
t = t + '1'
y = t
t = ''
for i in xrange(0, base):
if x[i] == y[i] :
t = t + '0'
else :
t = t + '1'
x = t
if x[0] == '1' :
stat = False
t = ''
for i in xrange(0,len(x)):
if x[i] == '1' :
t = t + '0'
else :
t = t + '1'
x = t
r = int(str(x), 2)
if stat == False :
r = 0 - r - 1
return r
def rotate (self, x, y, w, base = 32) :
stat = True
if x >= 0 :
x = str(bin(int(str(x), 10)))[2:]
for i in xrange(0, base - len(x)):
x = '0' + x
else :
x = str(bin(int(str(x + 1), 10)))[3:]
for i in xrange(0, base - len(x)):
x = '0' + x
t = ''
for i in xrange(0,len(x)):
if x[i] == '1' :
t = t + '0'
else :
t = t + '1'
x = t
if y >= base :
y = y % base
for i in xrange (0, y) :
if w != 'r+' :
x = x[0] + x + '0'
else :
x = '0' + x + '0'
if w == 'r' or w == 'r+' :
x = x[0 : base]
else :
x = x[(len(x) - base) : ]
if x[0] == '1' :
stat = False
t = ''
for i in xrange(0,len(x)):
if x[i] == '1' :
t = t + '0'
else :
t = t + '1'
x = t
r = int(str(x), 2)
if stat == False :
r = 0 - r - 1
return r | gpl-2.0 | -4,087,712,925,453,522,000 | 19.585034 | 66 | 0.495207 | false |
bpetering/python-pattern-recognition | pattern_recognition.py | 1 | 2300 | def constant(diffs):
val = diffs.pop()
for d in diffs:
if d != val:
return False
return val
def pat1(seq): # consider two elements at a time
diffs = []
for i in xrange(1, len(seq)):
diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
return constant(diffs)
# representation of the pattern for pat1 was easy. how can we represent
# more complex patterns?
class Pattern(object):
(PAT_INT_ADD, PAT_INT_MULT, PAT_INT_POW) = range(3)
# TODO how does panda3d get constants?
def __init__(self, pat_type, pat_vals, prev_data, over=2, *args, **kwargs):
self.pat_type = pat_type
self.over = over
self.prev_data = prev_data
self.pat_vals = pat_vals
def next(self):
if self.pat_type == Pattern.PAT_INT_ADD:
tmp = self.prev_data[-1] + self.pat_vals[0] # TODO how much prev_data to keep?
self.prev_data.append(tmp)
return tmp
class PatternSeq(object):
def __init__(self, *args, **kwargs):
self.pattern = None
def have_pattern(self):
return self.pattern is not None
def infer(self, seq):
v = pat1(seq)
if v is not False:
self.pattern = Pattern(pat_type=Pattern.PAT_INT_ADD, pat_vals=[v], prev_data=seq) # TODO generalize
else:
raise Exception("NYI")
def extend(self, n):
if self.have_pattern():
x = []
for i in xrange(n):
x.append(self.pattern.next())
return x
else:
raise Exception("ALSDKJLASKJD")
# def pat2(seq): # consider three elements at a time
# diffs = []
# for i in xrange(1, len(seq)):
# diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
# val = constant(diffs)
# if val is False:
# print 'no pattern'
# else:
# print val
# TODO look at sympy interface, requests interface
# TODO detect pattern with certain number of anomalous values:
# e.g. 2,4,6,8,11
ps = PatternSeq()
ps.infer([2,4,6,8,10])
print "have pattern:", ps.have_pattern()
print "next 10 vals:", ps.extend(10)
| mit | 4,782,913,297,461,526,000 | 28.263158 | 118 | 0.553478 | false |
cogstat/cogstat | cogstat/test/test_stat.py | 1 | 22429 | # -*- coding: utf-8 -*-
import unittest
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
print(sys.path)
from pathlib import Path
import numpy as np
import pandas as pd
from cogstat import cogstat as cs
print(cs.__file__)
print(cs.__version__)
print(os.path.abspath(cs.__file__))
"""
- All statistical value should be tested at least once.
- All leafs of the decision tree should be tested once.
- Tests shouldn't give p<0.001 results, because exact values cannot be tested.
- No need to test the details of the statistical methods imported from other modules,
because that is the job of that specific module.
- All variables should be used with 3 digits decimal precision, to ensure that copying
the data for validation no additional rounding happens.
"""
#cs.output_type = 'do not format'
np.random.seed(555)
# https://docs.scipy.org/doc/numpy/reference/routines.random.html
# Make sure to use round function to have the same precision of the data when copied to other software
data_np = np.vstack((
np.round(np.random.normal(loc=3, scale=3, size=30), 3),
np.round(np.random.lognormal(mean=3, sigma=3, size=30), 3),
np.random.randint(3, size=30),
np.random.randint(3, size=30),
np.round(np.random.normal(loc=3, scale=3, size=30), 3),
np.round(np.random.lognormal(mean=1.4, sigma=0.6, size=30), 3),
np.round(np.random.normal(loc=6, scale=3, size=30), 3),
np.round(np.random.normal(loc=7, scale=6, size=30), 3),
np.random.randint(2, size=30),
np.random.randint(2, size=30),
np.random.randint(2, size=30),
np.concatenate((np.round(np.random.normal(loc=3, scale=3, size=15), 3),
np.round(np.random.normal(loc=4, scale=3, size=15), 3))),
np.array([1]*15+[2]*15),
np.array([1]+[2]*29),
np.concatenate((np.round(np.random.normal(loc=3, scale=3, size=15), 3),
np.round(np.random.lognormal(mean=1.5, sigma=2.0, size=15), 3))),
np.concatenate((np.round(np.random.normal(loc=3, scale=3, size=15), 3),
np.round(np.random.normal(loc=3, scale=7, size=15), 3))),
np.array([1]*10+[2]*8+[3]*12),
np.concatenate((np.round(np.random.normal(loc=3, scale=3, size=10), 3),
np.round(np.random.normal(loc=3, scale=3, size=8), 3),
np.round(np.random.normal(loc=6, scale=3, size=12), 3)))
))
data_pd = pd.DataFrame(data_np.T, columns=
['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r'])
data = cs.CogStatData(data=data_pd, measurement_levels=['int', 'int', 'nom', 'nom', 'int', 'int', 'int', 'int', 'nom',
'nom', 'nom', 'int', 'nom', 'nom', 'int', 'int', 'int', 'int'])
#pd.set_option('display.expand_frame_repr', False)
#print (data_pd)
class CogStatTestCase(unittest.TestCase):
"""Unit tests for CogStat."""
def test_explore_variables(self):
"""Test explore variables"""
# Int variable
result = data.explore_variable('a', 1, 2.0)
#for i, res in enumerate(result): print(i, res)
self.assertTrue('N of valid cases: 30' in result[2])
self.assertTrue('N of missing cases: 0' in result[2])
self.assertTrue('<td>Mean</td> <td>3.1438</td>' in result[4])
self.assertTrue('<td>Standard deviation</td> <td>3.2152</td>' in result[4])
self.assertTrue('<td>Skewness</td> <td>0.3586</td>' in result[4])
self.assertTrue('<td>Kurtosis</td> <td>0.0446</td>' in result[4])
self.assertTrue('<td>Range</td> <td>12.7840</td>' in result[4])
self.assertTrue('<td>Maximum</td> <td>9.9810</td>' in result[4])
self.assertTrue('<td>Upper quartile</td> <td>4.3875</td>' in result[4])
self.assertTrue('<td>Median</td> <td>2.8545</td>' in result[4])
self.assertTrue('<td>Lower quartile</td> <td>1.4190</td>' in result[4])
self.assertTrue('<td>Minimum</td> <td>-2.8030</td>' in result[4])
# Shapiro–Wilk normality
self.assertTrue('<i>W</i> = 0.96' in result[6]) # <i>W</i> = 0.959
self.assertTrue('<i>p</i> = .287' in result[6])
# Population estimation and one sample t-test
self.assertTrue('<td>Mean</td> <td>3.1438</td> <td>1.9227</td> <td>4.3649</td>' in result[9])
self.assertTrue('<td>Standard deviation</td> <td>3.2702</td> <td>2.6044</td> <td>4.3961</td>' in result[9])
# Sensitivity power analysis
# G*Power 3.1.9.6: 0.6811825
# jamovi v1.2.19.0, jpower 0.1.2: 0.681
self.assertTrue('(effect size is in d): 0.68' in result[11])
self.assertTrue('t</i>(29) = 1.92' in result[11])
self.assertTrue('p</i> = .065' in result[11])
# Wilcoxon signed-rank test for non-normal interval variable
result = data.explore_variable('b', 0, 20.0)
self.assertTrue('T</i> = 203' in result[11])
self.assertTrue('p</i> = .551' in result[11])
# Ord variable
data.data_measlevs['a'] = 'ord'
result = data.explore_variable('a', 1, 2.0)
self.assertTrue('N of valid cases: 30' in result[2])
self.assertTrue('N of missing cases: 0' in result[2])
self.assertTrue('<td>Maximum</td> <td>9.9810</td>' in result[4])
self.assertTrue('<td>Upper quartile</td> <td>4.3875</td>' in result[4])
self.assertTrue('<td>Median</td> <td>2.8545</td>' in result[4])
self.assertTrue('<td>Lower quartile</td> <td>1.4190</td>' in result[4])
self.assertTrue('<td>Minimum</td> <td>-2.8030</td>' in result[4])
# TODO median CI
# Wilcoxon signed-rank test
self.assertTrue('T</i> = 145' in result[9])
self.assertTrue('p</i> = .074' in result[9])
data.data_measlevs['a'] = 'int'
# Nominal variable
#result = data.explore_variable('c')
# TODO variation ratio
# TODO multinomial proportion CI
def test_explore_variable_pairs(self):
"""Test explore variable pairs"""
# Int variables
result = data.explore_variable_pair('a', 'b')
self.assertTrue('N of valid pairs: 30' in result[1])
self.assertTrue('N of missing pairs: 0' in result[1])
self.assertTrue('-0.141' in result[4])
self.assertTrue('[-0.477, 0.231]' in result[6])
self.assertTrue("Pearson's correlation: <i>r</i>(28) = -0.14, <i>p</i> = .456" in result[7]) # <i>r</i>(28) = -0.141
self.assertTrue('y = -21.811x + 300.505' in result[3])
self.assertTrue('-0.363' in result[4])
self.assertTrue('[-0.640, -0.003]' in result[6])
self.assertTrue("Spearman's rank-order correlation: <i>r<sub>s</sub></i>(28) = -0.36, <i>p</i> = .048" in result[7]) # <i>r<sub>s</sub></i>(28) = -0.363
# Ord variables
data.data_measlevs['a'] = 'ord'
data.data_measlevs['b'] = 'ord'
result = data.explore_variable_pair('a', 'b')
self.assertTrue('-0.363' in result[4])
self.assertTrue('[-0.640, -0.003]' in result[5])
self.assertTrue("Spearman's rank-order correlation: <i>r<sub>s</sub></i>(28) = -0.36, <i>p</i> = .048" in result[6]) # <i>r<sub>s</sub></i>(28) = -0.363
data.data_measlevs['a'] = 'int'
data.data_measlevs['b'] = 'int'
# Nom variables
result = data.explore_variable_pair('c', 'd')
self.assertTrue('N of valid pairs: 30' in result[1])
self.assertTrue('N of missing pairs: 0' in result[1])
# Cramer's V
self.assertTrue('<sub>c</sub></i> = 0.372' in result[4])
# Sensitivity power analysis
# G*Power 3.1.9.6, Goodness of fit test, df=4: Contingency tables: 0.7868005
# TODO GPower gives 0.8707028 with df of 8; Seems like statsmodels GofChisquarePower calculates power
# with df=8; should we use 4 or 8 df? https://github.com/cogstat/cogstat/issues/134
self.assertTrue('(effect size is in w): 0.87' in result[6])
# Chi-squared
# jamovi v1.2.19.0: X2, df, p, N: 8.31, 4, 0.081, 30
self.assertTrue('(4, <i>N</i> = 30) = 8.31' in result[6]) # (4, <i>N</i> = 30) = 8.312
self.assertTrue('<i>p</i> = .081' in result[6])
def test_diffusion(self):
"""Test diffusion analysis"""
data_diffusion = cs.CogStatData(data=str(Path('data/diffusion.csv')))
result = data_diffusion.diffusion(error_name=['Error'], RT_name=['RT_sec'], participant_name=['Name'], condition_names=['Num1', 'Num2'])
# Drift rate
self.assertTrue('<td>zsiraf</td> <td>0.190</td> <td>0.276</td> <td>0.197</td> <td>0.235</td> <td>0.213</td>' in result[1])
# Threshold
self.assertTrue('<td>zsiraf</td> <td>0.178</td> <td>0.096</td> <td>0.171</td> <td>0.112</td> <td>0.088</td>' in result[1])
# Nondecision time
self.assertTrue('<td>zsiraf</td> <td>0.481</td> <td>0.590</td> <td>0.483</td> <td>0.561</td> <td>0.522</td>' in result[1])
def test_compare_variables(self):
"""Test compare variables"""
# 2 Int variables
result = data.compare_variables(['a', 'e'])
self.assertTrue('N of valid cases: 30' in result[1])
self.assertTrue('N of missing cases: 0' in result[1])
# Cohen's d
# CS formula: https://pingouin-stats.org/generated/pingouin.compute_effsize.html
# Based on the formula, calculated in LO Calc 6.4: 0.030004573510063
# jamovi v1.2.19.0: 0.0202; formula: https://github.com/jamovi/jmv/blob/master/R/ttestps.b.R#L54-L66
self.assertTrue("<td>Cohen's d</td> <td>0.030</td>" in result[3])
# eta-squared
# CS formula: https://pingouin-stats.org/generated/pingouin.convert_effsize.html
# Based on the formula, calculated in LO Calc 6.4: 0.0002250179634
# jamovi v1.2.19.0: 0.000
self.assertTrue('<td>Eta-squared</td> <td>0.000</td>' in result[3])
# Sample means
self.assertTrue('<td>3.1438</td> <td>3.0502</td>' in result[3])
# Hedges'g (with CI)
# CS formula: https://pingouin-stats.org/generated/pingouin.compute_effsize.html
# https://pingouin-stats.org/generated/pingouin.compute_esci.html
# Note that the latter (CI) method has changed in v0.3.5 https://pingouin-stats.org/changelog.html
# Based on the formula, calculated in LO Calc 7.0: 0.029614903724218, -0.34445335392457, 0.403683161373007
# Note that the last value is 0.404 in LO, not .403 as in pingouin
self.assertTrue("<td>Hedges' g</td> <td>0.030</td> <td>-0.344</td> <td>0.403</td>" in result[5])
self.assertTrue('<i>W</i> = 0.95, <i>p</i> = .215' in result[7]) # <i>W</i> = 0.954
# Sensitivity power analysis
# G*Power 3.1.9.6: 0.6811825
# jamovi v1.2.19.0, jpower 0.1.2: 0.681
self.assertTrue('(effect size is in d): 0.68' in result[7])
# Paired samples t-test
# jamovi v1.2.19.0: t, df, p: 0.110, 29.0, 0.913
self.assertTrue('<i>t</i>(29) = 0.11, <i>p</i> = .913' in result[7])
# 2 Int variables - non-normal
result = data.compare_variables(['e', 'f'])
self.assertTrue('<i>W</i> = 0.91, <i>p</i> = .019' in result[7]) # <i>W</i> = 0.915
self.assertTrue('<i>T</i> = 110.00, <i>p</i> = .012' in result[7])
# 3 Int variables
result = data.compare_variables(['a', 'e', 'g'])
self.assertTrue('<td>3.1438</td> <td>3.0502</td> <td>5.7295</td>' in result[3])
self.assertTrue('a: <i>W</i> = 0.96, <i>p</i> = .287' in result[7]) # <i>W</i> = 0.959
self.assertTrue('e: <i>W</i> = 0.97, <i>p</i> = .435' in result[7]) # <i>W</i> = 0.966
self.assertTrue('g: <i>W</i> = 0.95, <i>p</i> = .133' in result[7]) #x <i>W</i> = 0.946
self.assertTrue('sphericity: <i>W</i> = 0.98, <i>p</i> = .703' in result[7]) # <i>W</i> = 0.975
self.assertTrue('<i>F</i>(2, 58) = 6.17, <i>p</i> = .004' in result[7])
self.assertTrue('0.11, <i>p</i> = .913' in result[7]) # TODO keep the order of the variables, and have a fixed sign
self.assertTrue('3.17, <i>p</i> = .011' in result[7])
self.assertTrue('2.88, <i>p</i> = .015' in result[7])
# 3 Int variables, sphericity violated
result = data.compare_variables(['a', 'e', 'h'])
self.assertTrue('<td>3.1438</td> <td>3.0502</td> <td>6.5786</td>' in result[3])
self.assertTrue('a: <i>W</i> = 0.96, <i>p</i> = .287' in result[7]) # <i>W</i> = 0.959
self.assertTrue('e: <i>W</i> = 0.97, <i>p</i> = .435' in result[7]) # <i>W</i> = 0.966
self.assertTrue('h: <i>W</i> = 0.98, <i>p</i> = .824' in result[7])
self.assertTrue('sphericity: <i>W</i> = 0.79, <i>p</i> = .039' in result[7]) # <i>W</i> = 0.793
self.assertTrue('<i>F</i>(1.66, 48) = 6.16, <i>p</i> = .007' in result[7])
self.assertTrue('0.11, <i>p</i> = .913' in result[7]) # TODO keep the order of the variables, and have a fixed sign
self.assertTrue('2.68, <i>p</i> = .024' in result[7])
self.assertTrue('2.81, <i>p</i> = .026' in result[7])
# 3 Int variables, non-normal
result = data.compare_variables(['a', 'e', 'f'])
self.assertTrue('<td>3.1438</td> <td>3.0502</td> <td>5.3681</td>' in result[3])
self.assertTrue('a: <i>W</i> = 0.96, <i>p</i> = .287' in result[7]) # <i>W</i> = 0.959
self.assertTrue('e: <i>W</i> = 0.97, <i>p</i> = .435' in result[7]) # <i>W</i> = 0.966
self.assertTrue('f: <i>W</i> = 0.82, <i>p</i> < .001' in result[7]) # <i>W</i> = 0.818
self.assertTrue('χ<sup>2</sup>(2, <i>N</i> = 30) = 6.47, <i>p</i> = .039' in result[7])
# 2 × 2 Int variables
result = data.compare_variables(['a', 'b', 'e', 'f'], factors=[['first', 2], ['second', 2]])
self.assertTrue('Main effect of first: <i>F</i>(1, 29) = 6.06, <i>p</i> = .020' in result[7])
self.assertTrue('Main effect of second: <i>F</i>(1, 29) = 6.29, <i>p</i> = .018' in result[7])
self.assertTrue('Interaction of factors first, second: <i>F</i>(1, 29) = 6.04, <i>p</i> = .020' in result[7])
# 2 Ord variables
data.data_measlevs['a'] = 'ord'
data.data_measlevs['e'] = 'ord'
data.data_measlevs['f'] = 'ord'
result = data.compare_variables(['e', 'f'])
self.assertTrue('<td>2.3895</td> <td>4.2275</td>' in result[3])
self.assertTrue('<i>T</i> = 110.00, <i>p</i> = .012' in result[6])
# 3 Ord variables
result = data.compare_variables(['a', 'e', 'f'])
self.assertTrue('<td>2.8545</td> <td>2.3895</td> <td>4.2275</td>' in result[3])
self.assertTrue('χ<sup>2</sup>(2, <i>N</i> = 30) = 6.47, <i>p</i> = .039' in result[6])
data.data_measlevs['a'] = 'int'
data.data_measlevs['e'] = 'int'
data.data_measlevs['f'] = 'int'
# 2 Nom variables
result = data.compare_variables(['i', 'j'])
# TODO on Linux the row labels are 0.0 and 1.0 instead of 0 and 1
self.assertTrue('<td>0.0</td> <td>4</td> <td>9</td> <td>13</td> </tr> <tr> <td>1.0</td> <td>9</td>' in result[3])
self.assertTrue('χ<sup>2</sup>(1, <i>N</i> = 30) = 0.06, <i>p</i> = .814' in result[5]) # χ<sup>2</sup>(1, <i>N</i> = 30) = 0.0556
# 3 Nom variables
result = data.compare_variables(['i', 'j', 'k'])
self.assertTrue('<i>Q</i>(2, <i>N</i> = 30) = 0.78, <i>p</i> = .676' in result[7]) # <i>Q</i>(2, <i>N</i> = 30) = 0.783
def test_compare_groups(self):
"""Test compare groups"""
# 2 Int groups
result = data.compare_groups('l', ['m'])
self.assertTrue('<td>2.5316</td> <td>4.5759</td>' in result[3])
# Cohen's d
# CS formula: https://pingouin-stats.org/generated/pingouin.compute_effsize.html
# Based on the formula, calculated in LO Calc 6.4: -0.704171924382848
# jamovi v1.2.19.0: 0.0704
self.assertTrue("<td>Cohen's d</td> <td>-0.704</td>" in result[3])
# eta-squared
# CS formula: https://pingouin-stats.org/generated/pingouin.convert_effsize.html
# Based on the formula, calculated in LO Calc 6.4: 0.110292204104377
# jamovi v1.2.19.0: 0.117 # TODO why the difference?
self.assertTrue('<td>Eta-squared</td> <td>0.110</td>' in result[3])
# Hedges'g (with CI)
# CS formula: https://pingouin-stats.org/generated/pingouin.compute_effsize.html
# https://pingouin-stats.org/generated/pingouin.compute_esci.html
# Note that the latter (CI) method has changed in v0.3.5 https://pingouin-stats.org/changelog.html
# Based on the formula, calculated in LO Calc 7.0: -0.685140250750879, -1.45474443187683, 0.084463930375068
self.assertTrue('<td>Difference between the two groups:</td> <td>-2.0443</td> <td>-4.2157</td> <td>0.1272</td>' in result[5])
self.assertTrue("<td>Hedges' g</td> <td>-0.685</td> <td>-1.455</td> <td>0.084</td>" in result[6])
self.assertTrue('(m: 1.0): <i>W</i> = 0.96, <i>p</i> = .683' in result[8]) # <i>W</i> = 0.959
self.assertTrue('(m: 2.0): <i>W</i> = 0.98, <i>p</i> = .991' in result[8]) # <i>W</i> = 0.984
self.assertTrue('<i>W</i> = 0.30, <i>p</i> = .585' in result[8]) # <i>W</i> = 0.305
# Sensitivity power analysis
# G*Power 3.1.9.6: 1.3641059
# jamovi v1.2.19.0, jpower 0.1.2: 1.36
self.assertTrue('(effect size is in d): 1.36' in result[8])
# independent samples t-test
# jamovi v1.2.19.0: t, df, p: -1.93, 28.0, 0.064
self.assertTrue('<i>t</i>(28) = -1.93, <i>p</i> = .064' in result[8])
# Non-normal group
result = data.compare_groups('o', ['m'])
self.assertTrue('(m: 2.0): <i>W</i> = 0.81, <i>p</i> = .005' in result[8]) # <i>W</i> = 0.808
self.assertTrue('<i>U</i> = 51.00, <i>p</i> = .011' in result[8])
# Heteroscedastic groups
result = data.compare_groups('p', ['m'])
self.assertTrue('<i>t</i>(25.3) = 0.12, <i>p</i> = .907' in result[8]) # <i>t</i>(25.3) = 0.119
# TODO single case vs. group
# 3 Int groups
result = data.compare_groups('r', ['q'])
self.assertTrue('<td>3.2869</td> <td>5.0400</td> <td>7.2412</td>' in result[3])
self.assertTrue('<i>W</i> = 0.68, <i>p</i> = .517' in result[8]) # TODO this might be incorrect # <i>W</i> = 0.675
# Sensitivity power analysis
# G*Power 3.1.9.6: 0.7597473
self.assertTrue('(effect size is in f): 0.76' in result[8])
self.assertTrue('<i>F</i>(2, 27) = 4.00, <i>p</i> = .030' in result[8])
self.assertTrue('ω<sup>2</sup> = 0.167' in result[6])
# TODO post-hoc
# 3 Int groups with assumption violation
result = data.compare_groups('o', ['q'])
self.assertTrue('χ<sup>2</sup>(2, <i>N</i> = 30) = 8.37, <i>p</i> = .015' in result[8])
# 2 Ord groups
data.data_measlevs['o'] = 'ord'
result = data.compare_groups('o', ['m'])
self.assertTrue('<i>U</i> = 51.00, <i>p</i> = .011' in result[6])
# 3 Ord groups
data.data_measlevs['o'] = 'ord'
result = data.compare_groups('o', ['q'])
self.assertTrue('χ<sup>2</sup>(2, <i>N</i> = 30) = 8.37, <i>p</i> = .015' in result[6])
data.data_measlevs['o'] = 'int'
# 2 Nom groups
result = data.compare_groups('i', ['j'])
self.assertTrue('φ<i><sub>c</sub></i> = 0.154' in result[3]) # TODO validate
self.assertTrue('χ<sup>2</sup></i>(1, <i>N</i> = 30) = 0.71, <i>p</i> = .399' in result[5]) # TODO validate # χ<sup>2</sup></i>(1, <i>N</i> = 30) = 0.710
# 3 Nom groups
result = data.compare_groups('i', ['c'])
self.assertTrue('φ<i><sub>c</sub></i> = 0.009' in result[3]) # TODO validate
self.assertTrue('χ<sup>2</sup></i>(2, <i>N</i> = 30) = 0.00, <i>p</i> = .999' in result[5]) # TODO validate # χ<sup>2</sup></i>(2, <i>N</i> = 30) = 0.002
# 3 × 3 Int groups
result = data.compare_groups('a', ['c', 'd'])
self.assertTrue('<td>Mean</td> <td>1.0695</td> <td>1.8439</td> <td>2.3693</td>' in result[3])
self.assertTrue('<td>Standard deviation</td> <td>2.7005</td> <td>2.0891</td> <td>4.2610</td>' in result[3])
self.assertTrue('<td>Maximum</td> <td>4.4130</td> <td>4.7890</td> <td>9.1600</td>' in result[3])
self.assertTrue('<td>Upper quartile</td> <td>3.0000</td> <td>3.0213</td> <td>4.4028</td>' in result[3])
self.assertTrue('<td>Median</td> <td>1.3340</td> <td>2.4590</td> <td>0.9015</td>' in result[3])
self.assertTrue('<td>Lower quartile</td> <td>-0.5965</td> <td>0.8870</td> <td>-1.1320</td>' in result[3])
self.assertTrue('<td>Minimum</td> <td>-2.8030</td> <td>-2.2890</td> <td>-1.4860</td>' in result[3])
# TODO the two main effects differ from the SPSS result, see issue #91
self.assertTrue('<i>F</i>(2, 21) = 2.35, <i>p</i> = .120' in result[7])
self.assertTrue('<i>F</i>(2, 21) = 0.19, <i>p</i> = .832' in result[7]) # <i>F</i>(2, 21) = 0.185
self.assertTrue('<i>F</i>(4, 21) = 1.15, <i>p</i> = .363' in result[7])
def test_single_case(self):
# Test for the slope stat
data = cs.CogStatData(data='''group slope slope_SE
Patient 0.247 0.069
Control 0.492 0.106
Control 0.559 0.108
Control 0.63 0.116
Control 0.627 0.065
Control 0.674 0.105
Control 0.538 0.107''')
result = data.compare_groups('slope', ['group'], 'slope_SE', 25)
self.assertTrue('Test d.2: <i>t</i>(42.1) = -4.21, <i>p</i> < .001' in result[8])
result = data.compare_groups('slope', ['group'])
self.assertTrue('<i>t</i>(5) = -5.05, <i>p</i> = .004' in result[8])
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 2,875,072,844,080,069,600 | 55.772152 | 171 | 0.550056 | false |
hnakamur/saklient.python | saklient/cloud/models/model_iface.py | 1 | 3605 | # -*- coding:utf-8 -*-
from ..client import Client
from .model import Model
from ..resources.resource import Resource
from ..resources.iface import Iface
from ...util import Util
import saklient
# module saklient.cloud.models.model_iface
class Model_Iface(Model):
## インタフェースを検索・作成するための機能を備えたクラス。
## @private
# @return {str}
def _api_path(self):
return "/interface"
## @private
# @return {str}
def _root_key(self):
return "Interface"
## @private
# @return {str}
def _root_key_m(self):
return "Interfaces"
## @private
# @return {str}
def _class_name(self):
return "Iface"
## @private
# @param {any} obj
# @param {bool} wrapped=False
# @return {saklient.cloud.resources.resource.Resource}
def _create_resource_impl(self, obj, wrapped=False):
Util.validate_type(wrapped, "bool")
return Iface(self._client, obj, wrapped)
## 次に取得するリストの開始オフセットを指定します。
#
# @param {int} offset オフセット
# @return {saklient.cloud.models.model_iface.Model_Iface} this
def offset(self, offset):
Util.validate_type(offset, "int")
return self._offset(offset)
## 次に取得するリストの上限レコード数を指定します。
#
# @param {int} count 上限レコード数
# @return {saklient.cloud.models.model_iface.Model_Iface} this
def limit(self, count):
Util.validate_type(count, "int")
return self._limit(count)
## Web APIのフィルタリング設定を直接指定します。
#
# @param {str} key キー
# @param {any} value 値
# @param {bool} multiple=False valueに配列を与え、OR条件で完全一致検索する場合にtrueを指定します。通常、valueはスカラ値であいまい検索されます。
# @return {saklient.cloud.models.model_iface.Model_Iface}
def filter_by(self, key, value, multiple=False):
Util.validate_type(key, "str")
Util.validate_type(multiple, "bool")
return self._filter_by(key, value, multiple)
## 次のリクエストのために設定されているステートをすべて破棄します。
#
# @return {saklient.cloud.models.model_iface.Model_Iface} this
def reset(self):
return self._reset()
## 新規リソース作成用のオブジェクトを用意します。
#
# 返り値のオブジェクトにパラメータを設定し、save() を呼ぶことで実際のリソースが作成されます。
#
# @return {saklient.cloud.resources.iface.Iface} リソースオブジェクト
def create(self):
return self._create()
## 指定したIDを持つ唯一のリソースを取得します。
#
# @param {str} id
# @return {saklient.cloud.resources.iface.Iface} リソースオブジェクト
def get_by_id(self, id):
Util.validate_type(id, "str")
return self._get_by_id(id)
## リソースの検索リクエストを実行し、結果をリストで取得します。
#
# @return {saklient.cloud.resources.iface.Iface[]} リソースオブジェクトの配列
def find(self):
return self._find()
## @ignore
# @param {saklient.cloud.client.Client} client
def __init__(self, client):
super(Model_Iface, self).__init__(client)
Util.validate_type(client, "saklient.cloud.client.Client")
| mit | 5,133,975,804,251,336,000 | 27.417476 | 99 | 0.622822 | false |
okuraoy/mywork | mtlearn/datasets.py | 1 | 2037 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
from sklearn.datasets.base import Bunch
from os.path import join
PATH = "d:\\data"
# class Bunch(dict):
# """Container object for datasets
# Dictionary-like object that exposes its keys as attributes.
#
# See: sklearn.datasets.base.py Bunch
# """
#
# def __init__(self, **kwargs):
# super(Bunch, self).__init__(kwargs)
#
# def __setattr__(self, key, value):
# self[key] = value
#
# def __dir__(self):
# return self.keys()
#
# def __getattr__(self, key):
# try:
# return self[key]
# except KeyError:
# raise AttributeError(key)
#
# def __setstate__(self, state):
# # Bunch pickles generated with scikit-learn 0.16.* have an non
# # empty __dict__. This causes a surprising behaviour when
# # loading these pickles scikit-learn 0.17: reading bunch.key
# # uses __dict__ but assigning to bunch.key use __setattr__ and
# # only changes bunch['key']. More details can be found at:
# # https://github.com/scikit-learn/scikit-learn/issues/6196.
# # Overriding __setstate__ to be a noop has the effect of
# # ignoring the pickled __dict__
# pass
def parse_date(x):
return pd.datetime.strptime(x, '%Y-%m-%d')
def load_pcs_data():
# column: date,pcs,f1,f2,...
# sep='\001',
df = pd.read_csv(join(PATH, 'spu_pcs_20170721.csv'), sep='\001', parse_dates=['date'], date_parser=parse_date)
df.sort_values(by='date')
columns = np.array(df.columns.values)
feature_name = columns[2:]
tmp_data = np.array(df)
inx_data = tmp_data[:, 0]
target = tmp_data[:, 1]
data = tmp_data[:, 2:]
# print shape
print data.shape
print feature_name
return Bunch(data=data, target=target, feature_names=feature_name, inx=inx_data)
if __name__ == '__main__':
load_pcs_data()
| apache-2.0 | 4,101,028,020,952,745,000 | 27.1 | 114 | 0.569956 | false |
Azure/azure-sdk-for-python | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_09_01/operations/_domain_registration_provider_operations.py | 1 | 5142 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DomainRegistrationProviderOperations(object):
"""DomainRegistrationProviderOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2020_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_operations(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.CsmOperationCollection"]
"""Implements Csm operations Api to exposes the list of available Csm Apis under the resource provider.
Description for Implements Csm operations Api to exposes the list of available Csm Apis under
the resource provider.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CsmOperationCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_09_01.models.CsmOperationCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CsmOperationCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_operations.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('CsmOperationCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_operations.metadata = {'url': '/providers/Microsoft.DomainRegistration/operations'} # type: ignore
| mit | 2,799,316,774,956,120,600 | 44.504425 | 133 | 0.652859 | false |
lum4chi/mygensim | models/qlmodel.py | 1 | 1822 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Francesco Lumachi <[email protected]>
from __future__ import division
from gensim import models, utils
import math
class QLModel(models.TfidfModel):
""" Use of models.TfidfModel as base to build Query Likelihood Model (12.9) appeared in
"An introduction to Information Retrieval" by Manning, Raghavan and Schütze
"""
def __init__(self, *args, **kwargs):
super(QLModel, self).__init__(*args, normalize=False, **kwargs)
def __str__(self):
return "QueryLikelihoodModel(num_docs=%s, num_nnz=%s)" % (self.num_docs, self.num_nnz)
def __getitem__(self, bog, eps=1e-12):
""" Overwrite weight calculus with estimation of a Model of d, based on its own "gram"
(we can see bag-of-word as bag-of-gram based upon what tokenize policy to adopt):
P(q|d) ≈ prod( P(g|d) for g in q ) # product of only the gram present in query
P(g|d) ≈ tf(g,d) / len(d) # compute prob of every gram
"""
# if the input vector is in fact a corpus, return a transformed corpus as a result
is_corpus, bog = utils.is_corpus(bog)
if is_corpus:
return self._apply(bog)
# --- only vector component calculation has changed from original method ---
# unknown (new) terms will be given zero weight
# 0 < P(g|d) <= 1, then -1 * log() to avoid negative
vector = [(gramid, -math.log(tf / len(bog)))
for gramid, tf in bog if self.idfs.get(gramid, 0.0) != 0.0]
# --- no need to normalize ---
# make sure there are no explicit zeroes in the vector (must be sparse)
vector = [(termid, weight) for termid, weight in vector if abs(weight) > eps]
return vector | gpl-3.0 | 2,304,769,948,544,169,700 | 43.341463 | 94 | 0.614199 | false |
lightbase/LBConverter | lbconverter/config.py | 1 | 4423 |
def set_config():
import ConfigParser
config = ConfigParser.ConfigParser()
config.read('development.ini')
global REST_URL
global OUTPATH
global DEFAULT_OPENOFFICE_PORT
global PIDFILE_PATH
global LOGFILE_PATH
global SUPPORTED_FILES
#---------------------#
# Configuration Start #
#---------------------#
REST_URL = config.get('LBConverter', 'rest_url')
OUTPATH = config.get('LBConverter', 'outpath')
DEFAULT_OPENOFFICE_PORT = int(config.get('LBConverter', 'default_openoffice_port'))
PIDFILE_PATH = config.get('Daemon', 'pidfile_path')
LOGFILE_PATH = config.get('Daemon', 'logfile_path')
SUPPORTED_FILES = [
'doc',
'docx',
'odt',
'rtf',
'txt',
'html',
'pdf',
'xml',
#'ods',
#'xls',
#'xlsx',
#'ppt',
#'pptx',
#'pps',
#'ppsx',
#'odp'
]
#-------------------#
# Configuration End #
#-------------------#
global FAMILY_TEXT
global FAMILY_WEB
global FAMILY_SPREADSHEET
global FAMILY_PRESENTATION
global FAMILY_DRAWING
FAMILY_TEXT = "Text"
FAMILY_WEB = "Web"
FAMILY_SPREADSHEET = "Spreadsheet"
FAMILY_PRESENTATION = "Presentation"
FAMILY_DRAWING = "Drawing"
# see http://wiki.services.openoffice.org/wiki/Framework/Article/Filter
# most formats are auto-detected; only those requiring options are defined here
global IMPORT_FILTER_MAP
IMPORT_FILTER_MAP = {
"txt": {
"FilterName": "Text (encoded)",
"FilterOptions": "utf8"
},
"csv": {
"FilterName": "Text - txt - csv (StarCalc)",
"FilterOptions": "44,34,0"
},
'default':{
'Hidden': True,
'RepairPackage': True,
'Silent': True,
}
}
global EXPORT_FILTER_MAP
EXPORT_FILTER_MAP = {
"pdf": {
FAMILY_TEXT: { "FilterName": "writer_pdf_Export" },
FAMILY_WEB: { "FilterName": "writer_web_pdf_Export" },
FAMILY_SPREADSHEET: { "FilterName": "calc_pdf_Export" },
FAMILY_PRESENTATION: { "FilterName": "impress_pdf_Export" },
FAMILY_DRAWING: { "FilterName": "draw_pdf_Export" }
},
"html": {
FAMILY_TEXT: { "FilterName": "HTML (StarWriter)" },
FAMILY_SPREADSHEET: { "FilterName": "HTML (StarCalc)" },
FAMILY_PRESENTATION: { "FilterName": "impress_html_Export" }
},
"odt": {
FAMILY_TEXT: { "FilterName": "writer8" },
FAMILY_WEB: { "FilterName": "writerweb8_writer" }
},
"doc": {
FAMILY_TEXT: { "FilterName": "MS Word 97" }
},
"docx": {
FAMILY_TEXT: { "FilterName": "MS Word 2007 XML" }
},
"rtf": {
FAMILY_TEXT: { "FilterName": "Rich Text Format" }
},
"txt": {
FAMILY_TEXT: {
"FilterName": "Text",
"FilterOptions": "utf8"
}
},
"ods": {
FAMILY_SPREADSHEET: { "FilterName": "calc8" }
},
"xls": {
FAMILY_SPREADSHEET: { "FilterName": "MS Excel 97" }
},
"csv": {
FAMILY_SPREADSHEET: {
"FilterName": "Text - txt - csv (StarCalc)",
"FilterOptions": "44,34,0"
}
},
"odp": {
FAMILY_PRESENTATION: { "FilterName": "impress8" }
},
"ppt": {
FAMILY_PRESENTATION: { "FilterName": "MS PowerPoint 97" }
},
"swf": {
FAMILY_DRAWING: { "FilterName": "draw_flash_Export" },
FAMILY_PRESENTATION: { "FilterName": "impress_flash_Export" }
}
}
global PAGE_STYLE_OVERRIDE_PROPERTIES
PAGE_STYLE_OVERRIDE_PROPERTIES = {
FAMILY_SPREADSHEET: {
#--- Scale options: uncomment 1 of the 3 ---
# a) 'Reduce / enlarge printout': 'Scaling factor'
"PageScale": 100,
# b) 'Fit print range(s) to width / height': 'Width in pages' and 'Height in pages'
#"ScaleToPagesX": 1, "ScaleToPagesY": 1000,
# c) 'Fit print range(s) on number of pages': 'Fit print range(s) on number of pages'
#"ScaleToPages": 1,
"PrintGrid": False
}
}
| gpl-2.0 | -7,174,447,507,429,265,000 | 28.098684 | 97 | 0.496496 | false |
ftrain/django-ftrain | kcal/migrations/0002_to_energy_model.py | 1 | 3214 |
from south.db import db
from django.db import models
from ftrain.ohlih.models import *
class Migration:
def forwards(self, orm):
# Adding model 'Energy'
db.create_table('ohlih_energy', (
('kcal_is_est', orm['ohlih.energy:kcal_is_est']),
('kcal', orm['ohlih.energy:kcal']),
('id', orm['ohlih.energy:id']),
('name', orm['ohlih.energy:name']),
))
db.send_create_signal('ohlih', ['Energy'])
# Adding model 'Consumption'
db.create_table('ohlih_consumption', (
('in_event', orm['ohlih.consumption:in_event']),
('order', orm['ohlih.consumption:order']),
('id', orm['ohlih.consumption:id']),
('quantity', orm['ohlih.consumption:quantity']),
('of_energy', orm['ohlih.consumption:of_energy']),
))
db.send_create_signal('ohlih', ['Consumption'])
def backwards(self, orm):
# Deleting model 'Energy'
db.delete_table('ohlih_energy')
# Deleting model 'Consumption'
db.delete_table('ohlih_consumption')
models = {
'ohlih.event': {
'commentary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {})
},
'ohlih.energy': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kcal': ('django.db.models.fields.IntegerField', [], {}),
'kcal_is_est': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'ohlih.food': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ohlih.Event']"}),
'kcal': ('django.db.models.fields.IntegerField', [], {}),
'kcal_is_est': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'quantity': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'ohlih.consumption': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ohlih.Event']"}),
'of_energy': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ohlih.Energy']"}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'quantity': ('django.db.models.fields.CharField', [], {'max_length': '10'})
}
}
complete_apps = ['ohlih']
| bsd-3-clause | 1,700,160,144,269,069,800 | 43.638889 | 111 | 0.520846 | false |
jlengrand/Ivolution | ivolution/util/Notifier.py | 1 | 2002 | """
.. module:: Notifier
:platform: Unix, Windows
:synopsis: Implements a simple Observer/Observable pattern for communication between between Facemovie thread and Ivolution GUI
.. moduleauthor:: Julien Lengrand-Lambert <[email protected]>
"""
class Observer():
"""
Implements a simple Observer from the Observer pattern
"""
def __init__(self, name="Observer"):
"""
"""
self.name = name
def update(self, message):
"""
"""
if message is not None:
#print "%s received %s" %(self.name, message)
pass
def __str__(self):
return self.name
class Observable():
"""
Implements a simple Observable from the Observer pattern
"""
def __init__(self):
"""
"""
self.val = 1
self.obs_collection = []
def subscribe(self, observer):
"""
"""
try:
if not(observer in self.obs_collection):
self.obs_collection.append(observer)
#print "%s added to collection" %(str(observer))
else:
#print "%s already in collection" %(str(observer))
pass
except TypeError:
#print "Failed to add %s" %(str(observer))
pass
def unsubscribe(self, observer):
"""
"""
try:
if observer in self.obs_collection:
self.obs_collection.remove(observer)
#print "%s removed from collection" %(str(observer))
else:
#print "%s not in collection" %(str(observer))
pass
except TypeError:
#print "Failed to remove %s" %(str(observer))
pass
def notify(self, message):
"""
"""
for observer in self.obs_collection:
#print "sent %s to %s" %(message, str(observer))
if message[0] == observer.name:
observer.update(message[1])
| bsd-3-clause | -6,127,289,947,069,954,000 | 24.341772 | 130 | 0.51998 | false |
IronLanguages/ironpython3 | Tests/test_bool.py | 1 | 4185 | # Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
import unittest
from iptest import is_cli, long, run_test
class BoolTest(unittest.TestCase):
def test_types(self):
for x in [str, int, long, float, bool]:
if not x:
self.fail("should be true: %r", x)
def test_bool_dir(self):
bool_dir = ['__abs__', '__add__', '__and__', '__class__',
'__eq__', '__ne__', '__gt__', '__ge__', '__le__', '__lt__',
'__delattr__', '__divmod__', '__doc__',
'__float__', '__floordiv__', '__getattribute__', '__getnewargs__',
'__hash__', '__index__', '__init__', '__int__',
'__invert__', '__lshift__', '__mod__', '__mul__',
'__neg__', '__new__', '__bool__', '__or__', '__pos__',
'__pow__', '__radd__', '__rand__', '__rdivmod__', '__reduce__',
'__reduce_ex__', '__repr__', '__rfloordiv__', '__rlshift__', '__rmod__',
'__rmul__', '__ror__', '__rpow__', '__rrshift__', '__rshift__',
'__rsub__', '__rtruediv__', '__rxor__', '__setattr__', '__str__',
'__sub__', '__truediv__', '__xor__']
for t_list in [dir(bool), dir(True), dir(False)]:
for stuff in bool_dir:
self.assertTrue(stuff in t_list, "%s should be in dir(bool), but is not" % (stuff))
def test__float__(self):
self.assertEqual(float(True), 1.0)
self.assertEqual(float(False), 0.0)
def test__index__(self):
self.assertEqual(True.__index__(), 1)
self.assertEqual(False.__index__(), 0)
def test__long__(self):
self.assertEqual(long(True), long(1))
self.assertEqual(long(False), long(0))
def test__rdivmod__(self):
self.assertEqual(divmod(True, True), (1, 0))
self.assertEqual(divmod(False, True), (0, 0))
self.assertRaises(ZeroDivisionError, divmod, True, False)
self.assertRaises(ZeroDivisionError, divmod, False, False)
@unittest.skipUnless(is_cli, 'IronPython specific test')
def test_decimal(self):
import System
if not System.Decimal:
Fail("should be true: %r", System.Decimal)
self.assertEqual(bool(System.Decimal(0)), False)
self.assertEqual(bool(System.Decimal(1)), True)
self.assertEqual(System.Decimal(True), System.Decimal(1))
self.assertEqual(System.Decimal(False), System.Decimal(0))
def test__bool__(self):
class ClassWithBool:
def __init__(self, val):
self.val = val
def __bool__(self):
return self.val
class ClassWithLen:
def __init__(self, val):
self.val = val
def __len__(self):
return self.val
class MyIndex:
def __init__(self, val):
self.val = val
def __index__(self):
return self.val
class MyLong(long): pass
bool_cases = [
(True, True), (False, False), (MyIndex(0), TypeError),
]
len_cases = [
(1, True), (0, False), (0.0, TypeError), (-1, ValueError), (1<<64, OverflowError),
]
cases = []
cases += [(ClassWithBool(x), y) for x, y in bool_cases]
cases += [(ClassWithLen(x), y) for x, y in len_cases]
cases += [(ClassWithLen(long(x)), y) for x, y in len_cases if isinstance(x, int)]
cases += [(ClassWithLen(MyLong(x)), y) for x, y in len_cases if isinstance(x, int)]
cases += [(ClassWithLen(MyIndex(x)), y) for x, y in len_cases]
for val, res in cases:
if type(res) == type:
with self.assertRaises(res):
bool(val)
with self.assertRaises(res):
not val
else:
self.assertEqual(bool(val), res)
self.assertEqual(not val, not res)
run_test(__name__)
| apache-2.0 | -3,756,831,943,541,605,000 | 38.11215 | 99 | 0.493429 | false |
zcoinofficial/zcoin | src/tor/scripts/codegen/makedesc.py | 1 | 10850 | #!/usr/bin/python
# Copyright 2014-2019, The Tor Project, Inc.
# See LICENSE for license information
# This is a kludgey python script that uses ctypes and openssl to sign
# router descriptors and extrainfo documents and put all the keys in
# the right places. There are examples at the end of the file.
# I've used this to make inputs for unit tests. I wouldn't suggest
# using it for anything else.
import base64
import binascii
import ctypes
import ctypes.util
import hashlib
import optparse
import os
import re
import struct
import time
import UserDict
import slow_ed25519
import slownacl_curve25519
import ed25519_exts_ref
# Pull in the openssl stuff we need.
crypt = ctypes.CDLL(ctypes.util.find_library('crypto'))
BIO_s_mem = crypt.BIO_s_mem
BIO_s_mem.argtypes = []
BIO_s_mem.restype = ctypes.c_void_p
BIO_new = crypt.BIO_new
BIO_new.argtypes = [ctypes.c_void_p]
BIO_new.restype = ctypes.c_void_p
crypt.BIO_free.argtypes = [ctypes.c_void_p]
crypt.BIO_free.restype = ctypes.c_int
crypt.BIO_ctrl.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_long, ctypes.c_void_p ]
crypt.BIO_ctrl.restype = ctypes.c_long
crypt.PEM_write_bio_RSAPublicKey.argtypes = [ ctypes.c_void_p, ctypes.c_void_p ]
crypt.PEM_write_bio_RSAPublicKey.restype = ctypes.c_int
RSA_generate_key = crypt.RSA_generate_key
RSA_generate_key.argtypes = [ctypes.c_int, ctypes.c_ulong, ctypes.c_void_p, ctypes.c_void_p]
RSA_generate_key.restype = ctypes.c_void_p
RSA_private_encrypt = crypt.RSA_private_encrypt
RSA_private_encrypt.argtypes = [
ctypes.c_int, ctypes.c_char_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int ]
RSA_private_encrypt.restype = ctypes.c_int
i2d_RSAPublicKey = crypt.i2d_RSAPublicKey
i2d_RSAPublicKey.argtypes = [
ctypes.c_void_p, ctypes.POINTER(ctypes.c_char_p)
]
i2d_RSAPublicKey.restype = ctypes.c_int
def rsa_sign(msg, rsa):
buf = ctypes.create_string_buffer(1024)
n = RSA_private_encrypt(len(msg), msg, buf, rsa, 1)
if n <= 0:
raise Exception()
return buf.raw[:n]
def b64(x):
x = base64.b64encode(x)
res = []
for i in xrange(0, len(x), 64):
res.append(x[i:i+64]+"\n")
return "".join(res)
def bio_extract(bio):
buf = ctypes.c_char_p()
length = crypt.BIO_ctrl(bio, 3, 0, ctypes.byref(buf))
return ctypes.string_at(buf, length)
def make_rsa_key(e=65537):
rsa = crypt.RSA_generate_key(1024, e, None, None)
bio = BIO_new(BIO_s_mem())
crypt.PEM_write_bio_RSAPublicKey(bio, rsa)
pem = bio_extract(bio).rstrip()
crypt.BIO_free(bio)
buf = ctypes.create_string_buffer(1024)
pBuf = ctypes.c_char_p(ctypes.addressof(buf))
n = crypt.i2d_RSAPublicKey(rsa, ctypes.byref(pBuf))
s = buf.raw[:n]
digest = hashlib.sha1(s).digest()
return (rsa,pem,digest)
def makeEdSigningKeyCert(sk_master, pk_master, pk_signing, date,
includeSigning=False, certType=1):
assert len(pk_signing) == len(pk_master) == 32
expiration = struct.pack("!L", date//3600)
if includeSigning:
extensions = "\x01\x00\x20\x04\x00%s"%(pk_master)
else:
extensions = "\x00"
signed = "\x01%s%s\x01%s%s" % (
chr(certType), expiration, pk_signing, extensions)
signature = ed25519_exts_ref.signatureWithESK(signed, sk_master, pk_master)
assert len(signature) == 64
return signed+signature
def objwrap(identifier, body):
return ("-----BEGIN {0}-----\n"
"{1}"
"-----END {0}-----").format(identifier, body)
MAGIC1 = "<<<<<<MAGIC>>>>>>"
MAGIC2 = "<<<<<!#!#!#XYZZY#!#!#!>>>>>"
class OnDemandKeys(object):
def __init__(self, certDate=None):
if certDate is None:
certDate = time.time() + 86400
self.certDate = certDate
self.rsa_id = None
self.rsa_onion_key = None
self.ed_id_sk = None
self.ntor_sk = None
self.ntor_crosscert = None
self.rsa_crosscert_ed = None
self.rsa_crosscert_noed = None
@property
def RSA_IDENTITY(self):
if self.rsa_id is None:
self.rsa_id, self.rsa_ident_pem, self.rsa_id_digest = make_rsa_key()
return self.rsa_ident_pem
@property
def RSA_ID_DIGEST(self):
self.RSA_IDENTITY
return self.rsa_id_digest
@property
def RSA_FINGERPRINT_NOSPACE(self):
return binascii.b2a_hex(self.RSA_ID_DIGEST).upper()
@property
def RSA_ONION_KEY(self):
if self.rsa_onion_key is None:
self.rsa_onion_key, self.rsa_onion_pem, _ = make_rsa_key()
return self.rsa_onion_pem
@property
def RSA_FINGERPRINT(self):
hexdigest = self.RSA_FINGERPRINT_NOSPACEK
return " ".join(hexdigest[i:i+4] for i in range(0,len(hexdigest),4))
@property
def RSA_SIGNATURE(self):
return MAGIC1
@property
def ED_SIGNATURE(self):
return MAGIC2
@property
def NTOR_ONION_KEY(self):
if self.ntor_sk is None:
self.ntor_sk = slownacl_curve25519.Private()
self.ntor_pk = self.ntor_sk.get_public()
return base64.b64encode(self.ntor_pk.serialize())
@property
def ED_CERT(self):
if self.ed_id_sk is None:
self.ed_id_sk = ed25519_exts_ref.expandSK(os.urandom(32))
self.ed_signing_sk = ed25519_exts_ref.expandSK(os.urandom(32))
self.ed_id_pk = ed25519_exts_ref.publickeyFromESK(self.ed_id_sk)
self.ed_signing_pk = ed25519_exts_ref.publickeyFromESK(self.ed_signing_sk)
self.ed_cert = makeEdSigningKeyCert(self.ed_id_sk, self.ed_id_pk, self.ed_signing_pk, self.certDate, includeSigning=True, certType=4)
return objwrap('ED25519 CERT', b64(self.ed_cert))
@property
def NTOR_CROSSCERT(self):
if self.ntor_crosscert is None:
self.ED_CERT
self.NTOR_ONION_KEY
ed_privkey = self.ntor_sk.serialize() + os.urandom(32)
ed_pub0 = ed25519_exts_ref.publickeyFromESK(ed_privkey)
sign = (ord(ed_pub0[31]) & 255) >> 7
self.ntor_crosscert = makeEdSigningKeyCert(self.ntor_sk.serialize() + os.urandom(32), ed_pub0, self.ed_id_pk, self.certDate, certType=10)
self.ntor_crosscert_sign = sign
return objwrap('ED25519 CERT', b64(self.ntor_crosscert))
@property
def NTOR_CROSSCERT_SIGN(self):
self.NTOR_CROSSCERT
return self.ntor_crosscert_sign
@property
def RSA_CROSSCERT_NOED(self):
if self.rsa_crosscert_noed is None:
self.RSA_ONION_KEY
signed = self.RSA_ID_DIGEST
self.rsa_crosscert_noed = rsa_sign(signed, self.rsa_onion_key)
return objwrap("CROSSCERT",b64(self.rsa_crosscert_noed))
@property
def RSA_CROSSCERT_ED(self):
if self.rsa_crosscert_ed is None:
self.RSA_ONION_KEY
self.ED_CERT
signed = self.RSA_ID_DIGEST + self.ed_id_pk
self.rsa_crosscert_ed = rsa_sign(signed, self.rsa_onion_key)
return objwrap("CROSSCERT",b64(self.rsa_crosscert_ed))
def sign_desc(self, body):
idx = body.rfind("\nrouter-sig-ed25519 ")
if idx >= 0:
self.ED_CERT
signed_part = body[:idx+len("\nrouter-sig-ed25519 ")]
signed_part = "Tor router descriptor signature v1" + signed_part
digest = hashlib.sha256(signed_part).digest()
ed_sig = ed25519_exts_ref.signatureWithESK(digest,
self.ed_signing_sk, self.ed_signing_pk)
body = body.replace(MAGIC2, base64.b64encode(ed_sig).replace("=",""))
idx = body.rindex("\nrouter-signature")
end_of_sig = body.index("\n", idx+1)
signed_part = body[:end_of_sig+1]
digest = hashlib.sha1(signed_part).digest()
assert len(digest) == 20
rsasig = rsa_sign(digest, self.rsa_id)
body = body.replace(MAGIC1, objwrap("SIGNATURE", b64(rsasig)))
return body
def signdesc(body, args_out=None):
rsa, ident_pem, id_digest = make_key()
_, onion_pem, _ = make_key()
need_ed = '{ED25519-CERT}' in body or '{ED25519-SIGNATURE}' in body
if need_ed:
sk_master = os.urandom(32)
sk_signing = os.urandom(32)
pk_master = slow_ed25519.pubkey(sk_master)
pk_signing = slow_ed25519.pubkey(sk_signing)
hexdigest = binascii.b2a_hex(id_digest).upper()
fingerprint = " ".join(hexdigest[i:i+4] for i in range(0,len(hexdigest),4))
MAGIC = "<<<<<<MAGIC>>>>>>"
MORE_MAGIC = "<<<<<!#!#!#XYZZY#!#!#!>>>>>"
args = {
"RSA-IDENTITY" : ident_pem,
"ONION-KEY" : onion_pem,
"FINGERPRINT" : fingerprint,
"FINGERPRINT-NOSPACE" : hexdigest,
"RSA-SIGNATURE" : MAGIC
}
if need_ed:
args['ED25519-CERT'] = makeEdSigningKeyCert(
sk_master, pk_master, pk_signing)
args['ED25519-SIGNATURE'] = MORE_MAGIC
if args_out:
args_out.update(args)
body = body.format(**args)
idx = body.rindex("\nrouter-signature")
end_of_sig = body.index("\n", idx+1)
signed_part = body[:end_of_sig+1]
digest = hashlib.sha1(signed_part).digest()
assert len(digest) == 20
buf = ctypes.create_string_buffer(1024)
n = RSA_private_encrypt(20, digest, buf, rsa, 1)
sig = buf.raw[:n]
sig = """-----BEGIN SIGNATURE-----
%s
-----END SIGNATURE-----""" % b64(sig).rstrip()
body = body.replace(MAGIC, sig)
return body.rstrip()
def print_c_string(ident, body):
print "static const char %s[] =" % ident
for line in body.split("\n"):
print ' "%s\\n"' %(line)
print " ;"
def emit_ri(name, body):
info = OnDemandKeys()
body = body.format(d=info)
body = info.sign_desc(body)
print_c_string("EX_RI_%s"%name.upper(), body)
def emit_ei(name, body):
info = OnDemandKeys()
body = body.format(d=info)
body = info.sign_desc(body)
print_c_string("EX_EI_%s"%name.upper(), body)
print 'const char EX_EI_{NAME}_FP[] = "{d.RSA_FINGERPRINT_NOSPACE}";'.format(
d=info, NAME=name.upper())
print_c_string("EX_EI_%s_KEY"%name.upper(), info.RSA_IDENTITY)
def analyze(s):
fields = {}
while s.startswith(":::"):
first,s=s.split("\n", 1)
m = re.match(r'^:::(\w+)=(.*)',first)
if not m:
raise ValueError(first)
k,v = m.groups()
fields[k] = v
return fields, s
def process_file(s):
fields, s = analyze(s)
try:
name = fields['name']
tp = fields['type']
except KeyError:
raise ValueError("missing required field")
if tp == 'ei':
emit_ei(name, s)
elif tp == 'ri':
emit_ri(name, s)
else:
raise ValueError("unrecognized type")
if __name__ == '__main__':
import sys
for fn in sys.argv[1:]:
process_file(open(fn).read())
| mit | 6,792,913,120,685,563,000 | 29.911681 | 149 | 0.614194 | false |
zackdever/kafka-python | test/test_consumer_group.py | 1 | 4801 | import collections
import logging
import threading
import time
import pytest
import six
from kafka import SimpleClient
from kafka.conn import ConnectionStates
from kafka.consumer.group import KafkaConsumer
from kafka.structs import TopicPartition
from test.conftest import version
from test.testutil import random_string
def get_connect_str(kafka_broker):
return 'localhost:' + str(kafka_broker.port)
@pytest.fixture
def simple_client(kafka_broker):
return SimpleClient(get_connect_str(kafka_broker))
@pytest.fixture
def topic(simple_client):
topic = random_string(5)
simple_client.ensure_topic_exists(topic)
return topic
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
def test_consumer(kafka_broker, version):
# 0.8.2 brokers need a topic to function well
if version >= (0, 8, 2) and version < (0, 9):
topic(simple_client(kafka_broker))
consumer = KafkaConsumer(bootstrap_servers=get_connect_str(kafka_broker))
consumer.poll(500)
assert len(consumer._client._conns) > 0
node_id = list(consumer._client._conns.keys())[0]
assert consumer._client._conns[node_id].state is ConnectionStates.CONNECTED
@pytest.mark.skipif(version() < (0, 9), reason='Unsupported Kafka Version')
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
def test_group(kafka_broker, topic):
num_partitions = 4
connect_str = get_connect_str(kafka_broker)
consumers = {}
stop = {}
threads = {}
messages = collections.defaultdict(list)
def consumer_thread(i):
assert i not in consumers
assert i not in stop
stop[i] = threading.Event()
consumers[i] = KafkaConsumer(topic,
bootstrap_servers=connect_str,
heartbeat_interval_ms=500)
while not stop[i].is_set():
for tp, records in six.itervalues(consumers[i].poll(100)):
messages[i][tp].extend(records)
consumers[i].close()
del consumers[i]
del stop[i]
num_consumers = 4
for i in range(num_consumers):
t = threading.Thread(target=consumer_thread, args=(i,))
t.start()
threads[i] = t
try:
timeout = time.time() + 35
while True:
for c in range(num_consumers):
# Verify all consumers have been created
if c not in consumers:
break
# Verify all consumers have an assignment
elif not consumers[c].assignment():
break
# Verify all consumers are in the same generation
generations = set()
for consumer in six.itervalues(consumers):
generations.add(consumer._coordinator.generation)
if len(generations) != 1:
break
# If all checks passed, log state and break while loop
else:
for c in range(num_consumers):
logging.info("[%s] %s %s: %s", c,
consumers[c]._coordinator.generation,
consumers[c]._coordinator.member_id,
consumers[c].assignment())
break
assert time.time() < timeout, "timeout waiting for assignments"
group_assignment = set()
for c in range(num_consumers):
assert len(consumers[c].assignment()) != 0
assert set.isdisjoint(consumers[c].assignment(), group_assignment)
group_assignment.update(consumers[c].assignment())
assert group_assignment == set([
TopicPartition(topic, partition)
for partition in range(num_partitions)])
finally:
for c in range(num_consumers):
stop[c].set()
threads[c].join()
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
def test_paused(kafka_broker, topic):
consumer = KafkaConsumer(bootstrap_servers=get_connect_str(kafka_broker))
topics = [TopicPartition(topic, 1)]
consumer.assign(topics)
assert set(topics) == consumer.assignment()
assert set() == consumer.paused()
consumer.pause(topics[0])
assert set([topics[0]]) == consumer.paused()
consumer.resume(topics[0])
assert set() == consumer.paused()
consumer.unsubscribe()
assert set() == consumer.paused()
def test_heartbeat_timeout(conn, mocker):
mocker.patch('kafka.client_async.KafkaClient.check_version', return_value = '0.9')
mocker.patch('time.time', return_value = 1234)
consumer = KafkaConsumer('foobar')
mocker.patch.object(consumer._coordinator.heartbeat, 'ttl', return_value = 0)
assert consumer._next_timeout() == 1234
| apache-2.0 | -7,511,210,647,016,828,000 | 31.883562 | 86 | 0.614664 | false |
cheral/orange3 | Orange/widgets/utils/plot/owplot.py | 4 | 69148 | '''
#################
Plot (``owplot``)
#################
.. autoclass:: OrangeWidgets.plot.OWPlot
'''
from AnyQt.QtWidgets import \
QGraphicsView, QGraphicsScene, QGraphicsRectItem, QGraphicsTextItem,\
QToolTip, QApplication
from AnyQt.QtGui import QPen, QBrush, QColor, QPainter, QTransform, QPolygonF
from AnyQt.QtCore import \
QPointF, QRectF, QLineF, QPoint, QRect, QPropertyAnimation, Qt, QEvent, \
pyqtProperty
from Orange.widgets.gui import OWComponent
from Orange.widgets.settings import Setting
LeftLegend = 0
RightLegend = 1
BottomLegend = 2
TopLegend = 3
ExternalLegend = 4
UNUSED_ATTRIBUTES_STR = 'unused attributes'
from .owaxis import *
from .owcurve import *
from .owlegend import *
from .owplotgui import OWPlotGUI
from .owtools import *
from ..colorpalette import ColorPaletteGenerator
## Color values copied from orngView.SchemaView for consistency
SelectionPen = QPen(QBrush(QColor(51, 153, 255, 192)),
1, Qt.SolidLine, Qt.RoundCap)
SelectionBrush = QBrush(QColor(168, 202, 236, 192))
#from OWDlgs import OWChooseImageSizeDlg
#from OWColorPalette import * # color palletes, ...
#from Orange.utils import deprecated_members, deprecated_attribute
import orangeqt
def n_min(*args):
lst = args[0] if len(args) == 1 else args
a = [i for i in lst if i is not None]
return min(a) if a else None
def n_max(*args):
lst = args[0] if len(args) == 1 else args
a = [i for i in lst if i is not None]
return max(a) if a else None
name_map = {
"saveToFileDirect": "save_to_file_direct",
"saveToFile" : "save_to_file",
"addCurve" : "add_curve",
"addMarker" : "add_marker",
"updateLayout" : "update_layout",
"activateZooming" : "activate_zooming",
"activateSelection" : "activate_selection",
"activateRectangleSelection" : "activate_rectangle_selection",
"activatePolygonSelection" : "activate_polygon_selection",
"activatePanning" : "activate_panning",
"getSelectedPoints" : "get_selected_points",
"setAxisScale" : "set_axis_scale",
"setAxisLabels" : "set_axis_labels",
"setAxisAutoScale" : "set_axis_autoscale",
"setTickLength" : "set_axis_tick_length",
"updateCurves" : "update_curves",
"itemList" : "plot_items",
"setShowMainTitle" : "set_show_main_title",
"setMainTitle" : "set_main_title",
"invTransform" : "inv_transform",
"setAxisTitle" : "set_axis_title",
"setShowAxisTitle" : "set_show_axis_title"
}
#@deprecated_members(name_map, wrap_methods=list(name_map.keys()))
class OWPlot(orangeqt.Plot, OWComponent):
"""
The base class for all plots in Orange. It uses the Qt Graphics View Framework
to draw elements on a graph.
**Plot layout**
.. attribute:: show_legend
A boolean controlling whether the legend is displayed or not
.. attribute:: show_main_title
Controls whether or not the main plot title is displayed
.. attribute:: main_title
The plot title, usually show on top of the plot
.. automethod:: set_main_title
.. automethod:: set_show_main_title
.. attribute:: axis_margin
How much space (in pixels) should be left on each side for the axis, its label and its title.
.. attribute:: title_margin
How much space (in pixels) should be left at the top of the plot for the title, if the title is shown.
.. seealso:: attribute :attr:`show_main_title`
.. attribute:: plot_margin
How much space (in pixels) should be left at each side of the plot as whitespace.
**Coordinate transformation**
There are several coordinate systems used by OWPlot:
* `widget` coordinates.
This is the coordinate system of the position returned by :meth:`.QEvent.pos()`.
No calculations or positions is done with this coordinates, they must first be converted
to scene coordinates with :meth:`mapToScene`.
* `data` coordinates.
The value used internally in Orange to specify the values of attributes.
For example, this can be age in years, the number of legs, or any other numeric value.
* `plot` coordinates.
These coordinates specify where the plot items are placed on the graph, but doesn't account for zoom.
They can be retrieved for a particular plot item with :meth:`.PlotItem.pos()`.
* `scene` or `zoom` coordinates.
Like plot coordinates, except that they take the :attr:`zoom_transform` into account. They represent the
actual position of an item on the scene.
These are the coordinates returned by :meth:`.PlotItem.scenePos()` and :meth:`mapToScene`.
For example, they can be used to determine what is under the cursor.
In most cases, you will use data coordinates for interacting with the actual data, and scene coordinates for
interacting with the plot items. The other two sets are mostly used for converting.
.. automethod:: map_to_graph
.. automethod:: map_from_graph
.. automethod:: transform
.. automethod:: inv_transform
.. method:: nearest_point(pos)
Returns the point nearest to ``pos``, or ``None`` if no point is close enough.
:param pos: The position in scene coordinates
:type pos: QPointF
:rtype: :obj:`.OWPoint`
.. method:: point_at(pos)
If there is a point with data coordinates equal to ``pos``, if is returned.
Otherwise, this function returns None.
:param pos: The position in data coordinates
:type pos: tuple of float float
:rtype: :obj:`.OWPoint`
**Data curves**
The preferred method for showing a series of data points is :meth:`set_main_curve_data`.
It allows you to specify point positions, colors, labels, sizes and shapes.
.. automethod:: set_main_curve_data
.. automethod:: add_curve
.. automethod:: add_custom_curve
.. automethod:: add_marker
.. method:: add_item(item)
Adds any PlotItem ``item`` to this plot.
Calling this function directly is useful for adding a :obj:`.Marker` or another object that does not have to appear in the legend.
For data curves, consider using :meth:`add_custom_curve` instead.
.. method:: plot_items()
Returns the list of all plot items added to this graph with :meth:`add_item` or :meth:`.PlotItem.attach`.
**Axes**
.. automethod:: add_axis
.. automethod:: add_custom_axis
.. automethod:: set_axis_enabled
.. automethod:: set_axis_labels
.. automethod:: set_axis_scale
**Settings**
.. attribute:: gui
An :obj:`.OWPlotGUI` object associated with this graph
**Point Selection and Marking**
There are four possible selection behaviors used for selecting or marking points in OWPlot.
They are used in :meth:`select_points` and :meth:`mark_points` and are the same for both operations.
.. data:: AddSelection
The points are added to the selection, without affected the currently selected points
.. data:: RemoveSelection
The points are removed from the selection, without affected the currently selected points
.. data:: ToggleSelection
The points' selection state is toggled
.. data:: ReplaceSelection
The current selection is replaced with the new one
.. note:: There are exactly the same functions for point selection and marking.
For simplicity, they are only documented once.
.. method:: select_points(area, behavior)
.. method:: mark_points(area, behavior)
Selects or marks all points inside the ``area``
:param area: The newly selected/marked area
:type area: QRectF or QPolygonF
:param behavior: :data:`AddSelection`, :data:`RemoveSelection`, :data:`ToggleSelection` or :data:`ReplaceSelection`
:type behavior: int
.. method:: unselect_all_points()
.. method:: unmark_all_points()
Unselects or unmarks all the points in the plot
.. method:: selected_points()
.. method:: marked_points()
Returns a list of all selected or marked points
:rtype: list of OWPoint
.. method:: selected_points(xData, yData)
For each of the point specified by ``xData`` and ``yData``, the point's selection state is returned.
:param xData: The list of x coordinates
:type xData: list of float
:param yData: The list of y coordinates
:type yData: list of float
:rtype: list of int
**Color schemes**
By default, OWPlot uses the application's system palette for drawing everything
except data curves and points. This way, it maintains consistency with other application
with regards to the user interface.
If data is plotted with no color specified, it will use a system color as well,
so that a good contrast with the background in guaranteed.
OWPlot uses the :meth:`.OWidget.palette` to determine its color scheme, so it can be
changed using :meth:`.QWidget.setPalette`. There are also two predefined color schemes:
``OWPalette.Dark`` and ``OWPalette.Light``, which provides a dark and a light scheme
respectively.
.. attribute:: theme_name
A string attribute with three possible values:
============== ===========================
Value Meaning
-------------- ---------------------------
"default" The system palette is used
"dark" The dark theme is used
"light" The light theme is used
============== ===========================
To apply the settings, first set this attribute's value, and then call :meth:`update_theme`
.. automethod:: update_theme
On the other hand, curves with a specified color will use colors from Orange's palette,
which can be configured within Orange. Each plot contains two separate palettes:
one for continuous attributes, and one for discrete ones. Both are created by
:obj:`.OWColorPalette.ColorPaletteGenerator`
.. attribute:: continuous_palette
The palette used when point color represents a continuous attribute
.. attribute:: discrete_palette
The palette used when point color represents a discrete attribute
"""
point_settings = ["point_width", "alpha_value"]
plot_settings = ["show_legend", "show_grid"]
alpha_value = Setting(255)
show_legend = Setting(False)
show_grid = Setting(False)
appearance_settings = ["antialias_plot", "animate_plot", "animate_points", "disable_animations_threshold", "auto_adjust_performance"]
def settings_list(self, graph_name, settings):
return [graph_name + '.' + setting for setting in settings]
def __init__(self, parent = None, name = "None", show_legend = 1, axes = [xBottom, yLeft], widget = None):
"""
Creates a new graph
If your visualization uses axes other than ``xBottom`` and ``yLeft``, specify them in the
``axes`` parameter. To use non-cartesian axes, set ``axes`` to an empty list
and add custom axes with :meth:`add_axis` or :meth:`add_custom_axis`
"""
orangeqt.Plot.__init__(self, parent)
OWComponent.__init__(self, widget)
self.widget = widget
self.parent_name = name
self.title_item = None
self.setRenderHints(QPainter.Antialiasing | QPainter.TextAntialiasing)
self._legend = OWLegend(self, self.scene())
self._legend.setZValue(LegendZValue)
self._legend_margin = QRectF(0, 0, 100, 0)
self._legend_moved = False
self.axes = dict()
self.axis_margin = 50
self.y_axis_extra_margin = 30
self.title_margin = 40
self.graph_margin = 10
self.mainTitle = None
self.showMainTitle = False
self.XaxisTitle = None
self.YLaxisTitle = None
self.YRaxisTitle = None
# Method aliases, because there are some methods with different names but same functions
self.setCanvasBackground = self.setCanvasColor
self.map_from_widget = self.mapToScene
# OWScatterPlot needs these:
self.point_width = 5
self.show_filled_symbols = True
self.show_grid = True
self.curveSymbols = list(range(13))
self.tips = TooltipManager(self)
self.setMouseTracking(True)
self.grabGesture(Qt.PinchGesture)
self.grabGesture(Qt.PanGesture)
self.state = NOTHING
self._pressed_mouse_button = Qt.NoButton
self._pressed_point = None
self.selection_items = []
self._current_rs_item = None
self._current_ps_item = None
self.polygon_close_treshold = 10
self.sendSelectionOnUpdate = False
self.auto_send_selection_callback = None
self.data_range = {}
self.map_transform = QTransform()
self.graph_area = QRectF()
## Performance optimization
self.setViewportUpdateMode(QGraphicsView.FullViewportUpdate)
self.scene().setItemIndexMethod(QGraphicsScene.NoIndex)
self.animate_plot = True
self.animate_points = True
self.antialias_plot = True
self.antialias_points = True
self.antialias_lines = True
self.auto_adjust_performance = True
self.disable_animations_threshold = 5000
# self.setInteractive(False)
self.warn_unused_attributes = False
self._bounds_cache = {}
self._transform_cache = {}
self.block_update = False
self.use_animations = True
self._animations = []
## Mouse event handlers
self.mousePressEventHandler = None
self.mouseMoveEventHandler = None
self.mouseReleaseEventHandler = None
self.mouseStaticClickHandler = self.mouseStaticClick
self.static_click = False
self._marker_items = []
self.grid_curve = PlotGrid(self)
self._zoom_rect = None
self._zoom_transform = QTransform()
self.zoom_stack = []
self.old_legend_margin = None
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
## Add specified axes:
for key in axes:
if key in [yLeft, xTop]:
self.add_axis(key, title_above=1)
else:
self.add_axis(key)
self.continuous_palette = ColorPaletteGenerator(number_of_colors= -1)
self.discrete_palette = ColorPaletteGenerator()
self.gui = OWPlotGUI(self)
"""
An :obj:`.OWPlotGUI` object associated with this plot
"""
self.activate_zooming()
self.selection_behavior = self.AddSelection
self.main_curve = None
self.replot()
# selectionCurveList = deprecated_attribute("selectionCurveList", "selection_items")
# autoSendSelectionCallback = deprecated_attribute("autoSendSelectionCallback", "auto_send_selection_callback")
# showLegend = deprecated_attribute("showLegend", "show_legend")
# pointWidth = deprecated_attribute("pointWidth", "point_width")
# alphaValue = deprecated_attribute("alphaValue", "alpha_value")
# useAntialiasing = deprecated_attribute("useAntialiasing", "use_antialiasing")
# showFilledSymbols = deprecated_attribute("showFilledSymbols", "show_filled_symbols")
# mainTitle = deprecated_attribute("mainTitle", "main_title")
# showMainTitle = deprecated_attribute("showMainTitle", "show_main_title")
# gridCurve = deprecated_attribute("gridCurve", "grid_curve")
# contPalette = deprecated_attribute("contPalette", "continuous_palette")
# discPalette = deprecated_attribute("discPalette", "discrete_palette")
def scrollContentsBy(self, dx, dy):
# This is overriden here to prevent scrolling with mouse and keyboard
# Instead of moving the contents, we simply do nothing
pass
def graph_area_rect(self):
return self.graph_area
def map_to_graph(self, point, axes = None, zoom = False):
'''
Maps ``point``, which can be ether a tuple of (x,y), a QPoint or a QPointF, from data coordinates
to plot coordinates.
:param point: The point in data coordinates
:type point: tuple or QPointF
:param axes: The pair of axes along which to transform the point.
If none are specified, (xBottom, yLeft) will be used.
:type axes: tuple of float float
:param zoom: if ``True``, the current :attr:`zoom_transform` will be considered in the transformation, and the result will be in scene coordinates instead.
:type zoom: int
:return: The transformed point in scene coordinates
:type: tuple of float float
'''
if type(point) == tuple:
(x, y) = point
point = QPointF(x, y)
if axes:
x_id, y_id = axes
point = point * self.transform_for_axes(x_id, y_id)
else:
point = point * self.map_transform
if zoom:
point = point * self._zoom_transform
return (point.x(), point.y())
def map_from_graph(self, point, axes = None, zoom = False):
'''
Maps ``point``, which can be ether a tuple of (x,y), a QPoint or a QPointF, from plot coordinates
to data coordinates.
:param point: The point in data coordinates
:type point: tuple or QPointF
:param axes: The pair of axes along which to transform the point. If none are specified, (xBottom, yLeft) will be used.
:type axes: tuple of float float
:param zoom: if ``True``, the current :attr:`zoom_transform` will be considered in the transformation, and the ``point`` should be in scene coordinates instead.
:type zoom: int
:returns: The transformed point in data coordinates
:rtype: tuple of float float
'''
if type(point) == tuple:
(x, y) = point
point = QPointF(x,y)
if zoom:
t, ok = self._zoom_transform.inverted()
point = point * t
if axes:
x_id, y_id = axes
t, ok = self.transform_for_axes(x_id, y_id).inverted()
else:
t, ok = self.map_transform.inverted()
ret = point * t
return (ret.x(), ret.y())
def save_to_file(self, extraButtons = []):
sizeDlg = OWChooseImageSizeDlg(self, extraButtons, parent=self)
sizeDlg.exec_()
def save_to_file_direct(self, fileName, size = None):
sizeDlg = OWChooseImageSizeDlg(self)
sizeDlg.saveImage(fileName, size)
def activate_zooming(self):
'''
Activates the zooming mode, where the user can zoom in and out with a single mouse click
or by dragging the mouse to form a rectangular area
'''
self.state = ZOOMING
def activate_rectangle_selection(self):
'''
Activates the rectangle selection mode, where the user can select points in a rectangular area
by dragging the mouse over them
'''
self.state = SELECT_RECTANGLE
def activate_selection(self):
'''
Activates the point selection mode, where the user can select points by clicking on them
'''
self.state = SELECT
def activate_polygon_selection(self):
'''
Activates the polygon selection mode, where the user can select points by drawing a polygon around them
'''
self.state = SELECT_POLYGON
def activate_panning(self):
'''
Activates the panning mode, where the user can move the zoom projection by dragging the mouse
'''
self.state = PANNING
def set_show_main_title(self, b):
'''
Shows the main title if ``b`` is ``True``, and hides it otherwise.
'''
self.showMainTitle = b
self.replot()
def set_main_title(self, t):
'''
Sets the main title to ``t``
'''
self.mainTitle = t
self.replot()
def setShowXaxisTitle(self, b = -1):
if b == -1 and hasattr(self, 'showXaxisTitle'):
b = self.showXaxisTitle
self.set_show_axis_title(xBottom, b)
def setXaxisTitle(self, title):
self.set_axis_title(xBottom, title)
def setShowYLaxisTitle(self, b = -1):
if b == -1 and hasattr(self, 'showYLaxisTitle'):
b = self.showYLaxisTitle
self.set_show_axis_title(yLeft, b)
def setYLaxisTitle(self, title):
self.set_axis_title(yLeft, title)
def setShowYRaxisTitle(self, b = -1):
if b == -1 and hasattr(self, 'showYRaxisTitle'):
b = self.showYRaxisTitle
self.set_show_axis_title(yRight, b)
def setYRaxisTitle(self, title):
self.set_axis_title(yRight, title)
def enableGridXB(self, b):
self.grid_curve.set_x_enabled(b)
self.replot()
def enableGridYL(self, b):
self.grid_curve.set_y_enabled(b)
self.replot()
def setGridColor(self, c):
self.grid_curve.set_pen(QPen(c))
self.replot()
def setCanvasColor(self, c):
p = self.palette()
p.setColor(OWPalette.Canvas, c)
self.set_palette(p)
def setData(self, data):
self.clear()
self.replot()
def setXlabels(self, labels):
if xBottom in self.axes:
self.set_axis_labels(xBottom, labels)
elif xTop in self.axes:
self.set_axis_labels(xTop, labels)
def set_axis_autoscale(self, axis_id):
if axis_id in self.axes:
self.axes[axis_id].auto_scale = True
elif axis_id in self.data_range:
del self.data_range[axis_id]
def set_axis_labels(self, axis_id, labels, values=None):
'''
Sets the labels of axis ``axis_id`` to ``labels``. This is used for axes displaying a discrete data type.
:param labels: The ID of the axis to change
:type labels: int
:param labels: The list of labels to be displayed along the axis
:type labels: A list of strings
.. note:: This changes the axis scale and removes any previous scale set with :meth:`set_axis_scale`.
'''
if axis_id in self._bounds_cache:
del self._bounds_cache[axis_id]
self._transform_cache = {}
self.axes[axis_id].set_labels(labels, values)
def set_axis_scale(self, axis_id, min, max, step_size=0):
'''
Sets the scale of axis ``axis_id`` to show an interval between ``min`` and ``max``.
If ``step`` is specified and non-zero, it determines the steps between label on the axis.
Otherwise, they are calculated automatically.
.. note:: This changes the axis scale and removes any previous labels set with :meth:`set_axis_labels`.
'''
if axis_id in self._bounds_cache:
del self._bounds_cache[axis_id]
self._transform_cache = {}
if axis_id in self.axes:
self.axes[axis_id].set_scale(min, max, step_size)
else:
self.data_range[axis_id] = (min, max)
def set_axis_title(self, axis_id, title):
if axis_id in self.axes:
self.axes[axis_id].set_title(title)
def set_show_axis_title(self, axis_id, b):
if axis_id in self.axes:
if b == -1:
b = not self.axes[axis_id].show_title
self.axes[axis_id].set_show_title(b)
self.replot()
def set_axis_tick_length(self, axis_id, minor, medium, major):
if axis_id in self.axes:
self.axes[axis_id].set_tick_legth(minor, medium, major)
def setYLlabels(self, labels):
self.set_axis_labels(yLeft, labels)
def setYRlabels(self, labels):
self.set_axis_labels(yRight, labels)
def add_custom_curve(self, curve, enableLegend = False):
'''
Adds a custom PlotItem ``curve`` to the plot.
If ``enableLegend`` is ``True``, a curve symbol defined by
:meth:`.OWCurve.point_item` and the ``curve``'s name
:obj:`.OWCurve.name` is added to the legend.
This function recalculates axis bounds and replots the plot if needed.
:param curve: The curve to add
:type curve: :obj:`.OWCurve`
'''
self.add_item(curve)
if enableLegend:
self.legend().add_curve(curve)
for key in [curve.axes()]:
if key in self._bounds_cache:
del self._bounds_cache[key]
self._transform_cache = {}
if hasattr(curve, 'tooltip'):
curve.setToolTip(curve.tooltip)
x,y = curve.axes()
if curve.is_auto_scale() and (self.is_axis_auto_scale(x) or self.is_axis_auto_scale(y)):
self.set_dirty()
self.replot()
else:
curve.set_graph_transform(self.transform_for_axes(x,y))
curve.update_properties()
return curve
def add_curve(self, name, brushColor = None, penColor = None, size = 5, style = Qt.NoPen,
symbol = OWPoint.Ellipse, enableLegend = False, xData = [], yData = [], showFilledSymbols = None,
lineWidth = 1, pen = None, autoScale = 0, antiAlias = None, penAlpha = 255, brushAlpha = 255,
x_axis_key = xBottom, y_axis_key = yLeft):
'''
Creates a new :obj:`.OWCurve` with the specified parameters and adds it to the graph.
If ``enableLegend`` is ``True``, a curve symbol is added to the legend.
'''
c = OWCurve(xData, yData, x_axis_key, y_axis_key, tooltip=name)
c.set_zoom_transform(self._zoom_transform)
c.name = name
c.set_style(style)
if not brushColor:
brushColor = self.color(OWPalette.Data)
if not penColor:
penColor = self.color(OWPalette.Data)
c.set_color(penColor)
if pen:
p = pen
else:
p = QPen()
p.setColor(penColor)
p.setWidth(lineWidth)
c.set_pen(p)
c.set_brush(brushColor)
c.set_symbol(symbol)
c.set_point_size(size)
c.set_data(xData, yData)
c.set_auto_scale(autoScale)
return self.add_custom_curve(c, enableLegend)
def set_main_curve_data(self, x_data, y_data, color_data, label_data, size_data, shape_data, marked_data = [], valid_data = [], x_axis_key=xBottom, y_axis_key=yLeft):
"""
Creates a single curve that can have points of different colors, shapes and sizes.
This is the preferred method for visualization that show a series of different points.
:param x_data: The list of X coordinates of the points
:type x_data: list of float
:param y_data: The list of Y coordinates of the points
:type y_data: list of float
:param color_data: The list of point colors
:type color_data: list of QColor
:param label_data: The list of point labels
:type label_data: list of str
:param size_data: The list of point sizes
:type size_data: list of int
:param shape_data: The list of point symbols
:type shape_data: list of int
The number of points in the curve will be equal to min(len(x_data), len(y_data)).
The other four list can be empty, in which case a default value will be used.
If they contain only one element, its value will be used for all points.
.. note:: This function does not add items to the legend automatically.
You will have to add them yourself with :meth:`.OWLegend.add_item`.
.. seealso:: :obj:`.OWMultiCurve`, :obj:`.OWPoint`
"""
if not self.main_curve:
self.main_curve = OWMultiCurve([], [])
self.add_item(self.main_curve)
self.update_performance(len(x_data))
if len(valid_data):
import numpy
x_data = numpy.compress(valid_data, x_data)
y_data = numpy.compress(valid_data, y_data)
if len(color_data) > 1:
color_data = numpy.compress(valid_data, color_data)
if len(size_data) > 1:
size_data = numpy.compress(valid_data, size_data)
if len(shape_data) > 1:
shape_data = numpy.compress(valid_data, shape_data)
if len(label_data) > 1:
label_data = numpy.compress(valid_data, label_data)
if len(marked_data) > 1:
marked_data = numpy.compress(valid_data, marked_data).tolist()
c = self.main_curve
c.set_data(x_data, y_data)
c.set_axes(x_axis_key, y_axis_key)
c.set_point_colors(color_data)
c.set_point_labels(label_data)
c.set_point_sizes(size_data)
c.set_point_symbols(shape_data)
if len(marked_data):
c.set_points_marked(marked_data)
self.marked_points_changed.emit()
c.name = 'Main Curve'
self.replot()
def remove_curve(self, item):
'''
Removes ``item`` from the plot
'''
self.remove_item(item)
self.legend().remove_curve(item)
def plot_data(self, xData, yData, colors, labels, shapes, sizes):
pass
def add_axis(self, axis_id, title='', title_above=False, title_location=AxisMiddle,
line=None, arrows=0, zoomable=False, bounds=None):
'''
Creates an :obj:`OrangeWidgets.plot.OWAxis` with the specified ``axis_id`` and ``title``.
'''
a = OWAxis(axis_id, title, title_above, title_location, line, arrows, self, bounds=bounds)
self.scene().addItem(a)
a.zoomable = zoomable
a.update_callback = self.replot
if axis_id in self._bounds_cache:
del self._bounds_cache[axis_id]
self._transform_cache = {}
self.axes[axis_id] = a
if not axis_id in CartesianAxes:
self.set_show_axis_title(axis_id, True)
return a
def remove_all_axes(self, user_only = True):
'''
Removes all axes from the plot
'''
ids = []
for id,item in self.axes.items():
if not user_only or id >= UserAxis:
ids.append(id)
self.scene().removeItem(item)
for id in ids:
del self.axes[id]
def add_custom_axis(self, axis_id, axis):
'''
Adds a custom ``axis`` with id ``axis_id`` to the plot
'''
self.axes[axis_id] = axis
self.replot()
def add_marker(self, name, x, y, alignment = -1, bold = 0, color = None, brushColor = None, size=None, antiAlias = None,
x_axis_key = xBottom, y_axis_key = yLeft):
m = Marker(name, x, y, alignment, bold, color, brushColor)
self._marker_items.append((m, x, y, x_axis_key, y_axis_key))
self.add_custom_curve(m)
return m
def removeAllSelections(self):
## TODO
pass
def clear(self):
"""
Clears the plot, removing all curves, markers and tooltips.
Axes and the grid are not removed
"""
for i in self.plot_items():
if i is not self.grid_curve:
self.remove_item(i)
self.main_curve = None
self._bounds_cache = {}
self._transform_cache = {}
self.clear_markers()
self.tips.removeAll()
self.legend().clear()
self.old_legend_margin = None
self.update_grid()
def clear_markers(self):
"""
Removes all markers added with :meth:`add_marker` from the plot
"""
for item,x,y,x_axis,y_axis in self._marker_items:
item.detach()
self._marker_items = []
def update_layout(self):
'''
Updates the plot layout.
This function recalculates the position of titles, axes, the legend and the main plot area.
It does not update the curve or the other plot items.
'''
if not self.isVisible():
# No point in updating the graph if it's still hidden
return
graph_rect = QRectF(self.contentsRect())
self.centerOn(graph_rect.center())
m = self.graph_margin
graph_rect.adjust(m, m, -m, -m)
if self.showMainTitle and self.mainTitle:
if self.title_item:
self.scene().remove_item(self.title_item)
del self.title_item
self.title_item = QGraphicsTextItem(self.mainTitle, scene=self.scene())
title_size = self.title_item.boundingRect().size()
## TODO: Check if the title is too big
self.title_item.setPos( graph_rect.width()/2 - title_size.width()/2, self.title_margin/2 - title_size.height()/2 )
graph_rect.setTop(graph_rect.top() + self.title_margin)
if self.show_legend:
self._legend_outside_area = QRectF(graph_rect)
self._legend.max_size = self._legend_outside_area.size()
r = self._legend_margin
graph_rect.adjust(r.left(), r.top(), -r.right(), -r.bottom())
self._legend.update_items()
axis_rects = dict()
base_margin = min(self.axis_margin, graph_rect.height()/4, graph_rect.height()/4)
if xBottom in self.axes and self.axes[xBottom].isVisible():
margin = base_margin
if self.axes[xBottom].should_be_expanded():
margin += min(20, graph_rect.height()/8, graph_rect.width() / 8)
bottom_rect = QRectF(graph_rect)
bottom_rect.setTop( bottom_rect.bottom() - margin)
axis_rects[xBottom] = bottom_rect
graph_rect.setBottom( graph_rect.bottom() - margin)
if xTop in self.axes and self.axes[xTop].isVisible():
margin = base_margin
if self.axes[xTop].should_be_expanded():
margin += min(20, graph_rect.height()/8, graph_rect.width() / 8)
top_rect = QRectF(graph_rect)
top_rect.setBottom(top_rect.top() + margin)
axis_rects[xTop] = top_rect
graph_rect.setTop(graph_rect.top() + margin)
if yLeft in self.axes and self.axes[yLeft].isVisible():
margin = base_margin
if self.axes[yLeft].should_be_expanded():
margin += min(20, graph_rect.height()/8, graph_rect.width() / 8)
left_rect = QRectF(graph_rect)
left = graph_rect.left() + margin + self.y_axis_extra_margin
left_rect.setRight(left)
graph_rect.setLeft(left)
axis_rects[yLeft] = left_rect
if xBottom in axis_rects:
axis_rects[xBottom].setLeft(left)
if xTop in axis_rects:
axis_rects[xTop].setLeft(left)
if yRight in self.axes and self.axes[yRight].isVisible():
margin = base_margin
if self.axes[yRight].should_be_expanded():
margin += min(20, graph_rect.height()/8, graph_rect.width() / 8)
right_rect = QRectF(graph_rect)
right = graph_rect.right() - margin - self.y_axis_extra_margin
right_rect.setLeft(right)
graph_rect.setRight(right)
axis_rects[yRight] = right_rect
if xBottom in axis_rects:
axis_rects[xBottom].setRight(right)
if xTop in axis_rects:
axis_rects[xTop].setRight(right)
if self.graph_area != graph_rect:
self.graph_area = QRectF(graph_rect)
self.set_graph_rect(self.graph_area)
self._transform_cache = {}
if self._zoom_rect:
data_zoom_rect = self.map_transform.inverted()[0].mapRect(self._zoom_rect)
self.map_transform = self.transform_for_axes()
self.set_zoom_rect(self.map_transform.mapRect(data_zoom_rect))
self.map_transform = self.transform_for_axes()
for c in self.plot_items():
x,y = c.axes()
c.set_graph_transform(self.transform_for_axes(x,y))
c.update_properties()
def update_zoom(self):
'''
Updates the zoom transformation of the plot items.
'''
zt = self.zoom_transform()
self._zoom_transform = zt
self.set_zoom_transform(zt)
self.update_axes(zoom_only=True)
self.viewport().update()
def update_axes(self, zoom_only=False):
"""
Updates the axes.
If ``zoom_only`` is ``True``, only the positions of the axes and their labels are recalculated.
Otherwise, all their labels are updated.
"""
if self.warn_unused_attributes and not zoom_only:
self._legend.remove_category(UNUSED_ATTRIBUTES_STR)
for id, item in self.axes.items():
if item.scale is None and item.labels is None:
item.auto_range = self.bounds_for_axis(id)
if id in XAxes:
(x,y) = (id, yLeft)
elif id in YAxes:
(x,y) = (xBottom, id)
else:
(x,y) = (xBottom, yLeft)
if id in CartesianAxes:
## This class only sets the lines for these four axes, widgets are responsible for the rest
if x in self.axes and y in self.axes:
item.data_line = self.axis_line(self.data_rect_for_axes(x,y), id)
if id in CartesianAxes:
item.graph_line = self.axis_line(self.graph_area, id, invert_y = True)
elif item.data_line:
t = self.transform_for_axes(x, y)
item.graph_line = t.map(item.data_line)
if item.graph_line and item.zoomable:
item.graph_line = self._zoom_transform.map(item.graph_line)
if not zoom_only:
if item.graph_line:
item.show()
else:
item.hide()
if self.warn_unused_attributes:
self._legend.add_item(UNUSED_ATTRIBUTES_STR, item.title, None)
item.zoom_transform = self._zoom_transform
item.update(zoom_only)
def replot(self):
'''
Replot the entire graph.
This functions redraws everything on the graph, so it can be very slow
'''
#self.setBackgroundBrush(self.color(OWPalette.Canvas))
self._bounds_cache = {}
self._transform_cache = {}
self.set_clean()
self.update_antialiasing()
self.update_legend()
self.update_layout()
self.update_zoom()
self.update_axes()
self.update_grid()
self.update_filled_symbols()
self.setSceneRect(QRectF(self.contentsRect()))
self.viewport().update()
def update_legend(self):
if self.show_legend and not self._legend_moved:
## If the legend hasn't been moved it, we set it outside, in the top right corner
m = self.graph_margin
r = QRectF(self.contentsRect())
r.adjust(m, m, -m, -m)
self._legend.max_size = r.size()
self._legend.update_items()
w = self._legend.boundingRect().width()
self._legend_margin = QRectF(0, 0, w, 0)
self._legend.set_floating(False)
self._legend.set_orientation(Qt.Vertical)
self._legend.setPos(QRectF(self.contentsRect()).topRight() + QPointF(-w, 0))
if (self._legend.isVisible() == self.show_legend):
return
self._legend.setVisible(self.show_legend)
if self.show_legend:
if self.old_legend_margin is not None:
self.animate(self, 'legend_margin', self.old_legend_margin, duration = 100)
else:
r = self.legend_rect()
self.ensure_inside(r, self.contentsRect())
self._legend.setPos(r.topLeft())
self.notify_legend_moved(r.topLeft())
else:
self.old_legend_margin = self.legend_margin
self.animate(self, 'legend_margin', QRectF(), duration=100)
def update_filled_symbols(self):
## TODO: Implement this in Curve.cpp
pass
def update_grid(self):
self.grid_curve.set_x_enabled(self.show_grid)
self.grid_curve.set_y_enabled(self.show_grid)
self.grid_curve.update_properties()
def legend(self):
'''
Returns the plot's legend, which is a :obj:`OrangeWidgets.plot.OWLegend`
'''
return self._legend
def legend_rect(self):
if self.show_legend:
return self._legend.mapRectToScene(self._legend.boundingRect())
else:
return QRectF()
def isLegendEvent(self, event, function):
if self.show_legend and self.legend_rect().contains(self.mapToScene(event.pos())):
function(self, event)
return True
else:
return False
def mouse_action(self, event):
b = event.buttons() | event.button()
m = event.modifiers()
if b == Qt.LeftButton | Qt.RightButton:
b = Qt.MidButton
if m & Qt.AltModifier and b == Qt.LeftButton:
m = m & ~Qt.AltModifier
b = Qt.MidButton
if b == Qt.LeftButton and not m:
return self.state
if b == Qt.RightButton and not m and self.state == SELECT:
return SELECT_RIGHTCLICK
if b == Qt.MidButton:
return PANNING
if b in [Qt.LeftButton, Qt.RightButton] and (self.state == ZOOMING or m == Qt.ControlModifier):
return ZOOMING
if b == Qt.LeftButton and m == Qt.ShiftModifier:
return SELECT
## Event handling
def event(self, event):
if event.type() == QEvent.Gesture:
return self.gestureEvent(event)
else:
return orangeqt.Plot.event(self, event)
def gestureEvent(self, event):
for gesture in event.gestures():
if gesture.state() == Qt.GestureStarted:
self.current_gesture_scale = 1.
event.accept(gesture)
continue
elif gesture.gestureType() == Qt.PinchGesture:
old_animate_plot = self.animate_plot
self.animate_plot = False
self.zoom(gesture.centerPoint(), gesture.scaleFactor()/self.current_gesture_scale )
self.current_gesture_scale = gesture.scaleFactor()
self.animate_plot = old_animate_plot
elif gesture.gestureType() == Qt.PanGesture:
self.pan(gesture.delta())
return True
def resizeEvent(self, event):
self.replot()
s = event.size() - event.oldSize()
if self.legend_margin.right() > 0:
self._legend.setPos(self._legend.pos() + QPointF(s.width(), 0))
if self.legend_margin.bottom() > 0:
self._legend.setPos(self._legend.pos() + QPointF(0, s.height()))
def showEvent(self, event):
self.replot()
def mousePressEvent(self, event):
self.static_click = True
self._pressed_mouse_button = event.button()
self._pressed_mouse_pos = event.pos()
if self.mousePressEventHandler and self.mousePressEventHandler(event):
event.accept()
return
if self.isLegendEvent(event, QGraphicsView.mousePressEvent):
return
point = self.mapToScene(event.pos())
a = self.mouse_action(event)
if a == SELECT and hasattr(self, 'move_selected_points'):
self._pressed_point = self.nearest_point(point)
self._pressed_point_coor = None
if self._pressed_point is not None:
self._pressed_point_coor = self._pressed_point.coordinates()
if a == PANNING:
self._last_pan_pos = point
event.accept()
else:
orangeqt.Plot.mousePressEvent(self, event)
def mouseMoveEvent(self, event):
if event.buttons() and (self._pressed_mouse_pos - event.pos()).manhattanLength() > QApplication.instance().startDragDistance():
self.static_click = False
if self.mouseMoveEventHandler and self.mouseMoveEventHandler(event):
event.accept()
return
if self.isLegendEvent(event, QGraphicsView.mouseMoveEvent):
return
point = self.mapToScene(event.pos())
if not self._pressed_mouse_button:
if self.receivers(self.point_hovered) > 0:
self.point_hovered.emit(self.nearest_point(point))
## We implement a workaround here, because sometimes mouseMoveEvents are not fast enough
## so the moving legend gets left behind while dragging, and it's left in a pressed state
if self._legend.mouse_down:
QGraphicsView.mouseMoveEvent(self, event)
return
a = self.mouse_action(event)
if a == SELECT and self._pressed_point is not None and self._pressed_point.is_selected() and hasattr(self, 'move_selected_points'):
animate_points = self.animate_points
self.animate_points = False
x1, y1 = self._pressed_point_coor
x2, y2 = self.map_from_graph(point, zoom=True)
self.move_selected_points((x2 - x1, y2 - y1))
self.replot()
if self._pressed_point is not None:
self._pressed_point_coor = self._pressed_point.coordinates()
self.animate_points = animate_points
elif a in [SELECT, ZOOMING] and self.graph_area.contains(point):
if not self._current_rs_item:
self._selection_start_point = self.mapToScene(self._pressed_mouse_pos)
self._current_rs_item = QGraphicsRectItem(scene=self.scene())
self._current_rs_item.setPen(SelectionPen)
self._current_rs_item.setBrush(SelectionBrush)
self._current_rs_item.setZValue(SelectionZValue)
self._current_rs_item.setRect(QRectF(self._selection_start_point, point).normalized())
elif a == PANNING:
if not self._last_pan_pos:
self._last_pan_pos = self.mapToScene(self._pressed_mouse_pos)
self.pan(point - self._last_pan_pos)
self._last_pan_pos = point
else:
x, y = self.map_from_graph(point, zoom=True)
text, x, y = self.tips.maybeTip(x, y)
if type(text) == int:
text = self.buildTooltip(text)
if text and x is not None and y is not None:
tp = self.mapFromScene(QPointF(x,y) * self.map_transform * self._zoom_transform)
self.showTip(tp.x(), tp.y(), text)
else:
orangeqt.Plot.mouseMoveEvent(self, event)
def mouseReleaseEvent(self, event):
self._pressed_mouse_button = Qt.NoButton
if self.mouseReleaseEventHandler and self.mouseReleaseEventHandler(event):
event.accept()
return
if self.static_click and self.mouseStaticClickHandler and self.mouseStaticClickHandler(event):
event.accept()
return
if self.isLegendEvent(event, QGraphicsView.mouseReleaseEvent):
return
a = self.mouse_action(event)
if a == SELECT and self._pressed_point is not None:
self._pressed_point = None
if a in [ZOOMING, SELECT] and self._current_rs_item:
rect = self._current_rs_item.rect()
if a == ZOOMING:
self.zoom_to_rect(self._zoom_transform.inverted()[0].mapRect(rect))
else:
self.add_selection(rect)
self.scene().removeItem(self._current_rs_item)
self._current_rs_item = None
return
orangeqt.Plot.mouseReleaseEvent(self, event)
def mouseStaticClick(self, event):
point = self.mapToScene(event.pos())
if point not in self.graph_area:
return False
a = self.mouse_action(event)
b = event.buttons() | event.button()
if a == ZOOMING:
if event.button() == Qt.LeftButton:
self.zoom_in(point)
elif event.button() == Qt.RightButton:
self.zoom_back()
else:
return False
return True
elif a == SELECT and b == Qt.LeftButton:
point_item = self.nearest_point(point)
b = self.selection_behavior
if b == self.ReplaceSelection:
self.unselect_all_points()
b = self.AddSelection
if point_item:
point_item.set_selected(b == self.AddSelection or (b == self.ToggleSelection and not point_item.is_selected()))
self.selection_changed.emit()
elif a == SELECT and b == Qt.RightButton:
point_item = self.nearest_point(point)
if point_item:
self.point_rightclicked.emit(self.nearest_point(point))
else:
self.unselect_all_points()
else:
return False
def wheelEvent(self, event):
point = self.mapToScene(event.pos())
d = event.delta() / 120.0
self.zoom(point, pow(2,d))
@staticmethod
def transform_from_rects(r1, r2):
"""
Returns a QTransform that maps from rectangle ``r1`` to ``r2``.
"""
if r1 is None or r2 is None:
return QTransform()
if r1.width() == 0 or r1.height() == 0 or r2.width() == 0 or r2.height() == 0:
return QTransform()
tr1 = QTransform().translate(-r1.left(), -r1.top())
ts = QTransform().scale(r2.width()/r1.width(), r2.height()/r1.height())
tr2 = QTransform().translate(r2.left(), r2.top())
return tr1 * ts * tr2
def transform_for_zoom(self, factor, point, rect):
if factor == 1:
return QTransform()
dp = point
t = QTransform()
t.translate(dp.x(), dp.y())
t.scale(factor, factor)
t.translate(-dp.x(), -dp.y())
return t
def rect_for_zoom(self, point, old_rect, scale = 2):
r = QRectF()
r.setWidth(old_rect.width() / scale)
r.setHeight(old_rect.height() / scale)
r.moveCenter(point)
self.ensure_inside(r, self.graph_area)
return r
def set_state(self, state):
self.state = state
if state != SELECT_RECTANGLE:
self._current_rs_item = None
if state != SELECT_POLYGON:
self._current_ps_item = None
def get_selected_points(self, xData, yData, validData):
if self.main_curve:
selected = []
points = self.main_curve.points()
i = 0
for d in validData:
if d:
selected.append(points[i].is_selected())
i += 1
else:
selected.append(False)
else:
selected = self.selected_points(xData, yData)
unselected = [not i for i in selected]
return selected, unselected
def add_selection(self, reg):
"""
Selects all points in the region ``reg`` using the current :attr: `selection_behavior`.
"""
self.select_points(reg, self.selection_behavior)
self.viewport().update()
if self.auto_send_selection_callback:
self.auto_send_selection_callback()
def points_equal(self, p1, p2):
if type(p1) == tuple:
(x, y) = p1
p1 = QPointF(x, y)
if type(p2) == tuple:
(x, y) = p2
p2 = QPointF(x, y)
return (QPointF(p1)-QPointF(p2)).manhattanLength() < self.polygon_close_treshold
def data_rect_for_axes(self, x_axis = xBottom, y_axis = yLeft):
"""
Calculates the bounding rectangle in data coordinates for the axes ``x_axis`` and ``y_axis``.
"""
if x_axis in self.axes and y_axis in self.axes:
x_min, x_max = self.bounds_for_axis(x_axis, try_auto_scale=True)
y_min, y_max = self.bounds_for_axis(y_axis, try_auto_scale=True)
if (x_min or x_max) and (y_min or y_max):
r = QRectF(x_min, y_min, x_max-x_min, y_max-y_min)
return r
r = orangeqt.Plot.data_rect_for_axes(self, x_axis, y_axis)
for id, axis in self.axes.items():
if id not in CartesianAxes and axis.data_line:
r |= QRectF(axis.data_line.p1(), axis.data_line.p2())
## We leave a 5% margin on each side so the graph doesn't look overcrowded
## TODO: Perhaps change this from a fixed percentage to always round to a round number
dx = r.width() / 20.0
dy = r.height() / 20.0
r.adjust(-dx, -dy, dx, dy)
return r
def transform_for_axes(self, x_axis = xBottom, y_axis = yLeft):
"""
Returns the graph transform that maps from data to scene coordinates using axes ``x_axis`` and ``y_axis``.
"""
if not (x_axis, y_axis) in self._transform_cache:
# We must flip the graph area, becase Qt coordinates start from top left, while graph coordinates start from bottom left
a = QRectF(self.graph_area)
t = a.top()
a.setTop(a.bottom())
a.setBottom(t)
self._transform_cache[(x_axis, y_axis)] = self.transform_from_rects(self.data_rect_for_axes(x_axis, y_axis), a)
return self._transform_cache[(x_axis, y_axis)]
def transform(self, axis_id, value):
"""
Transforms the ``value`` from data to plot coordinates along the axis ``axis_id``.
This function always ignores zoom. If you need to account for zooming, use :meth:`map_to_graph`.
"""
if axis_id in XAxes:
size = self.graph_area.width()
margin = self.graph_area.left()
else:
size = self.graph_area.height()
margin = self.graph_area.top()
m, M = self.bounds_for_axis(axis_id)
if m is None or M is None or M == m:
return 0
else:
return margin + (value-m)/(M-m) * size
def inv_transform(self, axis_id, value):
"""
Transforms the ``value`` from plot to data coordinates along the axis ``axis_id``.
This function always ignores zoom. If you need to account for zooming, use :meth:`map_from_graph`.
"""
if axis_id in XAxes:
size = self.graph_area.width()
margin = self.graph_area.left()
else:
size = self.graph_area.height()
margin = self.graph_area.top()
m, M = self.bounds_for_axis(axis_id)
if m is not None and M is not None:
return m + (value-margin)/size * (M-m)
else:
return 0
def bounds_for_axis(self, axis_id, try_auto_scale=True):
if axis_id in self.axes and not self.axes[axis_id].auto_scale:
return self.axes[axis_id].bounds()
if try_auto_scale:
lower, upper = orangeqt.Plot.bounds_for_axis(self, axis_id)
if lower != upper:
lower = lower - (upper-lower)/20.0
upper = upper + (upper-lower)/20.0
return lower, upper
else:
return None, None
def enableYRaxis(self, enable=1):
self.set_axis_enabled(yRight, enable)
def enableLRaxis(self, enable=1):
self.set_axis_enabled(yLeft, enable)
def enableXaxis(self, enable=1):
self.set_axis_enabled(xBottom, enable)
def set_axis_enabled(self, axis, enable):
if axis not in self.axes:
self.add_axis(axis)
self.axes[axis].setVisible(enable)
self.replot()
@staticmethod
def axis_coordinate(point, axis_id):
if axis_id in XAxes:
return point.x()
elif axis_id in YAxes:
return point.y()
else:
return None
# ####################################################################
# return string with attribute names and their values for example example
def getExampleTooltipText(self, example, indices=None, maxIndices=20):
if indices and type(indices[0]) == str:
indices = [self.attributeNameIndex[i] for i in indices]
if not indices:
indices = list(range(len(self.dataDomain.attributes)))
# don't show the class value twice
if example.domain.classVar:
classIndex = self.attributeNameIndex[example.domain.classVar.name]
while classIndex in indices:
indices.remove(classIndex)
text = "<b>Attributes:</b><br>"
for index in indices[:maxIndices]:
attr = self.attributeNames[index]
if attr not in example.domain: text += " "*4 + "%s = ?<br>" % (Qt.escape(attr))
elif example[attr].isSpecial(): text += " "*4 + "%s = ?<br>" % (Qt.escape(attr))
else: text += " "*4 + "%s = %s<br>" % (Qt.escape(attr), Qt.escape(str(example[attr])))
if len(indices) > maxIndices:
text += " "*4 + " ... <br>"
if example.domain.classVar:
text = text[:-4]
text += "<hr><b>Class:</b><br>"
if example.getclass().isSpecial(): text += " "*4 + "%s = ?<br>" % (Qt.escape(example.domain.classVar.name))
else: text += " "*4 + "%s = %s<br>" % (Qt.escape(example.domain.classVar.name), Qt.escape(str(example.getclass())))
if len(example.domain.getmetas()) != 0:
text = text[:-4]
text += "<hr><b>Meta attributes:</b><br>"
# show values of meta attributes
for key in example.domain.getmetas():
try: text += " "*4 + "%s = %s<br>" % (Qt.escape(example.domain[key].name), Qt.escape(str(example[key])))
except: pass
return text[:-4] # remove the last <br>
# show a tooltip at x,y with text. if the mouse will move for more than 2 pixels it will be removed
def showTip(self, x, y, text):
QToolTip.showText(self.mapToGlobal(QPoint(x, y)), text, self, QRect(x-3,y-3,6,6))
def notify_legend_moved(self, pos):
self._legend_moved = True
l = self.legend_rect()
g = getattr(self, '_legend_outside_area', QRectF())
p = QPointF()
rect = QRectF()
offset = 20
if pos.x() > g.right() - offset:
self._legend.set_orientation(Qt.Vertical)
rect.setRight(self._legend.boundingRect().width())
p = g.topRight() - self._legend.boundingRect().topRight()
elif pos.x() < g.left() + offset:
self._legend.set_orientation(Qt.Vertical)
rect.setLeft(self._legend.boundingRect().width())
p = g.topLeft()
elif pos.y() < g.top() + offset:
self._legend.set_orientation(Qt.Horizontal)
rect.setTop(self._legend.boundingRect().height())
p = g.topLeft()
elif pos.y() > g.bottom() - offset:
self._legend.set_orientation(Qt.Horizontal)
rect.setBottom(self._legend.boundingRect().height())
p = g.bottomLeft() - self._legend.boundingRect().bottomLeft()
if p.isNull():
self._legend.set_floating(True, pos)
else:
self._legend.set_floating(False, p)
if rect != self._legend_margin:
orientation = Qt.Horizontal if rect.top() or rect.bottom() else Qt.Vertical
self._legend.set_orientation(orientation)
self.animate(self, 'legend_margin', rect, duration=100)
def get_legend_margin(self):
return self._legend_margin
def set_legend_margin(self, value):
self._legend_margin = value
self.update_layout()
self.update_axes()
legend_margin = pyqtProperty(QRectF, get_legend_margin, set_legend_margin)
def update_curves(self):
if self.main_curve:
self.main_curve.set_alpha_value(self.alpha_value)
else:
for c in self.plot_items():
if isinstance(c, orangeqt.Curve) and not getattr(c, 'ignore_alpha', False):
au = c.auto_update()
c.set_auto_update(False)
c.set_point_size(self.point_width)
color = c.color()
color.setAlpha(self.alpha_value)
c.set_color(color)
c.set_auto_update(au)
c.update_properties()
self.viewport().update()
update_point_size = update_curves
update_alpha_value = update_curves
def update_antialiasing(self, use_antialiasing=None):
if use_antialiasing is not None:
self.antialias_plot = use_antialiasing
self.setRenderHint(QPainter.Antialiasing, self.antialias_plot)
def update_animations(self, use_animations=None):
if use_animations is not None:
self.animate_plot = use_animations
self.animate_points = use_animations
def update_performance(self, num_points = None):
if self.auto_adjust_performance:
if not num_points:
if self.main_curve:
num_points = len(self.main_curve.points())
else:
num_points = sum( len(c.points()) for c in self.curves )
if num_points > self.disable_animations_threshold:
self.disabled_animate_points = self.animate_points
self.animate_points = False
self.disabled_animate_plot = self.animate_plot
self.animate_plot = False
self.disabled_antialias_lines = self.animate_points
self.antialias_lines = True
elif hasattr(self, 'disabled_animate_points'):
self.animate_points = self.disabled_animate_points
del self.disabled_animate_points
self.animate_plot = self.disabled_animate_plot
del self.disabled_animate_plot
self.antialias_lines = True # self.disabled_antialias_lines
del self.disabled_antialias_lines
def animate(self, target, prop_name, end_val, duration = None, start_val = None):
for a in self._animations:
if a.state() == QPropertyAnimation.Stopped:
self._animations.remove(a)
if self.animate_plot:
a = QPropertyAnimation(target, prop_name)
a.setEndValue(end_val)
if start_val is not None:
a.setStartValue(start_val)
if duration:
a.setDuration(duration)
self._animations.append(a)
a.start(QPropertyAnimation.KeepWhenStopped)
else:
target.setProperty(prop_name, end_val)
def clear_selection(self):
self.unselect_all_points()
def send_selection(self):
if self.auto_send_selection_callback:
self.auto_send_selection_callback()
def pan(self, delta):
if type(delta) == tuple:
x, y = delta
else:
x, y = delta.x(), delta.y()
t = self.zoom_transform()
x = x / t.m11()
y = y / t.m22()
r = QRectF(self.zoom_rect)
r.translate(-QPointF(x,y))
self.ensure_inside(r, self.graph_area)
self.zoom_rect = r
def zoom_to_rect(self, rect):
self.ensure_inside(rect, self.graph_area)
# add to zoom_stack if zoom_rect is larger
if self.zoom_rect.width() > rect.width() or self.zoom_rect.height() > rect.height():
self.zoom_stack.append(self.zoom_rect)
self.animate(self, 'zoom_rect', rect, start_val = self.get_zoom_rect())
def zoom_back(self):
if self.zoom_stack:
rect = self.zoom_stack.pop()
self.animate(self, 'zoom_rect', rect, start_val = self.get_zoom_rect())
def reset_zoom(self):
self._zoom_rect = None
self.update_zoom()
def zoom_transform(self):
return self.transform_from_rects(self.zoom_rect, self.graph_area)
def zoom_in(self, point):
self.zoom(point, scale = 2)
def zoom_out(self, point):
self.zoom(point, scale = 0.5)
def zoom(self, point, scale):
print(len(self.zoom_stack))
t, ok = self._zoom_transform.inverted()
point = point * t
r = QRectF(self.zoom_rect)
i = 1.0/scale
r.setTopLeft(point*(1-i) + r.topLeft()*i)
r.setBottomRight(point*(1-i) + r.bottomRight()*i)
self.ensure_inside(r, self.graph_area)
# remove smaller zoom rects from stack
while len(self.zoom_stack) > 0 and r.width() >= self.zoom_stack[-1].width() and r.height() >= self.zoom_stack[-1].height():
self.zoom_stack.pop()
self.zoom_to_rect(r)
def get_zoom_rect(self):
if self._zoom_rect:
return self._zoom_rect
else:
return self.graph_area
def set_zoom_rect(self, rect):
self._zoom_rect = rect
self._zoom_transform = self.transform_from_rects(rect, self.graph_area)
self.update_zoom()
zoom_rect = pyqtProperty(QRectF, get_zoom_rect, set_zoom_rect)
@staticmethod
def ensure_inside(small_rect, big_rect):
if small_rect.width() > big_rect.width():
small_rect.setWidth(big_rect.width())
if small_rect.height() > big_rect.height():
small_rect.setHeight(big_rect.height())
if small_rect.right() > big_rect.right():
small_rect.moveRight(big_rect.right())
elif small_rect.left() < big_rect.left():
small_rect.moveLeft(big_rect.left())
if small_rect.bottom() > big_rect.bottom():
small_rect.moveBottom(big_rect.bottom())
elif small_rect.top() < big_rect.top():
small_rect.moveTop(big_rect.top())
def shuffle_points(self):
if self.main_curve:
self.main_curve.shuffle_points()
def set_progress(self, done, total):
if not self.widget:
return
if done == total:
self.widget.progressBarFinished()
else:
self.widget.progressBarSet(100.0 * done / total)
def start_progress(self):
if self.widget:
self.widget.progressBarInit()
def end_progress(self):
if self.widget:
self.widget.progressBarFinished()
def is_axis_auto_scale(self, axis_id):
if axis_id not in self.axes:
return axis_id not in self.data_range
return self.axes[axis_id].auto_scale
def axis_line(self, rect, id, invert_y = False):
if invert_y:
r = QRectF(rect)
r.setTop(rect.bottom())
r.setBottom(rect.top())
rect = r
if id == xBottom:
line = QLineF(rect.topLeft(), rect.topRight())
elif id == xTop:
line = QLineF(rect.bottomLeft(), rect.bottomRight())
elif id == yLeft:
line = QLineF(rect.topLeft(), rect.bottomLeft())
elif id == yRight:
line = QLineF(rect.topRight(), rect.bottomRight())
else:
line = None
return line
def color(self, role, group = None):
if group:
return self.palette().color(group, role)
else:
return self.palette().color(role)
def set_palette(self, p):
'''
Sets the plot palette to ``p``.
:param p: The new color palette
:type p: :obj:`.QPalette`
'''
self.setPalette(p)
self.replot()
def update_theme(self):
'''
Updates the current color theme, depending on the value of :attr:`theme_name`.
'''
if self.theme_name.lower() == 'default':
self.set_palette(OWPalette.System)
elif self.theme_name.lower() == 'light':
self.set_palette(OWPalette.Light)
elif self.theme_name.lower() == 'dark':
self.set_palette(OWPalette.Dark)
| bsd-2-clause | -1,525,150,586,387,520,000 | 35.820021 | 172 | 0.580668 | false |
lamondlab/sipify | CppHeaderParser-2.7/CppHeaderParser/CppHeaderParser.py | 1 | 114661 | #!/usr/bin/python
#
# Author: Jashua R. Cloutier (contact via https://bitbucket.org/senex)
# Project: http://senexcanis.com/open-source/cppheaderparser/
#
# Copyright (C) 2011, Jashua R. Cloutier
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Jashua R. Cloutier nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission. Stories,
# blog entries etc making reference to this project may mention the
# name Jashua R. Cloutier in terms of project originator/creator etc.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
#
# The CppHeaderParser.py script is written in Python 2.4 and released to
# the open source community for continuous improvements under the BSD
# 2.0 new license, which can be found at:
#
# http://www.opensource.org/licenses/bsd-license.php
#
"""Parse C++ header files and generate a data structure
representing the class
"""
import ply.lex as lex
import os
import sys
import re
import inspect
def lineno():
"""Returns the current line number in our program."""
return inspect.currentframe().f_back.f_lineno
version = __version__ = "2.7"
tokens = [
'NUMBER',
'FLOAT_NUMBER',
'TEMPLATE_NAME',
'NAME',
'OPEN_PAREN',
'CLOSE_PAREN',
'OPEN_BRACE',
'CLOSE_BRACE',
'OPEN_SQUARE_BRACKET',
'CLOSE_SQUARE_BRACKET',
'COLON',
'SEMI_COLON',
'COMMA',
'TAB',
'BACKSLASH',
'PIPE',
'PERCENT',
'EXCLAMATION',
'CARET',
'COMMENT_SINGLELINE',
'COMMENT_MULTILINE',
'PRECOMP_MACRO',
'PRECOMP_MACRO_CONT',
'ASTERISK',
'AMPERSTAND',
'EQUALS',
'MINUS',
'PLUS',
'DIVIDE',
'CHAR_LITERAL',
'STRING_LITERAL',
'NEW_LINE',
'SQUOTE',
]
t_ignore = " \r.?@\f"
t_NUMBER = r'[0-9][0-9XxA-Fa-f]*'
t_FLOAT_NUMBER = r'[-+]?[0-9]*\.[0-9]+([eE][-+]?[0-9]+)?'
t_TEMPLATE_NAME = r'CppHeaderParser_template_[0-9]+'
t_NAME = r'[<>A-Za-z_~][A-Za-z0-9_]*'
t_OPEN_PAREN = r'\('
t_CLOSE_PAREN = r'\)'
t_OPEN_BRACE = r'{'
t_CLOSE_BRACE = r'}'
t_OPEN_SQUARE_BRACKET = r'\['
t_CLOSE_SQUARE_BRACKET = r'\]'
t_SEMI_COLON = r';'
t_COLON = r':'
t_COMMA = r','
t_TAB = r'\t'
t_BACKSLASH = r'\\'
t_PIPE = r'\|'
t_PERCENT = r'%'
t_CARET = r'\^'
t_EXCLAMATION = r'!'
t_PRECOMP_MACRO = r'\#.*'
t_PRECOMP_MACRO_CONT = r'.*\\\n'
def t_COMMENT_SINGLELINE(t):
r'\/\/.*\n'
global doxygenCommentCache
if t.value.startswith("///") or t.value.startswith("//!"):
if doxygenCommentCache:
doxygenCommentCache += "\n"
if t.value.endswith("\n"):
doxygenCommentCache += t.value[:-1]
else:
doxygenCommentCache += t.value
t.lexer.lineno += len([a for a in t.value if a=="\n"])
t_ASTERISK = r'\*'
t_MINUS = r'\-'
t_PLUS = r'\+'
t_DIVIDE = r'/(?!/)'
t_AMPERSTAND = r'&'
t_EQUALS = r'='
t_CHAR_LITERAL = "'.'"
t_SQUOTE = "'"
#found at http://wordaligned.org/articles/string-literals-and-regular-expressions
#TODO: This does not work with the string "bla \" bla"
t_STRING_LITERAL = r'"([^"\\]|\\.)*"'
#Found at http://ostermiller.org/findcomment.html
def t_COMMENT_MULTILINE(t):
r'/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/'
global doxygenCommentCache
if t.value.startswith("/**") or t.value.startswith("/*!"):
#not sure why, but get double new lines
v = t.value.replace("\n\n", "\n")
#strip prefixing whitespace
v = re.sub("\n[\s]+\*", "\n*", v)
doxygenCommentCache += v
t.lexer.lineno += len([a for a in t.value if a=="\n"])
def t_NEWLINE(t):
r'\n+'
t.lexer.lineno += len(t.value)
def t_error(v):
print(( "Lex error: ", v ))
lex.lex()
# Controls error_print
print_errors = 1
# Controls warning_print
print_warnings = 1
# Controls debug_print
debug = 0
# Controls trace_print
debug_trace = 0
def error_print(arg):
if print_errors: print(("[%4d] %s"%(inspect.currentframe().f_back.f_lineno, arg)))
def warning_print(arg):
if print_warnings: print(("[%4d] %s"%(inspect.currentframe().f_back.f_lineno, arg)))
def debug_print(arg):
global debug
if debug: print(("[%4d] %s"%(inspect.currentframe().f_back.f_lineno, arg)))
def trace_print(*arg):
global debug_trace
if debug_trace:
sys.stdout.write("[%s] "%(inspect.currentframe().f_back.f_lineno))
for a in arg: sys.stdout.write("%s "%a)
sys.stdout.write("\n")
supportedAccessSpecifier = [
'public',
'protected',
'private',
'public slots',
'protected slots',
'private slots',
'public Q_SLOTS',
'protected Q_SLOTS',
'private Q_SLOTS',
'signals',
'Q_SIGNALS',
]
#Symbols to ignore, usually special macros
ignoreSymbols = [
'Q_OBJECT',
'Q_PROPERTY()',
'Q_DECLARE_FLAGS()',
'Q_INVOKABLE',
]
doxygenCommentCache = ""
#Track what was added in what order and at what depth
parseHistory = []
def is_namespace(nameStack):
"""Determines if a namespace is being specified"""
if len(nameStack) == 0:
return False
if nameStack[0] == "namespace":
return True
return False
def is_enum_namestack(nameStack):
"""Determines if a namestack is an enum namestack"""
if len(nameStack) == 0:
return False
if nameStack[0] == "enum":
return True
if len(nameStack) > 1 and nameStack[0] == "typedef" and nameStack[1] == "enum":
return True
return False
def is_fundamental(s):
for a in s.split():
if a not in ["size_t", "struct", "union", "unsigned", "signed", "bool", "char", "short", "int", "float", "double", "long", "void", "*"]: return False
return True
def is_function_pointer_stack(stack):
"""Count how many non-nested paranthesis are in the stack. Useful for determining if a stack is a function pointer"""
paren_depth = 0
paren_count = 0
star_after_first_paren = False
last_e = None
for e in stack:
if e == "(":
paren_depth += 1
elif e == ")" and paren_depth > 0:
paren_depth -= 1
if paren_depth == 0:
paren_count += 1
elif e == "*" and last_e == "(" and paren_count == 0 and paren_depth == 1:
star_after_first_paren = True
last_e = e
if star_after_first_paren and paren_count == 2:
return True
else:
return False
def is_method_namestack(stack):
r = False
if '(' not in stack: r = False
elif stack[0] == 'typedef': r = False # TODO deal with typedef function prototypes
#elif '=' in stack and stack.index('=') < stack.index('(') and stack[stack.index('=')-1] != 'operator': r = False #disabled July6th - allow all operators
elif 'operator' in stack: r = True # allow all operators
elif '{' in stack and stack.index('{') < stack.index('('): r = False # struct that looks like a method/class
elif '(' in stack and ')' in stack:
if '{' in stack and '}' in stack: r = True
elif stack[-1] == ';':
if is_function_pointer_stack(stack):
r = False
else:
r = True
elif '{' in stack: r = True # ideally we catch both braces... TODO
else: r = False
#Test for case of property set to something with parens such as "static const int CONST_A = (1 << 7) - 1;"
if r and "(" in stack and "=" in stack and 'operator' not in stack:
if stack.index("=") < stack.index("("): r = False
return r
def is_property_namestack(nameStack):
r = False
if '(' not in nameStack and ')' not in nameStack: r = True
elif "(" in nameStack and "=" in nameStack and nameStack.index("=") < nameStack.index("("): r = True
#See if we are a function pointer
if not r and is_function_pointer_stack(nameStack): r = True
return r
def detect_lineno(s):
"""Detect the line number for a given token string"""
try:
rtn = s.lineno()
if rtn != -1:
return rtn
except: pass
global curLine
return curLine
def filter_out_attribute_keyword(stack):
"""Strips __attribute__ and its parenthetical expression from the stack"""
if "__attribute__" not in stack: return stack
try:
debug_print("Stripping __attribute__ from %s"% stack)
attr_index = stack.index("__attribute__")
attr_end = attr_index + 1 #Assuming not followed by parenthetical expression which wont happen
#Find final paren
if stack[attr_index + 1] == '(':
paren_count = 1
for i in range(attr_index + 2, len(stack)):
elm = stack[i]
if elm == '(':
paren_count += 1
elif elm == ')':
paren_count -= 1
if paren_count == 0:
attr_end = i + 1
break
new_stack = stack[0:attr_index] + stack[attr_end:]
debug_print("stripped stack is %s"% new_stack)
return new_stack
except:
return stack
class TagStr(str):
"""Wrapper for a string that allows us to store the line number associated with it"""
lineno_reg = {}
def __new__(cls,*args,**kw):
new_obj = str.__new__(cls,*args)
if "lineno" in kw:
TagStr.lineno_reg[id(new_obj)] = kw["lineno"]
return new_obj
def __del__(self):
try:
del TagStr.lineno_reg[id(self)]
except: pass
def lineno(self):
return TagStr.lineno_reg.get(id(self), -1)
class CppParseError(Exception): pass
class CppClass(dict):
"""Takes a name stack and turns it into a class
Contains the following Keys:
self['name'] - Name of the class
self['doxygen'] - Doxygen comments associated with the class if they exist
self['inherits'] - List of Classes that this one inherits where the values
are of the form {"access": Anything in supportedAccessSpecifier
"class": Name of the class
self['methods'] - Dictionary where keys are from supportedAccessSpecifier
and values are a lists of CppMethod's
self['properties'] - Dictionary where keys are from supportedAccessSpecifier
and values are lists of CppVariable's
self['enums'] - Dictionary where keys are from supportedAccessSpecifier and
values are lists of CppEnum's
self['structs'] - Dictionary where keys are from supportedAccessSpecifier and
values are lists of nested Struct's
An example of how this could look is as follows:
#self =
{
'name': ""
'inherits':[]
'methods':
{
'public':[],
'protected':[],
'private':[]
},
'properties':
{
'public':[],
'protected':[],
'private':[]
},
'enums':
{
'public':[],
'protected':[],
'private':[]
}
}
"""
def get_all_methods(self):
r = []
for typ in supportedAccessSpecifier: r += self['methods'][typ]
return r
def get_all_method_names( self ):
r = []
for typ in supportedAccessSpecifier: r += self.get_method_names(typ) # returns list
return r
def get_all_pure_virtual_methods( self ):
r = {}
for typ in supportedAccessSpecifier: r.update(self.get_pure_virtual_methods(typ)) # returns dict
return r
def get_method_names( self, type='public' ): return [ meth['name'] for meth in self['methods'][ type ] ]
def get_pure_virtual_methods( self, type='public' ):
r = {}
for meth in self['methods'][ type ]:
if meth['pure_virtual']: r[ meth['name'] ] = meth
return r
def __init__(self, nameStack, curTemplate):
self['nested_classes'] = []
self['parent'] = None
self['abstract'] = False
self._public_enums = {}
self._public_structs = {}
self._public_typedefs = {}
self._public_forward_declares = []
self['namespace'] = ""
debug_print( "Class: %s"%nameStack )
debug_print( "Template: %s"%curTemplate)
if (len(nameStack) < 2):
nameStack.insert(1, "")#anonymous struct
global doxygenCommentCache
if len(doxygenCommentCache):
self["doxygen"] = doxygenCommentCache
doxygenCommentCache = ""
if "::" in "".join(nameStack):
#Re-Join class paths (ex ['class', 'Bar', ':', ':', 'Foo'] -> ['class', 'Bar::Foo']
try:
new_nameStack = []
for name in nameStack:
if len(new_nameStack) == 0:
new_nameStack.append(name)
elif name == ":" and new_nameStack[-1].endswith(":"):
new_nameStack[-1] += name
elif new_nameStack[-1].endswith("::"):
new_nameStack[-2] += new_nameStack[-1] + name
del new_nameStack[-1]
else:
new_nameStack.append(name)
trace_print("Convert from namestack\n %s\nto\n%s"%(nameStack, new_nameStack))
nameStack = new_nameStack
except: pass
# Handle final specifier
self["final"] = False
try:
final_index = nameStack.index("final")
# Dont trip up the rest of the logic
del nameStack[final_index]
self["final"] = True
trace_print("final")
except: pass
self["name"] = nameStack[1]
self["line_number"] = detect_lineno(nameStack[0])
#Handle template classes
if len(nameStack) > 3 and nameStack[2].startswith("<"):
open_template_count = 0
param_separator = 0
found_first = False
i = 0
for elm in nameStack:
if '<' in elm :
open_template_count += 1
found_first = True
elif '>' in elm:
open_template_count -= 1
if found_first and open_template_count == 0:
self["name"] = "".join(nameStack[1:i + 1])
break;
i += 1
elif ":" in nameStack:
self['name'] = nameStack[ nameStack.index(':') - 1 ]
inheritList = []
if nameStack.count(':') == 1:
nameStack = nameStack[nameStack.index(":") + 1:]
while len(nameStack):
tmpStack = []
tmpInheritClass = {"access":"private", "virtual": False}
if "," in nameStack:
tmpStack = nameStack[:nameStack.index(",")]
nameStack = nameStack[nameStack.index(",") + 1:]
else:
tmpStack = nameStack
nameStack = []
# Convert template classes to one name in the last index
for i in range(0, len(tmpStack)):
if '<' in tmpStack[i]:
tmpStack2 = tmpStack[:i-1]
tmpStack2.append("".join(tmpStack[i-1:]))
tmpStack = tmpStack2
break
if len(tmpStack) == 0:
break;
elif len(tmpStack) == 1:
tmpInheritClass["class"] = tmpStack[0]
elif len(tmpStack) == 2:
tmpInheritClass["access"] = tmpStack[0]
tmpInheritClass["class"] = tmpStack[1]
elif len(tmpStack) == 3 and "virtual" in tmpStack:
tmpInheritClass["access"] = tmpStack[1] if tmpStack[1] != "virtual" else tmpStack[0]
tmpInheritClass["class"] = tmpStack[2]
tmpInheritClass["virtual"] = True
else:
warning_print( "Warning: can not parse inheriting class %s"%(" ".join(tmpStack)))
if '>' in tmpStack: pass # allow skip templates for now
else: raise NotImplemented
if 'class' in tmpInheritClass: inheritList.append(tmpInheritClass)
elif nameStack.count(':') == 2: self['parent'] = self['name']; self['name'] = nameStack[-1]
elif nameStack.count(':') > 2 and nameStack[0] in ("class", "struct"):
tmpStack = nameStack[nameStack.index(":") + 1:]
superTmpStack = [[]]
for tok in tmpStack:
if tok == ',':
superTmpStack.append([])
else:
superTmpStack[-1].append(tok)
for tmpStack in superTmpStack:
tmpInheritClass = {"access":"private"}
if len(tmpStack) and tmpStack[0] in supportedAccessSpecifier:
tmpInheritClass["access"] = tmpStack[0]
tmpStack = tmpStack[1:]
inheritNSStack = []
while len(tmpStack) > 3:
if tmpStack[0] == ':': break;
if tmpStack[1] != ':': break;
if tmpStack[2] != ':': break;
inheritNSStack.append(tmpStack[0])
tmpStack = tmpStack[3:]
if len(tmpStack) == 1 and tmpStack[0] != ':':
inheritNSStack.append(tmpStack[0])
tmpInheritClass["class"] = "::".join(inheritNSStack)
inheritList.append(tmpInheritClass)
self['inherits'] = inheritList
if curTemplate:
self["template"] = curTemplate
trace_print("Setting template to '%s'"%self["template"])
methodAccessSpecificList = {}
propertyAccessSpecificList = {}
enumAccessSpecificList = {}
structAccessSpecificList = {}
typedefAccessSpecificList = {}
forwardAccessSpecificList = {}
for accessSpecifier in supportedAccessSpecifier:
methodAccessSpecificList[accessSpecifier] = []
propertyAccessSpecificList[accessSpecifier] = []
enumAccessSpecificList[accessSpecifier] = []
structAccessSpecificList[accessSpecifier] = []
typedefAccessSpecificList[accessSpecifier] = []
forwardAccessSpecificList[accessSpecifier] = []
self['methods'] = methodAccessSpecificList
self['properties'] = propertyAccessSpecificList
self['enums'] = enumAccessSpecificList
self['structs'] = structAccessSpecificList
self['typedefs'] = typedefAccessSpecificList
self['forward_declares'] = forwardAccessSpecificList
def show(self):
"""Convert class to a string"""
namespace_prefix = ""
if self["namespace"]: namespace_prefix = self["namespace"] + "::"
rtn = "%s %s"%(self["declaration_method"], namespace_prefix + self["name"])
if self["final"]: rtn += " final"
if self['abstract']: rtn += ' (abstract)\n'
else: rtn += '\n'
if 'doxygen' in list(self.keys()): rtn += self["doxygen"] + '\n'
if 'parent' in list(self.keys()) and self['parent']: rtn += 'parent class: ' + self['parent'] + '\n'
if "inherits" in list(self.keys()):
rtn += " Inherits: "
for inheritClass in self["inherits"]:
if inheritClass["virtual"]: rtn += "virtual "
rtn += "%s %s, "%(inheritClass["access"], inheritClass["class"])
rtn += "\n"
rtn += " {\n"
for accessSpecifier in supportedAccessSpecifier:
rtn += " %s\n"%(accessSpecifier)
#Enums
if (len(self["enums"][accessSpecifier])):
rtn += " <Enums>\n"
for enum in self["enums"][accessSpecifier]:
rtn += " %s\n"%(repr(enum))
#Properties
if (len(self["properties"][accessSpecifier])):
rtn += " <Properties>\n"
for property in self["properties"][accessSpecifier]:
rtn += " %s\n"%(repr(property))
#Methods
if (len(self["methods"][accessSpecifier])):
rtn += " <Methods>\n"
for method in self["methods"][accessSpecifier]:
rtn += "\t\t" + method.show() + '\n'
rtn += " }\n"
print(rtn)
def __str__(self):
"""Convert class to a string"""
namespace_prefix = ""
if self["namespace"]: namespace_prefix = self["namespace"] + "::"
rtn = "%s %s"%(self["declaration_method"], namespace_prefix + self["name"])
if self["final"]: rtn += " final"
if self['abstract']: rtn += ' (abstract)\n'
else: rtn += '\n'
if 'doxygen' in list(self.keys()): rtn += self["doxygen"] + '\n'
if 'parent' in list(self.keys()) and self['parent']: rtn += 'parent class: ' + self['parent'] + '\n'
if "inherits" in list(self.keys()) and len(self["inherits"]):
rtn += "Inherits: "
for inheritClass in self["inherits"]:
if inheritClass.get("virtual", False): rtn += "virtual "
rtn += "%s %s, "%(inheritClass["access"], inheritClass["class"])
rtn += "\n"
rtn += "{\n"
for accessSpecifier in supportedAccessSpecifier:
rtn += "%s\n"%(accessSpecifier)
#Enums
if (len(self["enums"][accessSpecifier])):
rtn += " // Enums\n"
for enum in self["enums"][accessSpecifier]:
rtn += " %s\n"%(repr(enum))
#Properties
if (len(self["properties"][accessSpecifier])):
rtn += " // Properties\n"
for property in self["properties"][accessSpecifier]:
rtn += " %s\n"%(repr(property))
#Methods
if (len(self["methods"][accessSpecifier])):
rtn += " // Methods\n"
for method in self["methods"][accessSpecifier]:
rtn += " %s\n"%(repr(method))
rtn += "}\n"
return rtn
class CppUnion( CppClass ):
"""Takes a name stack and turns it into a union
Contains the following Keys:
self['name'] - Name of the union
self['doxygen'] - Doxygen comments associated with the union if they exist
self['members'] - List of members the union has
An example of how this could look is as follows:
#self =
{
'name': ""
'members': []
}
"""
def __init__(self, nameStack):
CppClass.__init__(self, nameStack, None)
self["name"] = "union " + self["name"]
self["members"] = self["properties"]["public"]
def transform_to_union_keys(self):
print("union keys: %s"%list(self.keys()))
for key in ['inherits', 'parent', 'abstract', 'namespace', 'typedefs', 'methods']:
del self[key]
def show(self):
"""Convert class to a string"""
print(self)
def __str__(self):
"""Convert class to a string"""
namespace_prefix = ""
if self["namespace"]: namespace_prefix = self["namespace"] + "::"
rtn = "%s %s"%(self["declaration_method"], namespace_prefix + self["name"])
if self['abstract']: rtn += ' (abstract)\n'
else: rtn += '\n'
if 'doxygen' in list(self.keys()): rtn += self["doxygen"] + '\n'
if 'parent' in list(self.keys()) and self['parent']: rtn += 'parent class: ' + self['parent'] + '\n'
rtn += "{\n"
for member in self["members"]:
rtn += " %s\n"%(repr(member))
rtn += "}\n"
return rtn
class _CppMethod( dict ):
def _params_helper1( self, stack ):
# deal with "throw" keyword
if 'throw' in stack: stack = stack[ : stack.index('throw') ]
## remove GCC keyword __attribute__(...) and preserve returns ##
cleaned = []
hit = False; hitOpen = 0; hitClose = 0
for a in stack:
if a == '__attribute__': hit = True
if hit:
if a == '(': hitOpen += 1
elif a == ')': hitClose += 1
if a==')' and hitOpen == hitClose:
hit = False
else:
cleaned.append( a )
stack = cleaned
# also deal with attribute((const)) function prefix #
# TODO this needs to be better #
if len(stack) > 5:
a = ''.join(stack)
if a.startswith('((__const__))'): stack = stack[ 5 : ]
elif a.startswith('__attribute__((__const__))'): stack = stack[ 6 : ]
stack = stack[stack.index('(') + 1: ]
if not stack: return []
if len(stack)>=3 and stack[0]==')' and stack[1]==':': # is this always a constructor?
self['constructor'] = True
return []
stack.reverse(); _end_ = stack.index(')'); stack.reverse()
stack = stack[ : len(stack)-(_end_+1) ]
if '(' not in stack: return stack # safe to return, no defaults that init a class
# transforms ['someclass', '(', '0', '0', '0', ')'] into "someclass(0,0,0)'"
r = []; hit=False
for a in stack:
if a == '(': hit=True
elif a == ')': hit=False
if hit or a == ')': r[-1] = r[-1] + a
else: r.append( a )
return r
def _params_helper2( self, params ):
for p in params:
p['method'] = self # save reference in variable to parent method
if '::' in p['type']:
ns = p['type'].split('::')[0]
if ns not in Resolver.NAMESPACES and ns in Resolver.CLASSES:
p['type'] = self['namespace'] + p['type']
else: p['namespace'] = self[ 'namespace' ]
class CppMethod( _CppMethod ):
"""Takes a name stack and turns it into a method
Contains the following Keys:
self['rtnType'] - Return type of the method (ex. "int")
self['name'] - Name of the method (ex. "getSize")
self['doxygen'] - Doxygen comments associated with the method if they exist
self['parameters'] - List of CppVariables
"""
def show(self):
r = ['method name: %s (%s)' %(self['name'],self['debug']) ]
if self['returns']: r.append( 'returns: %s'%self['returns'] )
if self['parameters']: r.append( 'number arguments: %s' %len(self['parameters']))
if self['pure_virtual']: r.append( 'pure virtual: %s'%self['pure_virtual'] )
if self['constructor']: r.append( 'constructor' )
if self['destructor']: r.append( 'destructor' )
return '\n\t\t '.join( r )
def __init__(self, nameStack, curClass, methinfo, curTemplate):
debug_print( "Method: %s"%nameStack )
debug_print( "Template: %s"%curTemplate )
global doxygenCommentCache
if len(doxygenCommentCache):
self["doxygen"] = doxygenCommentCache
doxygenCommentCache = ""
if "operator" in nameStack:
self["rtnType"] = " ".join(nameStack[:nameStack.index('operator')])
self["name"] = "".join(nameStack[nameStack.index('operator'):nameStack.index('(')])
else:
self["rtnType"] = " ".join(nameStack[:nameStack.index('(') - 1])
self["name"] = " ".join(nameStack[nameStack.index('(') - 1:nameStack.index('(')])
if self["rtnType"].startswith("virtual"):
self["rtnType"] = self["rtnType"][len("virtual"):].strip()
if len(self["rtnType"]) == 0 or self["name"] == curClass:
self["rtnType"] = "void"
self["rtnType"] = self["rtnType"].replace(' : : ', '::' )
self["rtnType"] = self["rtnType"].replace(" <","<")
self["rtnType"] = self["rtnType"].replace(" >",">").replace(">>", "> >").replace(">>", "> >")
self["rtnType"] = self["rtnType"].replace(" ,",",")
for spec in ["const", "final", "override"]:
self[spec] = False
for i in reversed(nameStack):
if i == spec:
self[spec] = True
break
elif i == ")":
break
self.update( methinfo )
self["line_number"] = detect_lineno(nameStack[0])
#Filter out initializer lists used in constructors
try:
paren_depth_counter = 0
for i in range(0, len(nameStack)):
elm = nameStack[i]
if elm == "(":
paren_depth_counter += 1
if elm == ")":
paren_depth_counter -=1
if paren_depth_counter == 0 and nameStack[i+1] == ':':
debug_print("Stripping out initializer list")
nameStack = nameStack[:i+1]
break
except: pass
paramsStack = self._params_helper1( nameStack )
debug_print( "curTemplate: %s"%curTemplate)
if curTemplate:
self["template"] = curTemplate
debug_print( "SET self['template'] to `%s`"%self["template"])
params = []
#See if there is a doxygen comment for the variable
doxyVarDesc = {}
if "doxygen" in self:
doxyLines = self["doxygen"].split("\n")
lastParamDesc = ""
for doxyLine in doxyLines:
if " @param " in doxyLine or " \param " in doxyLine:
try:
#Strip out the param
doxyLine = doxyLine[doxyLine.find("param ") + 6:]
(var, desc) = doxyLine.split(" ", 1)
doxyVarDesc[var] = desc.strip()
lastParamDesc = var
except: pass
elif " @return " in doxyLine or " \return " in doxyLine:
lastParamDesc = ""
# not handled for now
elif lastParamDesc:
try:
doxyLine = doxyLine.strip()
if " " not in doxyLine:
lastParamDesc = ""
continue
doxyLine = doxyLine[doxyLine.find(" ") + 1:]
doxyVarDesc[lastParamDesc] += " " + doxyLine
except: pass
#Create the variable now
while (len(paramsStack)):
# Find commas that are not nexted in <>'s like template types
open_template_count = 0
param_separator = 0
i = 0
for elm in paramsStack:
if '<' in elm :
open_template_count += 1
elif '>' in elm:
open_template_count -= 1
elif elm == ',' and open_template_count == 0:
param_separator = i
break
i += 1
if param_separator:
param = CppVariable(paramsStack[0:param_separator], doxyVarDesc=doxyVarDesc)
if len(list(param.keys())): params.append(param)
paramsStack = paramsStack[param_separator + 1:]
else:
param = CppVariable(paramsStack, doxyVarDesc=doxyVarDesc)
if len(list(param.keys())): params.append(param)
break
self["parameters"] = params
#self._params_helper2( params ) # mods params inplace
def __str__(self):
filter_keys = ("parent", "defined", "operator", "returns_reference")
cpy = dict((k,v) for (k,v) in list(self.items()) if k not in filter_keys)
return "%s"%cpy
class _CppVariable(dict):
def _name_stack_helper( self, stack ):
stack = list(stack)
if '=' not in stack: # TODO refactor me
# check for array[n] and deal with funny array syntax: "int myvar:99"
array = []
while stack and stack[-1].isdigit(): array.append( stack.pop() )
if array: array.reverse(); self['array'] = int(''.join(array))
if stack and stack[-1].endswith(':'): stack[-1] = stack[-1][:-1]
while stack and not stack[-1]: stack.pop() # can be empty
return stack
def init(self):
#assert self['name'] # allow unnamed variables, methods like this: "void func(void);"
a = []
self['aliases'] = []; self['parent'] = None; self['typedef'] = None
for key in 'constant reference pointer static typedefs class fundamental unresolved'.split():
self[ key ] = 0
for b in self['type'].split():
if b == '__const__': b = 'const'
a.append( b )
self['type'] = ' '.join( a )
class CppVariable( _CppVariable ):
"""Takes a name stack and turns it into a method
Contains the following Keys:
self['type'] - Type for the variable (ex. "const string &")
self['name'] - Name of the variable (ex. "numItems")
self['namespace'] - Namespace containing the enum
self['desc'] - Description of the variable if part of a method (optional)
self['doxygen'] - Doxygen comments associated with the method if they exist
self['defaultValue'] - Default value of the variable, this key will only
exist if there is a default value
self['extern'] - True if its an extern, false if not
"""
Vars = []
def __init__(self, nameStack, **kwargs):
debug_print("trace %s"%nameStack)
if len(nameStack) and nameStack[0] == "extern":
self['extern'] = True
del nameStack[0]
else:
self['extern'] = False
_stack_ = nameStack
if "[" in nameStack: #strip off array informatin
arrayStack = nameStack[nameStack.index("["):]
if nameStack.count("[") > 1:
debug_print("Multi dimensional array")
debug_print("arrayStack=%s"%arrayStack)
nums = filter(lambda x: x.isdigit(), arrayStack)
# Calculate size by multiplying all dimensions
p = 1
for n in nums:
p *= int(n)
#Multi dimensional array
self["array_size"] = p
self["multi_dimensional_array"] = 1
self["multi_dimensional_array_size"] = "x".join(nums)
else:
debug_print("Array")
if len(arrayStack) == 3:
self["array_size"] = arrayStack[1]
nameStack = nameStack[:nameStack.index("[")]
self["array"] = 1
else:
self["array"] = 0
nameStack = self._name_stack_helper( nameStack )
global doxygenCommentCache
if len(doxygenCommentCache):
self["doxygen"] = doxygenCommentCache
doxygenCommentCache = ""
debug_print( "Variable: %s"%nameStack )
self["line_number"] = detect_lineno(nameStack[0])
self["function_pointer"] = 0
if (len(nameStack) < 2): # +++
if len(nameStack) == 1: self['type'] = nameStack[0]; self['name'] = ''
else: error_print(_stack_); assert 0
elif is_function_pointer_stack(nameStack): #function pointer
self["type"] = " ".join(nameStack[:nameStack.index("(") + 2] + nameStack[nameStack.index(")") :])
self["name"] = " ".join(nameStack[nameStack.index("(") + 2 : nameStack.index(")")])
self["function_pointer"] = 1
elif ("=" in nameStack):
self["type"] = " ".join(nameStack[:nameStack.index("=") - 1])
self["name"] = nameStack[nameStack.index("=") - 1]
self["defaultValue"] = " ".join(nameStack[nameStack.index("=") + 1:]) # deprecate camelCase in dicts
self['default'] = " ".join(nameStack[nameStack.index("=") + 1:])
elif is_fundamental(nameStack[-1]) or nameStack[-1] in ['>', '<' , ':', '.']:
#Un named parameter
self["type"] = " ".join(nameStack)
self["name"] = ""
else: # common case
self["type"] = " ".join(nameStack[:-1])
self["name"] = nameStack[-1]
self["type"] = self["type"].replace(" :",":")
self["type"] = self["type"].replace(": ",":")
self["type"] = self["type"].replace(" <","<")
self["type"] = self["type"].replace(" >",">").replace(">>", "> >").replace(">>", "> >")
self["type"] = self["type"].replace(" ,",",")
#Optional doxygen description
try:
self["desc"] = kwargs["doxyVarDesc"][self["name"]]
except: pass
self.init()
CppVariable.Vars.append( self ) # save and resolve later
def __str__(self):
keys_white_list = ['constant','name','reference','type','static','pointer','desc', 'line_number', 'extern']
cpy = dict((k,v) for (k,v) in list(self.items()) if k in keys_white_list)
if "array_size" in self: cpy["array_size"] = self["array_size"]
return "%s"%cpy
class _CppEnum(dict):
def resolve_enum_values( self, values ):
"""Evaluates the values list of dictionaries passed in and figures out what the enum value
for each enum is editing in place:
Example:
From: [{'name': 'ORANGE'},
{'name': 'RED'},
{'name': 'GREEN', 'value': '8'}]
To: [{'name': 'ORANGE', 'value': 0},
{'name': 'RED', 'value': 1},
{'name': 'GREEN', 'value': 8}]
"""
t = int; i = 0
names = [ v['name'] for v in values ]
for v in values:
if 'value' in v:
a = v['value'].strip()
# Remove single quotes from single quoted chars (unless part of some expression
if len(a) == 3 and a[0] == "'" and a[2] == "'":
a = v['value'] = a[1]
if a.lower().startswith("0x"):
try:
i = a = int(a , 16)
except:pass
elif a.isdigit():
i = a = int( a )
elif a in names:
for other in values:
if other['name'] == a:
v['value'] = other['value']
break
elif '"' in a or "'" in a: t = str # only if there are quotes it this a string enum
else:
try:
a = i = ord(a)
except: pass
#Allow access of what is in the file pre-convert if converted
if v['value'] != str(a):
v['raw_value'] = v['value']
v['value'] = a
else: v['value'] = i
try:
v['value'] = v['value'].replace(" < < ", " << ").replace(" >> ", " >> ")
except: pass
i += 1
return t
class CppEnum(_CppEnum):
"""Takes a name stack and turns it into an Enum
Contains the following Keys:
self['name'] - Name of the enum (ex. "ItemState")
self['namespace'] - Namespace containing the enum
self['values'] - List of values where the values are a dictionary of the
form {"name": name of the key (ex. "PARSING_HEADER"),
"value": Specified value of the enum, this key will only exist
if a value for a given enum value was defined
}
"""
def __init__(self, nameStack):
global doxygenCommentCache
if len(doxygenCommentCache):
self["doxygen"] = doxygenCommentCache
doxygenCommentCache = ""
if len(nameStack) == 3 and nameStack[0] == "enum":
debug_print("Created enum as just name/value")
self["name"] = nameStack[1]
self["instances"]=[nameStack[2]]
if len(nameStack) < 4 or "{" not in nameStack or "}" not in nameStack:
#Not enough stuff for an enum
debug_print("Bad enum")
return
valueList = []
self["line_number"] = detect_lineno(nameStack[0])
#Figure out what values it has
valueStack = nameStack[nameStack.index('{') + 1: nameStack.index('}')]
while len(valueStack):
tmpStack = []
if "," in valueStack:
tmpStack = valueStack[:valueStack.index(",")]
valueStack = valueStack[valueStack.index(",") + 1:]
else:
tmpStack = valueStack
valueStack = []
d = {}
if len(tmpStack) == 1: d["name"] = tmpStack[0]
elif len(tmpStack) >= 3 and tmpStack[1] == "=":
d["name"] = tmpStack[0]; d["value"] = " ".join(tmpStack[2:])
elif len(tmpStack) == 2 and tmpStack[1] == "=":
debug_print( "WARN-enum: parser missed value for %s"%tmpStack[0] )
d["name"] = tmpStack[0]
if d: valueList.append( d )
if len(valueList):
self['type'] = self.resolve_enum_values( valueList ) # returns int for standard enum
self["values"] = valueList
else:
warning_print( 'WARN-enum: empty enum %s'%nameStack )
return
#Figure out if it has a name
preBraceStack = nameStack[:nameStack.index("{")]
postBraceStack = nameStack[nameStack.index("}") + 1:]
self["typedef"] = False
if (len(preBraceStack) == 2 and "typedef" not in nameStack):
self["name"] = preBraceStack[1]
elif len(postBraceStack) and "typedef" in nameStack:
self["name"] = " ".join(postBraceStack)
self["typedef"] = True
else: warning_print( 'WARN-enum: nameless enum %s'%nameStack )
#See if there are instances of this
if "typedef" not in nameStack and len(postBraceStack):
self["instances"] = []
for var in postBraceStack:
if "," in var:
continue
self["instances"].append(var)
self["namespace"] = ""
class CppStruct(dict):
Structs = []
def __init__(self, nameStack):
if len(nameStack) >= 2: self['type'] = nameStack[1]
else: self['type'] = None
self['fields'] = []
self.Structs.append( self )
global curLine
self["line_number"] = curLine
C99_NONSTANDARD = {
'int8' : 'signed char',
'int16' : 'short int',
'int32' : 'int',
'int64' : 'int64_t', # this can be: long int (64bit), or long long int (32bit)
'uint' : 'unsigned int',
'uint8' : 'unsigned char',
'uint16' : 'unsigned short int',
'uint32' : 'unsigned int',
'uint64' : 'uint64_t', # depends on host bits
}
def standardize_fundamental( s ):
if s in C99_NONSTANDARD: return C99_NONSTANDARD[ s ]
else: return s
class Resolver(object):
C_FUNDAMENTAL = 'size_t unsigned signed bool char wchar short int float double long void'.split()
C_FUNDAMENTAL += 'struct union enum'.split()
SubTypedefs = {} # TODO deprecate?
NAMESPACES = []
CLASSES = {}
STRUCTS = {}
def initextra(self):
self.typedefs = {}
self.typedefs_order = []
self.classes_order = []
self.structs = Resolver.STRUCTS
self.structs_order = []
self.namespaces = Resolver.NAMESPACES # save all namespaces
self.curStruct = None
self.stack = [] # full name stack, good idea to keep both stacks? (simple stack and full stack)
self._classes_brace_level = {} # class name : level
self._structs_brace_level = {} # struct type : level
self._method_body = None
self._forward_decls = []
self._template_typenames = [] # template<typename XXX>
def current_namespace(self): return self.cur_namespace(True)
def cur_namespace(self, add_double_colon=False):
rtn = ""
i = 0
while i < len(self.nameSpaces):
rtn += self.nameSpaces[i]
if add_double_colon or i < len(self.nameSpaces) - 1: rtn += "::"
i+=1
return rtn
def guess_ctypes_type( self, string ):
pointers = string.count('*')
string = string.replace('*','')
a = string.split()
if 'unsigned' in a: u = 'u'
else: u = ''
if 'long' in a and 'double' in a: b = 'longdouble' # there is no ctypes.c_ulongdouble (this is a 64bit float?)
elif a.count('long') == 2 and 'int' in a: b = '%sint64' %u
elif a.count('long') == 2: b = '%slonglong' %u
elif 'long' in a: b = '%slong' %u
elif 'double' in a: b = 'double' # no udouble in ctypes
elif 'short' in a: b = '%sshort' %u
elif 'char' in a: b = '%schar' %u
elif 'wchar' in a: b = 'wchar'
elif 'bool' in a: b = 'bool'
elif 'float' in a: b = 'float'
elif 'int' in a: b = '%sint' %u
elif 'int8' in a: b = 'int8'
elif 'int16' in a: b = 'int16'
elif 'int32' in a: b = 'int32'
elif 'int64' in a: b = 'int64'
elif 'uint' in a: b = 'uint'
elif 'uint8' in a: b = 'uint8'
elif 'uint16' in a: b = 'uint16'
elif 'uint32' in a: b = 'uint32'
elif 'uint64' in a: b = 'uint64'
elif 'size_t' in a: b = 'size_t'
elif 'void' in a: b = 'void_p'
elif string in 'struct union'.split(): b = 'void_p' # what should be done here? don't trust struct, it could be a class, no need to expose via ctypes
else: b = 'void_p'
if not pointers: return 'ctypes.c_%s' %b
else:
x = ''
for i in range(pointers): x += 'ctypes.POINTER('
x += 'ctypes.c_%s' %b
x += ')' * pointers
return x
def resolve_type( self, string, result ): # recursive
'''
keeps track of useful things like: how many pointers, number of typedefs, is fundamental or a class, etc...
'''
## be careful with templates, what is inside <something*> can be a pointer but the overall type is not a pointer
## these come before a template
s = string.split('<')[0]
result[ 'constant' ] += s.split().count('const')
result[ 'static' ] += s.split().count('static')
result[ 'mutable' ] = 'mutable' in s.split()
## these come after a template
s = string.split('>')[-1]
result[ 'pointer' ] += s.count('*')
result[ 'reference' ] += s.count('&')
x = string; alias = False
for a in '* & const static mutable'.split(): x = x.replace(a,'')
for y in x.split():
if y not in self.C_FUNDAMENTAL: alias = y; break
#if alias == 'class':
# result['class'] = result['name'] # forward decl of class
# result['forward_decl'] = True
if alias == '__extension__': result['fundamental_extension'] = True
elif alias:
result['aliases'].append( alias )
if alias in C99_NONSTANDARD:
result['type'] = C99_NONSTANDARD[ alias ]
result['typedef'] = alias
result['typedefs'] += 1
elif alias in self.typedefs:
result['typedefs'] += 1
result['typedef'] = alias
self.resolve_type( self.typedefs[alias], result )
elif alias in self.classes:
klass = self.classes[alias]; result['fundamental'] = False
result['class'] = klass
result['unresolved'] = False
else: result['unresolved'] = True
else:
result['fundamental'] = True
result['unresolved'] = False
def finalize_vars(self):
for s in CppStruct.Structs: # vars within structs can be ignored if they do not resolve
for var in s['fields']: var['parent'] = s['type']
#for c in self.classes.values():
# for var in c.get_all_properties(): var['parent'] = c['name']
## RESOLVE ##
for var in CppVariable.Vars:
self.resolve_type( var['type'], var )
#if 'method' in var and var['method']['name'] == '_notifyCurrentCamera': print(var); assert 0
# then find concrete type and best guess ctypes type #
for var in CppVariable.Vars:
if not var['aliases']: #var['fundamental']:
var['ctypes_type'] = self.guess_ctypes_type( var['type'] )
else:
var['unresolved'] = False # below may test to True
if var['class']:
var['ctypes_type'] = 'ctypes.c_void_p'
else:
assert var['aliases']
tag = var['aliases'][0]
klass = None
nestedEnum = None
nestedStruct = None
nestedTypedef = None
if 'method' in var and 'parent' in list(var['method'].keys()):
klass = var['method']['parent']
if tag in var['method']['parent']._public_enums:
nestedEnum = var['method']['parent']._public_enums[ tag ]
elif tag in var['method']['parent']._public_structs:
nestedStruct = var['method']['parent']._public_structs[ tag ]
elif tag in var['method']['parent']._public_typedefs:
nestedTypedef = var['method']['parent']._public_typedefs[ tag ]
if '<' in tag: # should also contain '>'
var['template'] = tag # do not resolve templates
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
elif nestedEnum:
enum = nestedEnum
if enum['type'] is int:
var['ctypes_type'] = 'ctypes.c_int'
var['raw_type'] = 'int'
elif enum['type'] is str:
var['ctypes_type'] = 'ctypes.c_char_p'
var['raw_type'] = 'char*'
var['enum'] = var['method']['path'] + '::' + enum['name']
var['fundamental'] = True
elif nestedStruct:
var['ctypes_type'] = 'ctypes.c_void_p'
var['raw_type'] = var['method']['path'] + '::' + nestedStruct['type']
var['fundamental'] = False
elif nestedTypedef:
var['fundamental'] = is_fundamental( nestedTypedef )
if not var['fundamental']:
var['raw_type'] = var['method']['path'] + '::' + tag
else:
_tag = tag
if '::' in tag and tag.split('::')[0] in self.namespaces: tag = tag.split('::')[-1]
con = self.concrete_typedef( _tag )
if con:
var['concrete_type'] = con
var['ctypes_type'] = self.guess_ctypes_type( var['concrete_type'] )
elif tag in self.structs:
trace_print( 'STRUCT', var )
var['struct'] = tag
var['ctypes_type'] = 'ctypes.c_void_p'
var['raw_type'] = self.structs[tag]['namespace'] + '::' + tag
elif tag in self._forward_decls:
var['forward_declared'] = tag
var['ctypes_type'] = 'ctypes.c_void_p'
elif tag in self.global_enums:
enum = self.global_enums[ tag ]
if enum['type'] is int:
var['ctypes_type'] = 'ctypes.c_int'
var['raw_type'] = 'int'
elif enum['type'] is str:
var['ctypes_type'] = 'ctypes.c_char_p'
var['raw_type'] = 'char*'
var['enum'] = enum['namespace'] + enum['name']
var['fundamental'] = True
elif var['parent']:
warning_print( 'WARN unresolved %s'%_tag)
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
elif tag.count('::')==1:
trace_print( 'trying to find nested something in', tag )
a = tag.split('::')[0]
b = tag.split('::')[-1]
if a in self.classes: # a::b is most likely something nested in a class
klass = self.classes[ a ]
if b in klass._public_enums:
trace_print( '...found nested enum', b )
enum = klass._public_enums[ b ]
if enum['type'] is int:
var['ctypes_type'] = 'ctypes.c_int'
var['raw_type'] = 'int'
elif enum['type'] is str:
var['ctypes_type'] = 'ctypes.c_char_p'
var['raw_type'] = 'char*'
try:
if 'method' in var: var['enum'] = var['method']['path'] + '::' + enum['name']
else: # class property
var['unresolved'] = True
except:
var['unresolved'] = True
var['fundamental'] = True
else: var['unresolved'] = True # TODO klass._public_xxx
elif a in self.namespaces: # a::b can also be a nested namespace
if b in self.global_enums:
enum = self.global_enums[ b ]
trace_print(enum)
trace_print(var)
assert 0
elif b in self.global_enums: # falling back, this is a big ugly
enum = self.global_enums[ b ]
assert a in enum['namespace'].split('::')
if enum['type'] is int:
var['ctypes_type'] = 'ctypes.c_int'
var['raw_type'] = 'int'
elif enum['type'] is str:
var['ctypes_type'] = 'ctypes.c_char_p'
var['raw_type'] = 'char*'
var['fundamental'] = True
else: # boost::gets::crazy
trace_print('NAMESPACES', self.namespaces)
trace_print( a, b )
trace_print( '---- boost gets crazy ----' )
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
elif 'namespace' in var and self.concrete_typedef(var['namespace']+tag):
#print( 'TRYING WITH NS', var['namespace'] )
con = self.concrete_typedef( var['namespace']+tag )
if con:
var['typedef'] = var['namespace']+tag
var['type'] = con
if 'struct' in con.split():
var['raw_type'] = var['typedef']
var['ctypes_type'] = 'ctypes.c_void_p'
else:
self.resolve_type( var['type'], var )
var['ctypes_type'] = self.guess_ctypes_type( var['type'] )
elif '::' in var:
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
elif tag in self.SubTypedefs: # TODO remove SubTypedefs
if 'property_of_class' in var or 'property_of_struct' in var:
trace_print( 'class:', self.SubTypedefs[ tag ], 'tag:', tag )
var['typedef'] = self.SubTypedefs[ tag ] # class name
var['ctypes_type'] = 'ctypes.c_void_p'
else:
trace_print( "WARN-this should almost never happen!" )
trace_print( var ); trace_print('-'*80)
var['unresolved'] = True
elif tag in self._template_typenames:
var['typename'] = tag
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True # TODO, how to deal with templates?
elif tag.startswith('_'): # assume starting with underscore is not important for wrapping
warning_print( 'WARN unresolved %s'%_tag)
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
else:
trace_print( 'WARN: unknown type', var )
assert 'property_of_class' in var or 'property_of_struct' # only allow this case
var['unresolved'] = True
## if not resolved and is a method param, not going to wrap these methods ##
if var['unresolved'] and 'method' in var: var['method']['unresolved_parameters'] = True
# create stripped raw_type #
p = '* & const static mutable'.split() # +++ new July7: "mutable"
for var in CppVariable.Vars:
if 'raw_type' not in var:
raw = []
for x in var['type'].split():
if x not in p: raw.append( x )
var['raw_type'] = ' '.join( raw )
#if 'AutoConstantEntry' in var['raw_type']: print(var); assert 0
if var['class']:
if '::' not in var['raw_type']:
if not var['class']['parent']:
var['raw_type'] = var['class']['namespace'] + '::' + var['raw_type']
elif var['class']['parent'] in self.classes:
parent = self.classes[ var['class']['parent'] ]
var['raw_type'] = parent['namespace'] + '::' + var['class']['name'] + '::' + var['raw_type']
else:
var['unresolved'] = True
elif '::' in var['raw_type'] and var['raw_type'].split('::')[0] not in self.namespaces:
var['raw_type'] = var['class']['namespace'] + '::' + var['raw_type']
else:
var['unresolved'] = True
elif 'forward_declared' in var and 'namespace' in var:
if '::' not in var['raw_type']:
var['raw_type'] = var['namespace'] + var['raw_type']
elif '::' in var['raw_type'] and var['raw_type'].split('::')[0] in self.namespaces:
pass
else: trace_print('-'*80); trace_print(var); raise NotImplemented
## need full name space for classes in raw type ##
if var['raw_type'].startswith( '::' ):
#print(var)
#print('NAMESPACE', var['class']['namespace'])
#print( 'PARENT NS', var['class']['parent']['namespace'] )
#assert 0
var['unresolved'] = True
if 'method' in var: var['method']['unresolved_parameters'] = True
#var['raw_type'] = var['raw_type'][2:]
# Take care of #defines and #pragmas etc
trace_print("Processing precomp_macro_buf: %s"%self._precomp_macro_buf)
for m in self._precomp_macro_buf:
macro = m.replace("<CppHeaderParser_newline_temp_replacement>\\n", "\n")
try:
if macro.lower().startswith("#define"):
trace_print("Adding #define %s"%macro)
self.defines.append(macro.split(" ", 1)[1].strip())
elif macro.lower().startswith("#if") or macro.lower().startswith("#endif") or macro.lower().startswith("#else"):
self.conditionals.append(macro)
elif macro.lower().startswith("#pragma"):
trace_print("Adding #pragma %s"%macro)
self.pragmas.append(macro.split(" ", 1)[1].strip())
elif macro.lower().startswith("#include"):
trace_print("Adding #include %s"%macro)
self.includes.append(macro.split(" ", 1)[1].strip())
else:
debug_print("Cant detect what to do with precomp macro '%s'"%macro)
except: pass
self._precomp_macro_buf = None
def concrete_typedef( self, key ):
if key not in self.typedefs:
#print( 'FAILED typedef', key )
return None
while key in self.typedefs:
prev = key
key = self.typedefs[ key ]
if '<' in key or '>' in key: return prev # stop at template
if key.startswith('std::'): return key # stop at std lib
return key
class _CppHeader( Resolver ):
def finalize(self):
self.finalize_vars()
# finalize classes and method returns types
for cls in list(self.classes.values()):
for meth in cls.get_all_methods():
if meth['pure_virtual']: cls['abstract'] = True
if not meth['returns_fundamental'] and meth['returns'] in C99_NONSTANDARD:
meth['returns'] = C99_NONSTANDARD[meth['returns']]
meth['returns_fundamental'] = True
elif not meth['returns_fundamental']: # describe the return type
con = None
if cls['namespace'] and '::' not in meth['returns']:
con = self.concrete_typedef( cls['namespace'] + '::' + meth['returns'] )
else: con = self.concrete_typedef( meth['returns'] )
if con:
meth['returns_concrete'] = con
meth['returns_fundamental'] = is_fundamental( con )
elif meth['returns'] in self.classes:
trace_print( 'meth returns class:', meth['returns'] )
meth['returns_class'] = True
elif meth['returns'] in self.SubTypedefs:
meth['returns_class'] = True
meth['returns_nested'] = self.SubTypedefs[ meth['returns'] ]
elif meth['returns'] in cls._public_enums:
enum = cls._public_enums[ meth['returns'] ]
meth['returns_enum'] = enum['type']
meth['returns_fundamental'] = True
if enum['type'] == int: meth['returns'] = 'int'
else: meth['returns'] = 'char*'
elif meth['returns'] in self.global_enums:
enum = self.global_enums[ meth['returns'] ]
meth['returns_enum'] = enum['type']
meth['returns_fundamental'] = True
if enum['type'] == int: meth['returns'] = 'int'
else: meth['returns'] = 'char*'
elif meth['returns'].count('::')==1:
trace_print( meth )
a,b = meth['returns'].split('::')
if a in self.namespaces:
if b in self.classes:
klass = self.classes[ b ]
meth['returns_class'] = a + '::' + b
elif '<' in b and '>' in b:
warning_print( 'WARN-can not return template: %s'%b )
meth['returns_unknown'] = True
elif b in self.global_enums:
enum = self.global_enums[ b ]
meth['returns_enum'] = enum['type']
meth['returns_fundamental'] = True
if enum['type'] == int: meth['returns'] = 'int'
else: meth['returns'] = 'char*'
else: trace_print( a, b); trace_print( meth); meth['returns_unknown'] = True # +++
elif a in self.classes:
klass = self.classes[ a ]
if b in klass._public_enums:
trace_print( '...found nested enum', b )
enum = klass._public_enums[ b ]
meth['returns_enum'] = enum['type']
meth['returns_fundamental'] = True
if enum['type'] == int: meth['returns'] = 'int'
else: meth['returns'] = 'char*'
elif b in klass._public_forward_declares:
meth['returns_class'] = True
elif b in klass._public_typedefs:
typedef = klass._public_typedefs[ b ]
meth['returns_fundamental'] = is_fundamental( typedef )
else:
trace_print( meth ) # should be a nested class, TODO fix me.
meth['returns_unknown'] = True
elif '::' in meth['returns']:
trace_print('TODO namespace or extra nested return:', meth)
meth['returns_unknown'] = True
else:
trace_print( 'WARN: UNKNOWN RETURN', meth['name'], meth['returns'])
meth['returns_unknown'] = True
if meth["returns"].startswith(": : "):
meth["returns"] = meth["returns"].replace(": : ", "::")
for cls in list(self.classes.values()):
methnames = cls.get_all_method_names()
pvm = cls.get_all_pure_virtual_methods()
for d in cls['inherits']:
c = d['class']
a = d['access'] # do not depend on this to be 'public'
trace_print( 'PARENT CLASS:', c )
if c not in self.classes: trace_print('WARN: parent class not found')
if c in self.classes and self.classes[c]['abstract']:
p = self.classes[ c ]
for meth in p.get_all_methods(): #p["methods"]["public"]:
trace_print( '\t\tmeth', meth['name'], 'pure virtual', meth['pure_virtual'] )
if meth['pure_virtual'] and meth['name'] not in methnames: cls['abstract'] = True; break
def evaluate_struct_stack(self):
"""Create a Struct out of the name stack (but not its parts)"""
#print( 'eval struct stack', self.nameStack )
#if self.braceDepth != len(self.nameSpaces): return
struct = CppStruct(self.nameStack)
struct["namespace"] = self.cur_namespace()
self.structs[ struct['type'] ] = struct
self.structs_order.append( struct )
if self.curClass:
struct['parent'] = self.curClass
klass = self.classes[ self.curClass ]
klass['structs'][self.curAccessSpecifier].append( struct )
if self.curAccessSpecifier == 'public': klass._public_structs[ struct['type'] ] = struct
self.curStruct = struct
self._structs_brace_level[ struct['type'] ] = self.braceDepth
def parse_method_type( self, stack ):
trace_print( 'meth type info', stack )
if stack[0] in ':;' and stack[1] != ':': stack = stack[1:]
info = {
'debug': ' '.join(stack).replace(' : : ', '::' ).replace(' < ', '<' ).replace(' > ', '> ' ).replace(" >",">").replace(">>", "> >").replace(">>", "> >"),
'class':None,
'namespace':self.cur_namespace(add_double_colon=True),
}
for tag in 'defined pure_virtual operator constructor destructor extern template virtual static explicit inline friend returns returns_pointer returns_fundamental returns_class'.split(): info[tag]=False
header = stack[ : stack.index('(') ]
header = ' '.join( header )
header = header.replace(' : : ', '::' )
header = header.replace(' < ', '<' )
header = header.replace(' > ', '> ' )
header = header.strip()
if '{' in stack:
info['defined'] = True
self._method_body = self.braceDepth + 1
trace_print( 'NEW METHOD WITH BODY', self.braceDepth )
elif stack[-1] == ';':
info['defined'] = False
self._method_body = None # not a great idea to be clearing here
else: assert 0
if len(stack) > 3 and stack[-1] == ';' and stack[-2] == '0' and stack[-3] == '=':
info['pure_virtual'] = True
r = header.split()
name = None
if 'operator' in stack: # rare case op overload defined outside of class
op = stack[ stack.index('operator')+1 : stack.index('(') ]
op = ''.join(op)
if not op:
if " ".join(['operator', '(', ')', '(']) in " ".join(stack):
op = "()"
else:
trace_print( 'Error parsing operator')
return None
info['operator'] = op
name = 'operator' + op
a = stack[ : stack.index('operator') ]
elif r:
name = r[-1]
a = r[ : -1 ] # strip name
if name is None: return None
#if name.startswith('~'): name = name[1:]
while a and a[0] == '}': # strip - can have multiple } }
a = a[1:]
if '::' in name:
#klass,name = name.split('::') # methods can be defined outside of class
klass = name[ : name.rindex('::') ]
name = name.split('::')[-1]
info['class'] = klass
if klass in self.classes and not self.curClass:
#Class function defined outside the class
return None
# info['name'] = name
#else: info['name'] = name
if name.startswith('~'):
info['destructor'] = True
name = name[1:]
elif not a or (name == self.curClass and len(self.curClass)):
info['constructor'] = True
info['name'] = name
for tag in 'extern virtual static explicit inline friend'.split():
if tag in a: info[ tag ] = True; a.remove( tag ) # inplace
if 'template' in a:
a.remove('template')
b = ' '.join( a )
if '>' in b:
info['template'] = b[ : b.index('>')+1 ]
info['returns'] = b[ b.index('>')+1 : ] # find return type, could be incorrect... TODO
if '<typename' in info['template'].split():
typname = info['template'].split()[-1]
typname = typname[ : -1 ] # strip '>'
if typname not in self._template_typenames: self._template_typenames.append( typname )
else: info['returns'] = ' '.join( a )
else: info['returns'] = ' '.join( a )
info['returns'] = info['returns'].replace(' <', '<').strip()
## be careful with templates, do not count pointers inside template
info['returns_pointer'] = info['returns'].split('>')[-1].count('*')
if info['returns_pointer']: info['returns'] = info['returns'].replace('*','').strip()
info['returns_reference'] = '&' in info['returns']
if info['returns']: info['returns'] = info['returns'].replace('&','').strip()
a = []
for b in info['returns'].split():
if b == '__const__': info['returns_const'] = True
elif b == 'const': info['returns_const'] = True
else: a.append( b )
info['returns'] = ' '.join( a )
info['returns_fundamental'] = is_fundamental( info['returns'] )
return info
def evaluate_method_stack(self):
"""Create a method out of the name stack"""
if self.curStruct:
trace_print( 'WARN - struct contains methods - skipping' )
trace_print( self.stack )
assert 0
info = self.parse_method_type( self.stack )
if info:
if info[ 'class' ] and info['class'] in self.classes: # case where methods are defined outside of class
newMethod = CppMethod(self.nameStack, info['name'], info, self.curTemplate)
klass = self.classes[ info['class'] ]
klass[ 'methods' ][ 'public' ].append( newMethod )
newMethod['parent'] = klass
if klass['namespace']: newMethod['path'] = klass['namespace'] + '::' + klass['name']
else: newMethod['path'] = klass['name']
elif self.curClass: # normal case
newMethod = CppMethod(self.nameStack, self.curClass, info, self.curTemplate)
klass = self.classes[self.curClass]
klass['methods'][self.curAccessSpecifier].append(newMethod)
newMethod['parent'] = klass
if klass['namespace']: newMethod['path'] = klass['namespace'] + '::' + klass['name']
else: newMethod['path'] = klass['name']
else: #non class functions
debug_print("FREE FUNCTION")
newMethod = CppMethod(self.nameStack, None, info, self.curTemplate)
self.functions.append(newMethod)
global parseHistory
parseHistory.append({"braceDepth": self.braceDepth, "item_type": "method", "item": newMethod})
else:
trace_print( 'free function?', self.nameStack )
self.stack = []
def _parse_typedef( self, stack, namespace='' ):
if not stack or 'typedef' not in stack: return
stack = list( stack ) # copy just to be safe
if stack[-1] == ';': stack.pop()
while stack and stack[-1].isdigit(): stack.pop() # throw away array size for now
idx = stack.index('typedef')
if stack[-1] == "]":
try:
name = namespace + "".join(stack[-4:])
# Strip off the array part so the rest of the parsing is better
stack = stack[:-3]
except:
name = namespace + stack[-1]
else:
name = namespace + stack[-1]
s = ''
for a in stack[idx+1:-1]:
if a == '{': break
if not s or s[-1] in ':<>' or a in ':<>': s += a # keep compact
else: s += ' ' + a # spacing
r = {'name':name, 'raw':s, 'type':s}
if not is_fundamental(s):
if 'struct' in s.split(): pass # TODO is this right? "struct ns::something"
elif '::' not in s: s = namespace + s # only add the current name space if no namespace given
r['type'] = s
if s: return r
def evaluate_typedef(self):
ns = self.cur_namespace(add_double_colon=True)
res = self._parse_typedef( self.stack, ns )
if res:
name = res['name']
self.typedefs[ name ] = res['type']
if name not in self.typedefs_order: self.typedefs_order.append( name )
def evaluate_property_stack(self):
"""Create a Property out of the name stack"""
global parseHistory
assert self.stack[-1] == ';'
debug_print( "trace" )
if self.nameStack[0] == 'typedef':
if self.curClass:
typedef = self._parse_typedef( self.stack )
name = typedef['name']
klass = self.classes[ self.curClass ]
klass[ 'typedefs' ][ self.curAccessSpecifier ].append( name )
if self.curAccessSpecifier == 'public': klass._public_typedefs[ name ] = typedef['type']
Resolver.SubTypedefs[ name ] = self.curClass
else: assert 0
elif self.curStruct or self.curClass:
if len(self.nameStack) == 1:
#See if we can de anonymize the type
filteredParseHistory = [h for h in parseHistory if h["braceDepth"] == self.braceDepth]
if len(filteredParseHistory) and filteredParseHistory[-1]["item_type"] == "class":
self.nameStack.insert(0, filteredParseHistory[-1]["item"]["name"])
debug_print("DEANONYMOIZING %s to type '%s'"%(self.nameStack[1], self.nameStack[0]))
if "," in self.nameStack: #Maybe we have a variable list
#Figure out what part is the variable separator but remember templates of function pointer
#First find left most comma outside of a > and )
leftMostComma = 0;
for i in range(0, len(self.nameStack)):
name = self.nameStack[i]
if name in (">", ")"): leftMostComma = 0
if leftMostComma == 0 and name == ",": leftMostComma = i
# Is it really a list of variables?
if leftMostComma != 0:
trace_print("Multiple variables for namestack in %s. Separating processing"%self.nameStack)
orig_nameStack = self.nameStack[:]
orig_stack = self.stack[:]
type_nameStack = orig_nameStack[:leftMostComma-1]
for name in orig_nameStack[leftMostComma - 1::2]:
self.nameStack = type_nameStack + [name]
self.stack = orig_stack[:] # Not maintained for mucking, but this path it doesnt matter
self.evaluate_property_stack()
return
newVar = CppVariable(self.nameStack)
newVar['namespace'] = self.current_namespace()
if self.curStruct:
self.curStruct[ 'fields' ].append( newVar )
newVar['property_of_struct'] = self.curStruct
elif self.curClass:
klass = self.classes[self.curClass]
klass["properties"][self.curAccessSpecifier].append(newVar)
newVar['property_of_class'] = klass['name']
parseHistory.append({"braceDepth": self.braceDepth, "item_type": "variable", "item": newVar})
else:
debug_print( "Found Global variable" )
newVar = CppVariable(self.nameStack)
self.variables.append(newVar)
self.stack = [] # CLEAR STACK
def evaluate_class_stack(self):
"""Create a Class out of the name stack (but not its parts)"""
#dont support sub classes today
#print( 'eval class stack', self.nameStack )
parent = self.curClass
if self.braceDepth > len( self.nameSpaces) and parent:
trace_print( 'HIT NESTED SUBCLASS' )
self.accessSpecifierStack.append(self.curAccessSpecifier)
elif self.braceDepth != len(self.nameSpaces):
error_print( 'ERROR: WRONG BRACE DEPTH' )
return
# When dealing with typedefed structs, get rid of typedef keyword to handle later on
if self.nameStack[0] == "typedef":
del self.nameStack[0]
if len(self.nameStack) == 1:
self.anon_struct_counter += 1
# We cant handle more than 1 anonymous struct, so name them uniquely
self.nameStack.append("<anon-struct-%d>"%self.anon_struct_counter)
if self.nameStack[0] == "class":
self.curAccessSpecifier = 'private'
else:#struct
self.curAccessSpecifier = 'public'
debug_print("curAccessSpecifier changed/defaulted to %s"%self.curAccessSpecifier)
if self.nameStack[0] == "union":
newClass = CppUnion(self.nameStack)
self.anon_union_counter = [self.braceDepth, 2]
trace_print( 'NEW UNION', newClass['name'] )
else:
newClass = CppClass(self.nameStack, self.curTemplate)
trace_print( 'NEW CLASS', newClass['name'] )
newClass["declaration_method"] = self.nameStack[0]
self.classes_order.append( newClass ) # good idea to save ordering
self.stack = [] # fixes if class declared with ';' in closing brace
if parent:
newClass["namespace"] = self.classes[ parent ]['namespace'] + '::' + parent
newClass['parent'] = parent
self.classes[ parent ]['nested_classes'].append( newClass )
## supports nested classes with the same name ##
self.curClass = key = parent+'::'+newClass['name']
self._classes_brace_level[ key ] = self.braceDepth
elif newClass['parent']: # nested class defined outside of parent. A::B {...}
parent = newClass['parent']
newClass["namespace"] = self.classes[ parent ]['namespace'] + '::' + parent
self.classes[ parent ]['nested_classes'].append( newClass )
## supports nested classes with the same name ##
self.curClass = key = parent+'::'+newClass['name']
self._classes_brace_level[ key ] = self.braceDepth
else:
newClass["namespace"] = self.cur_namespace()
key = newClass['name']
self.curClass = newClass["name"]
self._classes_brace_level[ newClass['name'] ] = self.braceDepth
if not key.endswith("::") and not key.endswith(" ") and len(key) != 0:
if key in self.classes:
trace_print( 'ERROR name collision:', key )
self.classes[key].show()
trace_print('-'*80)
newClass.show()
assert key not in self.classes # namespace collision
self.classes[ key ] = newClass
global parseHistory
parseHistory.append({"braceDepth": self.braceDepth, "item_type": "class", "item": newClass})
def evalute_forward_decl(self):
trace_print( 'FORWARD DECL', self.nameStack )
assert self.nameStack[0] in ('class', 'struct')
name = self.nameStack[-1]
if self.curClass:
klass = self.classes[ self.curClass ]
klass['forward_declares'][self.curAccessSpecifier].append( name )
if self.curAccessSpecifier == 'public': klass._public_forward_declares.append( name )
else: self._forward_decls.append( name )
class CppHeader( _CppHeader ):
"""Parsed C++ class header
Variables produced:
self.classes - Dictionary of classes found in a given header file where the
key is the name of the class
"""
IGNORE_NAMES = '__extension__'.split()
def show(self):
for className in list(self.classes.keys()):self.classes[className].show()
def __init__(self, headerFileName, argType="file", **kwargs):
"""Create the parsed C++ header file parse tree
headerFileName - Name of the file to parse OR actual file contents (depends on argType)
argType - Indicates how to interpret headerFileName as a file string or file name
kwargs - Supports the following keywords
"""
## reset global state ##
global doxygenCommentCache
doxygenCommentCache = ""
CppVariable.Vars = []
CppStruct.Structs = []
if (argType == "file"):
self.headerFileName = os.path.expandvars(headerFileName)
self.mainClass = os.path.split(self.headerFileName)[1][:-2]
headerFileStr = ""
elif argType == "string":
self.headerFileName = ""
self.mainClass = "???"
headerFileStr = headerFileName
else:
raise Exception("Arg type must be either file or string")
self.curClass = ""
# nested classes have parent::nested, but no extra namespace,
# this keeps the API compatible, TODO proper namespace for everything.
Resolver.CLASSES = {}
self.classes = Resolver.CLASSES
#Functions that are not part of a class
self.functions = []
self.pragmas = []
self.defines = []
self.includes = []
self.conditionals = []
self._precomp_macro_buf = [] #for internal purposes, will end up filling out pragmras and defines at the end
self.enums = []
self.variables = []
self.global_enums = {}
self.nameStack = []
self.nameSpaces = []
self.curAccessSpecifier = 'private' # private is default
self.curTemplate = None
self.accessSpecifierStack = []
self.accessSpecifierScratch = []
debug_print("curAccessSpecifier changed/defaulted to %s"%self.curAccessSpecifier)
self.initextra()
# Old namestacks for a given level
self.nameStackHistory = []
self.anon_struct_counter = 0
self.anon_union_counter = [-1, 0]
self.templateRegistry = []
if (len(self.headerFileName)):
fd = open(self.headerFileName)
headerFileStr = "".join(fd.readlines())
fd.close()
# Make sure supportedAccessSpecifier are sane
for i in range(0, len(supportedAccessSpecifier)):
if " " not in supportedAccessSpecifier[i]: continue
supportedAccessSpecifier[i] = re.sub("[ ]+", " ", supportedAccessSpecifier[i]).strip()
# Strip out template declarations
templateSectionsToSliceOut = []
try:
for m in re.finditer("template[\t ]*<[^>]*>", headerFileStr):
start = m.start()
# Search for the final '>' which may or may not be caught in the case of nexted <>'s
for i in range(start, len(headerFileStr)):
if headerFileStr[i] == '<':
firstBracket = i
break
ltgtStackCount = 1
#Now look for fianl '>'
for i in range(firstBracket + 1, len(headerFileStr)):
if headerFileStr[i] == '<':
ltgtStackCount += 1
elif headerFileStr[i] == '>':
ltgtStackCount -= 1
if ltgtStackCount == 0:
end = i
break
templateSectionsToSliceOut.append((start, end))
# Now strip out all instances of the template
templateSectionsToSliceOut.reverse()
for tslice in templateSectionsToSliceOut:
# Replace the template symbol with a single symbol
template_symbol="CppHeaderParser_template_%d"%len(self.templateRegistry)
self.templateRegistry.append(headerFileStr[tslice[0]: tslice[1]+1])
newlines = headerFileStr[tslice[0]: tslice[1]].count("\n") * "\n" #Keep line numbers the same
headerFileStr = headerFileStr[:tslice[0]] + newlines + " " + template_symbol + " " + headerFileStr[tslice[1] + 1:]
except:
pass
# Change multi line #defines and expressions to single lines maintaining line nubmers
# Based from http://stackoverflow.com/questions/2424458/regular-expression-to-match-cs-multiline-preprocessor-statements
matches = re.findall(r'(?m)^(?:.*\\\r?\n)+.*$', headerFileStr)
is_define = re.compile(r'[ \t\v]*#[Dd][Ee][Ff][Ii][Nn][Ee]')
for m in matches:
#Keep the newlines so that linecount doesnt break
num_newlines = len([a for a in m if a=="\n"])
if is_define.match(m):
new_m = m.replace("\n", "<CppHeaderParser_newline_temp_replacement>\\n")
else:
# Just expression taking up multiple lines, make it take 1 line for easier parsing
new_m = m.replace("\\\n", " ")
if (num_newlines > 0):
new_m += "\n"*(num_newlines)
headerFileStr = headerFileStr.replace(m, new_m)
#Filter out Extern "C" statements. These are order dependent
matches = re.findall(re.compile(r'extern[\t ]+"[Cc]"[\t \n\r]*{', re.DOTALL), headerFileStr)
for m in matches:
#Keep the newlines so that linecount doesnt break
num_newlines = len([a for a in m if a=="\n"])
headerFileStr = headerFileStr.replace(m, "\n" * num_newlines)
headerFileStr = re.sub(r'extern[ ]+"[Cc]"[ ]*', "", headerFileStr)
#Filter out any ignore symbols that end with "()" to account for #define magic functions
for ignore in ignoreSymbols:
if not ignore.endswith("()"): continue
while True:
locStart = headerFileStr.find(ignore[:-1])
if locStart == -1:
break;
locEnd = None
#Now walk till we find the last paren and account for sub parens
parenCount = 1
inQuotes = False
for i in range(locStart + len(ignore) - 1, len(headerFileStr)):
c = headerFileStr[i]
if not inQuotes:
if c == "(":
parenCount += 1
elif c == ")":
parenCount -= 1
elif c == '"':
inQuotes = True
if parenCount == 0:
locEnd = i + 1
break;
else:
if c == '"' and headerFileStr[i-1] != '\\':
inQuotes = False
if locEnd:
#Strip it out but keep the linecount the same so line numbers are right
match_str = headerFileStr[locStart:locEnd]
debug_print("Striping out '%s'"%match_str)
num_newlines = len([a for a in match_str if a=="\n"])
headerFileStr = headerFileStr.replace(headerFileStr[locStart:locEnd], "\n"*num_newlines)
self.braceDepth = 0
lex.lex()
lex.input(headerFileStr)
global curLine
global curChar
curLine = 0
curChar = 0
try:
while True:
tok = lex.token()
if not tok: break
if self.anon_union_counter[0] == self.braceDepth and self.anon_union_counter[1]:
self.anon_union_counter[1] -= 1
tok.value = TagStr(tok.value, lineno=tok.lineno)
#debug_print("TOK: %s"%tok)
if tok.type == 'NAME' and tok.value in self.IGNORE_NAMES: continue
if tok.type != 'TEMPLATE_NAME':
self.stack.append( tok.value )
curLine = tok.lineno
curChar = tok.lexpos
if (tok.type in ('PRECOMP_MACRO', 'PRECOMP_MACRO_CONT')):
debug_print("PRECOMP: %s"%tok)
self._precomp_macro_buf.append(tok.value)
self.stack = []
self.nameStack = []
continue
if tok.type == 'TEMPLATE_NAME':
try:
templateId = int(tok.value.replace("CppHeaderParser_template_",""))
self.curTemplate = self.templateRegistry[templateId]
except: pass
if (tok.type == 'OPEN_BRACE'):
if len(self.nameStack) >= 2 and is_namespace(self.nameStack): # namespace {} with no name used in boost, this sets default?
if self.nameStack[1] == "__IGNORED_NAMESPACE__CppHeaderParser__":#Used in filtering extern "C"
self.nameStack[1] = ""
self.nameSpaces.append(self.nameStack[1])
ns = self.cur_namespace(); self.stack = []
if ns not in self.namespaces: self.namespaces.append( ns )
# Detect special condition of macro magic before class declaration so we
# can filter it out
if 'class' in self.nameStack and self.nameStack[0] != 'class':
classLocationNS = self.nameStack.index("class")
classLocationS = self.stack.index("class")
if "(" not in self.nameStack[classLocationNS:]:
debug_print("keyword 'class' found in unexpected location in nameStack, must be following #define magic. Process that before moving on")
origNameStack = self.nameStack
origStack = self.stack
#Process first part of stack which is probably #define macro magic and may cause issues
self.nameStack = self.nameStack[:classLocationNS]
self.stack = self.stack[:classLocationS]
try:
self.evaluate_stack()
except:
debug_print("Error processing #define magic... Oh well")
#Process rest of stack
self.nameStack = origNameStack[classLocationNS:]
self.stack = origStack[classLocationS:]
if len(self.nameStack) and not is_enum_namestack(self.nameStack):
self.evaluate_stack()
else:
self.nameStack.append(tok.value)
if self.stack and self.stack[0] == 'class': self.stack = []
self.braceDepth += 1
elif (tok.type == 'CLOSE_BRACE'):
if self.braceDepth == 0:
continue
if (self.braceDepth == len(self.nameSpaces)):
tmp = self.nameSpaces.pop()
self.stack = [] # clear stack when namespace ends?
if len(self.nameStack) and is_enum_namestack(self.nameStack):
self.nameStack.append(tok.value)
elif self.braceDepth < 10:
self.evaluate_stack()
else:
self.nameStack = []
self.braceDepth -= 1
#self.stack = []; print 'BRACE DEPTH', self.braceDepth, 'NS', len(self.nameSpaces)
if self.curClass: debug_print( 'CURBD %s'%self._classes_brace_level[ self.curClass ] )
if (self.braceDepth == 0) or (self.curClass and self._classes_brace_level[self.curClass]==self.braceDepth):
trace_print( 'END OF CLASS DEF' )
if self.accessSpecifierStack:
self.curAccessSpecifier = self.accessSpecifierStack[-1]
self.accessSpecifierStack = self.accessSpecifierStack[:-1]
if self.curClass and self.classes[ self.curClass ]['parent']: self.curClass = self.classes[ self.curClass ]['parent']
else: self.curClass = ""; #self.curStruct = None
self.stack = []
#if self.curStruct: self.curStruct = None
if self.braceDepth == 0 or (self.curStruct and self._structs_brace_level[self.curStruct['type']]==self.braceDepth):
trace_print( 'END OF STRUCT DEF' )
self.curStruct = None
if self._method_body and (self.braceDepth + 1) <= self._method_body:
self._method_body = None; self.stack = []; self.nameStack = []; trace_print( 'FORCE CLEAR METHBODY' )
if (tok.type == 'OPEN_PAREN'):
self.nameStack.append(tok.value)
elif (tok.type == 'CLOSE_PAREN'):
self.nameStack.append(tok.value)
elif (tok.type == 'OPEN_SQUARE_BRACKET'):
self.nameStack.append(tok.value)
elif (tok.type == 'CLOSE_SQUARE_BRACKET'):
self.nameStack.append(tok.value)
elif (tok.type == 'TAB'): pass
elif (tok.type == 'EQUALS'):
self.nameStack.append(tok.value)
elif (tok.type == 'COMMA'):
self.nameStack.append(tok.value)
elif (tok.type == 'BACKSLASH'):
self.nameStack.append(tok.value)
elif (tok.type == 'DIVIDE'):
self.nameStack.append(tok.value)
elif (tok.type == 'PIPE'):
self.nameStack.append(tok.value)
elif (tok.type == 'PERCENT'):
self.nameStack.append(tok.value)
elif (tok.type == 'CARET'):
self.nameStack.append(tok.value)
elif (tok.type == 'EXCLAMATION'):
self.nameStack.append(tok.value)
elif (tok.type == 'SQUOTE'): pass
elif (tok.type == 'NUMBER' or tok.type == 'FLOAT_NUMBER'):
self.nameStack.append(tok.value)
elif (tok.type == 'MINUS'):
self.nameStack.append(tok.value)
elif (tok.type == 'PLUS'):
self.nameStack.append(tok.value)
elif (tok.type == 'STRING_LITERAL'):
self.nameStack.append(tok.value)
elif (tok.type == 'NAME' or tok.type == 'AMPERSTAND' or tok.type == 'ASTERISK' or tok.type == 'CHAR_LITERAL'):
if tok.value in ignoreSymbols:
debug_print("Ignore symbol %s"%tok.value)
elif (tok.value == 'class'):
self.nameStack.append(tok.value)
elif tok.value in supportedAccessSpecifier:
if len(self.nameStack) and self.nameStack[0] in ("class", "struct", "union"):
self.nameStack.append(tok.value)
elif self.braceDepth == len(self.nameSpaces) + 1 or self.braceDepth == (len(self.nameSpaces) + len(self.curClass.split("::"))):
self.curAccessSpecifier = tok.value;
self.accessSpecifierScratch.append(tok.value)
debug_print("curAccessSpecifier updated to %s"%self.curAccessSpecifier)
self.stack = []
else:
self.nameStack.append(tok.value)
if self.anon_union_counter[0] == self.braceDepth:
self.anon_union_counter = [-1, 0]
elif (tok.type == 'COLON'):
#Dont want colon to be first in stack
if len(self.nameStack) == 0:
self.accessSpecifierScratch = []
continue
# Handle situation where access specifiers can be multi words such as "public slots"
jns = " ".join(self.accessSpecifierScratch + self.nameStack)
if jns in supportedAccessSpecifier:
self.curAccessSpecifier = jns;
debug_print("curAccessSpecifier updated to %s"%self.curAccessSpecifier)
self.stack = []
self.nameStack = []
else:
self.nameStack.append(tok.value)
self.accessSpecifierScratch = []
elif (tok.type == 'SEMI_COLON'):
if self.anon_union_counter[0] == self.braceDepth and self.anon_union_counter[1]:
debug_print("Creating anonymous union")
#Force the processing of an anonymous union
saved_namestack = self.nameStack[:]
saved_stack = self.stack[:]
self.nameStack = [""]
self.stack = self.nameStack + [";"]
self.nameStack = self.nameStack[0:1]
debug_print("pre eval anon stack")
self.evaluate_stack( tok.type )
debug_print("post eval anon stack")
self.nameStack = saved_namestack
self.stack = saved_stack
self.anon_union_counter = [-1, 0];
if (self.braceDepth < 10): self.evaluate_stack( tok.type )
self.stack = []
self.nameStack = []
except:
if (debug): raise
raise CppParseError("Not able to parse %s on line %d evaluating \"%s\"\nError around: %s"
% (self.headerFileName, tok.lineno, tok.value, " ".join(self.nameStack)))
self.finalize()
global parseHistory
parseHistory = []
# Delete some temporary variables
for key in ["_precomp_macro_buf", "nameStack", "nameSpaces", "curAccessSpecifier", "accessSpecifierStack",
"accessSpecifierScratch", "nameStackHistory", "anon_struct_counter", "anon_union_counter",
"_classes_brace_level", "_forward_decls", "stack", "mainClass", "curStruct", "_template_typenames",
"_method_body", "braceDepth", "_structs_brace_level", "typedefs_order", "curTemplate", "templateRegistry"]:
del self.__dict__[key]
def evaluate_stack(self, token=None):
"""Evaluates the current name stack"""
global doxygenCommentCache
self.nameStack = filter_out_attribute_keyword(self.nameStack)
self.stack = filter_out_attribute_keyword(self.stack)
nameStackCopy = self.nameStack[:]
debug_print( "Evaluating stack %s\n BraceDepth: %s (called from %d)" %(self.nameStack,self.braceDepth, inspect.currentframe().f_back.f_lineno))
#Handle special case of overloading operator ()
if "operator()(" in "".join(self.nameStack):
operator_index = self.nameStack.index("operator")
self.nameStack.pop(operator_index + 2)
self.nameStack.pop(operator_index + 1)
self.nameStack[operator_index] = "operator()"
if (len(self.curClass)):
debug_print( "%s (%s) "%(self.curClass, self.curAccessSpecifier))
else:
debug_print( "<anonymous> (%s) "%self.curAccessSpecifier)
#Filter special case of array with casting in it
try:
bracePos = self.nameStack.index("[")
parenPos = self.nameStack.index("(")
if bracePos == parenPos - 1:
endParen = self.nameStack.index(")")
self.nameStack = self.nameStack[:bracePos + 1] + self.nameStack[endParen + 1:]
debug_print("Filtered namestack to=%s"%self.nameStack)
except: pass
#if 'typedef' in self.nameStack: self.evaluate_typedef() # allows nested typedefs, probably a bad idea
if (not self.curClass and 'typedef' in self.nameStack and
(('struct' not in self.nameStack and 'union' not in self.nameStack) or self.stack[-1] == ";") and
not is_enum_namestack(self.nameStack)):
trace_print('STACK', self.stack)
self.evaluate_typedef()
return
elif (len(self.nameStack) == 0):
debug_print( "trace" )
debug_print( "(Empty Stack)" )
return
elif (self.nameStack[0] == "namespace"):
#Taken care of outside of here
pass
elif len(self.nameStack) == 2 and self.nameStack[0] == "friend":#friend class declaration
pass
elif len(self.nameStack) >= 2 and self.nameStack[0] == 'using' and self.nameStack[1] == 'namespace': pass # TODO
elif is_enum_namestack(self.nameStack):
debug_print( "trace" )
self.evaluate_enum_stack()
elif self._method_body and (self.braceDepth + 1) > self._method_body: trace_print( 'INSIDE METHOD DEF' )
elif is_method_namestack(self.stack) and not self.curStruct and '(' in self.nameStack:
debug_print( "trace" )
if self.braceDepth > 0:
if "{" in self.stack and self.stack[0] != '{' and self.stack[-1] == ';' and self.braceDepth == 1:
#Special case of a method defined outside a class that has a body
pass
else:
self.evaluate_method_stack()
else:
#Free function
self.evaluate_method_stack()
elif (len(self.nameStack) == 1 and len(self.nameStackHistory) > self.braceDepth
and (self.nameStackHistory[self.braceDepth][0][0:2] == ["typedef", "struct"] or
self.nameStackHistory[self.braceDepth][0][0:2] == ["typedef", "union"])):
# Look for the name of a typedef struct: struct typedef {...] StructName; or unions to get renamed
debug_print("found the naming of a union")
type_name_to_rename = self.nameStackHistory[self.braceDepth][1]
new_name = self.nameStack[0]
type_to_rename = self.classes[type_name_to_rename]
type_to_rename["name"] = self.nameStack[0]
#Now re install it in its new location
self.classes[new_name] = type_to_rename
del self.classes[type_name_to_rename]
elif is_property_namestack(self.nameStack) and self.stack[-1] == ';':
debug_print( "trace" )
if self.nameStack[0] in ('class', 'struct') and len(self.stack) == 3: self.evalute_forward_decl()
elif len(self.nameStack) >= 2 and (self.nameStack[0]=='friend' and self.nameStack[1]=='class'): pass
else: self.evaluate_property_stack() # catches class props and structs in a namespace
elif self.nameStack[0] in ("class", "struct", "union") or self.nameStack[0] == 'typedef' and self.nameStack[1] in ('struct', 'union'):
#Parsing a union can reuse much of the class parsing
debug_print( "trace" )
self.evaluate_class_stack()
elif not self.curClass:
debug_print( "trace" )
if is_enum_namestack(self.nameStack): self.evaluate_enum_stack()
elif self.curStruct and self.stack[-1] == ';': self.evaluate_property_stack() # this catches fields of global structs
self.nameStack = []
doxygenCommentCache = ""
elif (self.braceDepth < 1):
debug_print( "trace" )
#Ignore global stuff for now
debug_print( "Global stuff: %s"%self.nameStack )
self.nameStack = []
doxygenCommentCache = ""
elif (self.braceDepth > len(self.nameSpaces) + 1):
debug_print( "trace" )
self.nameStack = []
doxygenCommentCache = ""
try:
self.nameStackHistory[self.braceDepth] = (nameStackCopy, self.curClass)
except:
self.nameStackHistory.append((nameStackCopy, self.curClass))
self.nameStack = [] # its a little confusing to have some if/else above return and others not, and then clearning the nameStack down here
doxygenCommentCache = ""
self.curTemplate = None
def evaluate_enum_stack(self):
"""Create an Enum out of the name stack"""
debug_print( "evaluating enum" )
newEnum = CppEnum(self.nameStack)
if len(list(newEnum.keys())):
if len(self.curClass):
newEnum["namespace"] = self.cur_namespace(False)
klass = self.classes[self.curClass]
klass["enums"][self.curAccessSpecifier].append(newEnum)
if self.curAccessSpecifier == 'public' and 'name' in newEnum: klass._public_enums[ newEnum['name'] ] = newEnum
else:
newEnum["namespace"] = self.cur_namespace(True)
self.enums.append(newEnum)
if 'name' in newEnum and newEnum['name']: self.global_enums[ newEnum['name'] ] = newEnum
#This enum has instances, turn them into properties
if "instances" in newEnum:
instanceType = "enum"
if "name" in newEnum:
instanceType = newEnum["name"]
for instance in newEnum["instances"]:
self.nameStack = [instanceType, instance]
self.evaluate_property_stack()
del newEnum["instances"]
def strip_parent_keys(self):
"""Strip all parent keys to prevent loops"""
obj_queue = [self]
while len(obj_queue):
obj = obj_queue.pop()
trace_print("pop %s type %s"%(obj, type(obj)))
try:
if "parent" in obj.keys():
del obj["parent"]
trace_print("Stripped parent from %s"%obj.keys())
except: pass
# Figure out what sub types are one of ours
try:
if not hasattr(obj, 'keys'):
obj = obj.__dict__
for k in obj.keys():
trace_print("-Try key %s"%(k))
trace_print("-type %s"%(type(obj[k])))
if k in ["nameStackHistory", "parent", "_public_typedefs"]: continue
if type(obj[k]) == list:
for i in obj[k]:
trace_print("push l %s"%i)
obj_queue.append(i)
elif type(obj[k]) == dict:
if len(obj):
trace_print("push d %s"%obj[k])
obj_queue.append(obj[k])
elif type(obj[k]) == type(type(0)):
if type(obj[k]) == int:
obj[k] = "int"
elif type(obj[k]) == str:
obj[k] = "string"
else:
obj[k] = "???"
trace_print("next key\n")
except:
trace_print("Exception")
def toJSON(self, indent=4):
"""Converts a parsed structure to JSON"""
import json
self.strip_parent_keys()
try:
del self.__dict__["classes_order"]
except: pass
return json.dumps(self.__dict__, indent=indent)
def __repr__(self):
rtn = {
"classes": self.classes,
"functions": self.functions,
"enums": self.enums,
"variables": self.variables,
}
return repr(rtn)
def __str__(self):
rtn = ""
for className in list(self.classes.keys()):
rtn += "%s\n"%self.classes[className]
if self.functions:
rtn += "// functions\n"
for f in self.functions:
rtn += "%s\n"%f
if self.variables:
rtn += "// variables\n"
for f in self.variables:
rtn += "%s\n"%f
if self.enums:
rtn += "// enums\n"
for f in self.enums:
rtn += "%s\n"%f
return rtn
| apache-2.0 | -2,464,328,562,798,681,000 | 42.713687 | 210 | 0.502629 | false |
nschaetti/EchoTorch | echotorch/nn/ICACell.py | 1 | 2909 | # -*- coding: utf-8 -*-
#
# File : echotorch/nn/ESN.py
# Description : An Echo State Network module.
# Date : 26th of January, 2018
#
# This file is part of EchoTorch. EchoTorch is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Nils Schaetti <[email protected]>
"""
Created on 26 January 2018
@author: Nils Schaetti
"""
# Imports
import torch.sparse
import torch
import torch.nn as nn
from torch.autograd import Variable
# Independent Component Analysis layer
class ICACell(nn.Module):
"""
Principal Component Analysis layer. It can be used to handle different batch-mode algorithm for ICA.
"""
# Constructor
def __init__(self, input_dim, output_dim):
"""
Constructor
:param input_dim: Inputs dimension.
:param output_dim: Reservoir size
"""
super(ICACell, self).__init__()
pass
# end __init__
###############################################
# PROPERTIES
###############################################
###############################################
# PUBLIC
###############################################
# Reset learning
def reset(self):
"""
Reset learning
:return:
"""
# Training mode again
self.train(True)
# end reset
# Forward
def forward(self, x, y=None):
"""
Forward
:param x: Input signal.
:param y: Target outputs
:return: Output or hidden states
"""
# Batch size
batch_size = x.size()[0]
# Time length
time_length = x.size()[1]
# Add bias
if self.with_bias:
x = self._add_constant(x)
# end if
# end forward
# Finish training
def finalize(self):
"""
Finalize training with LU factorization or Pseudo-inverse
"""
pass
# end finalize
###############################################
# PRIVATE
###############################################
# Add constant
def _add_constant(self, x):
"""
Add constant
:param x:
:return:
"""
bias = Variable(torch.ones((x.size()[0], x.size()[1], 1)), requires_grad=False)
return torch.cat((bias, x), dim=2)
# end _add_constant
# end ICACell
| gpl-3.0 | 4,296,446,302,283,977,700 | 24.973214 | 104 | 0.546236 | false |
hehaichi/django-imagemanagement | imageserver/settings.py | 1 | 3326 | """
Django settings for imageserver project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'fvmacrow6pe#wtxg01(9_m01inqisms+255x%uvj0eftaft0xm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'imagemanagement',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'imageserver.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'imageserver.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
#Data Max upload size
DATA_UPLOAD_MAX_MEMORY_SIZE=2621440*10
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(BASE_DIR, 'imagemanagement/media')
MEDIA_URL = '/media/'
| mit | 2,671,746,408,004,011,500 | 25.396825 | 91 | 0.693025 | false |
openwisp/netjsonconfig | netjsonconfig/backends/openvpn/openvpn.py | 1 | 5835 | from ...schema import X509_FILE_MODE
from ..base.backend import BaseBackend
from . import converters
from .parser import OpenVpnParser, config_suffix, vpn_pattern
from .renderer import OpenVpnRenderer
from .schema import schema
class OpenVpn(BaseBackend):
"""
OpenVPN 2.x Configuration Backend
"""
schema = schema
converters = [converters.OpenVpn]
parser = OpenVpnParser
renderer = OpenVpnRenderer
list_identifiers = ['name']
def _generate_contents(self, tar):
"""
Adds configuration files to tarfile instance.
:param tar: tarfile instance
:returns: None
"""
text = self.render(files=False)
# create a list with all the packages (and remove empty entries)
vpn_instances = vpn_pattern.split(text)
if '' in vpn_instances:
vpn_instances.remove('')
# create a file for each VPN
for vpn in vpn_instances:
lines = vpn.split('\n')
vpn_name = lines[0]
text_contents = '\n'.join(lines[2:])
# do not end with double new line
if text_contents.endswith('\n\n'):
text_contents = text_contents[0:-1]
self._add_file(
tar=tar,
name='{0}{1}'.format(vpn_name, config_suffix),
contents=text_contents,
)
@classmethod
def auto_client(
cls,
host,
server,
ca_path=None,
ca_contents=None,
cert_path=None,
cert_contents=None,
key_path=None,
key_contents=None,
):
"""
Returns a configuration dictionary representing an OpenVPN client configuration
that is compatible with the passed server configuration.
:param host: remote VPN server
:param server: dictionary representing a single OpenVPN server configuration
:param ca_path: optional string representing path to CA, will consequently add
a file in the resulting configuration dictionary
:param ca_contents: optional string representing contents of CA file
:param cert_path: optional string representing path to certificate, will consequently add
a file in the resulting configuration dictionary
:param cert_contents: optional string representing contents of cert file
:param key_path: optional string representing path to key, will consequently add
a file in the resulting configuration dictionary
:param key_contents: optional string representing contents of key file
:returns: dictionary representing a single OpenVPN client configuration
"""
# client defaults
client = {
"mode": "p2p",
"nobind": True,
"resolv_retry": "infinite",
"tls_client": True,
}
# remote
port = server.get('port') or 1195
client['remote'] = [{'host': host, 'port': port}]
# proto
if server.get('proto') == 'tcp-server':
client['proto'] = 'tcp-client'
else:
client['proto'] = 'udp'
# determine if pull must be True
if 'server' in server or 'server_bridge' in server:
client['pull'] = True
# tls_client
if 'tls_server' not in server or not server['tls_server']:
client['tls_client'] = False
# ns_cert_type
ns_cert_type = {None: '', '': '', 'client': 'server'}
client['ns_cert_type'] = ns_cert_type[server.get('ns_cert_type')]
# remote_cert_tls
remote_cert_tls = {None: '', '': '', 'client': 'server'}
client['remote_cert_tls'] = remote_cert_tls[server.get('remote_cert_tls')]
copy_keys = [
'name',
'dev_type',
'dev',
'comp_lzo',
'auth',
'cipher',
'ca',
'cert',
'key',
'pkcs12',
'mtu_test',
'fragment',
'mssfix',
'keepalive',
'persist_tun',
'mute',
'persist_key',
'script_security',
'user',
'group',
'log',
'mute_replay_warnings',
'secret',
'reneg_sec',
'tls_timeout',
'tls_cipher',
'float',
'fast_io',
'verb',
]
for key in copy_keys:
if key in server:
client[key] = server[key]
files = cls._auto_client_files(
client,
ca_path,
ca_contents,
cert_path,
cert_contents,
key_path,
key_contents,
)
return {'openvpn': [client], 'files': files}
@classmethod
def _auto_client_files(
cls,
client,
ca_path=None,
ca_contents=None,
cert_path=None,
cert_contents=None,
key_path=None,
key_contents=None,
):
"""
returns a list of NetJSON extra files for automatically generated clients
produces side effects in ``client`` dictionary
"""
files = []
if ca_path and ca_contents:
client['ca'] = ca_path
files.append(dict(path=ca_path, contents=ca_contents, mode=X509_FILE_MODE))
if cert_path and cert_contents:
client['cert'] = cert_path
files.append(
dict(path=cert_path, contents=cert_contents, mode=X509_FILE_MODE)
)
if key_path and key_contents:
client['key'] = key_path
files.append(
dict(path=key_path, contents=key_contents, mode=X509_FILE_MODE,)
)
return files
| gpl-3.0 | -5,404,192,686,488,379,000 | 32.153409 | 97 | 0.533676 | false |
beagles/neutron_hacking | neutron/plugins/vmware/plugins/service.py | 1 | 81209 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 VMware, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import netaddr
from oslo.config import cfg
from neutron.common import exceptions as n_exc
from neutron.db.firewall import firewall_db
from neutron.db import l3_db
from neutron.db.loadbalancer import loadbalancer_db
from neutron.db import routedserviceinsertion_db as rsi_db
from neutron.db.vpn import vpn_db
from neutron.extensions import firewall as fw_ext
from neutron.extensions import l3
from neutron.extensions import routedserviceinsertion as rsi
from neutron.openstack.common import excutils
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as service_constants
from neutron.plugins.vmware.api_client import exception as api_exc
from neutron.plugins.vmware.common import config # noqa
from neutron.plugins.vmware.common import exceptions as nsx_exc
from neutron.plugins.vmware.common import utils
from neutron.plugins.vmware.dbexts import servicerouter as sr_db
from neutron.plugins.vmware.dbexts import vcns_db
from neutron.plugins.vmware.dbexts import vcns_models
from neutron.plugins.vmware.extensions import servicerouter as sr
from neutron.plugins.vmware.nsxlib import router as routerlib
from neutron.plugins.vmware.nsxlib import switch as switchlib
from neutron.plugins.vmware.plugins import base
from neutron.plugins.vmware.vshield.common import constants as vcns_const
from neutron.plugins.vmware.vshield.common.constants import RouterStatus
from neutron.plugins.vmware.vshield.common import exceptions
from neutron.plugins.vmware.vshield.tasks.constants import TaskState
from neutron.plugins.vmware.vshield.tasks.constants import TaskStatus
from neutron.plugins.vmware.vshield import vcns_driver
from sqlalchemy.orm import exc as sa_exc
LOG = logging.getLogger(__name__)
ROUTER_TYPE_BASIC = 1
ROUTER_TYPE_ADVANCED = 2
ROUTER_STATUS = [
service_constants.ACTIVE,
service_constants.DOWN,
service_constants.PENDING_CREATE,
service_constants.PENDING_DELETE,
service_constants.ERROR
]
ROUTER_STATUS_LEVEL = {
service_constants.ACTIVE: RouterStatus.ROUTER_STATUS_ACTIVE,
service_constants.DOWN: RouterStatus.ROUTER_STATUS_DOWN,
service_constants.PENDING_CREATE: (
RouterStatus.ROUTER_STATUS_PENDING_CREATE
),
service_constants.PENDING_DELETE: (
RouterStatus.ROUTER_STATUS_PENDING_DELETE
),
service_constants.ERROR: RouterStatus.ROUTER_STATUS_ERROR
}
class NsxAdvancedPlugin(sr_db.ServiceRouter_mixin,
base.NsxPluginV2,
rsi_db.RoutedServiceInsertionDbMixin,
firewall_db.Firewall_db_mixin,
loadbalancer_db.LoadBalancerPluginDb,
vpn_db.VPNPluginDb):
supported_extension_aliases = (
base.NsxPluginV2.supported_extension_aliases + [
"service-router",
"routed-service-insertion",
"fwaas",
"lbaas",
"vpnaas"
])
def __init__(self):
super(NsxAdvancedPlugin, self).__init__()
self._super_create_ext_gw_port = (
self._port_drivers['create'][l3_db.DEVICE_OWNER_ROUTER_GW])
self._super_delete_ext_gw_port = (
self._port_drivers['delete'][l3_db.DEVICE_OWNER_ROUTER_GW])
self._port_drivers['create'][l3_db.DEVICE_OWNER_ROUTER_GW] = (
self._vcns_create_ext_gw_port)
self._port_drivers['delete'][l3_db.DEVICE_OWNER_ROUTER_GW] = (
self._vcns_delete_ext_gw_port)
# cache router type based on router id
self._router_type = {}
self.callbacks = [VcnsCallbacks(self)]
# load the vCNS driver
self._load_vcns_drivers()
# switchlib's create_lswitch needs to be replaced in order to proxy
# logical switch create requests to vcns
self._set_create_lswitch_proxy()
def _set_create_lswitch_proxy(self):
base.switchlib.create_lswitch = self._proxy_create_lswitch
def _proxy_create_lswitch(self, *args, **kwargs):
name, tz_config, tags = (
_process_base_create_lswitch_args(*args, **kwargs)
)
return self.vcns_driver.create_lswitch(
name, tz_config, tags=tags,
port_isolation=None, replication_mode=None)
def _load_vcns_drivers(self):
self.vcns_driver = vcns_driver.VcnsDriver(self.callbacks)
def _set_router_type(self, router_id, router_type):
self._router_type[router_id] = router_type
def _get_router_type(self, context=None, router_id=None, router=None):
if not router:
if router_id in self._router_type:
return self._router_type[router_id]
router = self._get_router(context, router_id)
LOG.debug(_("EDGE: router = %s"), router)
if router['nsx_attributes']['service_router']:
router_type = ROUTER_TYPE_ADVANCED
else:
router_type = ROUTER_TYPE_BASIC
self._set_router_type(router['id'], router_type)
return router_type
def _find_router_type(self, router):
is_service_router = router.get(sr.SERVICE_ROUTER, False)
if is_service_router:
return ROUTER_TYPE_ADVANCED
else:
return ROUTER_TYPE_BASIC
def _is_advanced_service_router(self, context=None, router_id=None,
router=None):
if router:
router_type = self._get_router_type(router=router)
else:
router_type = self._get_router_type(context, router_id)
return (router_type == ROUTER_TYPE_ADVANCED)
def _vcns_create_ext_gw_port(self, context, port_data):
router_id = port_data['device_id']
if not self._is_advanced_service_router(context, router_id):
self._super_create_ext_gw_port(context, port_data)
return
# NOP for Edge because currently the port will be create internally
# by VSM
LOG.debug(_("EDGE: _vcns_create_ext_gw_port"))
def _vcns_delete_ext_gw_port(self, context, port_data):
router_id = port_data['device_id']
if not self._is_advanced_service_router(context, router_id):
self._super_delete_ext_gw_port(context, port_data)
return
# NOP for Edge
LOG.debug(_("EDGE: _vcns_delete_ext_gw_port"))
def _get_external_attachment_info(self, context, router):
gw_port = router.gw_port
ipaddress = None
netmask = None
nexthop = None
if gw_port:
# gw_port may have multiple IPs, only configure the first one
if gw_port.get('fixed_ips'):
ipaddress = gw_port['fixed_ips'][0]['ip_address']
network_id = gw_port.get('network_id')
if network_id:
ext_net = self._get_network(context, network_id)
if not ext_net.external:
msg = (_("Network '%s' is not a valid external "
"network") % network_id)
raise n_exc.BadRequest(resource='router', msg=msg)
if ext_net.subnets:
ext_subnet = ext_net.subnets[0]
netmask = str(netaddr.IPNetwork(ext_subnet.cidr).netmask)
nexthop = ext_subnet.gateway_ip
return (ipaddress, netmask, nexthop)
def _get_external_gateway_address(self, context, router):
ipaddress, netmask, nexthop = self._get_external_attachment_info(
context, router)
return nexthop
def _vcns_update_static_routes(self, context, **kwargs):
router = kwargs.get('router')
if router is None:
router = self._get_router(context, kwargs['router_id'])
edge_id = kwargs.get('edge_id')
if edge_id is None:
binding = vcns_db.get_vcns_router_binding(context.session,
router['id'])
edge_id = binding['edge_id']
skippable = True
if 'nexthop' in kwargs:
nexthop = kwargs['nexthop']
# The default gateway and vnic config has dependencies, if we
# explicitly specify nexthop to change, tell the driver not to
# skip this route update
skippable = False
else:
nexthop = self._get_external_gateway_address(context,
router)
if 'subnets' in kwargs:
subnets = kwargs['subnets']
else:
subnets = self._find_router_subnets_cidrs(context.elevated(),
router['id'])
routes = []
for subnet in subnets:
routes.append({
'cidr': subnet,
'nexthop': vcns_const.INTEGRATION_LR_IPADDRESS.split('/')[0]
})
self.vcns_driver.update_routes(router['id'], edge_id, nexthop, routes,
skippable)
def _get_nat_rules(self, context, router):
fip_qry = context.session.query(l3_db.FloatingIP)
fip_db = fip_qry.filter_by(router_id=router['id']).all()
dnat = []
snat = []
for fip in fip_db:
if fip.fixed_port_id:
dnat.append({
'dst': fip.floating_ip_address,
'translated': fip.fixed_ip_address
})
gw_port = router.gw_port
if gw_port and router.enable_snat:
if gw_port.get('fixed_ips'):
snat_ip = gw_port['fixed_ips'][0]['ip_address']
subnets = self._find_router_subnets_cidrs(context.elevated(),
router['id'])
for subnet in subnets:
snat.append({
'src': subnet,
'translated': snat_ip
})
return (snat, dnat)
def _update_nat_rules(self, context, router):
snat, dnat = self._get_nat_rules(context, router)
binding = vcns_db.get_vcns_router_binding(context.session,
router['id'])
self.vcns_driver.update_nat_rules(router['id'],
binding['edge_id'],
snat, dnat)
def _update_interface(self, context, router, sync=False):
addr, mask, nexthop = self._get_external_attachment_info(
context, router)
secondary = []
fip_qry = context.session.query(l3_db.FloatingIP)
fip_db = fip_qry.filter_by(router_id=router['id']).all()
for fip in fip_db:
if fip.fixed_port_id:
secondary.append(fip.floating_ip_address)
#Add all vip addresses bound on the router
vip_addrs = self._get_all_vip_addrs_by_router_id(context,
router['id'])
secondary.extend(vip_addrs)
binding = vcns_db.get_vcns_router_binding(context.session,
router['id'])
task = self.vcns_driver.update_interface(
router['id'], binding['edge_id'],
vcns_const.EXTERNAL_VNIC_INDEX,
self.vcns_driver.external_network,
addr, mask, secondary=secondary)
if sync:
task.wait(TaskState.RESULT)
def _update_router_gw_info(self, context, router_id, info):
if not self._is_advanced_service_router(context, router_id):
super(NsxAdvancedPlugin, self)._update_router_gw_info(
context, router_id, info)
return
# get original gw_port config
router = self._get_router(context, router_id)
org_ext_net_id = router.gw_port_id and router.gw_port.network_id
org_enable_snat = router.enable_snat
orgaddr, orgmask, orgnexthop = self._get_external_attachment_info(
context, router)
super(base.NsxPluginV2, self)._update_router_gw_info(
context, router_id, info, router=router)
new_ext_net_id = router.gw_port_id and router.gw_port.network_id
new_enable_snat = router.enable_snat
newaddr, newmask, newnexthop = self._get_external_attachment_info(
context, router)
binding = vcns_db.get_vcns_router_binding(context.session, router_id)
if new_ext_net_id != org_ext_net_id and orgnexthop:
# network changed, need to remove default gateway before vnic
# can be configured
LOG.debug(_("VCNS: delete default gateway %s"), orgnexthop)
self._vcns_update_static_routes(context,
router=router,
edge_id=binding['edge_id'],
nexthop=None)
if orgaddr != newaddr or orgmask != newmask:
self.vcns_driver.update_interface(
router_id, binding['edge_id'],
vcns_const.EXTERNAL_VNIC_INDEX,
self.vcns_driver.external_network,
newaddr, newmask)
if orgnexthop != newnexthop:
self._vcns_update_static_routes(context,
router=router,
edge_id=binding['edge_id'],
nexthop=newnexthop)
if (new_ext_net_id == org_ext_net_id and
org_enable_snat == new_enable_snat):
return
self._update_nat_rules(context, router)
def _add_subnet_snat_rule(self, context, router, subnet):
# NOP for service router
if not self._is_advanced_service_router(router=router):
super(NsxAdvancedPlugin, self)._add_subnet_snat_rule(
context, router, subnet)
def _delete_subnet_snat_rule(self, context, router, subnet):
# NOP for service router
if not self._is_advanced_service_router(router=router):
super(NsxAdvancedPlugin, self)._delete_subnet_snat_rule(
context, router, subnet)
def _remove_floatingip_address(self, context, fip_db):
# NOP for service router
router_id = fip_db.router_id
if not self._is_advanced_service_router(context, router_id):
super(NsxAdvancedPlugin, self)._remove_floatingip_address(
context, fip_db)
def _create_advanced_service_router(self, context, neutron_router_id,
name, lrouter, lswitch):
# store binding
binding = vcns_db.add_vcns_router_binding(
context.session, neutron_router_id, None, lswitch['uuid'],
service_constants.PENDING_CREATE)
# deploy edge
jobdata = {
'neutron_router_id': neutron_router_id,
'lrouter': lrouter,
'lswitch': lswitch,
'context': context
}
# deploy and wait until the deploy requeste has been requested
# so we will have edge_id ready. The wait here should be fine
# as we're not in a database transaction now
self.vcns_driver.deploy_edge(
lrouter['uuid'], name, lswitch['uuid'], jobdata=jobdata,
wait_for_exec=True)
return binding
def _create_integration_lswitch(self, tenant_id, name):
# use defautl transport zone
transport_zone_config = [{
"zone_uuid": self.cluster.default_tz_uuid,
"transport_type": cfg.CONF.NSX.default_transport_type
}]
return self.vcns_driver.create_lswitch(name, transport_zone_config)
def _add_router_integration_interface(self, tenant_id, name,
lrouter, lswitch):
# create logic switch port
try:
ls_port = switchlib.create_lport(
self.cluster, lswitch['uuid'], tenant_id,
'', '', lrouter['uuid'], True)
except api_exc.NsxApiException:
msg = (_("An exception occurred while creating a port "
"on lswitch %s") % lswitch['uuid'])
LOG.exception(msg)
raise n_exc.NeutronException(message=msg)
# create logic router port
try:
neutron_port_id = ''
pname = name[:36] + '-lp'
admin_status_enabled = True
lr_port = routerlib.create_router_lport(
self.cluster, lrouter['uuid'], tenant_id,
neutron_port_id, pname, admin_status_enabled,
[vcns_const.INTEGRATION_LR_IPADDRESS])
except api_exc.NsxApiException:
msg = (_("Unable to create port on NSX logical router %s") % name)
LOG.exception(msg)
switchlib.delete_port(
self.cluster, lswitch['uuid'], ls_port['uuid'])
raise n_exc.NeutronException(message=msg)
# attach logic router port to switch port
try:
self._update_router_port_attachment(
self.cluster, None, lrouter['uuid'], {}, lr_port['uuid'],
'PatchAttachment', ls_port['uuid'], None)
except api_exc.NsxApiException as e:
# lr_port should have been deleted
switchlib.delete_port(
self.cluster, lswitch['uuid'], ls_port['uuid'])
raise e
def _create_lrouter(self, context, router, nexthop):
lrouter = super(NsxAdvancedPlugin, self)._create_lrouter(
context, router, vcns_const.INTEGRATION_EDGE_IPADDRESS)
router_type = self._find_router_type(router)
self._set_router_type(lrouter['uuid'], router_type)
if router_type == ROUTER_TYPE_BASIC:
return lrouter
tenant_id = self._get_tenant_id_for_create(context, router)
name = router['name']
try:
lsname = name[:36] + '-ls'
lswitch = self._create_integration_lswitch(
tenant_id, lsname)
except Exception:
msg = _("Unable to create integration logic switch "
"for router %s") % name
LOG.exception(msg)
routerlib.delete_lrouter(self.cluster, lrouter['uuid'])
raise n_exc.NeutronException(message=msg)
try:
self._add_router_integration_interface(tenant_id, name,
lrouter, lswitch)
except Exception:
msg = _("Unable to add router interface to integration lswitch "
"for router %s") % name
LOG.exception(msg)
routerlib.delete_lrouter(self.cluster, lrouter['uuid'])
raise n_exc.NeutronException(message=msg)
try:
self._create_advanced_service_router(
context, router['id'], name, lrouter, lswitch)
except Exception:
msg = (_("Unable to create advance service router for %s") % name)
LOG.exception(msg)
self.vcns_driver.delete_lswitch(lswitch('uuid'))
routerlib.delete_lrouter(self.cluster, lrouter['uuid'])
raise n_exc.NeutronException(message=msg)
lrouter['status'] = service_constants.PENDING_CREATE
return lrouter
def _delete_lrouter(self, context, router_id, nsx_router_id):
binding = vcns_db.get_vcns_router_binding(context.session, router_id)
if not binding:
super(NsxAdvancedPlugin, self)._delete_lrouter(
context, router_id, nsx_router_id)
else:
vcns_db.update_vcns_router_binding(
context.session, router_id,
status=service_constants.PENDING_DELETE)
lswitch_id = binding['lswitch_id']
edge_id = binding['edge_id']
# delete lswitch
try:
self.vcns_driver.delete_lswitch(lswitch_id)
except exceptions.ResourceNotFound:
LOG.warning(_("Did not found lswitch %s in NSX"), lswitch_id)
# delete edge
jobdata = {
'context': context
}
self.vcns_driver.delete_edge(router_id, edge_id, jobdata=jobdata)
# delete NSX logical router
routerlib.delete_lrouter(self.cluster, nsx_router_id)
if id in self._router_type:
del self._router_type[router_id]
def _update_lrouter(self, context, router_id, name, nexthop, routes=None):
if not self._is_advanced_service_router(context, router_id):
return super(NsxAdvancedPlugin, self)._update_lrouter(
context, router_id, name, nexthop, routes=routes)
previous_routes = super(NsxAdvancedPlugin, self)._update_lrouter(
context, router_id, name,
vcns_const.INTEGRATION_EDGE_IPADDRESS, routes=routes)
# TODO(fank): Theoretically users can specify extra routes for
# physical network, and routes for phyiscal network needs to be
# configured on Edge. This can be done by checking if nexthop is in
# external network. But for now we only handle routes for logic
# space and leave it for future enhancement.
# Let _update_router_gw_info handle nexthop change
#self._vcns_update_static_routes(context, router_id=router_id)
return previous_routes
def _retrieve_and_delete_nat_rules(self, context, floating_ip_address,
internal_ip, router_id,
min_num_rules_expected=0):
# NOP for advanced service router
if not self._is_advanced_service_router(context, router_id):
super(NsxAdvancedPlugin, self)._retrieve_and_delete_nat_rules(
context, floating_ip_address, internal_ip, router_id,
min_num_rules_expected=min_num_rules_expected)
def _update_fip_assoc(self, context, fip, floatingip_db, external_port):
# Update DB model only for advanced service router
router_id = self._get_fip_assoc_data(context, fip, floatingip_db)[2]
if (router_id and
not self._is_advanced_service_router(context, router_id)):
super(NsxAdvancedPlugin, self)._update_fip_assoc(
context, fip, floatingip_db, external_port)
else:
super(base.NsxPluginV2, self)._update_fip_assoc(
context, fip, floatingip_db, external_port)
def _get_nsx_lrouter_status(self, id):
try:
lrouter = routerlib.get_lrouter(self.cluster, id)
lr_status = lrouter["_relations"]["LogicalRouterStatus"]
if lr_status["fabric_status"]:
nsx_status = RouterStatus.ROUTER_STATUS_ACTIVE
else:
nsx_status = RouterStatus.ROUTER_STATUS_DOWN
except n_exc.NotFound:
nsx_status = RouterStatus.ROUTER_STATUS_ERROR
return nsx_status
def _get_vse_status(self, context, id):
binding = vcns_db.get_vcns_router_binding(context.session, id)
edge_status_level = self.vcns_driver.get_edge_status(
binding['edge_id'])
edge_db_status_level = ROUTER_STATUS_LEVEL[binding.status]
if edge_status_level > edge_db_status_level:
return edge_status_level
else:
return edge_db_status_level
def _get_all_nsx_lrouters_statuses(self, tenant_id, fields):
# get nsx lrouters status
nsx_lrouters = routerlib.get_lrouters(self.cluster,
tenant_id,
fields)
nsx_status = {}
for nsx_lrouter in nsx_lrouters:
if (nsx_lrouter["_relations"]["LogicalRouterStatus"]
["fabric_status"]):
nsx_status[nsx_lrouter['uuid']] = (
RouterStatus.ROUTER_STATUS_ACTIVE
)
else:
nsx_status[nsx_lrouter['uuid']] = (
RouterStatus.ROUTER_STATUS_DOWN
)
return nsx_status
def _get_all_vse_statuses(self, context):
bindings = self._model_query(
context, vcns_models.VcnsRouterBinding)
vse_db_status_level = {}
edge_id_to_router_id = {}
router_ids = []
for binding in bindings:
if not binding['edge_id']:
continue
router_id = binding['router_id']
router_ids.append(router_id)
edge_id_to_router_id[binding['edge_id']] = router_id
vse_db_status_level[router_id] = (
ROUTER_STATUS_LEVEL[binding['status']])
if not vse_db_status_level:
# no advanced service router, no need to query
return {}
vse_status_level = {}
edges_status_level = self.vcns_driver.get_edges_statuses()
for edge_id, status_level in edges_status_level.iteritems():
if edge_id in edge_id_to_router_id:
router_id = edge_id_to_router_id[edge_id]
db_status_level = vse_db_status_level[router_id]
if status_level > db_status_level:
vse_status_level[router_id] = status_level
else:
vse_status_level[router_id] = db_status_level
return vse_status_level
def get_router(self, context, id, fields=None):
if fields and 'status' not in fields:
return super(NsxAdvancedPlugin, self).get_router(
context, id, fields=fields)
router = super(NsxAdvancedPlugin, self).get_router(context, id)
router_type = self._find_router_type(router)
if router_type == ROUTER_TYPE_ADVANCED:
vse_status_level = self._get_vse_status(context, id)
if vse_status_level > ROUTER_STATUS_LEVEL[router['status']]:
router['status'] = ROUTER_STATUS[vse_status_level]
return self._fields(router, fields)
def get_routers(self, context, filters=None, fields=None, **kwargs):
routers = super(NsxAdvancedPlugin, self).get_routers(
context, filters=filters, **kwargs)
if fields and 'status' not in fields:
# no status checking, just return regular get_routers
return [self._fields(router, fields) for router in routers]
for router in routers:
router_type = self._find_router_type(router)
if router_type == ROUTER_TYPE_ADVANCED:
break
else:
# no advanced service router, return here
return [self._fields(router, fields) for router in routers]
vse_status_all = self._get_all_vse_statuses(context)
for router in routers:
router_type = self._find_router_type(router)
if router_type == ROUTER_TYPE_ADVANCED:
vse_status_level = vse_status_all.get(router['id'])
if vse_status_level is None:
vse_status_level = RouterStatus.ROUTER_STATUS_ERROR
if vse_status_level > ROUTER_STATUS_LEVEL[router['status']]:
router['status'] = ROUTER_STATUS[vse_status_level]
return [self._fields(router, fields) for router in routers]
def add_router_interface(self, context, router_id, interface_info):
info = super(NsxAdvancedPlugin, self).add_router_interface(
context, router_id, interface_info)
if self._is_advanced_service_router(context, router_id):
router = self._get_router(context, router_id)
if router.enable_snat:
self._update_nat_rules(context, router)
# TODO(fank): do rollback on error, or have a dedicated thread
# do sync work (rollback, re-configure, or make router down)
self._vcns_update_static_routes(context, router=router)
return info
def remove_router_interface(self, context, router_id, interface_info):
info = super(NsxAdvancedPlugin, self).remove_router_interface(
context, router_id, interface_info)
if self._is_advanced_service_router(context, router_id):
router = self._get_router(context, router_id)
if router.enable_snat:
self._update_nat_rules(context, router)
# TODO(fank): do rollback on error, or have a dedicated thread
# do sync work (rollback, re-configure, or make router down)
self._vcns_update_static_routes(context, router=router)
return info
def create_floatingip(self, context, floatingip):
fip = super(NsxAdvancedPlugin, self).create_floatingip(
context, floatingip)
router_id = fip.get('router_id')
if router_id and self._is_advanced_service_router(context, router_id):
router = self._get_router(context, router_id)
# TODO(fank): do rollback on error, or have a dedicated thread
# do sync work (rollback, re-configure, or make router down)
self._update_nat_rules(context, router)
self._update_interface(context, router)
return fip
def update_floatingip(self, context, id, floatingip):
fip = super(NsxAdvancedPlugin, self).update_floatingip(
context, id, floatingip)
router_id = fip.get('router_id')
if router_id and self._is_advanced_service_router(context, router_id):
router = self._get_router(context, router_id)
# TODO(fank): do rollback on error, or have a dedicated thread
# do sync work (rollback, re-configure, or make router down)
self._update_nat_rules(context, router)
self._update_interface(context, router)
return fip
def delete_floatingip(self, context, id):
fip_db = self._get_floatingip(context, id)
router_id = None
if fip_db.fixed_port_id:
router_id = fip_db.router_id
super(NsxAdvancedPlugin, self).delete_floatingip(context, id)
if router_id and self._is_advanced_service_router(context, router_id):
router = self._get_router(context, router_id)
# TODO(fank): do rollback on error, or have a dedicated thread
# do sync work (rollback, re-configure, or make router down)
self._update_interface(context, router)
self._update_nat_rules(context, router)
def disassociate_floatingips(self, context, port_id):
try:
fip_qry = context.session.query(l3_db.FloatingIP)
fip_db = fip_qry.filter_by(fixed_port_id=port_id).one()
router_id = fip_db.router_id
except sa_exc.NoResultFound:
router_id = None
super(NsxAdvancedPlugin, self).disassociate_floatingips(context,
port_id)
if router_id and self._is_advanced_service_router(context, router_id):
router = self._get_router(context, router_id)
# TODO(fank): do rollback on error, or have a dedicated thread
# do sync work (rollback, re-configure, or make router down)
self._update_interface(context, router)
self._update_nat_rules(context, router)
#
# FWaaS plugin implementation
#
def _firewall_set_status(
self, context, firewall_id, status, firewall=None):
with context.session.begin(subtransactions=True):
fw_db = self._get_firewall(context, firewall_id)
if status == service_constants.PENDING_UPDATE and (
fw_db.status == service_constants.PENDING_DELETE):
raise fw_ext.FirewallInPendingState(
firewall_id=firewall_id, pending_state=status)
else:
fw_db.status = status
if firewall:
firewall['status'] = status
def _ensure_firewall_update_allowed(self, context, firewall_id):
fwall = self.get_firewall(context, firewall_id)
if fwall['status'] in [service_constants.PENDING_CREATE,
service_constants.PENDING_UPDATE,
service_constants.PENDING_DELETE]:
raise fw_ext.FirewallInPendingState(firewall_id=firewall_id,
pending_state=fwall['status'])
def _ensure_firewall_policy_update_allowed(
self, context, firewall_policy_id):
firewall_policy = self.get_firewall_policy(context, firewall_policy_id)
for firewall_id in firewall_policy.get('firewall_list', []):
self._ensure_firewall_update_allowed(context, firewall_id)
def _ensure_update_or_delete_firewall_rule(
self, context, firewall_rule_id):
fw_rule = self.get_firewall_rule(context, firewall_rule_id)
if fw_rule.get('firewall_policy_id'):
self._ensure_firewall_policy_update_allowed(
context, fw_rule['firewall_policy_id'])
def _make_firewall_rule_list_by_policy_id(self, context, fw_policy_id):
if not fw_policy_id:
return []
firewall_policy_db = self._get_firewall_policy(context, fw_policy_id)
return [
self._make_firewall_rule_dict(fw_rule_db)
for fw_rule_db in firewall_policy_db['firewall_rules']
]
def _get_edge_id_by_vcns_edge_binding(self, context,
router_id):
#Get vcns_router_binding mapping between router and edge
router_binding = vcns_db.get_vcns_router_binding(
context.session, router_id)
return router_binding.edge_id
def _get_firewall_list_from_firewall_policy(self, context, policy_id):
firewall_policy_db = self._get_firewall_policy(context, policy_id)
return [
self._make_firewall_dict(fw_db)
for fw_db in firewall_policy_db['firewalls']
]
def _get_firewall_list_from_firewall_rule(self, context, rule_id):
rule = self._get_firewall_rule(context, rule_id)
if not rule.firewall_policy_id:
# The firewall rule is not associated with firewall policy yet
return None
return self._get_firewall_list_from_firewall_policy(
context, rule.firewall_policy_id)
def _vcns_update_firewall(self, context, fw, router_id=None, **kwargs):
edge_id = kwargs.get('edge_id')
if not edge_id:
edge_id = self._get_edge_id_by_vcns_edge_binding(
context, router_id)
firewall_rule_list = kwargs.get('firewall_rule_list')
if not firewall_rule_list:
firewall_rule_list = self._make_firewall_rule_list_by_policy_id(
context, fw['firewall_policy_id'])
fw_with_rules = fw
fw_with_rules['firewall_rule_list'] = firewall_rule_list
try:
self.vcns_driver.update_firewall(context, edge_id, fw_with_rules)
except exceptions.VcnsApiException as e:
self._firewall_set_status(
context, fw['id'], service_constants.ERROR)
msg = (_("Failed to create firewall on vShield Edge "
"bound on router %s") % router_id)
LOG.exception(msg)
raise e
except exceptions.VcnsBadRequest as e:
self._firewall_set_status(
context, fw['id'], service_constants.ERROR)
LOG.exception(_("Bad Firewall request Input"))
raise e
def _vcns_delete_firewall(self, context, router_id=None, **kwargs):
edge_id = kwargs.get('edge_id')
if not edge_id:
edge_id = self._get_edge_id_by_vcns_edge_binding(
context, router_id)
#TODO(linb):do rollback on error
self.vcns_driver.delete_firewall(context, edge_id)
def create_firewall(self, context, firewall):
LOG.debug(_("create_firewall() called"))
router_id = firewall['firewall'].get(vcns_const.ROUTER_ID)
if not router_id:
msg = _("router_id is not provided!")
LOG.error(msg)
raise n_exc.BadRequest(resource='router', msg=msg)
if not self._is_advanced_service_router(context, router_id):
msg = _("router_id:%s is not an advanced router!") % router_id
LOG.error(msg)
raise n_exc.BadRequest(resource='router', msg=msg)
if self._get_resource_router_id_binding(
context, firewall_db.Firewall, router_id=router_id):
msg = _("A firewall is already associated with the router")
LOG.error(msg)
raise nsx_exc.ServiceOverQuota(
overs='firewall', err_msg=msg)
fw = super(NsxAdvancedPlugin, self).create_firewall(context, firewall)
#Add router service insertion binding with firewall object
res = {
'id': fw['id'],
'router_id': router_id
}
self._process_create_resource_router_id(
context, res, firewall_db.Firewall)
# Since there is only one firewall per edge,
# here would be bulk configuration operation on firewall
self._vcns_update_firewall(context, fw, router_id)
self._firewall_set_status(
context, fw['id'], service_constants.ACTIVE, fw)
fw[rsi.ROUTER_ID] = router_id
return fw
def update_firewall(self, context, id, firewall):
LOG.debug(_("update_firewall() called"))
self._ensure_firewall_update_allowed(context, id)
service_router_binding = self._get_resource_router_id_binding(
context, firewall_db.Firewall, resource_id=id)
rule_list_pre = self._make_firewall_rule_list_by_policy_id(
context,
self.get_firewall(context, id)['firewall_policy_id'])
firewall['firewall']['status'] = service_constants.PENDING_UPDATE
fw = super(NsxAdvancedPlugin, self).update_firewall(
context, id, firewall)
fw[rsi.ROUTER_ID] = service_router_binding['router_id']
rule_list_new = self._make_firewall_rule_list_by_policy_id(
context, fw['firewall_policy_id'])
if rule_list_pre == rule_list_new:
self._firewall_set_status(
context, fw['id'], service_constants.ACTIVE, fw)
return fw
else:
self._vcns_update_firewall(
context, fw, service_router_binding.router_id,
firewall_rule_list=rule_list_new)
self._firewall_set_status(
context, fw['id'], service_constants.ACTIVE, fw)
return fw
def delete_firewall(self, context, id):
LOG.debug(_("delete_firewall() called"))
self._firewall_set_status(
context, id, service_constants.PENDING_DELETE)
service_router_binding = self._get_resource_router_id_binding(
context, firewall_db.Firewall, resource_id=id)
self._vcns_delete_firewall(context, service_router_binding.router_id)
super(NsxAdvancedPlugin, self).delete_firewall(context, id)
self._delete_resource_router_id_binding(
context, id, firewall_db.Firewall)
def get_firewall(self, context, id, fields=None):
fw = super(NsxAdvancedPlugin, self).get_firewall(
context, id, fields)
if fields and rsi.ROUTER_ID not in fields:
return fw
service_router_binding = self._get_resource_router_id_binding(
context, firewall_db.Firewall, resource_id=fw['id'])
fw[rsi.ROUTER_ID] = service_router_binding['router_id']
return fw
def get_firewalls(self, context, filters=None, fields=None):
fws = super(NsxAdvancedPlugin, self).get_firewalls(
context, filters, fields)
if fields and rsi.ROUTER_ID not in fields:
return fws
service_router_bindings = self._get_resource_router_id_bindings(
context, firewall_db.Firewall,
resource_ids=[fw['id'] for fw in fws])
mapping = dict([(binding['resource_id'], binding['router_id'])
for binding in service_router_bindings])
for fw in fws:
fw[rsi.ROUTER_ID] = mapping[fw['id']]
return fws
def update_firewall_rule(self, context, id, firewall_rule):
LOG.debug(_("update_firewall_rule() called"))
self._ensure_update_or_delete_firewall_rule(context, id)
fwr_pre = self.get_firewall_rule(context, id)
fwr = super(NsxAdvancedPlugin, self).update_firewall_rule(
context, id, firewall_rule)
if fwr_pre == fwr:
return fwr
# check if this rule is associated with firewall
fw_list = self._get_firewall_list_from_firewall_rule(context, id)
if not fw_list:
return fwr
for fw in fw_list:
# get router service insertion binding with firewall id
service_router_binding = self._get_resource_router_id_binding(
context, firewall_db.Firewall, resource_id=fw['id'])
edge_id = self._get_edge_id_by_vcns_edge_binding(
context, service_router_binding.router_id)
#TODO(linb): do rollback on error
self.vcns_driver.update_firewall_rule(context, id, edge_id, fwr)
return fwr
def update_firewall_policy(self, context, id, firewall_policy):
LOG.debug(_("update_firewall_policy() called"))
self._ensure_firewall_policy_update_allowed(context, id)
firewall_rules_pre = self._make_firewall_rule_list_by_policy_id(
context, id)
fwp = super(NsxAdvancedPlugin, self).update_firewall_policy(
context, id, firewall_policy)
firewall_rules = self._make_firewall_rule_list_by_policy_id(
context, id)
if firewall_rules_pre == firewall_rules:
return fwp
# check if this policy is associated with firewall
fw_list = self._get_firewall_list_from_firewall_policy(context, id)
if not fw_list:
return fwp
for fw in fw_list:
# Get the router_service insertion binding with firewall id
# TODO(fank): optimized by using _get_resource_router_id_bindings
service_router_binding = self._get_resource_router_id_binding(
context, firewall_db.Firewall, resource_id=fw['id'])
self._vcns_update_firewall(
context, fw, service_router_binding.router_id,
firewall_rule_list=firewall_rules)
return fwp
def insert_rule(self, context, id, rule_info):
LOG.debug(_("insert_rule() called"))
self._ensure_firewall_policy_update_allowed(context, id)
fwp = super(NsxAdvancedPlugin, self).insert_rule(
context, id, rule_info)
fwr = super(NsxAdvancedPlugin, self).get_firewall_rule(
context, rule_info['firewall_rule_id'])
# check if this policy is associated with firewall
fw_list = self._get_firewall_list_from_firewall_policy(context, id)
if not fw_list:
return fwp
for fw in fw_list:
# TODO(fank): optimized by using _get_resource_router_id_bindings
service_router_binding = self._get_resource_router_id_binding(
context, firewall_db.Firewall, resource_id=fw['id'])
edge_id = self._get_edge_id_by_vcns_edge_binding(
context, service_router_binding.router_id)
if rule_info.get('insert_before') or rule_info.get('insert_after'):
#if insert_before or insert_after is set, we would call
#VCNS insert_rule API
#TODO(linb): do rollback on error
self.vcns_driver.insert_rule(context, rule_info, edge_id, fwr)
else:
#Else we would call bulk configuration on the firewall
self._vcns_update_firewall(context, fw, edge_id=edge_id)
return fwp
def remove_rule(self, context, id, rule_info):
LOG.debug(_("remove_rule() called"))
self._ensure_firewall_policy_update_allowed(context, id)
fwp = super(NsxAdvancedPlugin, self).remove_rule(
context, id, rule_info)
fwr = super(NsxAdvancedPlugin, self).get_firewall_rule(
context, rule_info['firewall_rule_id'])
# check if this policy is associated with firewall
fw_list = self._get_firewall_list_from_firewall_policy(context, id)
if not fw_list:
return fwp
for fw in fw_list:
# TODO(fank): optimized by using _get_resource_router_id_bindings
service_router_binding = self._get_resource_router_id_binding(
context, firewall_db.Firewall, resource_id=fw['id'])
edge_id = self._get_edge_id_by_vcns_edge_binding(
context, service_router_binding.router_id)
#TODO(linb): do rollback on error
self.vcns_driver.delete_firewall_rule(
context, fwr['id'], edge_id)
return fwp
#
# LBAAS service plugin implementation
#
def _get_edge_id_by_vip_id(self, context, vip_id):
try:
service_router_binding = self._get_resource_router_id_binding(
context, loadbalancer_db.Vip, resource_id=vip_id)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to find the edge with "
"vip_id: %s"), vip_id)
return self._get_edge_id_by_vcns_edge_binding(
context, service_router_binding.router_id)
def _get_all_vip_addrs_by_router_id(
self, context, router_id):
vip_bindings = self._get_resource_router_id_bindings(
context, loadbalancer_db.Vip, router_ids=[router_id])
vip_addrs = []
for vip_binding in vip_bindings:
vip = self.get_vip(context, vip_binding.resource_id)
vip_addrs.append(vip.get('address'))
return vip_addrs
def _add_router_service_insertion_binding(self, context, resource_id,
router_id,
model):
res = {
'id': resource_id,
'router_id': router_id
}
self._process_create_resource_router_id(context, res,
model)
def _resource_set_status(self, context, model, id, status, obj=None,
pool_id=None):
with context.session.begin(subtransactions=True):
try:
qry = context.session.query(model)
if issubclass(model, loadbalancer_db.PoolMonitorAssociation):
res = qry.filter_by(monitor_id=id,
pool_id=pool_id).one()
else:
res = qry.filter_by(id=id).one()
if status == service_constants.PENDING_UPDATE and (
res.get('status') == service_constants.PENDING_DELETE):
msg = (_("Operation can't be performed, Since resource "
"%(model)s : %(id)s is in DELETEing status!") %
{'model': model,
'id': id})
LOG.error(msg)
raise nsx_exc.NsxPluginException(err_msg=msg)
else:
res.status = status
except sa_exc.NoResultFound:
msg = (_("Resource %(model)s : %(id)s not found!") %
{'model': model,
'id': id})
LOG.exception(msg)
raise nsx_exc.NsxPluginException(err_msg=msg)
if obj:
obj['status'] = status
def _vcns_create_pool_and_monitors(self, context, pool_id, **kwargs):
pool = self.get_pool(context, pool_id)
edge_id = kwargs.get('edge_id')
if not edge_id:
edge_id = self._get_edge_id_by_vip_id(
context, pool['vip_id'])
#Check wheter the pool is already created on the router
#in case of future's M:N relation between Pool and Vip
#Check associated HealthMonitors and then create them
for monitor_id in pool.get('health_monitors'):
hm = self.get_health_monitor(context, monitor_id)
try:
self.vcns_driver.create_health_monitor(
context, edge_id, hm)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to create healthmonitor "
"associated with pool id: %s!") % pool_id)
for monitor_ide in pool.get('health_monitors'):
if monitor_ide == monitor_id:
break
self.vcns_driver.delete_health_monitor(
context, monitor_ide, edge_id)
#Create the pool on the edge
members = [
super(NsxAdvancedPlugin, self).get_member(
context, member_id)
for member_id in pool.get('members')
]
try:
self.vcns_driver.create_pool(context, edge_id, pool, members)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to create pool on vshield edge"))
self.vcns_driver.delete_pool(
context, pool_id, edge_id)
for monitor_id in pool.get('health_monitors'):
self.vcns_driver.delete_health_monitor(
context, monitor_id, edge_id)
def _vcns_update_pool(self, context, pool, **kwargs):
edge_id = self._get_edge_id_by_vip_id(context, pool['vip_id'])
members = kwargs.get('members')
if not members:
members = [
super(NsxAdvancedPlugin, self).get_member(
context, member_id)
for member_id in pool.get('members')
]
self.vcns_driver.update_pool(context, edge_id, pool, members)
def create_vip(self, context, vip):
LOG.debug(_("create_vip() called"))
router_id = vip['vip'].get(vcns_const.ROUTER_ID)
if not router_id:
msg = _("router_id is not provided!")
LOG.error(msg)
raise n_exc.BadRequest(resource='router', msg=msg)
if not self._is_advanced_service_router(context, router_id):
msg = _("router_id: %s is not an advanced router!") % router_id
LOG.error(msg)
raise nsx_exc.NsxPluginException(err_msg=msg)
#Check whether the vip port is an external port
subnet_id = vip['vip']['subnet_id']
network_id = self.get_subnet(context, subnet_id)['network_id']
ext_net = self._get_network(context, network_id)
if not ext_net.external:
msg = (_("Network '%s' is not a valid external "
"network") % network_id)
raise nsx_exc.NsxPluginException(err_msg=msg)
v = super(NsxAdvancedPlugin, self).create_vip(context, vip)
#Get edge_id for the resource
router_binding = vcns_db.get_vcns_router_binding(
context.session,
router_id)
edge_id = router_binding.edge_id
#Add vip_router binding
self._add_router_service_insertion_binding(context, v['id'],
router_id,
loadbalancer_db.Vip)
#Create the vip port on vShield Edge
router = self._get_router(context, router_id)
self._update_interface(context, router, sync=True)
#Create the vip and associated pool/monitor on the corresponding edge
try:
self._vcns_create_pool_and_monitors(
context, v['pool_id'], edge_id=edge_id)
self.vcns_driver.create_vip(context, edge_id, v)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to create vip!"))
self._delete_resource_router_id_binding(
context, v['id'], loadbalancer_db.Vip)
super(NsxAdvancedPlugin, self).delete_vip(context, v['id'])
self._resource_set_status(context, loadbalancer_db.Vip,
v['id'], service_constants.ACTIVE, v)
v[rsi.ROUTER_ID] = router_id
return v
def update_vip(self, context, id, vip):
edge_id = self._get_edge_id_by_vip_id(context, id)
old_vip = self.get_vip(context, id)
vip['vip']['status'] = service_constants.PENDING_UPDATE
v = super(NsxAdvancedPlugin, self).update_vip(context, id, vip)
v[rsi.ROUTER_ID] = self._get_resource_router_id_binding(
context, loadbalancer_db.Vip, resource_id=id)['router_id']
if old_vip['pool_id'] != v['pool_id']:
self.vcns_driver.delete_vip(context, id)
#Delete old pool/monitor on the edge
#TODO(linb): Factor out procedure for removing pool and health
#separate method
old_pool = self.get_pool(context, old_vip['pool_id'])
self.vcns_driver.delete_pool(
context, old_vip['pool_id'], edge_id)
for monitor_id in old_pool.get('health_monitors'):
self.vcns_driver.delete_health_monitor(
context, monitor_id, edge_id)
#Create new pool/monitor object on the edge
#TODO(linb): add exception handle if error
self._vcns_create_pool_and_monitors(
context, v['pool_id'], edge_id=edge_id)
self.vcns_driver.create_vip(context, edge_id, v)
return v
try:
self.vcns_driver.update_vip(context, v)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update vip with id: %s!"), id)
self._resource_set_status(context, loadbalancer_db.Vip,
id, service_constants.ERROR, v)
self._resource_set_status(context, loadbalancer_db.Vip,
v['id'], service_constants.ACTIVE, v)
return v
def delete_vip(self, context, id):
v = self.get_vip(context, id)
self._resource_set_status(
context, loadbalancer_db.Vip,
id, service_constants.PENDING_DELETE)
try:
self.vcns_driver.delete_vip(context, id)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to delete vip with id: %s!"), id)
self._resource_set_status(context, loadbalancer_db.Vip,
id, service_constants.ERROR)
edge_id = self._get_edge_id_by_vip_id(context, id)
#Check associated HealthMonitors and then delete them
pool = self.get_pool(context, v['pool_id'])
self.vcns_driver.delete_pool(context, v['pool_id'], edge_id)
for monitor_id in pool.get('health_monitors'):
#TODO(linb): do exception handle if error
self.vcns_driver.delete_health_monitor(
context, monitor_id, edge_id)
router_binding = self._get_resource_router_id_binding(
context, loadbalancer_db.Vip, resource_id=id)
router = self._get_router(context, router_binding.router_id)
self._delete_resource_router_id_binding(
context, id, loadbalancer_db.Vip)
super(NsxAdvancedPlugin, self).delete_vip(context, id)
self._update_interface(context, router, sync=True)
def get_vip(self, context, id, fields=None):
vip = super(NsxAdvancedPlugin, self).get_vip(context, id, fields)
if fields and rsi.ROUTER_ID not in fields:
return vip
service_router_binding = self._get_resource_router_id_binding(
context, loadbalancer_db.Vip, resource_id=vip['id'])
vip[rsi.ROUTER_ID] = service_router_binding['router_id']
return vip
def get_vips(self, context, filters=None, fields=None):
vips = super(NsxAdvancedPlugin, self).get_vips(
context, filters, fields)
if fields and rsi.ROUTER_ID not in fields:
return vips
service_router_bindings = self._get_resource_router_id_bindings(
context, loadbalancer_db.Vip,
resource_ids=[vip['id'] for vip in vips])
mapping = dict([(binding['resource_id'], binding['router_id'])
for binding in service_router_bindings])
for vip in vips:
vip[rsi.ROUTER_ID] = mapping[vip['id']]
return vips
def update_pool(self, context, id, pool):
pool['pool']['status'] = service_constants.PENDING_UPDATE
p = super(NsxAdvancedPlugin, self).update_pool(context, id, pool)
#Check whether the pool is already associated with the vip
if not p.get('vip_id'):
self._resource_set_status(context, loadbalancer_db.Pool,
p['id'], service_constants.ACTIVE, p)
return p
try:
self._vcns_update_pool(context, p)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update pool with id: %s!"), id)
self._resource_set_status(context, loadbalancer_db.Pool,
p['id'], service_constants.ERROR, p)
self._resource_set_status(context, loadbalancer_db.Pool,
p['id'], service_constants.ACTIVE, p)
return p
def create_member(self, context, member):
m = super(NsxAdvancedPlugin, self).create_member(context, member)
pool_id = m.get('pool_id')
pool = self.get_pool(context, pool_id)
if not pool.get('vip_id'):
self._resource_set_status(context, loadbalancer_db.Member,
m['id'], service_constants.ACTIVE, m)
return m
self._resource_set_status(context, loadbalancer_db.Pool,
pool_id,
service_constants.PENDING_UPDATE)
try:
self._vcns_update_pool(context, pool)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update pool with the member"))
super(NsxAdvancedPlugin, self).delete_member(context, m['id'])
self._resource_set_status(context, loadbalancer_db.Pool,
pool_id, service_constants.ACTIVE)
self._resource_set_status(context, loadbalancer_db.Member,
m['id'], service_constants.ACTIVE, m)
return m
def update_member(self, context, id, member):
member['member']['status'] = service_constants.PENDING_UPDATE
old_member = self.get_member(context, id)
m = super(NsxAdvancedPlugin, self).update_member(
context, id, member)
if m['pool_id'] != old_member['pool_id']:
old_pool_id = old_member['pool_id']
old_pool = self.get_pool(context, old_pool_id)
if old_pool.get('vip_id'):
self._resource_set_status(
context, loadbalancer_db.Pool,
old_pool_id, service_constants.PENDING_UPDATE)
try:
self._vcns_update_pool(context, old_pool)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update old pool "
"with the member"))
super(NsxAdvancedPlugin, self).delete_member(
context, m['id'])
self._resource_set_status(
context, loadbalancer_db.Pool,
old_pool_id, service_constants.ACTIVE)
pool_id = m['pool_id']
pool = self.get_pool(context, pool_id)
if not pool.get('vip_id'):
self._resource_set_status(context, loadbalancer_db.Member,
m['id'], service_constants.ACTIVE, m)
return m
self._resource_set_status(context, loadbalancer_db.Pool,
pool_id,
service_constants.PENDING_UPDATE)
try:
self._vcns_update_pool(context, pool)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update pool with the member"))
super(NsxAdvancedPlugin, self).delete_member(
context, m['id'])
self._resource_set_status(context, loadbalancer_db.Pool,
pool_id, service_constants.ACTIVE)
self._resource_set_status(context, loadbalancer_db.Member,
m['id'], service_constants.ACTIVE, m)
return m
def delete_member(self, context, id):
m = self.get_member(context, id)
super(NsxAdvancedPlugin, self).delete_member(context, id)
pool_id = m['pool_id']
pool = self.get_pool(context, pool_id)
if not pool.get('vip_id'):
return
self._resource_set_status(context, loadbalancer_db.Pool,
pool_id, service_constants.PENDING_UPDATE)
try:
self._vcns_update_pool(context, pool)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update pool with the member"))
self._resource_set_status(context, loadbalancer_db.Pool,
pool_id, service_constants.ACTIVE)
def update_health_monitor(self, context, id, health_monitor):
old_hm = super(NsxAdvancedPlugin, self).get_health_monitor(
context, id)
hm = super(NsxAdvancedPlugin, self).update_health_monitor(
context, id, health_monitor)
for hm_pool in hm.get('pools'):
pool_id = hm_pool['pool_id']
pool = self.get_pool(context, pool_id)
if pool.get('vip_id'):
edge_id = self._get_edge_id_by_vip_id(
context, pool['vip_id'])
try:
self.vcns_driver.update_health_monitor(
context, edge_id, old_hm, hm)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update monitor "
"with id: %s!"), id)
return hm
def delete_health_monitor(self, context, id):
with context.session.begin(subtransactions=True):
qry = context.session.query(
loadbalancer_db.PoolMonitorAssociation
).filter_by(monitor_id=id)
for assoc in qry:
pool_id = assoc['pool_id']
super(NsxAdvancedPlugin,
self).delete_pool_health_monitor(context,
id,
pool_id)
pool = self.get_pool(context, pool_id)
if not pool.get('vip_id'):
continue
edge_id = self._get_edge_id_by_vip_id(
context, pool['vip_id'])
self._resource_set_status(
context, loadbalancer_db.Pool,
pool_id, service_constants.PENDING_UPDATE)
try:
self._vcns_update_pool(context, pool)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to update pool with monitor!"))
self._resource_set_status(
context, loadbalancer_db.Pool,
pool_id, service_constants.ACTIVE)
try:
self.vcns_driver.delete_health_monitor(
context, id, edge_id)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to delete monitor "
"with id: %s!"), id)
super(NsxAdvancedPlugin,
self).delete_health_monitor(context, id)
self._delete_resource_router_id_binding(
context, id, loadbalancer_db.HealthMonitor)
super(NsxAdvancedPlugin, self).delete_health_monitor(context, id)
self._delete_resource_router_id_binding(
context, id, loadbalancer_db.HealthMonitor)
def create_pool_health_monitor(self, context,
health_monitor, pool_id):
monitor_id = health_monitor['health_monitor']['id']
pool = self.get_pool(context, pool_id)
monitors = pool.get('health_monitors')
if len(monitors) > 0:
msg = _("Vcns right now can only support "
"one monitor per pool")
LOG.error(msg)
raise nsx_exc.NsxPluginException(err_msg=msg)
#Check whether the pool is already associated with the vip
if not pool.get('vip_id'):
res = super(NsxAdvancedPlugin,
self).create_pool_health_monitor(context,
health_monitor,
pool_id)
return res
#Get the edge_id
edge_id = self._get_edge_id_by_vip_id(context, pool['vip_id'])
res = super(NsxAdvancedPlugin,
self).create_pool_health_monitor(context,
health_monitor,
pool_id)
monitor = self.get_health_monitor(context, monitor_id)
#TODO(linb)Add Exception handle if error
self.vcns_driver.create_health_monitor(context, edge_id, monitor)
#Get updated pool
pool['health_monitors'].append(monitor['id'])
self._resource_set_status(
context, loadbalancer_db.Pool,
pool_id, service_constants.PENDING_UPDATE)
try:
self._vcns_update_pool(context, pool)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to associate monitor with pool!"))
self._resource_set_status(
context, loadbalancer_db.Pool,
pool_id, service_constants.ERROR)
super(NsxAdvancedPlugin, self).delete_pool_health_monitor(
context, monitor_id, pool_id)
self._resource_set_status(
context, loadbalancer_db.Pool,
pool_id, service_constants.ACTIVE)
self._resource_set_status(
context, loadbalancer_db.PoolMonitorAssociation,
monitor_id, service_constants.ACTIVE, res,
pool_id=pool_id)
return res
def delete_pool_health_monitor(self, context, id, pool_id):
super(NsxAdvancedPlugin, self).delete_pool_health_monitor(
context, id, pool_id)
pool = self.get_pool(context, pool_id)
#Check whether the pool is already associated with the vip
if pool.get('vip_id'):
#Delete the monitor on vshield edge
edge_id = self._get_edge_id_by_vip_id(context, pool['vip_id'])
self._resource_set_status(
context, loadbalancer_db.Pool,
pool_id, service_constants.PENDING_UPDATE)
try:
self._vcns_update_pool(context, pool)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(
_("Failed to update pool with pool_monitor!"))
self._resource_set_status(
context, loadbalancer_db.Pool,
pool_id, service_constants.ERROR)
#TODO(linb): Add exception handle if error
self.vcns_driver.delete_health_monitor(context, id, edge_id)
self._resource_set_status(
context, loadbalancer_db.Pool,
pool_id, service_constants.ACTIVE)
def _vcns_update_ipsec_config(
self, context, vpnservice_id, removed_ipsec_conn_id=None):
sites = []
vpn_service = self._get_vpnservice(context, vpnservice_id)
edge_id = self._get_edge_id_by_vcns_edge_binding(
context, vpn_service.router_id)
if not vpn_service.router.gw_port:
msg = _("Failed to update ipsec vpn configuration on edge, since "
"the router: %s does not have a gateway yet!"
) % vpn_service.router_id
LOG.error(msg)
raise exceptions.VcnsBadRequest(resource='router', msg=msg)
external_ip = vpn_service.router.gw_port['fixed_ips'][0]['ip_address']
subnet = self._make_subnet_dict(vpn_service.subnet)
for ipsec_site_conn in vpn_service.ipsec_site_connections:
if ipsec_site_conn.id != removed_ipsec_conn_id:
site = self._make_ipsec_site_connection_dict(ipsec_site_conn)
ikepolicy = self._make_ikepolicy_dict(
ipsec_site_conn.ikepolicy)
ipsecpolicy = self._make_ipsecpolicy_dict(
ipsec_site_conn.ipsecpolicy)
sites.append({'site': site,
'ikepolicy': ikepolicy,
'ipsecpolicy': ipsecpolicy,
'subnet': subnet,
'external_ip': external_ip})
try:
self.vcns_driver.update_ipsec_config(
edge_id, sites, enabled=vpn_service.admin_state_up)
except exceptions.VcnsBadRequest:
LOG.exception(_("Bad or unsupported Input request!"))
raise
except exceptions.VcnsApiException:
msg = (_("Failed to update ipsec VPN configuration "
"with vpnservice: %(vpnservice_id)s on vShield Edge: "
"%(edge_id)s") % {'vpnservice_id': vpnservice_id,
'edge_id': edge_id})
LOG.exception(msg)
raise
def create_vpnservice(self, context, vpnservice):
LOG.debug(_("create_vpnservice() called"))
router_id = vpnservice['vpnservice'].get('router_id')
if not self._is_advanced_service_router(context, router_id):
msg = _("router_id:%s is not an advanced router!") % router_id
LOG.warning(msg)
raise exceptions.VcnsBadRequest(resource='router', msg=msg)
if self.get_vpnservices(context, filters={'router_id': [router_id]}):
msg = _("a vpnservice is already associated with the router: %s"
) % router_id
LOG.warning(msg)
raise nsx_exc.ServiceOverQuota(
overs='vpnservice', err_msg=msg)
service = super(NsxAdvancedPlugin, self).create_vpnservice(
context, vpnservice)
self._resource_set_status(
context, vpn_db.VPNService,
service['id'], service_constants.ACTIVE, service)
return service
def update_vpnservice(self, context, vpnservice_id, vpnservice):
vpnservice['vpnservice']['status'] = service_constants.PENDING_UPDATE
service = super(NsxAdvancedPlugin, self).update_vpnservice(
context, vpnservice_id, vpnservice)
# Only admin_state_up attribute is configurable on Edge.
if vpnservice['vpnservice'].get('admin_state_up') is None:
self._resource_set_status(
context, vpn_db.VPNService,
service['id'], service_constants.ACTIVE, service)
return service
# Test whether there is one ipsec site connection attached to
# the vpnservice. If not, just return without updating ipsec
# config on edge side.
vpn_service_db = self._get_vpnservice(context, vpnservice_id)
if not vpn_service_db.ipsec_site_connections:
self._resource_set_status(
context, vpn_db.VPNService,
service['id'], service_constants.ACTIVE, service)
return service
try:
self._vcns_update_ipsec_config(context, service['id'])
except Exception:
with excutils.save_and_reraise_exception():
self._resource_set_status(
context, vpn_db.VPNService,
service['id'], service_constants.ERROR, service)
self._resource_set_status(
context, vpn_db.VPNService,
service['id'], service_constants.ACTIVE, service)
return service
def create_ipsec_site_connection(self, context, ipsec_site_connection):
ipsec_site_conn = super(
NsxAdvancedPlugin, self).create_ipsec_site_connection(
context, ipsec_site_connection)
try:
self._vcns_update_ipsec_config(
context, ipsec_site_conn['vpnservice_id'])
except Exception:
with excutils.save_and_reraise_exception():
super(NsxAdvancedPlugin, self).delete_ipsec_site_connection(
context, ipsec_site_conn['id'])
self._resource_set_status(
context, vpn_db.IPsecSiteConnection,
ipsec_site_conn['id'], service_constants.ACTIVE, ipsec_site_conn)
return ipsec_site_conn
def update_ipsec_site_connection(self, context, ipsec_site_connection_id,
ipsec_site_connection):
ipsec_site_connection['ipsec_site_connection']['status'] = (
service_constants.PENDING_UPDATE)
ipsec_site_conn = super(
NsxAdvancedPlugin, self).update_ipsec_site_connection(
context, ipsec_site_connection_id, ipsec_site_connection)
try:
self._vcns_update_ipsec_config(
context, ipsec_site_conn['vpnservice_id'])
except Exception:
with excutils.save_and_reraise_exception():
self._resource_set_status(
context, vpn_db.IPsecSiteConnection, ipsec_site_conn['id'],
service_constants.ERROR, ipsec_site_conn)
self._resource_set_status(
context, vpn_db.IPsecSiteConnection,
ipsec_site_conn['id'], service_constants.ACTIVE, ipsec_site_conn)
return ipsec_site_conn
def delete_ipsec_site_connection(self, context, ipsec_site_conn_id):
self._resource_set_status(
context, vpn_db.IPsecSiteConnection,
ipsec_site_conn_id, service_constants.PENDING_DELETE)
vpnservice_id = self.get_ipsec_site_connection(
context, ipsec_site_conn_id)['vpnservice_id']
try:
self._vcns_update_ipsec_config(
context, vpnservice_id, ipsec_site_conn_id)
except Exception:
with excutils.save_and_reraise_exception():
self._resource_set_status(
context, vpn_db.IPsecSiteConnection, ipsec_site_conn_id,
service_constants.ERROR)
super(NsxAdvancedPlugin, self).delete_ipsec_site_connection(
context, ipsec_site_conn_id)
class VcnsCallbacks(object):
"""Edge callback implementation Callback functions for
asynchronous tasks.
"""
def __init__(self, plugin):
self.plugin = plugin
def edge_deploy_started(self, task):
"""callback when deployment task started."""
jobdata = task.userdata['jobdata']
context = jobdata['context']
edge_id = task.userdata.get('edge_id')
neutron_router_id = jobdata['neutron_router_id']
name = task.userdata['router_name']
if edge_id:
LOG.debug(_("Start deploying %(edge_id)s for router %(name)s"), {
'edge_id': edge_id,
'name': name})
vcns_db.update_vcns_router_binding(
context.session, neutron_router_id, edge_id=edge_id)
else:
LOG.debug(_("Failed to deploy Edge for router %s"), name)
vcns_db.update_vcns_router_binding(
context.session, neutron_router_id,
status=service_constants.ERROR)
def edge_deploy_result(self, task):
"""callback when deployment task finished."""
jobdata = task.userdata['jobdata']
lrouter = jobdata['lrouter']
context = jobdata['context']
name = task.userdata['router_name']
neutron_router_id = jobdata['neutron_router_id']
router_db = None
try:
router_db = self.plugin._get_router(
context, neutron_router_id)
except l3.RouterNotFound:
# Router might have been deleted before deploy finished
LOG.exception(_("Router %s not found"), lrouter['uuid'])
if task.status == TaskStatus.COMPLETED:
LOG.debug(_("Successfully deployed %(edge_id)s for "
"router %(name)s"), {
'edge_id': task.userdata['edge_id'],
'name': name})
if (router_db and
router_db['status'] == service_constants.PENDING_CREATE):
router_db['status'] = service_constants.ACTIVE
binding = vcns_db.get_vcns_router_binding(
context.session, neutron_router_id)
# only update status to active if its status is pending create
if binding['status'] == service_constants.PENDING_CREATE:
vcns_db.update_vcns_router_binding(
context.session, neutron_router_id,
status=service_constants.ACTIVE)
else:
LOG.debug(_("Failed to deploy Edge for router %s"), name)
if router_db:
router_db['status'] = service_constants.ERROR
vcns_db.update_vcns_router_binding(
context.session, neutron_router_id,
status=service_constants.ERROR)
def edge_delete_result(self, task):
jobdata = task.userdata['jobdata']
router_id = task.userdata['router_id']
context = jobdata['context']
if task.status == TaskStatus.COMPLETED:
vcns_db.delete_vcns_router_binding(context.session,
router_id)
def interface_update_result(self, task):
LOG.debug(_("interface_update_result %d"), task.status)
def snat_create_result(self, task):
LOG.debug(_("snat_create_result %d"), task.status)
def snat_delete_result(self, task):
LOG.debug(_("snat_delete_result %d"), task.status)
def dnat_create_result(self, task):
LOG.debug(_("dnat_create_result %d"), task.status)
def dnat_delete_result(self, task):
LOG.debug(_("dnat_delete_result %d"), task.status)
def routes_update_result(self, task):
LOG.debug(_("routes_update_result %d"), task.status)
def nat_update_result(self, task):
LOG.debug(_("nat_update_result %d"), task.status)
def _process_base_create_lswitch_args(*args, **kwargs):
tags = utils.get_tags()
tags.append({"tag": args[1],
"scope": "quantum_net_id"})
if args[2]:
tags.append({"tag": args[2], "scope": "os_tid"})
switch_name = args[3]
tz_config = args[4]
if kwargs.get("shared", False) or len(args) >= 6:
tags.append({"tag": "true", "scope": "shared"})
if kwargs.get("tags"):
tags.extend(kwargs["tags"])
return switch_name, tz_config, tags
# For backward compatibility
NvpAdvancedPlugin = NsxAdvancedPlugin
| apache-2.0 | 6,044,891,143,040,677,000 | 43.279716 | 79 | 0.572732 | false |
pkariz/nnsearch | nnsearch/approx/Annoy.py | 1 | 6165 | from ..baseindex import Index
import numpy as np
import math
from annoy import AnnoyIndex
class Annoy(Index):
"""
AnnoyIndex from annoy package.
"""
def __init__(self):
self.algorithm = "AnnoyIndex"
self.idx_to_vector = {}
self.valid_types = [np.uint8, np.uint16, np.uint32, np.uint64,
np.int8, np.int16, np.int32, np.int64,
np.float16, np.float32, np.float64]
def build(self, data=None, dimensions=None, distance="angular", trees=-1):
"""
Builds AnnoyIndex on data or creates an empty one. If both dimensions and data are given then their dimensions
must match. At least one of those two attributes must be given to define number of dimensions which is required
to create AnnoyIndex. After the trees are built you cannot add additional vectors.
:param data: Dataset instance representing vectors which are inserted before trees are built (optional, you can
insert data one by one with insert method before building trees)
:param dimensions: number of dimensions
:param distance: can be "angular" (default) or "euclidean"
:param trees: number of binary trees. Default (-1) means that this parameter is determined automatically in a way,
that memory usage <= 2 * memory(vectors)
"""
#check dimensions
if data is None and dimensions is None:
raise ValueError("Number of dimensions is missing!")
if data is not None and dimensions is not None and dimensions != len(data.data[0]):
raise ValueError("Dimensions from constructor parameter 'dimensions' and derived dimensions from 'data' are different!")
#build index
if data is not None:
dimensions = len(data.data[0])
self.index = AnnoyIndex(dimensions, distance)
self.d = dimensions
self._size = 0
self.metric = 0 #angular
if distance != "angular":
self.metric = 1 #euclidean
#fill data
if data is not None:
if type(data.data) is np.ndarray and data.data.dtype not in self.valid_types:
raise ValueError("Invalid dtype of numpy array, check valid_types parameter of index!")
for v in data.data:
self._insert(v)
#build trees
self.index.build(trees)
def _insert(self, vector):
"""
Inserts vector in AnnoyIndex.
:param vector: 1d numpy array, list or tuple representing vector
"""
if type(vector) is np.ndarray:
vector = vector.tolist()
else:
vector = list(vector)
self.index.add_item(self._size, vector)
self._size += 1
def get_dist(self, v1, v2, dist=None):
"""
Calculates distance (euclidean or angular) between two vectors. By default distance is set to metric of index.
:param v1: first vector (list or numpy array)
:param v2: second vector
:param dist: distance can be 0 (angular) or 1 (euclidean)
:return: distance between given vectors
"""
if dist is None:
dist = self.metric
if dist == 0:
#angular
v1_sum, v2_sum, mix_sum = 0.0, 0.0, 0.0
for i in range(self.d):
v1_sum += v1[i] * v1[i]
v2_sum += v2[i] * v2[i]
mix_sum += v1[i] * v2[i]
a = v1_sum * v2_sum
if a > 0.0:
return 2.0 - (2.0 * mix_sum / (math.sqrt(a)))
else:
return 2.0
else:
#euclidean
d = 0.0
if self.d != len(v1) or self.d != len(v2):
raise ValueError("Length of vectors is not the same as d!")
for i in range(self.d):
d += (v1[i] - v2[i]) * (v1[i] - v2[i])
return math.sqrt(d)
def query(self, queries, k=1):
"""
Returns k nearest neighbors.
:param queries: 1d or 2d numpy array or list
:param k: number of nearest neighbors to return
:return: array with k nearest neighbors, if return_distances is True it returns (a,b) where a is array with k
nearest neighbors and b is an array with the same shape containing their distances
"""
dists = []
if isinstance(queries, np.ndarray) and len(queries.shape) == 1 or \
isinstance(queries, list) and not isinstance(queries[0], list):
if isinstance(queries, np.ndarray):
neighbors = self.index.get_nns_by_vector(queries.tolist(), k)
else:
neighbors = self.index.get_nns_by_vector(queries, k)
#calculate distances
dists = [self.get_dist(queries.tolist(), self.index.get_item_vector(x)) for x in neighbors]
else:
#more queries
neighbors = []
for query in queries:
if isinstance(query, np.ndarray):
cur_neighbors = self.index.get_nns_by_vector(query.tolist(), k)
else:
cur_neighbors = self.index.get_nns_by_vector(query, k)
neighbors.append(cur_neighbors)
#calculate distances from cur_neighbors to query point
dists.append([self.get_dist(query, self.index.get_item_vector(x)) for x in cur_neighbors])
return np.array(neighbors), np.array(dists)
def save(self, filename):
"""Saves index to file."""
self.index.save(filename)
def load(self, filename, dimensions=None, distance=None):
"""
Loads index from file.
:param filename: path to file
:param dimensions: number of dimensions of index
:param distance: distance used
"""
if dimensions is None or distance is None:
raise ValueError("Dimensions and distance are needed!")
self.index = AnnoyIndex(dimensions, distance)
self.d = dimensions
self.metric = 0
if distance == "euclidean":
self.metric = 1
self.index.load(filename)
| gpl-3.0 | -3,607,731,220,281,790,000 | 39.827815 | 132 | 0.579238 | false |
HERA-Team/pyuvdata | pyuvdata/uvbeam/cst_beam.py | 1 | 13336 | # -*- mode: python; coding: utf-8 -*-
# Copyright (c) 2018 Radio Astronomy Software Group
# Licensed under the 2-clause BSD License
"""Class for reading beam CST files."""
import re
import warnings
import numpy as np
from .uvbeam import UVBeam
from .. import utils as uvutils
__all__ = ["CSTBeam"]
class CSTBeam(UVBeam):
"""
Defines a CST-specific subclass of UVBeam for reading CST text files.
This class should not be interacted with directly, instead use the
read_cst_beam method on the UVBeam class.
"""
def name2freq(self, fname):
"""
Extract frequency from the filename.
Assumes the file name contains a substring with the frequency channel
in MHz that the data represents.
e.g. "HERA_Sim_120.87MHz.txt" should yield 120.87e6
Parameters
----------
fname : str
Filename to parse.
Returns
-------
float
Frequency extracted from filename in Hz.
"""
fi = fname.rfind("Hz")
frequency = float(re.findall(r"\d*\.\d+|\d+", fname[:fi])[-1])
si_prefix = fname[fi - 1]
si_dict = {"k": 1e3, "M": 1e6, "G": 1e9}
if si_prefix in si_dict.keys():
frequency = frequency * si_dict[si_prefix]
return frequency
def read_cst_beam(
self,
filename,
beam_type="power",
feed_pol="x",
rotate_pol=True,
frequency=None,
telescope_name=None,
feed_name=None,
feed_version=None,
model_name=None,
model_version=None,
history="",
x_orientation=None,
reference_impedance=None,
extra_keywords=None,
run_check=True,
check_extra=True,
run_check_acceptability=True,
):
"""
Read in data from a cst file.
Parameters
----------
filename : str
The cst file to read from.
beam_type : str
What beam_type to read in ('power' or 'efield').
feed_pol : str
The feed or polarization or list of feeds or polarizations the
files correspond to.
Defaults to 'x' (meaning x for efield or xx for power beams).
rotate_pol : bool
If True, assume the structure in the simulation is symmetric under
90 degree rotations about the z-axis (so that the y polarization can be
constructed by rotating the x polarization or vice versa).
Default: True if feed_pol is a single value or a list with all
the same values in it, False if it is a list with varying values.
frequency : float or list of float
The frequency or list of frequencies corresponding to the filename(s).
This is assumed to be in the same order as the files.
If not passed, the code attempts to parse it from the filenames.
telescope_name : str
The name of the telescope corresponding to the filename(s).
feed_name : str
The name of the feed corresponding to the filename(s).
feed_version : str
The version of the feed corresponding to the filename(s).
model_name : str
The name of the model corresponding to the filename(s).
model_version : str
The version of the model corresponding to the filename(s).
history : str
A string detailing the history of the filename(s).
x_orientation : str, optional
Orientation of the physical dipole corresponding to what is
labelled as the x polarization. Options are "east" (indicating
east/west orientation) and "north" (indicating north/south orientation)
reference_impedance : float, optional
The reference impedance of the model(s).
extra_keywords : dict, optional
A dictionary containing any extra_keywords.
run_check : bool
Option to check for the existence and proper shapes of
required parameters after reading in the file.
check_extra : bool
Option to check optional parameters as well as
required ones.
run_check_acceptability : bool
Option to check acceptable range of the values of
required parameters after reading in the file.
"""
self.telescope_name = telescope_name
self.feed_name = feed_name
self.feed_version = feed_version
self.model_name = model_name
self.model_version = model_version
self.history = history
if not uvutils._check_history_version(self.history, self.pyuvdata_version_str):
self.history += self.pyuvdata_version_str
if x_orientation is not None:
self.x_orientation = x_orientation
if reference_impedance is not None:
self.reference_impedance = float(reference_impedance)
if extra_keywords is not None:
self.extra_keywords = extra_keywords
if beam_type == "power":
self.Naxes_vec = 1
if feed_pol == "x":
feed_pol = "xx"
elif feed_pol == "y":
feed_pol = "yy"
if rotate_pol:
rot_pol_dict = {"xx": "yy", "yy": "xx", "xy": "yx", "yx": "xy"}
pol2 = rot_pol_dict[feed_pol]
self.polarization_array = np.array(
[uvutils.polstr2num(feed_pol), uvutils.polstr2num(pol2)]
)
else:
self.polarization_array = np.array([uvutils.polstr2num(feed_pol)])
self.Npols = len(self.polarization_array)
self._set_power()
else:
self.Naxes_vec = 2
self.Ncomponents_vec = 2
if rotate_pol:
if feed_pol == "x":
self.feed_array = np.array(["x", "y"])
else:
self.feed_array = np.array(["y", "x"])
else:
if feed_pol == "x":
self.feed_array = np.array(["x"])
else:
self.feed_array = np.array(["y"])
self.Nfeeds = self.feed_array.size
self._set_efield()
self.data_normalization = "physical"
self.antenna_type = "simple"
self.Nfreqs = 1
self.Nspws = 1
self.freq_array = np.zeros((self.Nspws, self.Nfreqs))
self.bandpass_array = np.zeros((self.Nspws, self.Nfreqs))
self.spw_array = np.array([0])
self.pixel_coordinate_system = "az_za"
self._set_cs_params()
out_file = open(filename, "r")
line = out_file.readline().strip() # Get the first line
out_file.close()
raw_names = line.split("]")
raw_names = [raw_name for raw_name in raw_names if not raw_name == ""]
column_names = []
units = []
for raw_name in raw_names:
column_name, unit = tuple(raw_name.split("["))
column_names.append("".join(column_name.lower().split(" ")))
units.append(unit.lower().strip())
data = np.loadtxt(filename, skiprows=2)
theta_col = np.where(np.array(column_names) == "theta")[0][0]
phi_col = np.where(np.array(column_names) == "phi")[0][0]
if "deg" in units[theta_col]:
theta_data = np.radians(data[:, theta_col])
else:
theta_data = data[:, theta_col]
if "deg" in units[phi_col]:
phi_data = np.radians(data[:, phi_col])
else:
phi_data = data[:, phi_col]
theta_axis = np.sort(np.unique(theta_data))
phi_axis = np.sort(np.unique(phi_data))
if not theta_axis.size * phi_axis.size == theta_data.size:
raise ValueError("Data does not appear to be on a grid")
theta_data = theta_data.reshape((theta_axis.size, phi_axis.size), order="F")
phi_data = phi_data.reshape((theta_axis.size, phi_axis.size), order="F")
delta_theta = np.diff(theta_axis)
if not np.isclose(np.max(delta_theta), np.min(delta_theta)):
raise ValueError(
"Data does not appear to be regularly gridded in zenith angle"
)
delta_theta = delta_theta[0]
delta_phi = np.diff(phi_axis)
if not np.isclose(np.max(delta_phi), np.min(delta_phi)):
raise ValueError(
"Data does not appear to be regularly gridded in azimuth angle"
)
delta_phi = delta_phi[0]
self.axis1_array = phi_axis
self.Naxes1 = self.axis1_array.size
self.axis2_array = theta_axis
self.Naxes2 = self.axis2_array.size
if self.beam_type == "power":
# type depends on whether cross pols are present
# (if so, complex, else float)
self.data_array = np.zeros(
self._data_array.expected_shape(self),
dtype=self._data_array.expected_type,
)
else:
self.data_array = np.zeros(
self._data_array.expected_shape(self), dtype=np.complex
)
if frequency is not None:
self.freq_array[0] = frequency
else:
self.freq_array[0] = self.name2freq(filename)
if rotate_pol:
# for second polarization, rotate by pi/2
rot_phi = phi_data + np.pi / 2
rot_phi[np.where(rot_phi >= 2 * np.pi)] -= 2 * np.pi
roll_rot_phi = np.roll(rot_phi, int((np.pi / 2) / delta_phi), axis=1)
if not np.allclose(roll_rot_phi, phi_data):
raise ValueError("Rotating by pi/2 failed")
# theta is not affected by the rotation
# get beam
if self.beam_type == "power":
data_col_enum = ["abs(e)", "abs(v)"]
data_col = []
for name in data_col_enum:
this_col = np.where(np.array(column_names) == name)[0]
if this_col.size > 0:
data_col = data_col + this_col.tolist()
if len(data_col) == 0:
raise ValueError("No power column found in file: {}".format(filename))
elif len(data_col) > 1:
raise ValueError(
"Multiple possible power columns found in file: {}".format(filename)
)
data_col = data_col[0]
power_beam1 = (
data[:, data_col].reshape((theta_axis.size, phi_axis.size), order="F")
** 2.0
)
self.data_array[0, 0, 0, 0, :, :] = power_beam1
if rotate_pol:
# rotate by pi/2 for second polarization
power_beam2 = np.roll(power_beam1, int((np.pi / 2) / delta_phi), axis=1)
self.data_array[0, 0, 1, 0, :, :] = power_beam2
else:
self.basis_vector_array = np.zeros(
(self.Naxes_vec, self.Ncomponents_vec, self.Naxes2, self.Naxes1)
)
self.basis_vector_array[0, 0, :, :] = 1.0
self.basis_vector_array[1, 1, :, :] = 1.0
theta_mag_col = np.where(np.array(column_names) == "abs(theta)")[0][0]
theta_phase_col = np.where(np.array(column_names) == "phase(theta)")[0][0]
phi_mag_col = np.where(np.array(column_names) == "abs(phi)")[0][0]
phi_phase_col = np.where(np.array(column_names) == "phase(phi)")[0][0]
theta_mag = data[:, theta_mag_col].reshape(
(theta_axis.size, phi_axis.size), order="F"
)
phi_mag = data[:, phi_mag_col].reshape(
(theta_axis.size, phi_axis.size), order="F"
)
if "deg" in units[theta_phase_col]:
theta_phase = np.radians(data[:, theta_phase_col])
else:
theta_phase = data[:, theta_phase_col]
if "deg" in units[phi_phase_col]:
phi_phase = np.radians(data[:, phi_phase_col])
else:
phi_phase = data[:, phi_phase_col]
theta_phase = theta_phase.reshape(
(theta_axis.size, phi_axis.size), order="F"
)
phi_phase = phi_phase.reshape((theta_axis.size, phi_axis.size), order="F")
theta_beam = theta_mag * np.exp(1j * theta_phase)
phi_beam = phi_mag * np.exp(1j * phi_phase)
self.data_array[0, 0, 0, 0, :, :] = phi_beam
self.data_array[1, 0, 0, 0, :, :] = theta_beam
if rotate_pol:
# rotate by pi/2 for second polarization
theta_beam2 = np.roll(theta_beam, int((np.pi / 2) / delta_phi), axis=1)
phi_beam2 = np.roll(phi_beam, int((np.pi / 2) / delta_phi), axis=1)
self.data_array[0, 0, 1, 0, :, :] = phi_beam2
self.data_array[1, 0, 1, 0, :, :] = theta_beam2
self.bandpass_array[0] = 1
if frequency is None:
warnings.warn(
"No frequency provided. Detected frequency is: "
"{freqs} Hz".format(freqs=self.freq_array)
)
if run_check:
self.check(
check_extra=check_extra, run_check_acceptability=run_check_acceptability
)
| bsd-2-clause | 7,236,658,128,043,490,000 | 36.886364 | 88 | 0.540492 | false |
joaquinlpereyra/ludema | ludema/abstract/actions.py | 1 | 11361 | import random
from functools import wraps
from ludema.abstract.utils import Direction
from ludema.exceptions import (PieceIsNotOnATileError,
PieceIsNotOnThisBoardError,
TileIsEmptyError,
NotGrabbableError)
class Action:
def __init__(self, piece, action_functions):
self.possible_actions = []
self.piece = piece
if action_functions is None:
action_functions = self._default_actions()
self._set_actions(action_functions)
self.history = []
def __getattribute__(self, name):
attr = object.__getattribute__(self, name)
if attr in object.__getattribute__(self, 'possible_actions'):
attr = self._history_appender(attr)
return attr
@property
def is_implemented(self):
"""Return True if action is implemented, False if it can't."""
return True if self.possible_actions else False
def _history_appender(self, func):
@wraps(func)
def history_wrapper(*args, **kwargs):
self.history.append(func.__name__)
return func(*args, **kwargs)
return history_wrapper
def _normal_default_actions(self):
"""Just a collection of four extremely normal set of default actions.
The ones who apply the action to the tile up, right, left and down
of the piece.
"""
def up():
return self.do(self.piece.surroundings[Direction.UP])
def right():
return self.do(self.piece.surroundings[Direction.RIGHT])
def down():
return self.do(self.piece.surroundings[Direction.DOWN])
def left():
return self.do(self.piece.surroundings[Direction.LEFT])
return [up, right, down, left]
def _set_actions(self, action_functions):
"""Sets the action_funcions as methods of the class
and append them to the possible_actions list.
"""
for action_function in action_functions:
self.possible_actions.append(action_function)
setattr(self, action_function.__name__, action_function)
def _default_actions(self):
"""These will be the default action functions. Every action should
implement them, but the _normal_default_actions method give you
four extremely common default function actions: the one which
applies the action to the tiles above, at right, below and at left
of the piece.
"""
return self._normal_default_actions()
def _unsafe_do(self, tile):
"""Intended to actually perform the action. Should check all
action conditions and raise an appropiate error if they are not met.
Doesn't need to return anything. Shouldn't be used for I/O, instead
use the do method for that.
Note:
Every action should implement this method.
"""
raise NotImplementedError("The Action class shouldn't be used directly!")
def do(self, tile, dont_pass_turn=False):
"""Inteded as a safe wraper for _unsafe_do. Should take a tile
indicating where the action must be performed. Should return a bolean
indicating if the action could be performed or not. Should be capable
of handling I/O without raising any exceptions.
Useful for one-use-cases for the actions, if you want to extraordinarily
perform an action to a tile. For ordinary uses, use the actions in the
possible_actions lists. For example, if a piece moves up,down,left,right
alsways, set those as actions functions. If a magician teleports the
piece somewhere, you can use this function to move it there.
All the action functions should ultimately use this method.
Note:
Every action should implement this method.
"""
raise NotImplementedError("The Action class shouldn't be used directly!")
def random(self):
"""Call a random function from the possible actions
list. Keep in mind that the action may or may not be performed,
depending on the current position of the piece and what the action
tries to do.
Returns:
bool: True if action was performed, False if not
"""
surprise_action = random.choice(self.possible_actions)
was_action_valid = surprise_action()
return was_action_valid
def random_and_valid(self):
"""Call a random function from the possible actions,
making sure that the action is actually possible for the piece.
If no actions from the list of possible actions, it will just return
False.
Returns:
bool: True if there was a valid action to be made by the piece,
False if the piece couldn't move anywhere
"""
tries = 0
random_action_performed = self.random()
while not random_action_performed:
random_action_performed = self.random()
tries += 1
if tries >= len(self.possible_actions):
return False
return True
def all(self):
"""Call all possible actions from the list. The actions may or may
not be performed depending on the action conditions.
Returns:
dict: looks like {action_function_name, boolean} key-value pairs,
indicating which actions where actually performed (True) and which
not (False).
"""
successes = {}
for action_function in self.possible_actions:
success = action_function()
successes[action_function.__name__] = success
return successes
def until_success(self):
"""Call all possible actions from the list of possible actions,
but stop once it can perform one successfully.
Returns:
bool: True if there was a valid action performed by the piece,
False if no valid action was found.
"""
for action_function in self.possible_actions:
success = action_function()
if success:
return True
else:
return False
class Moving(Action):
def __init__(self, piece, movement_functions):
"""
Args:
piece (Piece): the movable piece to which the movements refer
movement_functions ([nullary functions]): a list of valid
functions which as a side effect move the piece.
"""
Action.__init__(self, piece, movement_functions)
self.possible_movements = self.possible_actions
def _unsafe_do(self, tile):
"""Move the object if it can.
That means: unlink the piece from its current tile and link it
to the new tile; unless there's a piece in the destiny tile already.
Args:
tile (Tile): the tile to which the piece will try to move
Returns:
bool: False if there was a piece on tile and it wasn't walkable,
True if movement could be completed
Raises:
PieceIsNotOnATileError: if the piece hasn't been put on a tile before
trying to move
PieceIsNotOnThisBoardError: if the piece you're trying to move
is in fact on another board
"""
if not self.piece.home_tile:
raise PieceIsNotOnATileError
if self.piece.home_tile.board is not tile.board:
raise PieceIsNotOnThisBoardError
if tile.piece is not None:
tile.piece.on_touch_do(touching_piece=self.piece)
# what if tile.piece.on_touch_do actually moved the touched piece?
# it could have, so we need to check if tile.piece still has
# a piece...
if tile.piece and not tile.piece.walkable:
return False
self.piece.home_tile.piece = None
tile.piece = self.piece
return True
def do(self, tile):
"""Move the object, if it can.
Args:
tile (Tile): the tile to which the piece will try to move.
Returns:
bool: True if piece could be moved, False if not
"""
if tile:
try:
return self._unsafe_do(tile)
except (PieceIsNotOnATileError, PieceIsNotOnThisBoardError):
return False
else:
return False
class Attacking(Action):
def __init__(self, piece, attack_functions):
Action.__init__(self, piece, attack_functions)
self.possible_attacks = self.possible_actions
def _unsafe_do(self, tile):
"""Attack a piece on tile passed as argument. If tile
has no piece, raise a TileIsEmptyError.
Args:
tile (Tile): the tile which the piece will try to attack
"""
if tile.piece is None:
raise TileIsEmptyError(self.piece, tile)
attacked_piece = tile.piece
attacked_piece.health -= self.piece.attack_damage
def do(self, tile):
"""Attack a tile passed as argument. Safe to use for I/O, should
never raise an error.
Args:
tile (Tile): the tile which the piece will try to attack
Returns:
bool: True if attack could be performed, False if attack failed
(because the tile didn't have a piece associated or it was None)
"""
if tile:
try:
self._unsafe_do(tile)
return True
except TileIsEmptyError:
return False
else:
return False
class Grabbing(Action):
def __init__(self, piece, grab_functions):
Action.__init__(self, piece, grab_functions)
self.possible_grabs = self.possible_actions
def _unsafe_do(self, tile):
"""Grabs from the tile passed as argument.
Args:
tile (Tile): the tile which the piece will try to attack
Raises:
NotGrabbableError if the piece on the tile can't be grabbed
"""
if not callable(tile.piece.grab):
raise NotGrabbableError(tile.piece)
grabbable = tile.piece
grabbable.owner = self.piece
self.piece.items.append(grabbable)
tile.piece = None # POPS!
def do(self, tile):
"""Grabs from the tile passed as argument. Safe to use for I/O, should
never raise an error.
Args:
tile (Tile): the tile which the piece will try to grab from
Returns:
bool: True if something could be grabbed could be performed, False if grab failed
"""
if not tile:
return False
try:
self._unsafe_do(tile)
return True
except TileIsEmptyError:
return False
def from_surroundings(self):
"""Grabs an item from the surroundings of the Character.
Stops at first item grabbed.
Items look-up goes clockwise.
Returns:
bool: True if item found and grabbed, False otherwise.
"""
for tile in self.piece.surroundings.values():
item_grabbed = self.do(tile)
if item_grabbed:
return True
else:
return False
| gpl-3.0 | 6,046,794,451,876,565,000 | 35.066667 | 93 | 0.602588 | false |
twilio/twilio-python | tests/integration/preview/sync/service/sync_list/test_sync_list_item.py | 1 | 10556 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base import serialize
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class SyncListItemTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(1).fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items/1',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"created_by": "created_by",
"data": {},
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"index": 100,
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"revision": "revision",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items/100"
}
'''
))
actual = self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(1).fetch()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(1).delete(if_match="if_match")
headers = {'If-Match': "if_match", }
self.holodeck.assert_has_request(Request(
'delete',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items/1',
headers=headers,
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(1).delete()
self.assertTrue(actual)
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.create(data={})
values = {'Data': serialize.object({}), }
self.holodeck.assert_has_request(Request(
'post',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items',
data=values,
))
def test_create_response(self):
self.holodeck.mock(Response(
201,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"created_by": "created_by",
"data": {},
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"index": 100,
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"revision": "revision",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items/100"
}
'''
))
actual = self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.create(data={})
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.list()
self.holodeck.assert_has_request(Request(
'get',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items',
))
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"items": [],
"meta": {
"first_page_url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items?From=from&Bounds=inclusive&Order=asc&PageSize=50&Page=0",
"key": "items",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items?From=from&Bounds=inclusive&Order=asc&PageSize=50&Page=0"
}
}
'''
))
actual = self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.list()
self.assertIsNotNone(actual)
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"items": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"created_by": "created_by",
"data": {},
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"index": 100,
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"revision": "revision",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items/100"
}
],
"meta": {
"first_page_url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items?From=from&Bounds=inclusive&Order=asc&PageSize=50&Page=0",
"key": "items",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items?From=from&Bounds=inclusive&Order=asc&PageSize=50&Page=0"
}
}
'''
))
actual = self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.list()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(1).update(data={}, if_match="if_match")
values = {'Data': serialize.object({}), }
headers = {'If-Match': "if_match", }
self.holodeck.assert_has_request(Request(
'post',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items/1',
headers=headers,
))
self.holodeck.assert_has_request(Request(
'post',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items/1',
data=values,
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"created_by": "created_by",
"data": {},
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"index": 100,
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"revision": "revision",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items/100"
}
'''
))
actual = self.client.preview.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists("ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(1).update(data={})
self.assertIsNotNone(actual)
| mit | -3,002,607,029,883,144,000 | 42.085714 | 219 | 0.551156 | false |
lycantropos/cetus | cetus/queries/filters.py | 1 | 2007 | from typing import Optional, Tuple, Any
from cetus.types import (FiltersType,
FilterType)
from cetus.utils import join_str
from .utils import normalize_value
LOGICAL_OPERATORS = {'AND', 'OR'}
INCLUSION_OPERATORS = {'IN', 'NOT IN'}
RANGE_OPERATORS = {'BETWEEN'}
COMPARISON_OPERATORS = {'=', '!=',
'<', '>',
'<=', '>=',
'IS', 'IS NOT',
'LIKE', 'NOT LIKE'}
PREDICATES = (INCLUSION_OPERATORS
| RANGE_OPERATORS
| COMPARISON_OPERATORS)
def add_filters(query: str, *,
filters: Optional[Tuple[str, Any]]
) -> str:
if filters:
filters = filters_to_str(filters)
query += f'WHERE {filters} '
return query
def filters_to_str(filters: FiltersType) -> str:
operator, filter_ = filters
if operator in LOGICAL_OPERATORS:
sub_filters = [filters_to_str(sub_filter)
for sub_filter in filter_]
return operator.join(f'({sub_filter})'
for sub_filter in sub_filters)
elif operator in PREDICATES:
res = predicate_to_str(predicate_name=operator,
filter_=filter_)
return res
else:
err_msg = ('Invalid filters operator: '
f'"{operator}" is not found '
f'in logical operators '
f'and predicates lists.')
raise ValueError(err_msg)
def predicate_to_str(
*,
predicate_name: str,
filter_: FilterType) -> str:
column_name, value = filter_
if predicate_name in INCLUSION_OPERATORS:
value = map(normalize_value, value)
value = f'({join_str(value)})'
elif predicate_name in RANGE_OPERATORS:
value = map(normalize_value, value)
value = ' AND '.join(value)
else:
value = normalize_value(value)
return f'{column_name} {predicate_name} {value}'
| mit | -6,637,450,050,062,195,000 | 30.857143 | 59 | 0.539113 | false |
RandallDW/Aruba_plugin | plugins/org.python.pydev/pysrc/_pydevd_bundle/pydevd_referrers.py | 1 | 8832 | from _pydevd_bundle.pydevd_constants import dict_contains
import sys
from _pydevd_bundle import pydevd_xml
from os.path import basename
import traceback
try:
from urllib import quote, quote_plus, unquote, unquote_plus
except:
from urllib.parse import quote, quote_plus, unquote, unquote_plus #@Reimport @UnresolvedImport
#===================================================================================================
# print_var_node
#===================================================================================================
def print_var_node(xml_node, stream):
name = xml_node.getAttribute('name')
value = xml_node.getAttribute('value')
val_type = xml_node.getAttribute('type')
found_as = xml_node.getAttribute('found_as')
stream.write('Name: ')
stream.write(unquote_plus(name))
stream.write(', Value: ')
stream.write(unquote_plus(value))
stream.write(', Type: ')
stream.write(unquote_plus(val_type))
if found_as:
stream.write(', Found as: %s' % (unquote_plus(found_as),))
stream.write('\n')
#===================================================================================================
# print_referrers
#===================================================================================================
def print_referrers(obj, stream=None):
if stream is None:
stream = sys.stdout
result = get_referrer_info(obj)
from xml.dom.minidom import parseString
dom = parseString(result)
xml = dom.getElementsByTagName('xml')[0]
for node in xml.childNodes:
if node.nodeType == node.TEXT_NODE:
continue
if node.localName == 'for':
stream.write('Searching references for: ')
for child in node.childNodes:
if child.nodeType == node.TEXT_NODE:
continue
print_var_node(child, stream)
elif node.localName == 'var':
stream.write('Referrer found: ')
print_var_node(node, stream)
else:
sys.stderr.write('Unhandled node: %s\n' % (node,))
return result
#===================================================================================================
# get_referrer_info
#===================================================================================================
def get_referrer_info(searched_obj):
DEBUG = 0
if DEBUG:
sys.stderr.write('Getting referrers info.\n')
try:
try:
if searched_obj is None:
ret = ['<xml>\n']
ret.append('<for>\n')
ret.append(pydevd_xml.var_to_xml(
searched_obj,
'Skipping getting referrers for None',
additional_in_xml=' id="%s"' % (id(searched_obj),)))
ret.append('</for>\n')
ret.append('</xml>')
ret = ''.join(ret)
return ret
obj_id = id(searched_obj)
try:
if DEBUG:
sys.stderr.write('Getting referrers...\n')
import gc
referrers = gc.get_referrers(searched_obj)
except:
traceback.print_exc()
ret = ['<xml>\n']
ret.append('<for>\n')
ret.append(pydevd_xml.var_to_xml(
searched_obj,
'Exception raised while trying to get_referrers.',
additional_in_xml=' id="%s"' % (id(searched_obj),)))
ret.append('</for>\n')
ret.append('</xml>')
ret = ''.join(ret)
return ret
if DEBUG:
sys.stderr.write('Found %s referrers.\n' % (len(referrers),))
curr_frame = sys._getframe()
frame_type = type(curr_frame)
#Ignore this frame and any caller frame of this frame
ignore_frames = {} #Should be a set, but it's not available on all python versions.
while curr_frame is not None:
if basename(curr_frame.f_code.co_filename).startswith('pydev'):
ignore_frames[curr_frame] = 1
curr_frame = curr_frame.f_back
ret = ['<xml>\n']
ret.append('<for>\n')
if DEBUG:
sys.stderr.write('Searching Referrers of obj with id="%s"\n' % (obj_id,))
ret.append(pydevd_xml.var_to_xml(
searched_obj,
'Referrers of obj with id="%s"' % (obj_id,)))
ret.append('</for>\n')
all_objects = None
for r in referrers:
try:
if dict_contains(ignore_frames, r):
continue #Skip the references we may add ourselves
except:
pass #Ok: unhashable type checked...
if r is referrers:
continue
r_type = type(r)
r_id = str(id(r))
representation = str(r_type)
found_as = ''
if r_type == frame_type:
if DEBUG:
sys.stderr.write('Found frame referrer: %r\n' % (r,))
for key, val in r.f_locals.items():
if val is searched_obj:
found_as = key
break
elif r_type == dict:
if DEBUG:
sys.stderr.write('Found dict referrer: %r\n' % (r,))
# Try to check if it's a value in the dict (and under which key it was found)
for key, val in r.items():
if val is searched_obj:
found_as = key
if DEBUG:
sys.stderr.write(' Found as %r in dict\n' % (found_as,))
break
#Ok, there's one annoying thing: many times we find it in a dict from an instance,
#but with this we don't directly have the class, only the dict, so, to workaround that
#we iterate over all reachable objects ad check if one of those has the given dict.
if all_objects is None:
all_objects = gc.get_objects()
for x in all_objects:
try:
if getattr(x, '__dict__', None) is r:
r = x
r_type = type(x)
r_id = str(id(r))
representation = str(r_type)
break
except:
pass #Just ignore any error here (i.e.: ReferenceError, etc.)
elif r_type in (tuple, list):
if DEBUG:
sys.stderr.write('Found tuple referrer: %r\n' % (r,))
#Don't use enumerate() because not all Python versions have it.
i = 0
for x in r:
if x is searched_obj:
found_as = '%s[%s]' % (r_type.__name__, i)
if DEBUG:
sys.stderr.write(' Found as %s in tuple: \n' % (found_as,))
break
i += 1
if found_as:
if not isinstance(found_as, str):
found_as = str(found_as)
found_as = ' found_as="%s"' % (pydevd_xml.make_valid_xml_value(found_as),)
ret.append(pydevd_xml.var_to_xml(
r,
representation,
additional_in_xml=' id="%s"%s' % (r_id, found_as)))
finally:
if DEBUG:
sys.stderr.write('Done searching for references.\n')
#If we have any exceptions, don't keep dangling references from this frame to any of our objects.
all_objects = None
referrers = None
searched_obj = None
r = None
x = None
key = None
val = None
curr_frame = None
ignore_frames = None
except:
traceback.print_exc()
ret = ['<xml>\n']
ret.append('<for>\n')
ret.append(pydevd_xml.var_to_xml(
searched_obj,
'Error getting referrers for:',
additional_in_xml=' id="%s"' % (id(searched_obj),)))
ret.append('</for>\n')
ret.append('</xml>')
ret = ''.join(ret)
return ret
ret.append('</xml>')
ret = ''.join(ret)
return ret
| epl-1.0 | 8,083,347,202,527,346,000 | 35.8 | 109 | 0.434783 | false |
adamcaudill/yawast | yawast/external/spinner.py | 1 | 1596 | # From: https://stackoverflow.com/a/39504463
# License: Creative Commons Attribution-Share Alike
# Copyright: Victor Moyseenko
import sys
import threading
import time
class Spinner:
running = False
busy = False
delay = 0.1
@staticmethod
def spinning_cursor():
while 1:
for cursor in "|/-\\":
yield cursor
def __init__(self, delay=None):
self.spinner_generator = self.spinning_cursor()
if delay and float(delay):
self.delay = delay
def spinner_task(self):
while self.busy:
try:
if sys.stdout.isatty():
sys.stdout.write(next(self.spinner_generator))
sys.stdout.flush()
time.sleep(self.delay)
sys.stdout.write("\b")
sys.stdout.flush()
except Exception:
# we don't care what happens here
pass
self.running = False
def start(self):
self.running = True
self.busy = True
threading.Thread(target=self.spinner_task).start()
def stop(self, exception=None):
self.busy = False
time.sleep(self.delay)
while self.running:
pass
sys.stdout.write(" ")
sys.stdout.flush()
sys.stdout.write("\b")
sys.stdout.flush()
if exception is not None:
return False
def __enter__(self):
self.start()
return self
def __exit__(self, exception, value, tb):
return self.stop(exception)
| mit | -1,195,580,193,516,973,800 | 23.553846 | 66 | 0.537594 | false |
BackupTheBerlios/pyhttpd-svn | core/baseHTTPRequestHandler.py | 1 | 11117 | # -*- coding: utf-8 -*-
##################################################################
# pyHTTPd
# $Id$
# (c) 2006 by Tim Taubert
##################################################################
import os, sys, socket, time, mimetools
from mimetypes import MimeTypes
from baseConfig import pConfig
import baseRoutines
DEFAULT_ERROR_MESSAGE = "<head><title>Error response</title></head><body><h1>Error response</h1><p>Error code %(code)d.</p><p>Message: %(message)s.</p><p>Error code explanation: %(code)s = %(explain)s.</body>"
def _quote_html(html):
return html.replace("&", "&").replace("<", "<").replace(">", ">")
class pHTTPRequestHandler:
rbufsize = 0
wbufsize = 0
sys_version = "Python/2.4"
server_version = "BaseHTTP/"
protocol_version = "HTTP/1.0"
# message-like class used to parse headers
MessageClass = mimetools.Message
# needed for timestamp formatting
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
# standard conform http response codes
responses = {
100: ('Continue', 'Request received, please continue'),
101: ('Switching Protocols', 'Switching to new protocol; obey Upgrade header'),
200: ('OK', 'Request fulfilled, document follows'),
201: ('Created', 'Document created, URL follows'),
202: ('Accepted', 'Request accepted, processing continues off-line'),
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
204: ('No response', 'Request fulfilled, nothing follows'),
205: ('Reset Content', 'Clear input form for further input.'),
206: ('Partial Content', 'Partial content follows.'),
300: ('Multiple Choices', 'Object has several resources -- see URI list'),
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
302: ('Found', 'Object moved temporarily -- see URI list'),
303: ('See Other', 'Object moved -- see Method and URL list'),
304: ('Not modified', 'Document has not changed since given time'),
305: ('Use Proxy', 'You must use proxy specified in Location to access this resource.'),
307: ('Temporary Redirect', 'Object moved temporarily -- see URI list'),
400: ('Bad request', 'Bad request syntax or unsupported method'),
401: ('Unauthorized', 'No permission -- see authorization schemes'),
402: ('Payment required', 'No payment -- see charging schemes'),
403: ('Forbidden', 'Request forbidden -- authorization will not help'),
404: ('Not Found', 'Nothing matches the given URI'),
405: ('Method Not Allowed', 'Specified method is invalid for this server.'),
406: ('Not Acceptable', 'URI not available in preferred format.'),
407: ('Proxy Authentication Required', 'You must authenticate with this proxy before proceeding.'),
408: ('Request Time-out', 'Request timed out; try again later.'),
409: ('Conflict', 'Request conflict.'),
410: ('Gone', 'URI no longer exists and has been permanently removed.'),
411: ('Length Required', 'Client must specify Content-Length.'),
412: ('Precondition Failed', 'Precondition in headers is false.'),
413: ('Request Entity Too Large', 'Entity is too large.'),
414: ('Request-URI Too Long', 'URI is too long.'),
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
416: ('Requested Range Not Satisfiable', 'Cannot satisfy request range.'),
417: ('Expectation Failed', 'Expect condition could not be satisfied.'),
500: ('Internal error', 'Server got itself in trouble'),
501: ('Not Implemented', 'Server does not support this operation'),
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
503: ('Service temporarily overloaded', 'The server cannot process the request due to a high load'),
504: ('Gateway timeout', 'The gateway server did not receive a timely response'),
505: ('HTTP Version not supported', 'Cannot fulfill request.'),
}
###################################################################################
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
try:
self.setup()
self.handle()
self.finish()
finally:
sys.exc_traceback = None # Help garbage collection
def setup(self):
self.connection = self.request
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
def finish(self):
if not self.wfile.closed:
self.wfile.flush()
self.wfile.close()
self.rfile.close()
def do_HEAD(self):
print "HEAD cmd used"
def do_PUT(self):
print "PUT cmd used"
def do_GET(self):
self.handleCommand()
def do_POST(self):
self.handleCommand()
def handleCommand(self):
self.handleFileFlag = True
baseRoutines.parsePaths(self)
# trigger the "before" hook
self.modules.triggerBefore(self, self.command)
if not os.path.isfile(self.path):
if os.path.isfile(pConfig.getValue("base.docroot")+self.path):
self.path = pConfig.getValue("base.docroot")+self.path
elif os.path.isfile(pConfig.getValue("base.docroot")+"/"+self.path):
self.path = pConfig.getValue("base.docroot")+"/"+self.path
else:
self.send_response(404)
self.end_headers()
self.handleFileFlag = False
if self.handleFileFlag:
try:
self.handleFile(self.path)
except:
pass
# trigger the "after" hook
self.modules.triggerAfter(self, self.command)
def handleFile(self, filename):
fd = open(filename)
content = fd.read()
fd.close()
self.send_response(200)
mime = MimeTypes()
mimetype = mime.guess_type(filename)
self.send_header("Content-Type", mimetype[0])
if mimetype[1]:
self.send_header("Content-Encoding", mimetype[1])
self.send_header("Content-Length", str(len(content)))
self.end_headers()
self.wfile.write(content)
self.wfile.flush()
def parse_request(self):
self.command = None # set in case of error on the first line
self.request_version = version = "HTTP/0.9" # Default
self.close_connection = 1
requestline = self.raw_requestline
if requestline[-2:] == '\r\n':
requestline = requestline[:-2]
elif requestline[-1:] == '\n':
requestline = requestline[:-1]
self.requestline = requestline
words = requestline.split()
if len(words) == 3:
[command, path, version] = words
if version[:5] != 'HTTP/':
self.send_error(400, "Bad request version (%r)" % version)
return False
try:
base_version_number = version.split('/', 1)[1]
version_number = base_version_number.split(".")
if len(version_number) != 2:
raise ValueError
version_number = int(version_number[0]), int(version_number[1])
except (ValueError, IndexError):
self.send_error(400, "Bad request version (%r)" % version)
return False
if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
self.close_connection = 0
if version_number >= (2, 0):
self.send_error(505,
"Invalid HTTP Version (%s)" % base_version_number)
return False
elif len(words) == 2:
[command, path] = words
self.close_connection = 1
if command != 'GET':
self.send_error(400,
"Bad HTTP/0.9 request type (%r)" % command)
return False
elif not words:
return False
else:
self.send_error(400, "Bad request syntax (%r)" % requestline)
return False
self.command, self.path, self.request_version = command, path, version
# Examine the headers and look for a Connection directive
self.headers = self.MessageClass(self.rfile, 0)
conntype = self.headers.get('Connection', "")
if conntype.lower() == 'close':
self.close_connection = 1
elif (conntype.lower() == 'keep-alive' and
self.protocol_version >= "HTTP/1.1"):
self.close_connection = 0
return True
def handle_one_request(self):
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request(): # An error code has been sent, just exit
return
mname = 'do_' + self.command
if hasattr(self, mname):
getattr(self, mname)()
else:
self.send_error(501, "Unsupported method (%r)" % self.command)
def handle(self):
self.close_connection = 1
self.handle_one_request()
while not self.close_connection:
self.handle_one_request()
def send_error(self, code, message=None):
try:
short, long = self.responses[code]
except KeyError:
short, long = '???', '???'
if message is None:
message = short
explain = long
self.log_error("code %d, message %s", code, message)
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
{'code': code, 'message': _quote_html(message), 'explain': explain})
self.send_response(code, message)
self.send_header("Content-Type", "text/html")
self.send_header('Connection', 'close')
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
self.wfile.write(content)
error_message_format = DEFAULT_ERROR_MESSAGE
def send_response(self, code, message=None):
self.log_request(code)
if message is None:
if code in self.responses:
message = self.responses[code][0]
else:
message = ''
if self.request_version != 'HTTP/0.9':
self.wfile.write("%s %d %s\r\n" %
(self.protocol_version, code, message))
# print (self.protocol_version, code, message)
self.send_header('Server', self.version_string())
self.send_header('Date', self.date_time_string())
def send_header(self, keyword, value):
"""Send a MIME header."""
if self.request_version != 'HTTP/0.9':
self.wfile.write("%s: %s\r\n" % (keyword, value))
if keyword.lower() == 'connection':
if value.lower() == 'close':
self.close_connection = 1
elif value.lower() == 'keep-alive':
self.close_connection = 0
def end_headers(self):
if self.request_version != 'HTTP/0.9':
self.wfile.write("\r\n")
def log_request(self, code='-', size='-'):
self.log_message('"%s" %s %s', self.requestline, str(code), str(size))
def log_error(self, *args):
self.log_message(*args)
def log_message(self, format, *args):
sys.stderr.write("%s - - [%s] %s\n" % (self.address_string(), self.log_date_time_string(), format % args))
def version_string(self):
return self.server_version + ' ' + self.sys_version
# returns the current date and time formatted for a message header
def date_time_string(self):
now = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(now)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
self.weekdayname[wd],
day, self.monthname[month], year,
hh, mm, ss)
return s
# returns the current time formatted for logging
def log_date_time_string(self):
now = time.time()
year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
s = "%02d/%3s/%04d %02d:%02d:%02d" % (
day, self.monthname[month], year,
hh, mm, ss)
return s
def address_string(self):
host, port = self.client_address[:2]
return socket.getfqdn(host)
| gpl-2.0 | 305,883,743,289,710,850 | 33.206154 | 209 | 0.652784 | false |
szaydel/psutil | psutil/_pslinux.py | 1 | 40630 | #!/usr/bin/env python
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Linux platform implementation."""
from __future__ import division
import os
import errno
import socket
import struct
import sys
import base64
import re
import warnings
import _psutil_posix
import _psutil_linux
from psutil import _psposix
from psutil._error import AccessDenied, NoSuchProcess, TimeoutExpired
from psutil._common import *
from psutil._compat import PY3, xrange, long, namedtuple, wraps
from _psutil_linux import RLIM_INFINITY
from _psutil_linux import (RLIMIT_AS, RLIMIT_CORE, RLIMIT_CPU, RLIMIT_DATA,
RLIMIT_FSIZE, RLIMIT_LOCKS, RLIMIT_MEMLOCK,
RLIMIT_MSGQUEUE, RLIMIT_NICE, RLIMIT_NOFILE,
RLIMIT_NPROC, RLIMIT_RSS, RLIMIT_RTPRIO,
RLIMIT_RTTIME, RLIMIT_SIGPENDING, RLIMIT_STACK)
__extra__all__ = [
# io prio constants
"IOPRIO_CLASS_NONE", "IOPRIO_CLASS_RT", "IOPRIO_CLASS_BE",
"IOPRIO_CLASS_IDLE",
# connection status constants
"CONN_ESTABLISHED", "CONN_SYN_SENT", "CONN_SYN_RECV", "CONN_FIN_WAIT1",
"CONN_FIN_WAIT2", "CONN_TIME_WAIT", "CONN_CLOSE", "CONN_CLOSE_WAIT",
"CONN_LAST_ACK", "CONN_LISTEN", "CONN_CLOSING",
# process resources constants
"RLIM_INFINITY",
"RLIMIT_AS", "RLIMIT_CORE", "RLIMIT_CPU", "RLIMIT_DATA", "RLIMIT_FSIZE",
"RLIMIT_LOCKS", "RLIMIT_MEMLOCK", "RLIMIT_MSGQUEUE", "RLIMIT_NICE",
"RLIMIT_NOFILE", "RLIMIT_NPROC", "RLIMIT_RSS", "RLIMIT_RTPRIO",
"RLIMIT_RTTIME", "RLIMIT_SIGPENDING", "RLIMIT_STACK",
# other
"phymem_buffers", "cached_phymem"]
def get_system_boot_time():
"""Return the system boot time expressed in seconds since the epoch."""
f = open('/proc/stat', 'r')
try:
for line in f:
if line.startswith('btime'):
return float(line.strip().split()[1])
raise RuntimeError("line 'btime' not found")
finally:
f.close()
def _get_num_cpus():
"""Return the number of CPUs on the system"""
try:
return os.sysconf("SC_NPROCESSORS_ONLN")
except ValueError:
# as a second fallback we try to parse /proc/cpuinfo
num = 0
f = open('/proc/cpuinfo', 'r')
try:
lines = f.readlines()
finally:
f.close()
for line in lines:
if line.lower().startswith('processor'):
num += 1
# unknown format (e.g. amrel/sparc architectures), see:
# http://code.google.com/p/psutil/issues/detail?id=200
# try to parse /proc/stat as a last resort
if num == 0:
f = open('/proc/stat', 'r')
try:
lines = f.readlines()
finally:
f.close()
search = re.compile('cpu\d')
for line in lines:
line = line.split(' ')[0]
if search.match(line):
num += 1
if num == 0:
raise RuntimeError("couldn't determine platform's NUM_CPUS")
return num
# Number of clock ticks per second
_CLOCK_TICKS = os.sysconf("SC_CLK_TCK")
_PAGESIZE = os.sysconf("SC_PAGE_SIZE")
# Since these constants get determined at import time we do not want to
# crash immediately; instead we'll set them to None and most likely
# we'll crash later as they're used for determining process CPU stats
# and creation_time
try:
BOOT_TIME = get_system_boot_time()
except Exception:
BOOT_TIME = None
warnings.warn("couldn't determine platform's BOOT_TIME", RuntimeWarning)
try:
NUM_CPUS = _get_num_cpus()
except Exception:
NUM_CPUS = None
warnings.warn("couldn't determine platform's NUM_CPUS", RuntimeWarning)
try:
TOTAL_PHYMEM = _psutil_linux.get_sysinfo()[0]
except Exception:
TOTAL_PHYMEM = None
warnings.warn("couldn't determine platform's TOTAL_PHYMEM", RuntimeWarning)
# ioprio_* constants http://linux.die.net/man/2/ioprio_get
IOPRIO_CLASS_NONE = 0
IOPRIO_CLASS_RT = 1
IOPRIO_CLASS_BE = 2
IOPRIO_CLASS_IDLE = 3
# http://students.mimuw.edu.pl/lxr/source/include/net/tcp_states.h
_TCP_STATES_TABLE = {"01" : CONN_ESTABLISHED,
"02" : CONN_SYN_SENT,
"03" : CONN_SYN_RECV,
"04" : CONN_FIN_WAIT1,
"05" : CONN_FIN_WAIT2,
"06" : CONN_TIME_WAIT,
"07" : CONN_CLOSE,
"08" : CONN_CLOSE_WAIT,
"09" : CONN_LAST_ACK,
"0A" : CONN_LISTEN,
"0B" : CONN_CLOSING
}
# --- system memory functions
nt_virtmem_info = namedtuple('vmem', ' '.join([
# all platforms
'total', 'available', 'percent', 'used', 'free',
# linux specific
'active',
'inactive',
'buffers',
'cached']))
def virtual_memory():
total, free, buffers, shared, _, _ = _psutil_linux.get_sysinfo()
cached = active = inactive = None
f = open('/proc/meminfo', 'r')
try:
for line in f:
if line.startswith('Cached:'):
cached = int(line.split()[1]) * 1024
elif line.startswith('Active:'):
active = int(line.split()[1]) * 1024
elif line.startswith('Inactive:'):
inactive = int(line.split()[1]) * 1024
if cached is not None \
and active is not None \
and inactive is not None:
break
else:
# we might get here when dealing with exotic Linux flavors, see:
# http://code.google.com/p/psutil/issues/detail?id=313
msg = "'cached', 'active' and 'inactive' memory stats couldn't " \
"be determined and were set to 0"
warnings.warn(msg, RuntimeWarning)
cached = active = inactive = 0
finally:
f.close()
avail = free + buffers + cached
used = total - free
percent = usage_percent((total - avail), total, _round=1)
return nt_virtmem_info(total, avail, percent, used, free,
active, inactive, buffers, cached)
def swap_memory():
_, _, _, _, total, free = _psutil_linux.get_sysinfo()
used = total - free
percent = usage_percent(used, total, _round=1)
# get pgin/pgouts
f = open("/proc/vmstat", "r")
sin = sout = None
try:
for line in f:
# values are expressed in 4 kilo bytes, we want bytes instead
if line.startswith('pswpin'):
sin = int(line.split(' ')[1]) * 4 * 1024
elif line.startswith('pswpout'):
sout = int(line.split(' ')[1]) * 4 * 1024
if sin is not None and sout is not None:
break
else:
# we might get here when dealing with exotic Linux flavors, see:
# http://code.google.com/p/psutil/issues/detail?id=313
msg = "'sin' and 'sout' swap memory stats couldn't " \
"be determined and were set to 0"
warnings.warn(msg, RuntimeWarning)
sin = sout = 0
finally:
f.close()
return nt_swapmeminfo(total, used, free, percent, sin, sout)
# --- XXX deprecated memory functions
@deprecated('psutil.virtual_memory().cached')
def cached_phymem():
return virtual_memory().cached
@deprecated('psutil.virtual_memory().buffers')
def phymem_buffers():
return virtual_memory().buffers
# --- system CPU functions
@memoize
def _get_cputimes_ntuple():
""" Return a (nt, rindex) tuple depending on the CPU times available
on this Linux kernel version which may be:
user, nice, system, idle, iowait, irq, softirq [steal, [guest, [guest_nice]]]
"""
f = open('/proc/stat', 'r')
try:
values = f.readline().split()[1:]
finally:
f.close()
fields = ['user', 'nice', 'system', 'idle', 'iowait', 'irq', 'softirq']
rindex = 8
vlen = len(values)
if vlen >= 8:
# Linux >= 2.6.11
fields.append('steal')
rindex += 1
if vlen >= 9:
# Linux >= 2.6.24
fields.append('guest')
rindex += 1
if vlen >= 10:
# Linux >= 3.2.0
fields.append('guest_nice')
rindex += 1
return (namedtuple('cputimes', ' '.join(fields)), rindex)
def get_system_cpu_times():
"""Return a named tuple representing the following system-wide
CPU times:
user, nice, system, idle, iowait, irq, softirq [steal, [guest, [guest_nice]]]
Last 3 fields may not be available on all Linux kernel versions.
"""
f = open('/proc/stat', 'r')
try:
values = f.readline().split()
finally:
f.close()
nt, rindex = _get_cputimes_ntuple()
fields = values[1:rindex]
fields = [float(x) / _CLOCK_TICKS for x in fields]
return nt(*fields)
def get_system_per_cpu_times():
"""Return a list of namedtuple representing the CPU times
for every CPU available on the system.
"""
nt, rindex = _get_cputimes_ntuple()
cpus = []
f = open('/proc/stat', 'r')
try:
# get rid of the first line which refers to system wide CPU stats
f.readline()
for line in f:
if line.startswith('cpu'):
fields = line.split()[1:rindex]
fields = [float(x) / _CLOCK_TICKS for x in fields]
entry = nt(*fields)
cpus.append(entry)
return cpus
finally:
f.close()
# --- system disk functions
def disk_partitions(all=False):
"""Return mounted disk partitions as a list of nameduples"""
phydevs = []
f = open("/proc/filesystems", "r")
try:
for line in f:
if not line.startswith("nodev"):
phydevs.append(line.strip())
finally:
f.close()
retlist = []
partitions = _psutil_linux.get_disk_partitions()
for partition in partitions:
device, mountpoint, fstype, opts = partition
if device == 'none':
device = ''
if not all:
if device == '' or fstype not in phydevs:
continue
ntuple = nt_partition(device, mountpoint, fstype, opts)
retlist.append(ntuple)
return retlist
get_disk_usage = _psposix.get_disk_usage
# --- other sysetm functions
def get_system_users():
"""Return currently connected users as a list of namedtuples."""
retlist = []
rawlist = _psutil_linux.get_system_users()
for item in rawlist:
user, tty, hostname, tstamp, user_process = item
# note: the underlying C function includes entries about
# system boot, run level and others. We might want
# to use them in the future.
if not user_process:
continue
if hostname == ':0.0':
hostname = 'localhost'
nt = nt_user(user, tty or None, hostname, tstamp)
retlist.append(nt)
return retlist
# --- process functions
def get_pid_list():
"""Returns a list of PIDs currently running on the system."""
pids = [int(x) for x in os.listdir('/proc') if x.isdigit()]
return pids
def pid_exists(pid):
"""Check For the existence of a unix pid."""
return _psposix.pid_exists(pid)
def net_io_counters():
"""Return network I/O statistics for every network interface
installed on the system as a dict of raw tuples.
"""
f = open("/proc/net/dev", "r")
try:
lines = f.readlines()
finally:
f.close()
retdict = {}
for line in lines[2:]:
colon = line.find(':')
assert colon > 0, line
name = line[:colon].strip()
fields = line[colon+1:].strip().split()
bytes_recv = int(fields[0])
packets_recv = int(fields[1])
errin = int(fields[2])
dropin = int(fields[2])
bytes_sent = int(fields[8])
packets_sent = int(fields[9])
errout = int(fields[10])
dropout = int(fields[11])
retdict[name] = (bytes_sent, bytes_recv, packets_sent, packets_recv,
errin, errout, dropin, dropout)
return retdict
def disk_io_counters():
"""Return disk I/O statistics for every disk installed on the
system as a dict of raw tuples.
"""
# man iostat states that sectors are equivalent with blocks and
# have a size of 512 bytes since 2.4 kernels. This value is
# needed to calculate the amount of disk I/O in bytes.
SECTOR_SIZE = 512
# determine partitions we want to look for
partitions = []
f = open("/proc/partitions", "r")
try:
lines = f.readlines()[2:]
finally:
f.close()
for line in reversed(lines):
_, _, _, name = line.split()
if name[-1].isdigit():
# we're dealing with a partition (e.g. 'sda1'); 'sda' will
# also be around but we want to omit it
partitions.append(name)
else:
if not partitions or not partitions[-1].startswith(name):
# we're dealing with a disk entity for which no
# partitions have been defined (e.g. 'sda' but
# 'sda1' was not around), see:
# http://code.google.com/p/psutil/issues/detail?id=338
partitions.append(name)
#
retdict = {}
f = open("/proc/diskstats", "r")
try:
lines = f.readlines()
finally:
f.close()
for line in lines:
# http://www.mjmwired.net/kernel/Documentation/iostats.txt
_, _, name, reads, _, rbytes, rtime, writes, _, wbytes, wtime = \
line.split()[:11]
if name in partitions:
rbytes = int(rbytes) * SECTOR_SIZE
wbytes = int(wbytes) * SECTOR_SIZE
reads = int(reads)
writes = int(writes)
rtime = int(rtime)
wtime = int(wtime)
retdict[name] = (reads, writes, rbytes, wbytes, rtime, wtime)
return retdict
# taken from /fs/proc/array.c
_status_map = {"R" : STATUS_RUNNING,
"S" : STATUS_SLEEPING,
"D" : STATUS_DISK_SLEEP,
"T" : STATUS_STOPPED,
"t" : STATUS_TRACING_STOP,
"Z" : STATUS_ZOMBIE,
"X" : STATUS_DEAD,
"x" : STATUS_DEAD,
"K" : STATUS_WAKE_KILL,
"W" : STATUS_WAKING}
# --- decorators
def wrap_exceptions(fun):
"""Decorator which translates bare OSError and IOError exceptions
into NoSuchProcess and AccessDenied.
"""
@wraps(fun)
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except EnvironmentError:
# ENOENT (no such file or directory) gets raised on open().
# ESRCH (no such process) can get raised on read() if
# process is gone in meantime.
err = sys.exc_info()[1]
if err.errno in (errno.ENOENT, errno.ESRCH):
raise NoSuchProcess(self.pid, self._process_name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._process_name)
raise
return wrapper
class Process(object):
"""Linux process implementation."""
__slots__ = ["pid", "_process_name"]
def __init__(self, pid):
self.pid = pid
self._process_name = None
@wrap_exceptions
def get_process_name(self):
f = open("/proc/%s/stat" % self.pid)
try:
name = f.read().split(' ')[1].replace('(', '').replace(')', '')
finally:
f.close()
# XXX - gets changed later and probably needs refactoring
return name
def get_process_exe(self):
try:
exe = os.readlink("/proc/%s/exe" % self.pid)
except (OSError, IOError):
err = sys.exc_info()[1]
if err.errno == errno.ENOENT:
# no such file error; might be raised also if the
# path actually exists for system processes with
# low pids (about 0-20)
if os.path.lexists("/proc/%s/exe" % self.pid):
return ""
else:
# ok, it is a process which has gone away
raise NoSuchProcess(self.pid, self._process_name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._process_name)
raise
# readlink() might return paths containing null bytes causing
# problems when used with other fs-related functions (os.*,
# open(), ...)
exe = exe.replace('\x00', '')
# Certain names have ' (deleted)' appended. Usually this is
# bogus as the file actually exists. Either way that's not
# important as we don't want to discriminate executables which
# have been deleted.
if exe.endswith(" (deleted)") and not os.path.exists(exe):
exe = exe[:-10]
return exe
@wrap_exceptions
def get_process_cmdline(self):
f = open("/proc/%s/cmdline" % self.pid)
try:
# return the args as a list
return [x for x in f.read().split('\x00') if x]
finally:
f.close()
@wrap_exceptions
def get_process_terminal(self):
tmap = _psposix._get_terminal_map()
f = open("/proc/%s/stat" % self.pid)
try:
tty_nr = int(f.read().split(' ')[6])
finally:
f.close()
try:
return tmap[tty_nr]
except KeyError:
return None
@wrap_exceptions
def get_process_io_counters(self):
f = open("/proc/%s/io" % self.pid)
try:
for line in f:
if line.startswith("rchar"):
read_count = int(line.split()[1])
elif line.startswith("wchar"):
write_count = int(line.split()[1])
elif line.startswith("read_bytes"):
read_bytes = int(line.split()[1])
elif line.startswith("write_bytes"):
write_bytes = int(line.split()[1])
return nt_io(read_count, write_count, read_bytes, write_bytes)
finally:
f.close()
if not os.path.exists('/proc/%s/io' % os.getpid()):
def get_process_io_counters(self):
raise NotImplementedError("couldn't find /proc/%s/io (kernel " \
"too old?)" % self.pid)
@wrap_exceptions
def get_cpu_times(self):
f = open("/proc/%s/stat" % self.pid)
try:
st = f.read().strip()
finally:
f.close()
# ignore the first two values ("pid (exe)")
st = st[st.find(')') + 2:]
values = st.split(' ')
utime = float(values[11]) / _CLOCK_TICKS
stime = float(values[12]) / _CLOCK_TICKS
return nt_cputimes(utime, stime)
@wrap_exceptions
def process_wait(self, timeout=None):
try:
return _psposix.wait_pid(self.pid, timeout)
except TimeoutExpired:
raise TimeoutExpired(self.pid, self._process_name)
@wrap_exceptions
def get_process_create_time(self):
f = open("/proc/%s/stat" % self.pid)
try:
st = f.read().strip()
finally:
f.close()
# ignore the first two values ("pid (exe)")
st = st[st.rfind(')') + 2:]
values = st.split(' ')
# According to documentation, starttime is in field 21 and the
# unit is jiffies (clock ticks).
# We first divide it for clock ticks and then add uptime returning
# seconds since the epoch, in UTC.
starttime = (float(values[19]) / _CLOCK_TICKS) + BOOT_TIME
return starttime
@wrap_exceptions
def get_memory_info(self):
f = open("/proc/%s/statm" % self.pid)
try:
vms, rss = f.readline().split()[:2]
return nt_meminfo(int(rss) * _PAGESIZE,
int(vms) * _PAGESIZE)
finally:
f.close()
_nt_ext_mem = namedtuple('meminfo', 'rss vms shared text lib data dirty')
@wrap_exceptions
def get_ext_memory_info(self):
# ============================================================
# | FIELD | DESCRIPTION | AKA | TOP |
# ============================================================
# | rss | resident set size | | RES |
# | vms | total program size | size | VIRT |
# | shared | shared pages (from shared mappings) | | SHR |
# | text | text ('code') | trs | CODE |
# | lib | library (unused in Linux 2.6) | lrs | |
# | data | data + stack | drs | DATA |
# | dirty | dirty pages (unused in Linux 2.6) | dt | |
# ============================================================
f = open("/proc/%s/statm" % self.pid)
try:
vms, rss, shared, text, lib, data, dirty = \
[int(x) * _PAGESIZE for x in f.readline().split()[:7]]
finally:
f.close()
return self._nt_ext_mem(rss, vms, shared, text, lib, data, dirty)
_mmap_base_fields = ['path', 'rss', 'size', 'pss', 'shared_clean',
'shared_dirty', 'private_clean', 'private_dirty',
'referenced', 'anonymous', 'swap',]
nt_mmap_grouped = namedtuple('mmap', ' '.join(_mmap_base_fields))
nt_mmap_ext = namedtuple('mmap', 'addr perms ' + ' '.join(_mmap_base_fields))
def get_memory_maps(self):
"""Return process's mapped memory regions as a list of nameduples.
Fields are explained in 'man proc'; here is an updated (Apr 2012)
version: http://goo.gl/fmebo
"""
f = None
try:
f = open("/proc/%s/smaps" % self.pid)
first_line = f.readline()
current_block = [first_line]
def get_blocks():
data = {}
for line in f:
fields = line.split(None, 5)
if not fields[0].endswith(':'):
# new block section
yield (current_block.pop(), data)
current_block.append(line)
else:
try:
data[fields[0]] = int(fields[1]) * 1024
except ValueError:
if fields[0].startswith('VmFlags:'):
# see issue #369
continue
else:
raise ValueError("don't know how to interpret" \
" line %r" % line)
yield (current_block.pop(), data)
if first_line: # smaps file can be empty
for header, data in get_blocks():
hfields = header.split(None, 5)
try:
addr, perms, offset, dev, inode, path = hfields
except ValueError:
addr, perms, offset, dev, inode, path = hfields + ['']
if not path:
path = '[anon]'
else:
path = path.strip()
yield (addr, perms, path,
data['Rss:'],
data.get('Size:', 0),
data.get('Pss:', 0),
data.get('Shared_Clean:', 0),
data.get('Shared_Dirty:', 0),
data.get('Private_Clean:', 0),
data.get('Private_Dirty:', 0),
data.get('Referenced:', 0),
data.get('Anonymous:', 0),
data.get('Swap:', 0))
f.close()
except EnvironmentError:
# XXX - Can't use wrap_exceptions decorator as we're
# returning a generator; this probably needs some
# refactoring in order to avoid this code duplication.
if f is not None:
f.close()
err = sys.exc_info()[1]
if err.errno in (errno.ENOENT, errno.ESRCH):
raise NoSuchProcess(self.pid, self._process_name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._process_name)
raise
except:
if f is not None:
f.close()
raise
f.close()
if not os.path.exists('/proc/%s/smaps' % os.getpid()):
def get_memory_maps(self, ext):
msg = "couldn't find /proc/%s/smaps; kernel < 2.6.14 or CONFIG_MMU " \
"kernel configuration option is not enabled" % self.pid
raise NotImplementedError(msg)
@wrap_exceptions
def get_process_cwd(self):
# readlink() might return paths containing null bytes causing
# problems when used with other fs-related functions (os.*,
# open(), ...)
path = os.readlink("/proc/%s/cwd" % self.pid)
return path.replace('\x00', '')
@wrap_exceptions
def get_num_ctx_switches(self):
vol = unvol = None
f = open("/proc/%s/status" % self.pid)
try:
for line in f:
if line.startswith("voluntary_ctxt_switches"):
vol = int(line.split()[1])
elif line.startswith("nonvoluntary_ctxt_switches"):
unvol = int(line.split()[1])
if vol is not None and unvol is not None:
return nt_ctxsw(vol, unvol)
raise NotImplementedError("the 'voluntary_ctxt_switches' and " \
"'nonvoluntary_ctxt_switches' fields were not found in " \
"/proc/%s/status; the kernel is probably older than 2.6.23" \
% self.pid)
finally:
f.close()
@wrap_exceptions
def get_process_num_threads(self):
f = open("/proc/%s/status" % self.pid)
try:
for line in f:
if line.startswith("Threads:"):
return int(line.split()[1])
raise NotImplementedError("line not found")
finally:
f.close()
@wrap_exceptions
def get_process_threads(self):
thread_ids = os.listdir("/proc/%s/task" % self.pid)
thread_ids.sort()
retlist = []
hit_enoent = False
for thread_id in thread_ids:
try:
f = open("/proc/%s/task/%s/stat" % (self.pid, thread_id))
except EnvironmentError:
err = sys.exc_info()[1]
if err.errno == errno.ENOENT:
# no such file or directory; it means thread
# disappeared on us
hit_enoent = True
continue
raise
try:
st = f.read().strip()
finally:
f.close()
# ignore the first two values ("pid (exe)")
st = st[st.find(')') + 2:]
values = st.split(' ')
utime = float(values[11]) / _CLOCK_TICKS
stime = float(values[12]) / _CLOCK_TICKS
ntuple = nt_thread(int(thread_id), utime, stime)
retlist.append(ntuple)
if hit_enoent:
# raise NSP if the process disappeared on us
os.stat('/proc/%s' % self.pid)
return retlist
@wrap_exceptions
def get_process_nice(self):
#f = open('/proc/%s/stat' % self.pid, 'r')
#try:
# data = f.read()
# return int(data.split()[18])
#finally:
# f.close()
# Use C implementation
return _psutil_posix.getpriority(self.pid)
@wrap_exceptions
def set_process_nice(self, value):
return _psutil_posix.setpriority(self.pid, value)
@wrap_exceptions
def get_process_cpu_affinity(self):
from_bitmask = lambda x: [i for i in xrange(64) if (1 << i) & x]
bitmask = _psutil_linux.get_process_cpu_affinity(self.pid)
return from_bitmask(bitmask)
@wrap_exceptions
def set_process_cpu_affinity(self, value):
def to_bitmask(l):
if not l:
raise ValueError("invalid argument %r" % l)
out = 0
for b in l:
if not isinstance(b, (int, long)) or b < 0:
raise ValueError("invalid argument %r" % b)
out |= 2**b
return out
bitmask = to_bitmask(value)
try:
_psutil_linux.set_process_cpu_affinity(self.pid, bitmask)
except OSError:
err = sys.exc_info()[1]
if err.errno == errno.EINVAL:
allcpus = list(range(len(get_system_per_cpu_times())))
for cpu in value:
if cpu not in allcpus:
raise ValueError("invalid CPU %i" % cpu)
raise
# only starting from kernel 2.6.13
if hasattr(_psutil_linux, "ioprio_get"):
@wrap_exceptions
def get_process_ionice(self):
ioclass, value = _psutil_linux.ioprio_get(self.pid)
return nt_ionice(ioclass, value)
@wrap_exceptions
def set_process_ionice(self, ioclass, value):
if ioclass in (IOPRIO_CLASS_NONE, None):
if value:
raise ValueError("can't specify value with IOPRIO_CLASS_NONE")
ioclass = IOPRIO_CLASS_NONE
value = 0
if ioclass in (IOPRIO_CLASS_RT, IOPRIO_CLASS_BE):
if value is None:
value = 4
elif ioclass == IOPRIO_CLASS_IDLE:
if value:
raise ValueError("can't specify value with IOPRIO_CLASS_IDLE")
value = 0
else:
value = 0
if not 0 <= value <= 8:
raise ValueError("value argument range expected is between 0 and 8")
return _psutil_linux.ioprio_set(self.pid, ioclass, value)
@wrap_exceptions
def process_rlimit(self, resource, limits=None):
if limits is None:
# get
return _psutil_linux.prlimit(self.pid, resource)
else:
# set
if len(limits) != 2:
raise ValueError("second argument must be a (soft, hard) tuple")
soft, hard = limits
_psutil_linux.prlimit(self.pid, resource, soft, hard)
@wrap_exceptions
def get_process_status(self):
f = open("/proc/%s/status" % self.pid)
try:
for line in f:
if line.startswith("State:"):
letter = line.split()[1]
if letter in _status_map:
return _status_map[letter]
return constant(-1, '?')
finally:
f.close()
@wrap_exceptions
def get_open_files(self):
retlist = []
files = os.listdir("/proc/%s/fd" % self.pid)
hit_enoent = False
for fd in files:
file = "/proc/%s/fd/%s" % (self.pid, fd)
if os.path.islink(file):
try:
file = os.readlink(file)
except OSError:
# ENOENT == file which is gone in the meantime
err = sys.exc_info()[1]
if err.errno == errno.ENOENT:
hit_enoent = True
continue
raise
else:
# If file is not an absolute path there's no way
# to tell whether it's a regular file or not,
# so we skip it. A regular file is always supposed
# to be absolutized though.
if file.startswith('/') and isfile_strict(file):
ntuple = nt_openfile(file, int(fd))
retlist.append(ntuple)
if hit_enoent:
# raise NSP if the process disappeared on us
os.stat('/proc/%s' % self.pid)
return retlist
@wrap_exceptions
def get_connections(self, kind='inet'):
"""Return connections opened by process as a list of namedtuples.
The kind parameter filters for connections that fit the following
criteria:
Kind Value Number of connections using
inet IPv4 and IPv6
inet4 IPv4
inet6 IPv6
tcp TCP
tcp4 TCP over IPv4
tcp6 TCP over IPv6
udp UDP
udp4 UDP over IPv4
udp6 UDP over IPv6
all the sum of all the possible families and protocols
"""
# Note: in case of UNIX sockets we're only able to determine the
# local bound path while the remote endpoint is not retrievable:
# http://goo.gl/R3GHM
inodes = {}
# os.listdir() is gonna raise a lot of access denied
# exceptions in case of unprivileged user; that's fine:
# lsof does the same so it's unlikely that we can to better.
for fd in os.listdir("/proc/%s/fd" % self.pid):
try:
inode = os.readlink("/proc/%s/fd/%s" % (self.pid, fd))
except OSError:
continue
if inode.startswith('socket:['):
# the process is using a socket
inode = inode[8:][:-1]
inodes[inode] = fd
if not inodes:
# no connections for this process
return []
def process(file, family, type_):
retlist = []
try:
f = open(file, 'r')
except IOError:
# IPv6 not supported on this platform
err = sys.exc_info()[1]
if err.errno == errno.ENOENT and file.endswith('6'):
return []
else:
raise
try:
f.readline() # skip the first line
for line in f:
# IPv4 / IPv6
if family in (socket.AF_INET, socket.AF_INET6):
_, laddr, raddr, status, _, _, _, _, _, inode = \
line.split()[:10]
if inode in inodes:
laddr = self._decode_address(laddr, family)
raddr = self._decode_address(raddr, family)
if type_ == socket.SOCK_STREAM:
status = _TCP_STATES_TABLE[status]
else:
status = CONN_NONE
fd = int(inodes[inode])
conn = nt_connection(fd, family, type_, laddr,
raddr, status)
retlist.append(conn)
elif family == socket.AF_UNIX:
tokens = line.split()
_, _, _, _, type_, _, inode = tokens[0:7]
if inode in inodes:
if len(tokens) == 8:
path = tokens[-1]
else:
path = ""
fd = int(inodes[inode])
type_ = int(type_)
conn = nt_connection(fd, family, type_, path,
None, CONN_NONE)
retlist.append(conn)
else:
raise ValueError(family)
return retlist
finally:
f.close()
tcp4 = ("tcp" , socket.AF_INET , socket.SOCK_STREAM)
tcp6 = ("tcp6", socket.AF_INET6, socket.SOCK_STREAM)
udp4 = ("udp" , socket.AF_INET , socket.SOCK_DGRAM)
udp6 = ("udp6", socket.AF_INET6, socket.SOCK_DGRAM)
unix = ("unix", socket.AF_UNIX, None)
tmap = {
"all" : (tcp4, tcp6, udp4, udp6, unix),
"tcp" : (tcp4, tcp6),
"tcp4" : (tcp4,),
"tcp6" : (tcp6,),
"udp" : (udp4, udp6),
"udp4" : (udp4,),
"udp6" : (udp6,),
"unix" : (unix,),
"inet" : (tcp4, tcp6, udp4, udp6),
"inet4": (tcp4, udp4),
"inet6": (tcp6, udp6),
}
if kind not in tmap:
raise ValueError("invalid %r kind argument; choose between %s"
% (kind, ', '.join([repr(x) for x in tmap])))
ret = []
for f, family, type_ in tmap[kind]:
ret += process("/proc/net/%s" % f, family, type_)
# raise NSP if the process disappeared on us
os.stat('/proc/%s' % self.pid)
return ret
@wrap_exceptions
def get_num_fds(self):
return len(os.listdir("/proc/%s/fd" % self.pid))
@wrap_exceptions
def get_process_ppid(self):
f = open("/proc/%s/status" % self.pid)
try:
for line in f:
if line.startswith("PPid:"):
# PPid: nnnn
return int(line.split()[1])
raise NotImplementedError("line not found")
finally:
f.close()
@wrap_exceptions
def get_process_uids(self):
f = open("/proc/%s/status" % self.pid)
try:
for line in f:
if line.startswith('Uid:'):
_, real, effective, saved, fs = line.split()
return nt_uids(int(real), int(effective), int(saved))
raise NotImplementedError("line not found")
finally:
f.close()
@wrap_exceptions
def get_process_gids(self):
f = open("/proc/%s/status" % self.pid)
try:
for line in f:
if line.startswith('Gid:'):
_, real, effective, saved, fs = line.split()
return nt_gids(int(real), int(effective), int(saved))
raise NotImplementedError("line not found")
finally:
f.close()
@staticmethod
def _decode_address(addr, family):
"""Accept an "ip:port" address as displayed in /proc/net/*
and convert it into a human readable form, like:
"0500000A:0016" -> ("10.0.0.5", 22)
"0000000000000000FFFF00000100007F:9E49" -> ("::ffff:127.0.0.1", 40521)
The IP address portion is a little or big endian four-byte
hexadecimal number; that is, the least significant byte is listed
first, so we need to reverse the order of the bytes to convert it
to an IP address.
The port is represented as a two-byte hexadecimal number.
Reference:
http://linuxdevcenter.com/pub/a/linux/2000/11/16/LinuxAdmin.html
"""
ip, port = addr.split(':')
port = int(port, 16)
if PY3:
ip = ip.encode('ascii')
# this usually refers to a local socket in listen mode with
# no end-points connected
if not port:
return ()
if family == socket.AF_INET:
# see: http://code.google.com/p/psutil/issues/detail?id=201
if sys.byteorder == 'little':
ip = socket.inet_ntop(family, base64.b16decode(ip)[::-1])
else:
ip = socket.inet_ntop(family, base64.b16decode(ip))
else: # IPv6
# old version - let's keep it, just in case...
#ip = ip.decode('hex')
#return socket.inet_ntop(socket.AF_INET6,
# ''.join(ip[i:i+4][::-1] for i in xrange(0, 16, 4)))
ip = base64.b16decode(ip)
# see: http://code.google.com/p/psutil/issues/detail?id=201
if sys.byteorder == 'little':
ip = socket.inet_ntop(socket.AF_INET6,
struct.pack('>4I', *struct.unpack('<4I', ip)))
else:
ip = socket.inet_ntop(socket.AF_INET6,
struct.pack('<4I', *struct.unpack('<4I', ip)))
return (ip, port)
| bsd-3-clause | 5,321,260,057,435,817,000 | 35.53777 | 84 | 0.510066 | false |
Rdbaker/Mealbound | ceraon/utils.py | 1 | 4801 | # -*- coding: utf-8 -*-
"""Helper utilities and decorators."""
from datetime import timedelta as td
from datetime import tzinfo
from threading import Thread
import requests
from flask import Blueprint, current_app, flash, request
def get_fb_access_token():
"""Get an access token from facebook for graph API calls."""
base_url = 'https://graph.facebook.com/oauth/access_token?' \
'grant_type=client_credentials'
res = requests.get(
base_url + '&client_id={}'.format(current_app.config['FB_APP_ID']) +
'&client_secret={}'.format(current_app.config['FB_APP_SECRET']))
return res.json().get('access_token')
def friendly_arg_get(key, default=None, type_cast=None):
"""Same as request.args.get but returns default on ValueError."""
try:
return request.args.get(key, default=default, type=type_cast)
except:
return default
class FlaskThread(Thread):
"""A utility class for threading in a flask app."""
def __init__(self, *args, **kwargs):
"""Create a new thread with a flask context."""
super().__init__(*args, **kwargs)
self.app = current_app._get_current_object()
def run(self):
"""Run the thread."""
# Make this an effective no-op if we're testing.
if not self.app.config['TESTING']:
with self.app.app_context():
super().run()
def flash_errors(form, category='warning'):
"""Flash all errors for a form."""
for field, errors in form.errors.items():
for error in errors:
flash('{0} - {1}'.format(getattr(form, field).label.text, error),
category)
class RESTBlueprint(Blueprint):
"""A base class for a RESTful API's view blueprint.
This comes with helper methods that set up routes based on method/actions.
It infers the route_prefix based on the version and blueprint name in the
format: `/api/<version string>/<blueprint name string>`
then creates routes from that.
Example usage:
mod = RESTBlueprint('users', __name__, 'v2')
# route is: GET /api/v2/users/<uid>
@mod.find()
def find_user(uid):
return User.get(uid)
# route is: PATCH /api/v2/users/<uid>
@mod.update()
def update_user(uid):
return User.update(name='new name')
# route is: POST /api/v2/users
@mod.create()
def create_user():
return User.create(name='my new user')
The `find`, `update`, `replace`, and `destroy` methods will add a
parameter called `uid` to your route. Make sure to correctly resolve that
to your entity's ID.
"""
def __init__(self, blueprint_name, name, version):
return super(RESTBlueprint, self).__init__(
'api.{}.{}'.format(version, blueprint_name),
name, url_prefix='/api/{}/{}'.format(version, blueprint_name))
def flexible_route(self, *args, **kwargs):
kwargs.update({'strict_slashes': False})
return self.route(*args, **kwargs)
def create(self, *args, **kwargs):
kwargs.update({'methods': ['POST']})
return self.flexible_route('/', *args, **kwargs)
def list(self, *args, **kwargs):
kwargs.update({'methods': ['GET']})
return self.flexible_route('/', *args, **kwargs)
def find(self, converter='string', *args, **kwargs):
kwargs.update({'methods': ['GET']})
return self.flexible_route('/<{}:uid>'.format(converter), *args,
**kwargs)
def update(self, converter='string', *args, **kwargs):
kwargs.update({'methods': ['PATCH']})
return self.flexible_route('/<{}:uid>'.format(converter), *args,
**kwargs)
def replace(self, converter='string', *args, **kwargs):
kwargs.update({'methods': ['PUT']})
return self.flexible_route('/<{}:uid>'.format(converter), *args,
**kwargs)
def destroy(self, converter='string', *args, **kwargs):
kwargs.update({'methods': ['DELETE']})
return self.flexible_route('/<{}:uid>'.format(converter), *args,
**kwargs)
class UTC(tzinfo):
"""tzinfo for a UTC timezone."""
def dst(self, dt_obj):
"""Return the DST offset in minutes from UTC."""
return 0
def fromutc(self, dt_obj):
"""Return a datetime object in local time from a UTC datetime."""
return dt_obj
def tzname(self, dt_obj):
"""Return the name of the timezone from a datetime obj."""
return 'UTC/GMT'
def utcoffset(self, dt_obj):
"""Return a timedelta showing offset from UTC.
Negative values indicating West of UTC
"""
return td()
| bsd-3-clause | -1,679,549,717,610,896,600 | 32.573427 | 78 | 0.586961 | false |
ActiveState/code | recipes/Python/577336_Fast_reentrant_optimistic_lock_implemented/recipe-577336.py | 1 | 4351 | from cpython cimport pythread
from cpython.exc cimport PyErr_NoMemory
cdef class FastRLock:
"""Fast, re-entrant locking.
Under uncongested conditions, the lock is never acquired but only
counted. Only when a second thread comes in and notices that the
lock is needed, it acquires the lock and notifies the first thread
to release it when it's done. This is all made possible by the
wonderful GIL.
"""
cdef pythread.PyThread_type_lock _real_lock
cdef long _owner # ID of thread owning the lock
cdef int _count # re-entry count
cdef int _pending_requests # number of pending requests for real lock
cdef bint _is_locked # whether the real lock is acquired
def __cinit__(self):
self._owner = -1
self._count = 0
self._is_locked = False
self._pending_requests = 0
self._real_lock = pythread.PyThread_allocate_lock()
if self._real_lock is NULL:
PyErr_NoMemory()
def __dealloc__(self):
if self._real_lock is not NULL:
pythread.PyThread_free_lock(self._real_lock)
self._real_lock = NULL
def acquire(self, bint blocking=True):
return lock_lock(self, pythread.PyThread_get_thread_ident(), blocking)
def release(self):
if self._owner != pythread.PyThread_get_thread_ident():
raise RuntimeError("cannot release un-acquired lock")
unlock_lock(self)
# compatibility with threading.RLock
def __enter__(self):
# self.acquire()
return lock_lock(self, pythread.PyThread_get_thread_ident(), True)
def __exit__(self, t, v, tb):
# self.release()
if self._owner != pythread.PyThread_get_thread_ident():
raise RuntimeError("cannot release un-acquired lock")
unlock_lock(self)
def _is_owned(self):
return self._owner == pythread.PyThread_get_thread_ident()
cdef inline bint lock_lock(FastRLock lock, long current_thread, bint blocking) nogil:
# Note that this function *must* hold the GIL when being called.
# We just use 'nogil' in the signature to make sure that no Python
# code execution slips in that might free the GIL
if lock._count:
# locked! - by myself?
if current_thread == lock._owner:
lock._count += 1
return 1
elif not lock._pending_requests:
# not locked, not requested - go!
lock._owner = current_thread
lock._count = 1
return 1
# need to get the real lock
return _acquire_lock(
lock, current_thread,
pythread.WAIT_LOCK if blocking else pythread.NOWAIT_LOCK)
cdef bint _acquire_lock(FastRLock lock, long current_thread, int wait) nogil:
# Note that this function *must* hold the GIL when being called.
# We just use 'nogil' in the signature to make sure that no Python
# code execution slips in that might free the GIL
if not lock._is_locked and not lock._pending_requests:
# someone owns it but didn't acquire the real lock - do that
# now and tell the owner to release it when done. Note that we
# do not release the GIL here as we must absolutely be the one
# who acquires the lock now.
if not pythread.PyThread_acquire_lock(lock._real_lock, wait):
return 0
#assert not lock._is_locked
lock._is_locked = True
lock._pending_requests += 1
with nogil:
# wait for the lock owning thread to release it
locked = pythread.PyThread_acquire_lock(lock._real_lock, wait)
lock._pending_requests -= 1
#assert not lock._is_locked
#assert lock._count == 0
if not locked:
return 0
lock._is_locked = True
lock._owner = current_thread
lock._count = 1
return 1
cdef inline void unlock_lock(FastRLock lock) nogil:
# Note that this function *must* hold the GIL when being called.
# We just use 'nogil' in the signature to make sure that no Python
# code execution slips in that might free the GIL
#assert lock._owner == pythread.PyThread_get_thread_ident()
#assert lock._count > 0
lock._count -= 1
if lock._count == 0:
lock._owner = -1
if lock._is_locked:
pythread.PyThread_release_lock(lock._real_lock)
lock._is_locked = False
| mit | 3,017,226,937,884,119,000 | 36.188034 | 85 | 0.638934 | false |
Melecio/face-detection | neural_network.py | 1 | 4860 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import argparse
from os import listdir
from os.path import isfile, join
from random import shuffle
from pybrain.datasets import ClassificationDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules import SoftmaxLayer, TanhLayer
from pybrain.tools.xml.networkwriter import NetworkWriter
from pybrain.tools.xml.networkreader import NetworkReader
from PIL import Image, ImageOps, ImageDraw, ImageFilter
import itertools
# Module image in 'image.py'
from image import img_features_vectors, img_features
def open_img(path):
return Image.open(path).convert('L')
def process(img):
return ImageOps.equalize(img)
"""Given list of images, create the training data set"""
def train_data_set(files):
# Because PyBrain may take the first 25% for testing
shuffle(files)
data_set = ClassificationDataSet(400, 1, nb_classes=2)
number = 0
for path, target in files:
if number % 100 == 0:
print number,
sys.stdout.flush()
number += 1
img = open_img(path)
vector = img_features(img)
img.close()
data_set.addSample(vector, target)
return data_set
"""Given list of images, test the network with the backpropagation algorithm"""
def test_network(net, images):
for img in images:
new_img = img.convert('RGB')
draw = ImageDraw.Draw(new_img)
for vector, box, window in img_features_vectors(img):
nof, yesf = net.activate(vector)
if yesf > nof:
print "found a face"
window.show()
draw.rectangle(box, outline=0xff0000)
new_img.show()
"""Opens the images of the data set"""
def open_imgs(files):
for path in files:
yield processs(open_img(path))
"""Given a directory, opens it and gets the files"""
def get_files(directory):
files = listdir(directory)
paths = map(lambda f: join(directory,f), files)
return [ p for p in paths if isfile(p) ]
"""Parsing of the command-line input"""
def read():
parser = argparse.ArgumentParser(description='Face detection using Neural Networks')
parser.add_argument('-t', '--train-faces', help='Receives a directory with files to train with', nargs='+')
parser.add_argument('-f', '--train-non-faces', help='Receives a directory with files to train with', nargs='+')
parser.add_argument('-p', '--test', help='Receives a list of images (testing set)', nargs='+')
parser.add_argument('-r', '--read', help='Read the file with the already trained network object', nargs=1)
parser.add_argument('-w', '--write', help='Write the network to the specified file (format is .xml)', nargs=1)
args = parser.parse_args()
# Read the Neural Network Object
if args.read:
net = NetworkReader.readFrom(args.read[0])
else:
net = buildNetwork(400, 5, 2, bias=True, outclass=SoftmaxLayer)
# net = buildNetwork(400, 80, 16, 1, bias=True, hiddenclass=TanhLayer)
# If there are some files to train with
if (args.train_faces or args.train_non_faces):
if args.train_faces:
faces = get_files(args.train_faces[0])
else:
faces = []
if args.train_non_faces:
non_faces = get_files(args.train_non_faces[0])
else:
non_faces = []
# Expected targets
faces = map(lambda path: (path, [1]), faces)
non_faces = map(lambda path: (path, [0]), non_faces)
training_files = faces + non_faces
else:
training_files = None
# If there are some files to test with
if args.test:
testing_imgs = open_imgs(args.test)
else:
testing_imgs = None
# If there is a writing file
if args.write:
write_file = args.write[0]
else:
write_file = None
return net, training_files, testing_imgs, write_file
"""Main function"""
def main():
net, training_files, testing_imgs, write_file = read()
if training_files:
print "creating training data set"
training_set = train_data_set(training_files)
training_set._convertToOneOfMany() # I don't know why this line is needed
print "training"
# print net
# print training_set, len(training_set)
# print training_set.calculateStatistics()
training_set.saveToFile('train.set')
trainer = BackpropTrainer(net, training_set, learningrate=0.05, verbose=True)
trainer.trainUntilConvergence(maxEpochs=100)
if testing_imgs:
print "testing"
test_network(net, testing_imgs)
if write_file:
NetworkWriter.writeToFile(net, write_file)
if __name__ == "__main__":
main()
| bsd-3-clause | -3,102,720,422,650,508,300 | 31.18543 | 115 | 0.641152 | false |
rodrigofaccioli/drugdesign | virtualscreening/vina/spark/hydrogen_bond_crud.py | 1 | 4480 | from pyspark.sql import SQLContext, Row
from vina_utils import get_ligand_from_receptor_ligand_model
"""
Creates data frame of residue list
sqlCtx - spark SQL context
residue_listRDD - RDD for creating data frame. It had been created by load_file_select_hydrogen_bond function
"""
def create_df_residue_list(sqlCtx, residue_listRDD):
df_residue_list = sqlCtx.createDataFrame(residue_listRDD)
df_residue_list.registerTempTable("residue_list")
return df_residue_list
"""
Creates data frame of all residues for hydrogen bond
sqlCtx - spark SQL context
residue_listRDD - RDD for creating data frame. It had been created by load_file_all_residue_hbonds function
"""
def create_df_all_residue(sqlCtx, all_residue_split):
df_all_residue = sqlCtx.createDataFrame(all_residue_split)
df_all_residue.registerTempTable("all_residue")
return df_all_residue
"""
Creates data frame of all residues filtered by residue list
sqlCtx - spark SQL context
Important: Before running this function must execute the functions
create_df_all_residue and create_df_residue_list
"""
def create_df_all_residue_filtered_by_res_list(sqlCtx):
#Getting all information based on list of residues
sql = """
SELECT all_residue.*
FROM all_residue
JOIN residue_list ON residue_list.residue = all_residue.receptor_residue
"""
df_result = sqlCtx.sql(sql)
df_result.registerTempTable("residues_filtered_by_list")
return df_result
"""
Group by poses all residues filtered by residue list
sqlCtx - spark SQL context
Important: Before running this function must execute the function
create_df_all_residue_filtered_by_res_list
"""
def get_group_by_poses_all_residue_filtered_by_res_list(sqlCtx):
sql = """
SELECT pose, count(*) as num_res
FROM residues_filtered_by_list
GROUP BY pose
ORDER BY num_res DESC
"""
df_result = sqlCtx.sql(sql)
return df_result
"""
Creates dataframe normalized Hydrogen Bond by donors and acceptors
sqlCtx - spark SQL context
df_only_poses - data frame created by get_group_by_poses_all_residue_filtered_by_res_list function
Important:
database is created by load_database function from database_io file.
This load_database function creates RDD only.
Therefore, the lines below must be executed before calling this function
#Loading database
rdd_database = load_database(sc, ligand_database)
#Creating Dataframe
database_table = sqlCtx.createDataFrame(rdd_database)
database_table.registerTempTable("database")
"""
def create_df_normalized_by_donors_acceptors(sqlCtx, df_only_poses):
normalizedRDD = df_only_poses.map(lambda p: Row(num_res=int(p.num_res), ligand=get_ligand_from_receptor_ligand_model(p.pose), pose=str(p.pose) ) ).collect()
#Creating Dataframe
normalized_residues_filtered_by_list_table = sqlCtx.createDataFrame(normalizedRDD)
normalized_residues_filtered_by_list_table.registerTempTable("normalized_residues_filtered_by_list")
# Normalized Hydrogen Bond by donors and acceptors
sql = """
SELECT pose, (b.num_res / a.hb_donors_acceptors) as normalized_hb
FROM database a
JOIN normalized_residues_filtered_by_list b ON b.ligand = a.ligand
ORDER BY normalized_hb DESC
"""
df_result = sqlCtx.sql(sql)
return df_result
"""
Creates dataframe normalized Hydrogen Bond by heavy atoms
sqlCtx - spark SQL context
Important:
database is created by load_database function from database_io file.
This load_database function creates RDD only.
Therefore, the lines below must be executed before calling this function
#Loading database
rdd_database = load_database(sc, ligand_database)
#Creating Dataframe
database_table = sqlCtx.createDataFrame(rdd_database)
database_table.registerTempTable("database")
"""
def create_df_normalized_by_heavy_atoms(sqlCtx):
# Normalized Hydrogen Bond by heavy atoms
sql = """
SELECT pose, (b.num_res / a.heavyAtom) as normalized_hb
FROM database a
JOIN normalized_residues_filtered_by_list b ON b.ligand = a.ligand
ORDER BY normalized_hb DESC
"""
df_result = sqlCtx.sql(sql)
return df_result
"""
Creates dataframe of hydrogen bond
sqlCtx - spark SQL context
rdd_hydrogen_bond - RDD for creating dataframe. It had been created by load_file_summary_hbonds function
"""
def create_df_hydrogen_bond(sqlCtx, rdd_hydrogen_bond):
hydrogen_bond_table = sqlCtx.createDataFrame(rdd_hydrogen_bond)
hydrogen_bond_table.registerTempTable("hydrogenbond")
return hydrogen_bond_table
| apache-2.0 | 4,840,884,905,067,512,000 | 36.647059 | 157 | 0.75692 | false |
maxamillion/anaconda | pyanaconda/bootloader.py | 1 | 89455 | # bootloader.py
# Anaconda's bootloader configuration module.
#
# Copyright (C) 2011 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Lehman <[email protected]>
# Matthew Miller <[email protected]> (extlinux portion)
#
import collections
import os
import re
import struct
import blivet
from parted import PARTITION_BIOS_GRUB
from glob import glob
from itertools import chain
from pyanaconda import iutil
from blivet.devicelibs import raid
from pyanaconda.isys import sync
from pyanaconda.product import productName
from pyanaconda.flags import flags, can_touch_runtime_system
from blivet.errors import StorageError
from blivet.fcoe import fcoe
import pyanaconda.network
from pyanaconda.errors import errorHandler, ERROR_RAISE, ZIPLError
from pyanaconda.packaging.rpmostreepayload import RPMOSTreePayload
from pyanaconda.nm import nm_device_hwaddress
from blivet import platform
from blivet.size import Size
from pyanaconda.i18n import _, N_
from pyanaconda.orderedset import OrderedSet
import logging
log = logging.getLogger("anaconda")
def get_boot_block(device, seek_blocks=0):
status = device.status
if not status:
try:
device.setup()
except StorageError:
return ""
block_size = device.partedDevice.sectorSize
fd = iutil.eintr_retry_call(os.open, device.path, os.O_RDONLY)
if seek_blocks:
os.lseek(fd, seek_blocks * block_size, 0)
block = iutil.eintr_retry_call(os.read, fd, 512)
iutil.eintr_retry_call(os.close, fd)
if not status:
try:
device.teardown(recursive=True)
except StorageError:
pass
return block
def is_windows_boot_block(block):
try:
windows = (len(block) >= 512 and
struct.unpack("H", block[0x1fe: 0x200]) == (0xaa55,))
except struct.error:
windows = False
return windows
def has_windows_boot_block(device):
return is_windows_boot_block(get_boot_block(device))
class serial_opts(object):
def __init__(self):
self.speed = None
self.parity = None
self.word = None
self.stop = None
self.flow = None
def parse_serial_opt(arg):
"""Parse and split serial console options.
Documentation/kernel-parameters.txt says:
ttyS<n>[,options]
Use the specified serial port. The options are of
the form "bbbbpnf", where "bbbb" is the baud rate,
"p" is parity ("n", "o", or "e"), "n" is number of
bits, and "f" is flow control ("r" for RTS or
omit it). Default is "9600n8".
but note that everything after the baud rate is optional, so these are
all valid: 9600, 19200n, 38400n8, 9600e7r.
Also note that the kernel assumes 1 stop bit; this can't be changed.
"""
opts = serial_opts()
m = re.match(r'\d+', arg)
if m is None:
return opts
opts.speed = m.group()
idx = len(opts.speed)
try:
opts.parity = arg[idx+0]
opts.word = arg[idx+1]
opts.flow = arg[idx+2]
except IndexError:
pass
return opts
def _is_on_iscsi(device):
"""Tells whether a given device is on an iSCSI disk or not."""
return all(isinstance(disk, blivet.devices.iScsiDiskDevice)
for disk in device.disks)
class BootLoaderError(Exception):
pass
class Arguments(OrderedSet):
def _merge_ip(self):
"""
Find ip= arguments targetting the same interface and merge them.
"""
# partition the input
def partition_p(arg):
# we are only interested in ip= parameters that use some kind of
# automatic network setup:
return arg.startswith("ip=") and arg.count(":") == 1
ip_params = filter(partition_p, self)
rest = OrderedSet(filter(lambda p: not partition_p(p), self))
# split at the colon:
ip_params = map(lambda p: p.split(":"), ip_params)
# create mapping from nics to their configurations
config = collections.defaultdict(list)
for (nic, cfg) in ip_params:
config[nic].append(cfg)
# generate the new parameters:
ip_params = set()
for nic in config:
ip_params.add("%s:%s" % (nic, ",".join(sorted(config[nic]))))
# update the set
self.clear()
self.update(rest)
self.update(ip_params)
return self
def __str__(self):
self._merge_ip()
return " ".join(list(self))
def add(self, key):
self.discard(key)
super(Arguments, self).add(key)
def update(self, other):
for key in other:
self.discard(key)
self.add(key)
class BootLoaderImage(object):
""" Base class for bootloader images. Suitable for non-linux OS images. """
def __init__(self, device=None, label=None, short=None):
self.label = label
self.short_label = short
self.device = device
class LinuxBootLoaderImage(BootLoaderImage):
def __init__(self, device=None, label=None, short=None, version=None):
super(LinuxBootLoaderImage, self).__init__(device=device, label=label)
self.label = label # label string
self.short_label = short # shorter label string
self.device = device # StorageDevice instance
self.version = version # kernel version string
self._kernel = None # filename string
self._initrd = None # filename string
@property
def kernel(self):
filename = self._kernel
if self.version and not filename:
filename = "vmlinuz-%s" % self.version
return filename
@property
def initrd(self):
filename = self._initrd
if self.version and not filename:
filename = "initramfs-%s.img" % self.version
return filename
class TbootLinuxBootLoaderImage(LinuxBootLoaderImage):
_multiboot = "tboot.gz" # filename string
_mbargs = ["logging=vga,serial,memory"]
_args = ["intel_iommu=on"]
def __init__(self, device=None, label=None, short=None, version=None):
super(TbootLinuxBootLoaderImage, self).__init__(
device=device, label=label,
short=short, version=version)
@property
def multiboot(self):
return self._multiboot
@property
def mbargs(self):
return self._mbargs
@property
def args(self):
return self._args
class BootLoader(object):
name = "Generic Bootloader"
packages = []
config_file = None
config_file_mode = 0o600
can_dual_boot = False
can_update = False
image_label_attr = "label"
encryption_support = False
stage2_is_valid_stage1 = False
# requirements for stage2 devices
stage2_device = None
stage2_device_types = []
stage2_raid_levels = []
stage2_raid_metadata = []
stage2_raid_member_types = []
stage2_mountpoints = ["/boot", "/"]
stage2_bootable = False
stage2_must_be_primary = True
stage2_description = N_("/boot file system")
stage2_max_end = Size("2 TiB")
@property
def stage2_format_types(self):
return ["ext4", "ext3", "ext2"]
# this is so stupid...
global_preserve_args = ["speakup_synth", "apic", "noapic", "apm", "ide",
"noht", "acpi", "video", "pci", "nodmraid",
"nompath", "nomodeset", "noiswmd", "fips",
"selinux", "biosdevname", "ipv6.disable",
"net.ifnames"]
preserve_args = []
_trusted_boot = False
def __init__(self):
self.boot_args = Arguments()
self.dracut_args = Arguments()
self.disks = []
self._disk_order = []
# timeout in seconds
self._timeout = None
self.password = None
# console/serial stuff
self.console = ""
self.console_options = ""
self._set_console()
# list of BootLoaderImage instances representing bootable OSs
self.linux_images = []
self.chain_images = []
# default image
self._default_image = None
self._update_only = False
self.skip_bootloader = False
self.errors = []
self.warnings = []
self.reset()
def reset(self):
""" Reset stage1 and stage2 values """
# the device the bootloader will be installed on
self.stage1_device = None
# the "boot disk", meaning the disk stage1 _will_ go on
self.stage1_disk = None
self.stage2_device = None
self.stage2_is_preferred_stage1 = False
self.errors = []
self.problems = []
self.warnings = []
#
# disk list access
#
@property
def disk_order(self):
"""Potentially partial order for disks."""
return self._disk_order
@disk_order.setter
def disk_order(self, order):
log.debug("new disk order: %s", order)
self._disk_order = order
if self.disks:
self._sort_disks()
def _sort_disks(self):
"""Sort the internal disk list. """
for name in reversed(self.disk_order):
try:
idx = [d.name for d in self.disks].index(name)
except ValueError:
log.error("bios order specified unknown disk %s", name)
continue
self.disks.insert(0, self.disks.pop(idx))
def set_disk_list(self, disks):
self.disks = disks[:]
self._sort_disks()
#
# image list access
#
@property
def default(self):
"""The default image."""
if not self._default_image and self.linux_images:
self._default_image = self.linux_images[0]
return self._default_image
@default.setter
def default(self, image):
if image not in self.images:
raise ValueError("new default image not in image list")
log.debug("new default image: %s", image)
self._default_image = image
@property
def images(self):
""" List of OS images that will be included in the configuration. """
all_images = self.linux_images
all_images.extend(i for i in self.chain_images if i.label)
return all_images
def clear_images(self):
"""Empty out the image list."""
self.linux_images = []
self.chain_images = []
def add_image(self, image):
"""Add a BootLoaderImage instance to the image list."""
if isinstance(image, LinuxBootLoaderImage):
self.linux_images.append(image)
else:
self.chain_images.append(image)
def image_label(self, image):
"""Return the appropriate image label for this bootloader."""
return getattr(image, self.image_label_attr)
#
# platform-specific data access
#
@property
def disklabel_types(self):
return platform.platform._disklabel_types
@property
def device_descriptions(self):
return platform.platform.bootStage1ConstraintDict["descriptions"]
#
# constraint checking for target devices
#
def _is_valid_md(self, device, raid_levels=None,
metadata=None, member_types=None, desc=""):
ret = True
if device.type != "mdarray":
return ret
if raid_levels and device.level not in raid_levels:
levels_str = ",".join("%s" % l for l in raid_levels)
self.errors.append(_("RAID sets that contain '%(desc)s' must have one "
"of the following raid levels: %(raid_level)s.")
% {"desc" : desc, "raid_level" : levels_str})
ret = False
# new arrays will be created with an appropriate metadata format
if device.exists and \
metadata and device.metadataVersion not in metadata:
self.errors.append(_("RAID sets that contain '%(desc)s' must have one "
"of the following metadata versions: %(metadata_versions)s.")
% {"desc": desc, "metadata_versions": ",".join(metadata)})
ret = False
if member_types:
for member in device.devices:
if not self._device_type_match(member, member_types):
self.errors.append(_("RAID sets that contain '%(desc)s' must "
"have one of the following device "
"types: %(types)s.")
% {"desc" : desc, "types" : ",".join(member_types)})
ret = False
log.debug("_is_valid_md(%s) returning %s", device.name, ret)
return ret
def _is_valid_disklabel(self, device, disklabel_types=None):
ret = True
if self.disklabel_types:
for disk in device.disks:
label_type = getattr(disk.format, "labelType", None)
if not label_type or label_type not in self.disklabel_types:
types_str = ",".join(disklabel_types)
self.errors.append(_("%(name)s must have one of the following "
"disklabel types: %(types)s.")
% {"name" : device.name, "types" : types_str})
ret = False
log.debug("_is_valid_disklabel(%s) returning %s", device.name, ret)
return ret
def _is_valid_format(self, device, format_types=None, mountpoints=None,
desc=""):
ret = True
if format_types and device.format.type not in format_types:
self.errors.append(_("%(desc)s cannot be of type %(type)s.")
% {"desc" : desc, "type" : device.format.type})
ret = False
if mountpoints and hasattr(device.format, "mountpoint") \
and device.format.mountpoint not in mountpoints:
self.errors.append(_("%(desc)s must be mounted on one of %(mountpoints)s.")
% {"desc" : desc, "mountpoints" : ", ".join(mountpoints)})
ret = False
log.debug("_is_valid_format(%s) returning %s", device.name, ret)
return ret
def _is_valid_size(self, device, desc=""):
ret = True
msg = None
errors = []
if device.format.minSize and device.format.maxSize:
msg = (_("%(desc)s must be between %(min)d and %(max)d MB in size")
% {"desc" : desc, "min" : device.format.minSize,
"max" : device.format.maxSize})
if device.format.minSize and device.size < device.format.minSize:
if msg is None:
errors.append(_("%(desc)s must not be smaller than %(min)dMB.")
% {"desc" : desc, "min" : device.format.minSize})
else:
errors.append(msg)
ret = False
if device.format.maxSize and device.size > device.format.maxSize:
if msg is None:
errors.append(_("%(desc)s must not be larger than %(max)dMB.")
% {"desc" : desc, "max" : device.format.maxSize})
elif msg not in errors:
# don't add the same error string twice
errors.append(msg)
ret = False
log.debug("_is_valid_size(%s) returning %s", device.name, ret)
return ret
def _is_valid_location(self, device, max_end=None, desc=""):
ret = True
if max_end and device.type == "partition" and device.partedPartition:
end_sector = device.partedPartition.geometry.end
sector_size = device.partedPartition.disk.device.sectorSize
end = Size(sector_size * end_sector)
if end > max_end:
self.errors.append(_("%(desc)s must be within the first %(max_end)s of "
"the disk.") % {"desc": desc, "max_end": max_end})
ret = False
log.debug("_is_valid_location(%s) returning %s", device.name, ret)
return ret
def _is_valid_partition(self, device, primary=None, desc=""):
ret = True
if device.type == "partition" and primary and not device.isPrimary:
self.errors.append(_("%s must be on a primary partition.") % desc)
ret = False
log.debug("_is_valid_partition(%s) returning %s", device.name, ret)
return ret
#
# target/stage1 device access
#
def _device_type_index(self, device, types):
""" Return the index of the matching type in types to device's type.
Return None if no match is found. """
index = None
try:
index = types.index(device.type)
except ValueError:
if "disk" in types and device.isDisk:
index = types.index("disk")
return index
def _device_type_match(self, device, types):
""" Return True if device is of one of the types in the list types. """
return self._device_type_index(device, types) is not None
def device_description(self, device):
device_types = list(self.device_descriptions.keys())
idx = self._device_type_index(device, device_types)
if idx is None:
raise ValueError("No description available for %s" % device.type)
# this looks unnecessarily complicated, but it handles the various
# device types that we treat as disks
return self.device_descriptions[device_types[idx]]
def set_preferred_stage1_type(self, preferred):
""" Set a preferred type of stage1 device. """
if not self.stage2_is_valid_stage1:
# "partition" means first sector of stage2 and is only meaningful
# for bootloaders that can use stage2 as stage1
return
if preferred == "mbr":
# "mbr" is already the default
return
# partition means "use the stage2 device for a stage1 device"
self.stage2_is_preferred_stage1 = True
def is_valid_stage1_device(self, device, early=False):
""" Return True if the device is a valid stage1 target device.
Also collect lists of errors and warnings.
The criteria for being a valid stage1 target device vary from
platform to platform. On some platforms a disk with an msdos
disklabel is a valid stage1 target, while some platforms require
a special device. Some examples of these special devices are EFI
system partitions on EFI machines, PReP boot partitions on
iSeries, and Apple bootstrap partitions on Mac.
The 'early' keyword argument is a boolean flag indicating whether
or not this check is being performed at a point where the mountpoint
cannot be expected to be set for things like EFI system partitions.
"""
self.errors = []
self.warnings = []
valid = True
constraint = platform.platform.bootStage1ConstraintDict
if device is None:
return False
if not self._device_type_match(device, constraint["device_types"]):
log.debug("stage1 device cannot be of type %s", device.type)
return False
if blivet.arch.isS390() and _is_on_iscsi(device):
log.debug("stage1 device cannot be on an iSCSI disk on s390(x)")
return False
description = self.device_description(device)
if self.stage2_is_valid_stage1 and device == self.stage2_device:
# special case
valid = (self.stage2_is_preferred_stage1 and
self.is_valid_stage2_device(device))
# we'll be checking stage2 separately so don't duplicate messages
self.problems = []
self.warnings = []
return valid
if device.protected:
valid = False
if not self._is_valid_disklabel(device,
disklabel_types=self.disklabel_types):
valid = False
if not self._is_valid_size(device, desc=description):
valid = False
if not self._is_valid_location(device,
max_end=constraint["max_end"],
desc=description):
valid = False
if not self._is_valid_md(device,
raid_levels=constraint["raid_levels"],
metadata=constraint["raid_metadata"],
member_types=constraint["raid_member_types"],
desc=description):
valid = False
if not self.stage2_bootable and not getattr(device, "bootable", True):
log.warning("%s not bootable", device.name)
# XXX does this need to be here?
if getattr(device.format, "label", None) in ("ANACONDA", "LIVE"):
log.info("ignoring anaconda boot disk")
valid = False
if early:
mountpoints = []
else:
mountpoints = constraint["mountpoints"]
if not self._is_valid_format(device,
format_types=constraint["format_types"],
mountpoints=mountpoints,
desc=description):
valid = False
if not self.encryption_support and device.encrypted:
self.errors.append(_("%s cannot be on an encrypted block "
"device.") % description)
valid = False
log.debug("is_valid_stage1_device(%s) returning %s", device.name, valid)
return valid
def set_stage1_device(self, devices):
self.stage1_device = None
if not self.stage1_disk:
self.reset()
raise BootLoaderError("need stage1 disk to set stage1 device")
if self.stage2_is_preferred_stage1:
self.stage1_device = self.stage2_device
return
for device in devices:
if self.stage1_disk not in device.disks:
continue
if self.is_valid_stage1_device(device):
if flags.imageInstall and device.isDisk:
# GRUB2 will install to /dev/loop0 but not to
# /dev/mapper/<image_name>
self.stage1_device = device.parents[0]
else:
self.stage1_device = device
break
if not self.stage1_device:
self.reset()
raise BootLoaderError("failed to find a suitable stage1 device")
#
# boot/stage2 device access
#
def is_valid_stage2_device(self, device, linux=True, non_linux=False):
""" Return True if the device is suitable as a stage2 target device.
Also collect lists of errors and warnings.
"""
self.errors = []
self.warnings = []
valid = True
if device is None:
return False
if device.protected:
valid = False
if blivet.arch.isS390() and _is_on_iscsi(device):
self.errors.append(_("%s cannot be on an iSCSI disk on s390(x)") % self.stage2_description)
valid = False
if not self._device_type_match(device, self.stage2_device_types):
self.errors.append(_("%(desc)s cannot be of type %(type)s")
% {"desc" : _(self.stage2_description), "type" : device.type})
valid = False
if not self._is_valid_disklabel(device,
disklabel_types=self.disklabel_types):
valid = False
if not self._is_valid_size(device, desc=_(self.stage2_description)):
valid = False
if self.stage2_max_end and not self._is_valid_location(device,
max_end=self.stage2_max_end,
desc=_(self.stage2_description)):
valid = False
if not self._is_valid_partition(device,
primary=self.stage2_must_be_primary):
valid = False
if not self._is_valid_md(device,
raid_levels=self.stage2_raid_levels,
metadata=self.stage2_raid_metadata,
member_types=self.stage2_raid_member_types,
desc=_(self.stage2_description)):
valid = False
if linux and \
not self._is_valid_format(device,
format_types=self.stage2_format_types,
mountpoints=self.stage2_mountpoints,
desc=_(self.stage2_description)):
valid = False
non_linux_format_types = platform.platform._non_linux_format_types
if non_linux and \
not self._is_valid_format(device,
format_types=non_linux_format_types):
valid = False
if not self.encryption_support and device.encrypted:
self.errors.append(_("%s cannot be on an encrypted block "
"device.") % _(self.stage2_description))
valid = False
log.debug("is_valid_stage2_device(%s) returning %s", device.name, valid)
return valid
#
# miscellaneous
#
def has_windows(self, devices):
return False
@property
def timeout(self):
"""Bootloader timeout in seconds."""
if self._timeout is not None:
t = self._timeout
else:
t = 5
return t
def check(self):
""" Run additional bootloader checks """
return True
@timeout.setter
def timeout(self, seconds):
self._timeout = seconds
@property
def update_only(self):
return self._update_only
@update_only.setter
def update_only(self, value):
if value and not self.can_update:
raise ValueError("this boot loader does not support updates")
elif self.can_update:
self._update_only = value
def set_boot_args(self, *args, **kwargs):
""" Set up the boot command line.
Keyword Arguments:
storage - a blivet.Storage instance
All other arguments are expected to have a dracutSetupArgs()
method.
"""
storage = kwargs.pop("storage", None)
#
# FIPS
#
if flags.cmdline.get("fips") == "1":
self.boot_args.add("boot=%s" % self.stage2_device.fstabSpec)
#
# dracut
#
# storage
from blivet.devices import NetworkStorageDevice
dracut_devices = [storage.rootDevice]
if self.stage2_device != storage.rootDevice:
dracut_devices.append(self.stage2_device)
dracut_devices.extend(storage.fsset.swapDevices)
# Does /usr have its own device? If so, we need to tell dracut
usr_device = storage.mountpoints.get("/usr")
if usr_device:
dracut_devices.extend([usr_device])
netdevs = storage.devicetree.getDevicesByInstance(NetworkStorageDevice)
rootdev = storage.rootDevice
if any(rootdev.dependsOn(netdev) for netdev in netdevs):
dracut_devices = set(dracut_devices)
for dev in storage.mountpoints.values():
if any(dev.dependsOn(netdev) for netdev in netdevs):
dracut_devices.add(dev)
done = []
for device in dracut_devices:
for dep in storage.devices:
if dep in done:
continue
if device != dep and not device.dependsOn(dep):
continue
setup_args = dep.dracutSetupArgs()
if not setup_args:
continue
self.boot_args.update(setup_args)
self.dracut_args.update(setup_args)
done.append(dep)
# network storage
# XXX this is nothing to be proud of
if isinstance(dep, NetworkStorageDevice):
setup_args = pyanaconda.network.dracutSetupArgs(dep)
self.boot_args.update(setup_args)
self.dracut_args.update(setup_args)
# passed-in objects
for cfg_obj in chain(args, kwargs.values()):
if hasattr(cfg_obj, "dracutSetupArgs"):
setup_args = cfg_obj.dracutSetupArgs()
self.boot_args.update(setup_args)
self.dracut_args.update(setup_args)
else:
setup_string = cfg_obj.dracutSetupString()
self.boot_args.add(setup_string)
self.dracut_args.add(setup_string)
# This is needed for FCoE, bug #743784. The case:
# We discover LUN on an iface which is part of multipath setup.
# If the iface is disconnected after discovery anaconda doesn't
# write dracut ifname argument for the disconnected iface path
# (in Network.dracutSetupArgs).
# Dracut needs the explicit ifname= because biosdevname
# fails to rename the iface (because of BFS booting from it).
for nic, _dcb, _auto_vlan in fcoe().nics:
try:
hwaddr = nm_device_hwaddress(nic)
except ValueError:
continue
self.boot_args.add("ifname=%s:%s" % (nic, hwaddr.lower()))
# Add iscsi_firmware to trigger dracut running iscsistart
# See rhbz#1099603 and rhbz#1185792
if len(glob("/sys/firmware/iscsi_boot*")) > 0:
self.boot_args.add("iscsi_firmware")
#
# preservation of some of our boot args
# FIXME: this is stupid.
#
for opt in self.global_preserve_args + self.preserve_args:
if opt not in flags.cmdline:
continue
arg = flags.cmdline.get(opt)
new_arg = opt
if arg:
new_arg += "=%s" % arg
self.boot_args.add(new_arg)
# passed-in objects
for cfg_obj in chain(args, kwargs.values()):
if hasattr(cfg_obj, "dracutSetupArgs"):
setup_args = cfg_obj.dracutSetupArgs()
self.boot_args.update(setup_args)
self.dracut_args.update(setup_args)
else:
setup_string = cfg_obj.dracutSetupString()
self.boot_args.add(setup_string)
self.dracut_args.add(setup_string)
#
# configuration
#
@property
def boot_prefix(self):
""" Prefix, if any, to paths in /boot. """
if self.stage2_device.format.mountpoint == "/":
prefix = "/boot"
else:
prefix = ""
return prefix
def _set_console(self):
""" Set console options based on boot arguments. """
console = flags.cmdline.get("console", "")
console = os.path.basename(console)
self.console, _x, self.console_options = console.partition(",")
def write_config_console(self, config):
"""Write console-related configuration lines."""
pass
def write_config_password(self, config):
"""Write password-related configuration lines."""
pass
def write_config_header(self, config):
"""Write global configuration lines."""
self.write_config_console(config)
self.write_config_password(config)
def write_config_images(self, config):
"""Write image configuration entries."""
raise NotImplementedError()
def write_config_post(self):
try:
iutil.eintr_retry_call(os.chmod, iutil.getSysroot() + self.config_file, self.config_file_mode)
except OSError as e:
log.error("failed to set config file permissions: %s", e)
def write_config(self):
""" Write the bootloader configuration. """
if not self.config_file:
raise BootLoaderError("no config file defined for this boot loader")
config_path = os.path.normpath(iutil.getSysroot() + self.config_file)
if os.access(config_path, os.R_OK):
os.rename(config_path, config_path + ".anacbak")
config = open(config_path, "w")
self.write_config_header(config)
self.write_config_images(config)
config.close()
self.write_config_post()
@property
def trusted_boot(self):
return self._trusted_boot
@trusted_boot.setter
def trusted_boot(self, trusted_boot):
self._trusted_boot = trusted_boot
#
# installation
#
def write(self):
""" Write the bootloader configuration and install the bootloader. """
if self.skip_bootloader:
return
if self.update_only:
self.update()
return
self.write_config()
sync()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
self.install()
def install(self, args=None):
raise NotImplementedError()
def update(self):
""" Update an existing bootloader configuration. """
pass
class GRUB(BootLoader):
name = "GRUB"
_config_dir = "grub"
_config_file = "grub.conf"
_device_map_file = "device.map"
can_dual_boot = True
can_update = True
stage2_is_valid_stage1 = True
stage2_bootable = True
stage2_must_be_primary = False
# list of strings representing options for boot device types
stage2_device_types = ["partition", "mdarray"]
stage2_raid_levels = [raid.RAID1]
stage2_raid_member_types = ["partition"]
stage2_raid_metadata = ["0", "0.90", "1.0"]
packages = ["grub"]
_serial_consoles = ["ttyS"]
def __init__(self):
super(GRUB, self).__init__()
self.encrypted_password = ""
#
# grub-related conveniences
#
def grub_device_name(self, device):
""" Return a grub-friendly representation of device. """
disk = getattr(device, "disk", device)
name = "(hd%d" % self.disks.index(disk)
if hasattr(device, "disk"):
name += ",%d" % (device.partedPartition.number - 1,)
name += ")"
return name
@property
def grub_config_dir(self):
""" Config dir, adjusted for grub's view of the world. """
return self.boot_prefix + self._config_dir
#
# configuration
#
@property
def config_dir(self):
""" Full path to configuration directory. """
return "/boot/" + self._config_dir
@property
def config_file(self):
""" Full path to configuration file. """
return "%s/%s" % (self.config_dir, self._config_file)
@property
def device_map_file(self):
""" Full path to device.map file. """
return "%s/%s" % (self.config_dir, self._device_map_file)
@property
def grub_conf_device_line(self):
return ""
@property
def splash_dir(self):
""" relative path to splash image directory."""
return GRUB._config_dir
@property
def has_serial_console(self):
""" true if the console is a serial console. """
return any(self.console.startswith(sconsole) for sconsole in self._serial_consoles)
@property
def serial_command(self):
command = ""
if self.console and self.has_serial_console:
unit = self.console[-1]
command = ["serial"]
s = parse_serial_opt(self.console_options)
if unit and unit != '0':
command.append("--unit=%s" % unit)
if s.speed and s.speed != '9600':
command.append("--speed=%s" % s.speed)
if s.parity:
if s.parity == 'o':
command.append("--parity=odd")
elif s.parity == 'e':
command.append("--parity=even")
if s.word and s.word != '8':
command.append("--word=%s" % s.word)
if s.stop and s.stop != '1':
command.append("--stop=%s" % s.stop)
command = " ".join(command)
return command
def write_config_console(self, config):
""" Write console-related configuration. """
if not self.console:
return
if self.has_serial_console:
config.write("%s\n" % self.serial_command)
config.write("terminal --timeout=%s serial console\n"
% self.timeout)
console_arg = "console=%s" % self.console
if self.console_options:
console_arg += ",%s" % self.console_options
self.boot_args.add(console_arg)
def _encrypt_password(self):
""" Make sure self.encrypted_password is set up correctly. """
if self.encrypted_password:
return
if not self.password:
raise BootLoaderError("cannot encrypt empty password")
# Used for ascii_letters and digits constants
import string # pylint: disable=deprecated-module
import crypt
import random
salt = "$6$"
salt_len = 16
salt_chars = string.ascii_letters + string.digits + './'
rand_gen = random.SystemRandom()
salt += "".join(rand_gen.choice(salt_chars) for i in range(salt_len))
self.encrypted_password = crypt.crypt(self.password, salt)
def write_config_password(self, config):
""" Write password-related configuration. """
if not self.password and not self.encrypted_password:
return
self._encrypt_password()
password_line = "--encrypted " + self.encrypted_password
config.write("password %s\n" % password_line)
def write_config_header(self, config):
"""Write global configuration information. """
if self.boot_prefix:
have_boot = "do not "
else:
have_boot = ""
s = """# grub.conf generated by anaconda
# Note that you do not have to rerun grub after making changes to this file.
# NOTICE: You %(do)shave a /boot partition. This means that all kernel and
# initrd paths are relative to %(boot)s, eg.
# root %(grub_target)s
# kernel %(prefix)s/vmlinuz-version ro root=%(root_device)s
# initrd %(prefix)s/initrd-[generic-]version.img
""" % {"do": have_boot, "boot": self.stage2_device.format.mountpoint,
"root_device": self.stage2_device.path,
"grub_target": self.grub_device_name(self.stage1_device),
"prefix": self.boot_prefix}
config.write(s)
config.write("boot=%s\n" % self.stage1_device.path)
config.write(self.grub_conf_device_line)
# find the index of the default image
try:
default_index = self.images.index(self.default)
except ValueError:
e = "Failed to find default image (%s)" % self.default.label
raise BootLoaderError(e)
config.write("default=%d\n" % default_index)
config.write("timeout=%d\n" % self.timeout)
self.write_config_console(config)
if iutil.isConsoleOnVirtualTerminal(self.console):
splash = "splash.xpm.gz"
splash_path = os.path.normpath("%s/boot/%s/%s" % (iutil.getSysroot(),
self.splash_dir,
splash))
if os.access(splash_path, os.R_OK):
grub_root_grub_name = self.grub_device_name(self.stage2_device)
config.write("splashimage=%s/%s/%s\n" % (grub_root_grub_name,
self.splash_dir,
splash))
config.write("hiddenmenu\n")
self.write_config_password(config)
def write_config_images(self, config):
""" Write image entries into configuration file. """
for image in self.images:
args = Arguments()
if isinstance(image, LinuxBootLoaderImage):
grub_root = self.grub_device_name(self.stage2_device)
args.update(["ro", "root=%s" % image.device.fstabSpec])
args.update(self.boot_args)
if isinstance(image, TbootLinuxBootLoaderImage):
args.update(image.args)
snippet = ("\tkernel %(prefix)s/%(multiboot)s %(mbargs)s\n"
"\tmodule %(prefix)s/%(kernel)s %(args)s\n"
"\tmodule %(prefix)s/%(initrd)s\n"
% {"prefix": self.boot_prefix,
"multiboot": image.multiboot,
"mbargs": image.mbargs,
"kernel": image.kernel, "args": args,
"initrd": image.initrd})
else:
snippet = ("\tkernel %(prefix)s/%(kernel)s %(args)s\n"
"\tinitrd %(prefix)s/%(initrd)s\n"
% {"prefix": self.boot_prefix,
"kernel": image.kernel, "args": args,
"initrd": image.initrd})
stanza = ("title %(label)s (%(version)s)\n"
"\troot %(grub_root)s\n"
"%(snippet)s"
% {"label": image.label, "version": image.version,
"grub_root": grub_root, "snippet": snippet})
else:
stanza = ("title %(label)s\n"
"\trootnoverify %(grub_root)s\n"
"\tchainloader +1\n"
% {"label": image.label,
"grub_root": self.grub_device_name(image.device)})
log.info("bootloader.py: used boot args: %s ", args)
config.write(stanza)
def write_device_map(self):
""" Write out a device map containing all supported devices. """
map_path = os.path.normpath(iutil.getSysroot() + self.device_map_file)
if os.access(map_path, os.R_OK):
os.rename(map_path, map_path + ".anacbak")
dev_map = open(map_path, "w")
dev_map.write("# this device map was generated by anaconda\n")
for disk in self.disks:
dev_map.write("%s %s\n" % (self.grub_device_name(disk),
disk.path))
dev_map.close()
def write_config_post(self):
""" Perform additional configuration after writing config file(s). """
super(GRUB, self).write_config_post()
# make symlink for menu.lst (grub's default config file name)
menu_lst = "%s%s/menu.lst" % (iutil.getSysroot(), self.config_dir)
if os.access(menu_lst, os.R_OK):
try:
os.rename(menu_lst, menu_lst + '.anacbak')
except OSError as e:
log.error("failed to back up %s: %s", menu_lst, e)
try:
os.symlink(self._config_file, menu_lst)
except OSError as e:
log.error("failed to create grub menu.lst symlink: %s", e)
# make symlink to grub.conf in /etc since that's where configs belong
etc_grub = "%s/etc/%s" % (iutil.getSysroot(), self._config_file)
if os.access(etc_grub, os.R_OK):
try:
os.unlink(etc_grub)
except OSError as e:
log.error("failed to remove %s: %s", etc_grub, e)
try:
os.symlink("..%s" % self.config_file, etc_grub)
except OSError as e:
log.error("failed to create /etc/grub.conf symlink: %s", e)
def write_config(self):
""" Write bootloader configuration to disk. """
# write device.map
self.write_device_map()
# this writes the actual configuration file
super(GRUB, self).write_config()
#
# installation
#
@property
def install_targets(self):
""" List of (stage1, stage2) tuples representing install targets. """
targets = []
# make sure we have stage1 and stage2 installed with redundancy
# so that boot can succeed even in the event of failure or removal
# of some of the disks containing the member partitions of the
# /boot array. If the stage1 is not a disk, it probably needs to
# be a partition on a particular disk (biosboot, prepboot), so only
# add the redundant targets if installing stage1 to a disk that is
# a member of the stage2 array.
# Look for both mdraid and btrfs raid
if self.stage2_device.type == "mdarray" and \
self.stage2_device.level == raid.RAID1:
stage2_raid = True
# Set parents to the list of partitions in the RAID
stage2_parents = self.stage2_device.parents
elif self.stage2_device.type == "btrfs subvolume" and \
self.stage2_device.parents[0].dataLevel == raid.RAID1:
stage2_raid = True
# Set parents to the list of partitions in the parent volume
stage2_parents = self.stage2_device.parents[0].parents
else:
stage2_raid = False
if stage2_raid and \
self.stage1_device.isDisk and \
self.stage2_device.dependsOn(self.stage1_device):
for stage2dev in stage2_parents:
# if target disk contains any of /boot array's member
# partitions, set up stage1 on each member's disk
stage1dev = stage2dev.disk
targets.append((stage1dev, self.stage2_device))
else:
targets.append((self.stage1_device, self.stage2_device))
return targets
def install(self, args=None):
rc = iutil.execInSysroot("grub-install", ["--just-copy"])
if rc:
raise BootLoaderError("boot loader install failed")
for (stage1dev, stage2dev) in self.install_targets:
cmd = ("root %(stage2dev)s\n"
"install --stage2=%(config_dir)s/stage2"
" /%(grub_config_dir)s/stage1 d %(stage1dev)s"
" /%(grub_config_dir)s/stage2 p"
" %(stage2dev)s/%(grub_config_dir)s/%(config_basename)s\n"
% {"grub_config_dir": self.grub_config_dir,
"config_dir": self.config_dir,
"config_basename": self._config_file,
"stage1dev": self.grub_device_name(stage1dev),
"stage2dev": self.grub_device_name(stage2dev)})
(pread, pwrite) = os.pipe()
iutil.eintr_retry_call(os.write, pwrite, cmd.encode("utf-8"))
iutil.eintr_retry_call(os.close, pwrite)
args = ["--batch", "--no-floppy",
"--device-map=%s" % self.device_map_file]
rc = iutil.execInSysroot("grub", args, stdin=pread)
iutil.eintr_retry_call(os.close, pread)
if rc:
raise BootLoaderError("boot loader install failed")
def update(self):
self.install()
#
# miscellaneous
#
def has_windows(self, devices):
""" Potential boot devices containing non-linux operating systems. """
# make sure we don't clobber error/warning lists
errors = self.errors[:]
warnings = self.warnings[:]
ret = [d for d in devices if self.is_valid_stage2_device(d, linux=False, non_linux=True)]
self.errors = errors
self.warnings = warnings
return bool(ret)
# Add a warning about certain RAID situations to is_valid_stage2_device
def is_valid_stage2_device(self, device, linux=True, non_linux=False):
valid = super(GRUB, self).is_valid_stage2_device(device, linux, non_linux)
# If the stage2 device is on a raid1, check that the stage1 device is also redundant,
# either by also being part of an array or by being a disk (which is expanded
# to every disk in the array by install_targets).
if self.stage1_device and self.stage2_device and \
self.stage2_device.type == "mdarray" and \
self.stage2_device.level == raid.RAID1 and \
self.stage1_device.type != "mdarray":
if not self.stage1_device.isDisk:
msg = _("boot loader stage2 device %(stage2dev)s is on a multi-disk array, but boot loader stage1 device %(stage1dev)s is not. " \
"A drive failure in %(stage2dev)s could render the system unbootable.") % \
{"stage1dev" : self.stage1_device.name,
"stage2dev" : self.stage2_device.name}
self.warnings.append(msg)
elif not self.stage2_device.dependsOn(self.stage1_device):
msg = _("boot loader stage2 device %(stage2dev)s is on a multi-disk array, but boot loader stage1 device %(stage1dev)s is not part of this array. " \
"The stage1 boot loader will only be installed to a single drive.") % \
{"stage1dev" : self.stage1_device.name,
"stage2dev" : self.stage2_device.name}
self.warnings.append(msg)
return valid
class GRUB2(GRUB):
""" GRUBv2
- configuration
- password (insecure), password_pbkdf2
- http://www.gnu.org/software/grub/manual/grub.html#Invoking-grub_002dmkpasswd_002dpbkdf2
- --users per-entry specifies which users can access, otherwise
entry is unrestricted
- /etc/grub/custom.cfg
- how does grub resolve names of md arrays?
- disable automatic use of grub-mkconfig?
- on upgrades?
- BIOS boot partition (GPT)
- parted /dev/sda set <partition_number> bios_grub on
- can't contain a file system
- 31KiB min, 1MiB recommended
"""
name = "GRUB2"
packages = ["grub2"]
_config_file = "grub.cfg"
_config_dir = "grub2"
defaults_file = "/etc/default/grub"
terminal_type = "console"
stage2_max_end = None
# requirements for boot devices
stage2_device_types = ["partition", "mdarray", "lvmlv"]
stage2_raid_levels = [raid.RAID0, raid.RAID1, raid.RAID4,
raid.RAID5, raid.RAID6, raid.RAID10]
stage2_raid_metadata = ["0", "0.90", "1.0", "1.2"]
@property
def stage2_format_types(self):
if productName.startswith("Red Hat "):
return ["xfs", "ext4", "ext3", "ext2", "btrfs"]
else:
return ["ext4", "ext3", "ext2", "btrfs", "xfs"]
def __init__(self):
super(GRUB2, self).__init__()
# XXX we probably need special handling for raid stage1 w/ gpt disklabel
# since it's unlikely there'll be a bios boot partition on each disk
#
# grub-related conveniences
#
def grub_device_name(self, device):
""" Return a grub-friendly representation of device.
Disks and partitions use the (hdX,Y) notation, while lvm and
md devices just use their names.
"""
disk = None
name = "(%s)" % device.name
if device.isDisk:
disk = device
elif hasattr(device, "disk"):
disk = device.disk
if disk is not None:
name = "(hd%d" % self.disks.index(disk)
if hasattr(device, "disk"):
lt = device.disk.format.labelType
name += ",%s%d" % (lt, device.partedPartition.number)
name += ")"
return name
def write_config_console(self, config):
if not self.console:
return
console_arg = "console=%s" % self.console
if self.console_options:
console_arg += ",%s" % self.console_options
self.boot_args.add(console_arg)
def write_device_map(self):
""" Write out a device map containing all supported devices. """
map_path = os.path.normpath(iutil.getSysroot() + self.device_map_file)
if os.access(map_path, os.R_OK):
os.rename(map_path, map_path + ".anacbak")
devices = self.disks
if self.stage1_device not in devices:
devices.append(self.stage1_device)
for disk in self.stage2_device.disks:
if disk not in devices:
devices.append(disk)
devices = [d for d in devices if d.isDisk]
if len(devices) == 0:
return
dev_map = open(map_path, "w")
dev_map.write("# this device map was generated by anaconda\n")
for drive in devices:
dev_map.write("%s %s\n" % (self.grub_device_name(drive),
drive.path))
dev_map.close()
def write_defaults(self):
defaults_file = "%s%s" % (iutil.getSysroot(), self.defaults_file)
defaults = open(defaults_file, "w+")
defaults.write("GRUB_TIMEOUT=%d\n" % self.timeout)
defaults.write("GRUB_DISTRIBUTOR=\"$(sed 's, release .*$,,g' /etc/system-release)\"\n")
defaults.write("GRUB_DEFAULT=saved\n")
defaults.write("GRUB_DISABLE_SUBMENU=true\n")
if self.console and self.has_serial_console:
defaults.write("GRUB_TERMINAL=\"serial console\"\n")
defaults.write("GRUB_SERIAL_COMMAND=\"%s\"\n" % self.serial_command)
else:
defaults.write("GRUB_TERMINAL_OUTPUT=\"%s\"\n" % self.terminal_type)
# this is going to cause problems for systems containing multiple
# linux installations or even multiple boot entries with different
# boot arguments
log.info("bootloader.py: used boot args: %s ", self.boot_args)
defaults.write("GRUB_CMDLINE_LINUX=\"%s\"\n" % self.boot_args)
defaults.write("GRUB_DISABLE_RECOVERY=\"true\"\n")
#defaults.write("GRUB_THEME=\"/boot/grub2/themes/system/theme.txt\"\n")
defaults.close()
def _encrypt_password(self):
""" Make sure self.encrypted_password is set up properly. """
if self.encrypted_password:
return
if not self.password:
raise RuntimeError("cannot encrypt empty password")
(pread, pwrite) = os.pipe()
passwords = "%s\n%s\n" % (self.password, self.password)
iutil.eintr_retry_call(os.write, pwrite, passwords.encode("utf-8"))
iutil.eintr_retry_call(os.close, pwrite)
buf = iutil.execWithCapture("grub2-mkpasswd-pbkdf2", [],
stdin=pread,
root=iutil.getSysroot())
iutil.eintr_retry_call(os.close, pread)
self.encrypted_password = buf.split()[-1].strip()
if not self.encrypted_password.startswith("grub.pbkdf2."):
raise BootLoaderError("failed to encrypt boot loader password")
def write_password_config(self):
if not self.password and not self.encrypted_password:
return
users_file = iutil.getSysroot() + "/etc/grub.d/01_users"
header = open(users_file, "w")
header.write("#!/bin/sh -e\n\n")
header.write("cat << \"EOF\"\n")
# XXX FIXME: document somewhere that the username is "root"
header.write("set superusers=\"root\"\n")
header.write("export superusers\n")
self._encrypt_password()
password_line = "password_pbkdf2 root " + self.encrypted_password
header.write("%s\n" % password_line)
header.write("EOF\n")
header.close()
iutil.eintr_retry_call(os.chmod, users_file, 0o700)
def write_config(self):
self.write_config_console(None)
# See if we have a password and if so update the boot args before we
# write out the defaults file.
if self.password or self.encrypted_password:
self.boot_args.add("rd.shell=0")
self.write_defaults()
# if we fail to setup password auth we should complete the
# installation so the system is at least bootable
try:
self.write_password_config()
except (BootLoaderError, OSError, RuntimeError) as e:
log.error("boot loader password setup failed: %s", e)
# make sure the default entry is the OS we are installing
if self.default is not None:
entry_title = "0"
rc = iutil.execInSysroot("grub2-set-default", [entry_title])
if rc:
log.error("failed to set default menu entry to %s", productName)
# now tell grub2 to generate the main configuration file
rc = iutil.execInSysroot("grub2-mkconfig",
["-o", self.config_file])
if rc:
raise BootLoaderError("failed to write boot loader configuration")
#
# installation
#
def install(self, args=None):
if args is None:
args = []
# XXX will installing to multiple drives work as expected with GRUBv2?
for (stage1dev, stage2dev) in self.install_targets:
grub_args = args + ["--no-floppy", stage1dev.path]
if stage1dev == stage2dev:
# This is hopefully a temporary hack. GRUB2 currently refuses
# to install to a partition's boot block without --force.
grub_args.insert(0, '--force')
else:
if flags.nombr:
grub_args.insert(0, '--grub-setup=/bin/true')
log.info("bootloader.py: mbr update by grub2 disabled")
else:
log.info("bootloader.py: mbr will be updated for grub2")
rc = iutil.execWithRedirect("grub2-install", grub_args,
root=iutil.getSysroot(),
env_prune=['MALLOC_PERTURB_'])
if rc:
raise BootLoaderError("boot loader install failed")
def write(self):
""" Write the bootloader configuration and install the bootloader. """
if self.skip_bootloader:
return
if self.update_only:
self.update()
return
try:
self.write_device_map()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
sync()
self.install()
sync()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
finally:
self.write_config()
sync()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
def check(self):
""" When installing to the mbr of a disk grub2 needs enough space
before the first partition in order to embed its core.img
Until we have a way to ask grub2 what the size is we check to make
sure it starts >= 512K, otherwise return an error.
"""
ret = True
base_gap_bytes = 32256 # 31.5KiB
advanced_gap_bytes = 524288 # 512KiB
self.errors = []
self.warnings = []
if self.stage1_device == self.stage2_device:
return ret
# These are small enough to fit
if self.stage2_device.type == "partition":
min_start = base_gap_bytes
else:
min_start = advanced_gap_bytes
if not self.stage1_disk:
return False
# If the first partition starts too low and there is no biosboot partition show an error.
error_msg = None
biosboot = False
parts = self.stage1_disk.format.partedDisk.partitions
for p in parts:
if p.getFlag(PARTITION_BIOS_GRUB):
biosboot = True
break
start = p.geometry.start * p.disk.device.sectorSize
if start < min_start:
error_msg = _("%(deviceName)s may not have enough space for grub2 to embed "
"core.img when using the %(fsType)s file system on %(deviceType)s") \
% {"deviceName": self.stage1_device.name, "fsType": self.stage2_device.format.type,
"deviceType": self.stage2_device.type}
if error_msg and not biosboot:
log.error(error_msg)
self.errors.append(error_msg)
ret = False
return ret
class EFIGRUB(GRUB2):
packages = ["grub2-efi", "efibootmgr", "shim"]
can_dual_boot = False
stage2_is_valid_stage1 = False
stage2_bootable = False
_efi_binary = "\\shim.efi"
@property
def _config_dir(self):
return "efi/EFI/%s" % (self.efi_dir,)
def __init__(self):
super(EFIGRUB, self).__init__()
self.efi_dir = 'BOOT'
def efibootmgr(self, *args, **kwargs):
if flags.imageInstall or flags.dirInstall:
log.info("Skipping efibootmgr for image/directory install.")
return ""
if "noefi" in flags.cmdline:
log.info("Skipping efibootmgr for noefi")
return ""
if kwargs.pop("capture", False):
exec_func = iutil.execWithCapture
else:
exec_func = iutil.execWithRedirect
if "root" not in kwargs:
kwargs["root"] = iutil.getSysroot()
return exec_func("efibootmgr", list(args), **kwargs)
#
# installation
#
def remove_efi_boot_target(self):
buf = self.efibootmgr(capture=True)
for line in buf.splitlines():
try:
(slot, _product) = line.split(None, 1)
except ValueError:
continue
if _product == productName.split("-")[0]:
slot_id = slot[4:8]
# slot_id is hex, we can't use .isint and use this regex:
if not re.match("^[0-9a-fA-F]+$", slot_id):
log.warning("failed to parse efi boot slot (%s)", slot)
continue
rc = self.efibootmgr("-b", slot_id, "-B")
if rc:
raise BootLoaderError("failed to remove old efi boot entry. This is most likely a kernel or firmware bug.")
@property
def efi_dir_as_efifs_dir(self):
ret = self._config_dir.replace('efi/', '')
return "\\" + ret.replace('/', '\\')
def _add_single_efi_boot_target(self, partition):
boot_disk = partition.disk
boot_part_num = str(partition.partedPartition.number)
rc = self.efibootmgr("-c", "-w", "-L", productName.split("-")[0],
"-d", boot_disk.path, "-p", boot_part_num,
"-l",
self.efi_dir_as_efifs_dir + self._efi_binary,
root=iutil.getSysroot())
if rc:
raise BootLoaderError("failed to set new efi boot target. This is most likely a kernel or firmware bug.")
def add_efi_boot_target(self):
if self.stage1_device.type == "partition":
self._add_single_efi_boot_target(self.stage1_device)
elif self.stage1_device.type == "mdarray":
for parent in self.stage1_device.parents:
self._add_single_efi_boot_target(parent)
def install(self, args=None):
if not flags.leavebootorder:
self.remove_efi_boot_target()
self.add_efi_boot_target()
def update(self):
self.install()
#
# installation
#
def write(self):
""" Write the bootloader configuration and install the bootloader. """
if self.skip_bootloader:
return
if self.update_only:
self.update()
return
try:
sync()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
self.install()
finally:
self.write_config()
def check(self):
return True
class Aarch64EFIGRUB(EFIGRUB):
_serial_consoles = ["ttyAMA", "ttyS"]
class MacEFIGRUB(EFIGRUB):
def mactel_config(self):
if os.path.exists(iutil.getSysroot() + "/usr/libexec/mactel-boot-setup"):
rc = iutil.execInSysroot("/usr/libexec/mactel-boot-setup", [])
if rc:
log.error("failed to configure Mac boot loader")
def install(self, args=None):
super(MacEFIGRUB, self).install()
self.mactel_config()
def is_valid_stage1_device(self, device, early=False):
valid = super(MacEFIGRUB, self).is_valid_stage1_device(device, early)
# Make sure we don't pick the OSX root partition
if valid and getattr(device.format, "name", "") != "Linux HFS+ ESP":
valid = False
if hasattr(device.format, "name"):
log.debug("device.format.name is '%s'", device.format.name)
log.debug("MacEFIGRUB.is_valid_stage1_device(%s) returning %s", device.name, valid)
return valid
# Inherit abstract methods from BootLoader
# pylint: disable=abstract-method
class YabootBase(BootLoader):
def write_config_password(self, config):
if self.password:
config.write("password=%s\n" % self.password)
config.write("restricted\n")
def write_config_images(self, config):
for image in self.images:
if not isinstance(image, LinuxBootLoaderImage):
# mac os images are handled specially in the header on mac
continue
args = Arguments()
if self.password:
args.add("rd.shell=0")
if image.initrd:
initrd_line = "\tinitrd=%s/%s\n" % (self.boot_prefix,
image.initrd)
else:
initrd_line = ""
root_device_spec = image.device.fstabSpec
if root_device_spec.startswith("/"):
root_line = "\troot=%s\n" % root_device_spec
else:
args.add("root=%s" % root_device_spec)
root_line = ""
args.update(self.boot_args)
log.info("bootloader.py: used boot args: %s ", args)
stanza = ("image=%(boot_prefix)s%(kernel)s\n"
"\tlabel=%(label)s\n"
"\tread-only\n"
"%(initrd_line)s"
"%(root_line)s"
"\tappend=\"%(args)s\"\n\n"
% {"kernel": image.kernel, "initrd_line": initrd_line,
"label": self.image_label(image),
"root_line": root_line, "args": args,
"boot_prefix": self.boot_prefix})
config.write(stanza)
class Yaboot(YabootBase):
name = "Yaboot"
_config_file = "yaboot.conf"
prog = "ybin"
image_label_attr = "short_label"
packages = ["yaboot"]
# stage2 device requirements
stage2_device_types = ["partition", "mdarray"]
stage2_device_raid_levels = [raid.RAID1]
#
# configuration
#
@property
def config_dir(self):
conf_dir = "/etc"
if self.stage2_device.format.mountpoint == "/boot":
conf_dir = "/boot/etc"
return conf_dir
@property
def config_file(self):
return "%s/%s" % (self.config_dir, self._config_file)
def write_config_header(self, config):
if self.stage2_device.type == "mdarray":
boot_part_num = self.stage2_device.parents[0].partedPartition.number
else:
boot_part_num = self.stage2_device.partedPartition.number
# yaboot.conf timeout is in tenths of a second. Brilliant.
header = ("# yaboot.conf generated by anaconda\n\n"
"boot=%(stage1dev)s\n"
"init-message=\"Welcome to %(product)s!\\nHit <TAB> for "
"boot options\"\n\n"
"partition=%(part_num)d\n"
"timeout=%(timeout)d\n"
"install=/usr/lib/yaboot/yaboot\n"
"delay=5\n"
"enablecdboot\n"
"enableofboot\n"
"enablenetboot\n"
% {"stage1dev": self.stage1_device.path,
"product": productName, "part_num": boot_part_num,
"timeout": self.timeout * 10})
config.write(header)
self.write_config_variant_header(config)
self.write_config_password(config)
config.write("\n")
def write_config_variant_header(self, config):
config.write("nonvram\n")
config.write("mntpoint=/boot/yaboot\n")
config.write("usemount\n")
def write_config_post(self):
super(Yaboot, self).write_config_post()
# make symlink in /etc to yaboot.conf if config is in /boot/etc
etc_yaboot_conf = iutil.getSysroot() + "/etc/yaboot.conf"
if not os.access(etc_yaboot_conf, os.R_OK):
try:
os.symlink("../boot/etc/yaboot.conf", etc_yaboot_conf)
except OSError as e:
log.error("failed to create /etc/yaboot.conf symlink: %s", e)
def write_config(self):
if not os.path.isdir(iutil.getSysroot() + self.config_dir):
os.mkdir(iutil.getSysroot() + self.config_dir)
# this writes the config
super(Yaboot, self).write_config()
#
# installation
#
def install(self, args=None):
args = ["-f", "-C", self.config_file]
rc = iutil.execInSysroot(self.prog, args)
if rc:
raise BootLoaderError("boot loader installation failed")
class IPSeriesYaboot(Yaboot):
prog = "mkofboot"
#
# configuration
#
def write_config_variant_header(self, config):
config.write("nonvram\n") # only on pSeries?
config.write("fstype=raw\n")
#
# installation
#
def install(self, args=None):
self.updatePowerPCBootList()
super(IPSeriesYaboot, self).install()
def updatePowerPCBootList(self):
if not can_touch_runtime_system("updatePowerPCBootList", touch_live=True):
return
log.debug("updatePowerPCBootList: self.stage1_device.path = %s", self.stage1_device.path)
buf = iutil.execWithCapture("nvram",
["--print-config=boot-device"])
if len(buf) == 0:
log.error("FAIL: nvram --print-config=boot-device")
return
boot_list = buf.strip().split()
log.debug("updatePowerPCBootList: boot_list = %s", boot_list)
buf = iutil.execWithCapture("ofpathname",
[self.stage1_device.path])
if len(buf) > 0:
boot_disk = buf.strip()
log.debug("updatePowerPCBootList: boot_disk = %s", boot_disk)
else:
log.error("FAIL: ofpathname %s", self.stage1_device.path)
return
# Place the disk containing the PReP partition first.
# Remove all other occurances of it.
boot_list = [boot_disk] + [x for x in boot_list if x != boot_disk]
log.debug("updatePowerPCBootList: updated boot_list = %s", boot_list)
update_value = "boot-device=%s" % " ".join(boot_list)
rc = iutil.execWithRedirect("nvram", ["--update-config", update_value])
if rc:
log.error("FAIL: nvram --update-config %s", update_value)
else:
log.info("Updated PPC boot list with the command: nvram --update-config %s", update_value)
class IPSeriesGRUB2(GRUB2):
# GRUB2 sets /boot bootable and not the PReP partition. This causes the Open Firmware BIOS not
# to present the disk as a bootable target. If stage2_bootable is False, then the PReP partition
# will be marked bootable. Confusing.
stage2_bootable = False
terminal_type = "ofconsole"
#
# installation
#
def install(self, args=None):
if flags.leavebootorder:
log.info("leavebootorder passed as an option. Will not update the NVRAM boot list.")
else:
self.updateNVRAMBootList()
super(IPSeriesGRUB2, self).install(args=["--no-nvram"])
# This will update the PowerPC's (ppc) bios boot devive order list
def updateNVRAMBootList(self):
if not can_touch_runtime_system("updateNVRAMBootList", touch_live=True):
return
log.debug("updateNVRAMBootList: self.stage1_device.path = %s", self.stage1_device.path)
buf = iutil.execWithCapture("nvram",
["--print-config=boot-device"])
if len(buf) == 0:
log.error("Failed to determine nvram boot device")
return
boot_list = buf.strip().replace("\"", "").split()
log.debug("updateNVRAMBootList: boot_list = %s", boot_list)
buf = iutil.execWithCapture("ofpathname",
[self.stage1_device.path])
if len(buf) > 0:
boot_disk = buf.strip()
else:
log.error("Failed to translate boot path into device name")
return
# Place the disk containing the PReP partition first.
# Remove all other occurances of it.
boot_list = [boot_disk] + [x for x in boot_list if x != boot_disk]
update_value = "boot-device=%s" % " ".join(boot_list)
rc = iutil.execWithRedirect("nvram", ["--update-config", update_value])
if rc:
log.error("Failed to update new boot device order")
#
# In addition to the normal grub configuration variable, add one more to set the size of the
# console's window to a standard 80x24
#
def write_defaults(self):
super(IPSeriesGRUB2, self).write_defaults()
defaults_file = "%s%s" % (iutil.getSysroot(), self.defaults_file)
defaults = open(defaults_file, "a+")
# The terminfo's X and Y size, and output location could change in the future
defaults.write("GRUB_TERMINFO=\"terminfo -g 80x24 console\"\n")
defaults.close()
class MacYaboot(Yaboot):
prog = "mkofboot"
can_dual_boot = True
#
# configuration
#
def write_config_variant_header(self, config):
try:
mac_os = [i for i in self.chain_images if i.label][0]
except IndexError:
pass
else:
config.write("macosx=%s\n" % mac_os.device.path)
config.write("magicboot=/usr/lib/yaboot/ofboot\n")
class ZIPL(BootLoader):
name = "ZIPL"
config_file = "/etc/zipl.conf"
packages = ["s390utils-base"]
# stage2 device requirements
stage2_device_types = ["partition"]
@property
def stage2_format_types(self):
if productName.startswith("Red Hat "):
return ["xfs", "ext4", "ext3", "ext2"]
else:
return ["ext4", "ext3", "ext2", "xfs"]
image_label_attr = "short_label"
preserve_args = ["cio_ignore", "rd.znet", "rd_ZNET"]
def __init__(self):
super(ZIPL, self).__init__()
self.stage1_name = None
#
# configuration
#
@property
def boot_dir(self):
return "/boot"
def write_config_images(self, config):
for image in self.images:
if "kdump" in (image.initrd or image.kernel):
# no need to create bootloader entries for kdump
continue
args = Arguments()
if image.initrd:
initrd_line = "\tramdisk=%s/%s\n" % (self.boot_dir,
image.initrd)
else:
initrd_line = ""
args.add("root=%s" % image.device.fstabSpec)
args.update(self.boot_args)
if image.device.type == "btrfs subvolume":
args.update(["rootflags=subvol=%s" % image.device.name])
log.info("bootloader.py: used boot args: %s ", args)
stanza = ("[%(label)s]\n"
"\timage=%(boot_dir)s/%(kernel)s\n"
"%(initrd_line)s"
"\tparameters=\"%(args)s\"\n"
% {"label": self.image_label(image),
"kernel": image.kernel, "initrd_line": initrd_line,
"args": args,
"boot_dir": self.boot_dir})
config.write(stanza)
def write_config_header(self, config):
header = ("[defaultboot]\n"
"defaultauto\n"
"prompt=1\n"
"timeout=%(timeout)d\n"
"default=%(default)s\n"
"target=/boot\n"
% {"timeout": self.timeout,
"default": self.image_label(self.default)})
config.write(header)
#
# installation
#
def install(self, args=None):
buf = iutil.execWithCapture("zipl", [], root=iutil.getSysroot())
for line in buf.splitlines():
if line.startswith("Preparing boot device: "):
# Output here may look like:
# Preparing boot device: dasdb (0200).
# Preparing boot device: dasdl.
# We want to extract the device name and pass that.
name = re.sub(r".+?: ", "", line)
self.stage1_name = re.sub(r"(\s\(.+\))?\.$", "", name)
# a limitation of s390x is that the kernel parameter list must not
# exceed 896 bytes; there is nothing we can do about this, so just
# catch the error and show it to the user instead of crashing
elif line.startswith("Error: The length of the parameters "):
errorHandler.cb(ZIPLError(line))
if not self.stage1_name:
raise BootLoaderError("could not find IPL device")
# do the reipl
iutil.reIPL(self.stage1_name)
class EXTLINUX(BootLoader):
name = "EXTLINUX"
_config_file = "extlinux.conf"
_config_dir = "/boot/extlinux"
# stage1 device requirements
stage1_device_types = ["disk"]
# stage2 device requirements
stage2_format_types = ["ext4", "ext3", "ext2"]
stage2_device_types = ["partition"]
stage2_bootable = True
packages = ["syslinux-extlinux"]
@property
def config_file(self):
return "%s/%s" % (self._config_dir, self._config_file)
@property
def boot_prefix(self):
""" Prefix, if any, to paths in /boot. """
if self.stage2_device.format.mountpoint == "/":
prefix = "/boot"
else:
prefix = ""
return prefix
def write_config_console(self, config):
if not self.console:
return
console_arg = "console=%s" % self.console
if self.console_options:
console_arg += ",%s" % self.console_options
self.boot_args.add(console_arg)
def write_config_images(self, config):
self.write_config_console(config)
for image in self.images:
args = Arguments()
args.update(["root=%s" % image.device.fstabSpec, "ro"])
if image.device.type == "btrfs subvolume":
args.update(["rootflags=subvol=%s" % image.device.name])
args.update(self.boot_args)
log.info("bootloader.py: used boot args: %s ", args)
stanza = ("label %(label)s (%(version)s)\n"
"\tkernel %(boot_prefix)s/%(kernel)s\n"
"\tinitrd %(boot_prefix)s/%(initrd)s\n"
"\tappend %(args)s\n\n"
% {"label": self.image_label(image),
"version": image.version,
"kernel": image.kernel,
"initrd": image.initrd,
"args": args,
"boot_prefix": self.boot_prefix})
config.write(stanza)
def write_config_header(self, config):
header = ("# extlinux.conf generated by anaconda\n\n"
"ui menu.c32\n\n"
"menu autoboot Welcome to %(productName)s. Automatic boot in # second{,s}. Press a key for options.\n"
"menu title %(productName)s Boot Options.\n"
"menu hidden\n\n"
"timeout %(timeout)d\n"
"#totaltimeout 9000\n\n"
% {"productName": productName, "timeout": self.timeout *10})
config.write(header)
if self.default is not None:
config.write("default %(default)s\n\n" % {"default" : self.image_label(self.default)})
self.write_config_password(config)
def write_config_password(self, config):
if self.password:
config.write("menu master passwd %s\n" % self.password)
config.write("menu notabmsg Press [Tab] and enter the password to edit options")
def write_config_post(self):
etc_extlinux = os.path.normpath(iutil.getSysroot() + "/etc/" + self._config_file)
if not os.access(etc_extlinux, os.R_OK):
try:
os.symlink("../boot/%s" % self._config_file, etc_extlinux)
except OSError as e:
log.warning("failed to create /etc/extlinux.conf symlink: %s", e)
def write_config(self):
super(EXTLINUX, self).write_config()
#
# installation
#
def install(self, args=None):
args = ["--install", self._config_dir]
rc = iutil.execInSysroot("extlinux", args)
if rc:
raise BootLoaderError("boot loader install failed")
# every platform that wants a bootloader needs to be in this dict
bootloader_by_platform = {platform.X86: GRUB2,
platform.EFI: EFIGRUB,
platform.MacEFI: MacEFIGRUB,
platform.PPC: GRUB2,
platform.IPSeriesPPC: IPSeriesGRUB2,
platform.NewWorldPPC: MacYaboot,
platform.S390: ZIPL,
platform.Aarch64EFI: Aarch64EFIGRUB,
platform.ARM: EXTLINUX,
platform.omapARM: EXTLINUX}
def get_bootloader():
platform_name = platform.platform.__class__.__name__
if flags.extlinux:
cls = EXTLINUX
else:
cls = bootloader_by_platform.get(platform.platform.__class__, BootLoader)
log.info("bootloader %s on %s platform", cls.__name__, platform_name)
return cls()
# anaconda-specific functions
def writeSysconfigKernel(storage, version, instClass):
# get the name of the default kernel package based on the version
kernel_basename = "vmlinuz-" + version
kernel_file = "/boot/%s" % kernel_basename
if not os.path.isfile(iutil.getSysroot() + kernel_file):
kernel_file = "/boot/efi/EFI/%s/%s" % (instClass.efi_dir, kernel_basename)
if not os.path.isfile(iutil.getSysroot() + kernel_file):
log.error("failed to recreate path to default kernel image")
return
try:
import rpm
except ImportError:
log.error("failed to import rpm python module")
return
ts = rpm.TransactionSet(iutil.getSysroot())
mi = ts.dbMatch('basenames', kernel_file)
try:
h = next(mi)
except StopIteration:
log.error("failed to get package name for default kernel")
return
kernel = h.name
f = open(iutil.getSysroot() + "/etc/sysconfig/kernel", "w+")
f.write("# UPDATEDEFAULT specifies if new-kernel-pkg should make\n"
"# new kernels the default\n")
# only update the default if we're setting the default to linux (#156678)
if storage.bootloader.default.device == storage.rootDevice:
f.write("UPDATEDEFAULT=yes\n")
else:
f.write("UPDATEDEFAULT=no\n")
f.write("\n")
f.write("# DEFAULTKERNEL specifies the default kernel package type\n")
f.write("DEFAULTKERNEL=%s\n" % kernel)
if storage.bootloader.trusted_boot:
f.write("# HYPERVISOR specifies the default multiboot kernel\n")
f.write("HYPERVISOR=/boot/tboot.gz\n")
f.write("HYPERVISOR_ARGS=logging=vga,serial,memory\n")
f.close()
def writeBootLoaderFinal(storage, payload, instClass, ksdata):
""" Do the final write of the bootloader. """
# set up dracut/fips boot args
# XXX FIXME: do this from elsewhere?
storage.bootloader.set_boot_args(storage=storage,
payload=payload)
try:
storage.bootloader.write()
except BootLoaderError as e:
log.error("bootloader.write failed: %s", e)
if errorHandler.cb(e) == ERROR_RAISE:
raise
def writeBootLoader(storage, payload, instClass, ksdata):
""" Write bootloader configuration to disk.
When we get here, the bootloader will already have a default linux
image. We only have to add images for the non-default kernels and
adjust the default to reflect whatever the default variant is.
"""
if not storage.bootloader.skip_bootloader:
stage1_device = storage.bootloader.stage1_device
log.info("boot loader stage1 target device is %s", stage1_device.name)
stage2_device = storage.bootloader.stage2_device
log.info("boot loader stage2 target device is %s", stage2_device.name)
# Bridge storage EFI configuration to bootloader
if hasattr(storage.bootloader, 'efi_dir'):
storage.bootloader.efi_dir = instClass.efi_dir
if isinstance(payload, RPMOSTreePayload):
if storage.bootloader.skip_bootloader:
log.info("skipping boot loader install per user request")
return
writeBootLoaderFinal(storage, payload, instClass, ksdata)
return
# get a list of installed kernel packages
# add whatever rescue kernels we can find to the end
kernel_versions = list(payload.kernelVersionList)
rescue_versions = glob(iutil.getSysroot() + "/boot/vmlinuz-*-rescue-*")
rescue_versions += glob(iutil.getSysroot() + "/boot/efi/EFI/%s/vmlinuz-*-rescue-*" % instClass.efi_dir)
kernel_versions += (f.split("/")[-1][8:] for f in rescue_versions)
if not kernel_versions:
log.warning("no kernel was installed -- boot loader config unchanged")
return
# all the linux images' labels are based on the default image's
base_label = productName
base_short_label = "linux"
# The first one is the default kernel. Update the bootloader's default
# entry to reflect the details of the default kernel.
version = kernel_versions.pop(0)
default_image = LinuxBootLoaderImage(device=storage.rootDevice,
version=version,
label=base_label,
short=base_short_label)
storage.bootloader.add_image(default_image)
storage.bootloader.default = default_image
# write out /etc/sysconfig/kernel
writeSysconfigKernel(storage, version, instClass)
if storage.bootloader.skip_bootloader:
log.info("skipping boot loader install per user request")
return
# now add an image for each of the other kernels
for version in kernel_versions:
label = "%s-%s" % (base_label, version)
short = "%s-%s" % (base_short_label, version)
if storage.bootloader.trusted_boot:
image = TbootLinuxBootLoaderImage(
device=storage.rootDevice,
version=version,
label=label, short=short)
else:
image = LinuxBootLoaderImage(device=storage.rootDevice,
version=version,
label=label, short=short)
storage.bootloader.add_image(image)
writeBootLoaderFinal(storage, payload, instClass, ksdata)
| gpl-2.0 | 3,438,288,245,029,872,000 | 35.304789 | 165 | 0.563378 | false |
benediktkr/lokun-record | record/sec.py | 1 | 2077 | from random import randint
def compare1toN(str1, strl):
return any([compare(str1, a) for a in strl])
def compare(str1, str2):
return compare_const2(str1, str2)
def compare_const2(str1, str2):
if len(str1) != len(str2):
return False
result = 0
for x, y in zip(str1, str2):
result |= ord(x) ^ ord(y)
return result == 0
def compare_const(str1, str2):
"""Constant-time string comparasion, to avoid timing attacks.
Leaks the lenght, but that's ok since we are always comparing
hashes, and the only information the adversary has to gain by
the length of a hash as a better guess at what hashing algorithm
is being used. At which point, i'd like to point out Shannons
Maxim."""
length = min(len(str1), len(str2))
ret = True
for i in xrange(length):
if str1[i] != str2[i]:
ret = False
if len(str1) != len(str2):
ret = False
return ret
def compare_noleak(str1, str2):
"""A non-random version that doesn't leak the length, made for Baldur :)
str1 should be the user-supplied string, and str2 the string you comare
against.
NOTE: Pads with 0x00, only inteded to compare strings, not byte-lists."""
l1 = len(str1)
l2 = len(str2)
if l1 > l2:
# If the user string is longer than the source string, pad.
delta = l1 - l2
str2 += "\x00"*delta
ret = True
for i in xrange(l1):
if str1[i] != str2[i]:
ret = False
return ret
def compare_rnd(str1, str2):
"""Constant-time string comparasion, to avoid timing attacks.
Start in a random char of the string.
Doesn't leak the length, since the starting point (and thus the
breaking point) as randomly chosen."""
length = min(len(str1), len(str2))
start = randint(0, length-1)
for i in xrange(length):
j = (start+i) % length
if str1[j] != str2[j]:
return False
if len(str1) != len(str2):
return False
return True
| agpl-3.0 | -8,581,726,397,058,848,000 | 24.329268 | 77 | 0.601348 | false |
dcrosta/mongo-disco | app/job.py | 1 | 2372 | #!/usr/bin/env python
# encoding: utf-8
'''
File: DiscoJob.py
Author: NYU ITP team
Description: Disco Job Wrapper
'''
from disco.core import Job, result_iterator
from disco.worker.classic.worker import Params
from disco.worker.classic.modutil import locate_modules,find_modules
from mongodb_io import mongodb_output_stream,mongodb_input_stream
from splitter import calculate_splits as do_split
class DiscoJob():
def __init__(self,config,map,reduce):
import config_util
self.config = config_util.config
#if the user doesn't specify output, print to stdout
if not config.get('output_uri') and not config.get('print_to_stdout'):
config['print_to_stdout'] = True
for item in config:
self.config[item] = config[item]
self.map = map
self.reduce = reduce
self.job = Job()
self.params = Params()
for key in self.config:
self.params.__dict__[key] = self.config[key]
def run(self):
if self.config['print_to_stdout']:
self.job.run(input = do_split(self.config),
map = self.map,
reduce = self.reduce,
params = self.params,
map_input_stream = mongodb_input_stream,
required_modules= ['mongodb_io',
'mongodb_input',
'config_util',
'mongo_util',
'mongodb_output'])
for key, value in result_iterator(self.job.wait(show=True)):
print key, value
else:
self.job.run(input = do_split(self.config),
map = self.map,
reduce = self.reduce,
params = self.params,
map_input_stream = mongodb_input_stream,
reduce_output_stream = mongodb_output_stream,
required_modules= ['mongodb_io',
'mongodb_input',
'config_util',
'mongo_util',
'mongodb_output'])
if self.config.get("job_wait",False):
self.job.wait(show=True)
| apache-2.0 | 1,969,891,187,076,187,000 | 32.885714 | 78 | 0.49747 | false |
ropable/resource_tracking | tracking/migrations/0004_auto_20200102_0914.py | 1 | 1126 | # Generated by Django 2.1.11 on 2020-01-02 01:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tracking', '0003_auto_20190308_1114'),
]
operations = [
migrations.AlterField(
model_name='device',
name='symbol',
field=models.CharField(choices=[('2 wheel drive', '2-Wheel Drive'), ('4 wheel drive passenger', '4-Wheel Drive Passenger'), ('4 wheel drive ute', '4-Wheel Drive (Ute)'), ('light unit', 'Light Unit'), ('heavy duty', 'Heavy Duty'), ('gang truck', 'Gang Truck'), ('snorkel', 'Snorkel'), ('dozer', 'Dozer'), ('grader', 'Grader'), ('loader', 'Loader'), ('tender', 'Tender'), ('float', 'Float'), ('fixed wing aircraft', 'Waterbomber'), ('rotary aircraft', 'Rotary'), ('spotter aircraft', 'Spotter'), ('helitac', 'Helitac'), ('rescue helicopter', 'Rescue Helicopter'), ('aviation fuel truck', 'Aviation Fuel Truck'), (None, ''), ('comms bus', 'Communications Bus'), ('boat', 'Boat'), ('person', 'Person'), ('other', 'Other'), ('unknown', 'Unknown')], default='other', max_length=32),
),
]
| bsd-3-clause | -3,249,550,416,547,287,600 | 61.555556 | 788 | 0.60746 | false |
NathanW2/QGIS | tests/src/python/test_qgsfieldformatters.py | 1 | 13493 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for field formatters.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Matthias Kuhn'
__date__ = '05/12/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
from qgis.core import (QgsFeature, QgsProject, QgsRelation, QgsVectorLayer,
QgsValueMapFieldFormatter, QgsValueRelationFieldFormatter,
QgsRelationReferenceFieldFormatter, QgsRangeFieldFormatter, QgsSettings)
from qgis.testing import start_app, unittest
start_app()
class TestQgsValueMapFieldFormatter(unittest.TestCase):
VALUEMAP_NULL_TEXT = "{2839923C-8B7D-419E-B84B-CA2FE9B80EC7}"
def test_representValue(self):
QgsSettings().setValue("qgis/nullValue", "NULL")
layer = QgsVectorLayer("none?field=number1:integer&field=number2:double&field=text1:string&field=number3:integer&field=number4:double&field=text2:string",
"layer", "memory")
self.assertTrue(layer.isValid())
QgsProject.instance().addMapLayer(layer)
f = QgsFeature()
f.setAttributes([2, 2.5, 'NULL', None, None, None])
layer.dataProvider().addFeatures([f])
fieldFormatter = QgsValueMapFieldFormatter()
# Tests with different value types occurring in the value map
config = {'map': {'two': '2', 'twoandhalf': '2.5', 'NULL text': 'NULL',
'nothing': self.VALUEMAP_NULL_TEXT}}
self.assertEqual(fieldFormatter.representValue(layer, 0, config, None, 2), 'two')
self.assertEqual(fieldFormatter.representValue(layer, 1, config, None, 2.5), 'twoandhalf')
self.assertEqual(fieldFormatter.representValue(layer, 2, config, None, 'NULL'), 'NULL text')
# Tests with null values of different types, if value map contains null
self.assertEqual(fieldFormatter.representValue(layer, 3, config, None, None), 'nothing')
self.assertEqual(fieldFormatter.representValue(layer, 4, config, None, None), 'nothing')
self.assertEqual(fieldFormatter.representValue(layer, 5, config, None, None), 'nothing')
# Tests with fallback display for different value types
config = {}
self.assertEqual(fieldFormatter.representValue(layer, 0, config, None, 2), '(2)')
self.assertEqual(fieldFormatter.representValue(layer, 1, config, None, 2.5), '(2.50000)')
self.assertEqual(fieldFormatter.representValue(layer, 2, config, None, 'NULL'), '(NULL)')
# Tests with fallback display for null in different types of fields
self.assertEqual(fieldFormatter.representValue(layer, 3, config, None, None), '(NULL)')
self.assertEqual(fieldFormatter.representValue(layer, 4, config, None, None), '(NULL)')
self.assertEqual(fieldFormatter.representValue(layer, 5, config, None, None), '(NULL)')
QgsProject.instance().removeAllMapLayers()
class TestQgsValueRelationFieldFormatter(unittest.TestCase):
def test_representValue(self):
first_layer = QgsVectorLayer("none?field=foreign_key:integer",
"first_layer", "memory")
self.assertTrue(first_layer.isValid())
second_layer = QgsVectorLayer("none?field=pkid:integer&field=decoded:string",
"second_layer", "memory")
self.assertTrue(second_layer.isValid())
QgsProject.instance().addMapLayer(second_layer)
f = QgsFeature()
f.setAttributes([123])
first_layer.dataProvider().addFeatures([f])
f = QgsFeature()
f.setAttributes([123, 'decoded_val'])
second_layer.dataProvider().addFeatures([f])
fieldFormatter = QgsValueRelationFieldFormatter()
# Everything valid
config = {'Layer': second_layer.id(), 'Key': 'pkid', 'Value': 'decoded'}
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '123'), 'decoded_val')
# Code not find match in foreign layer
config = {'Layer': second_layer.id(), 'Key': 'pkid', 'Value': 'decoded'}
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '456'), '(456)')
# Missing Layer
config = {'Key': 'pkid', 'Value': 'decoded'}
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '456'), '(456)')
# Invalid Layer
config = {'Layer': 'invalid', 'Key': 'pkid', 'Value': 'decoded'}
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '456'), '(456)')
# Invalid Key
config = {'Layer': second_layer.id(), 'Key': 'invalid', 'Value': 'decoded'}
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '456'), '(456)')
# Invalid Value
config = {'Layer': second_layer.id(), 'Key': 'pkid', 'Value': 'invalid'}
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '456'), '(456)')
QgsProject.instance().removeMapLayer(second_layer.id())
def test_valueToStringList(self):
def _test(a, b):
self.assertEqual(QgsValueRelationFieldFormatter.valueToStringList(a), b)
_test([1, 2, 3], ["1", "2", "3"])
_test("{1,2,3}", ["1", "2", "3"])
_test(['1', '2', '3'], ["1", "2", "3"])
_test('not an array', ['not an array'])
class TestQgsRelationReferenceFieldFormatter(unittest.TestCase):
def test_representValue(self):
first_layer = QgsVectorLayer("none?field=foreign_key:integer",
"first_layer", "memory")
self.assertTrue(first_layer.isValid())
second_layer = QgsVectorLayer("none?field=pkid:integer&field=decoded:string",
"second_layer", "memory")
self.assertTrue(second_layer.isValid())
QgsProject.instance().addMapLayers([first_layer, second_layer])
f = QgsFeature()
f.setAttributes([123])
first_layer.dataProvider().addFeatures([f])
f = QgsFeature()
f.setAttributes([123, 'decoded_val'])
second_layer.dataProvider().addFeatures([f])
relMgr = QgsProject.instance().relationManager()
fieldFormatter = QgsRelationReferenceFieldFormatter()
rel = QgsRelation()
rel.setId('rel1')
rel.setName('Relation Number One')
rel.setReferencingLayer(first_layer.id())
rel.setReferencedLayer(second_layer.id())
rel.addFieldPair('foreign_key', 'pkid')
self.assertTrue(rel.isValid())
relMgr.addRelation(rel)
# Everything valid
config = {'Relation': rel.id()}
second_layer.setDisplayExpression('decoded')
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '123'), 'decoded_val')
# Code not find match in foreign layer
config = {'Relation': rel.id()}
second_layer.setDisplayExpression('decoded')
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '456'), '456')
# Invalid relation id
config = {'Relation': 'invalid'}
second_layer.setDisplayExpression('decoded')
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '123'), '123')
# No display expression
config = {'Relation': rel.id()}
second_layer.setDisplayExpression(None)
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '123'), '123')
# Invalid display expression
config = {'Relation': rel.id()}
second_layer.setDisplayExpression('invalid +')
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '123'), '123')
# Missing relation
config = {}
second_layer.setDisplayExpression('decoded')
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '123'), '123')
# Inconsistent layer provided to representValue()
config = {'Relation': rel.id()}
second_layer.setDisplayExpression('decoded')
self.assertEqual(fieldFormatter.representValue(second_layer, 0, config, None, '123'), '123')
# Inconsistent idx provided to representValue()
config = {'Relation': rel.id()}
second_layer.setDisplayExpression('decoded')
self.assertEqual(fieldFormatter.representValue(first_layer, 1, config, None, '123'), '123')
# Invalid relation
rel = QgsRelation()
rel.setId('rel2')
rel.setName('Relation Number Two')
rel.setReferencingLayer(first_layer.id())
rel.addFieldPair('foreign_key', 'pkid')
self.assertFalse(rel.isValid())
relMgr.addRelation(rel)
config = {'Relation': rel.id()}
second_layer.setDisplayExpression('decoded')
self.assertEqual(fieldFormatter.representValue(first_layer, 0, config, None, '123'), '123')
QgsProject.instance().removeAllMapLayers()
class TestQgsRangeFieldFormatter(unittest.TestCase):
def test_representValue(self):
layer = QgsVectorLayer("point?field=int:integer&field=double:double",
"layer", "memory")
self.assertTrue(layer.isValid())
QgsProject.instance().addMapLayers([layer])
fieldFormatter = QgsRangeFieldFormatter()
# Precision is ignored for integers
self.assertEqual(fieldFormatter.representValue(layer, 0, {'Precision': 1}, None, '123'), '123')
self.assertEqual(fieldFormatter.representValue(layer, 0, {'Precision': 1}, None, '123000'), '123000')
self.assertEqual(fieldFormatter.representValue(layer, 0, {'Precision': 1}, None, None), 'NULL')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 1}, None, None), 'NULL')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 1}, None, '123'), '123.0')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, None), 'NULL')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '123000'), '123000.00')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '0'), '0.00')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '123'), '123.00')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '0.123'), '0.12')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '0.127'), '0.13')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '0'), '0.000')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '0.127'), '0.127')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '1.27e-1'), '0.127')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '-123'), '-123.00')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '-0.123'), '-0.12')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '-0.127'), '-0.13')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '-0.127'), '-0.127')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '-1.27e-1'), '-0.127')
QgsSettings().setValue("locale/overrideFlag", True)
QgsSettings().setValue("locale/userLocale", 'it')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, None), 'NULL')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '123000'), '123000,00')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '0'), '0,00')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '123'), '123,00')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '0.123'), '0,12')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '0.127'), '0,13')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '0'), '0,000')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '0.127'), '0,127')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '1.27e-1'), '0,127')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '-123'), '-123,00')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '-0.123'), '-0,12')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 2}, None, '-0.127'), '-0,13')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '-0.127'), '-0,127')
self.assertEqual(fieldFormatter.representValue(layer, 1, {'Precision': 3}, None, '-1.27e-1'), '-0,127')
QgsProject.instance().removeAllMapLayers()
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 9,146,336,332,509,080,000 | 49.347015 | 162 | 0.646261 | false |
flacjacket/sympy | sympy/core/tests/test_expr.py | 1 | 48018 | from __future__ import division
from sympy import (Add, Basic, S, Symbol, Wild, Float, Integer, Rational, I,
sin, cos, tan, exp, log, nan, oo, sqrt, symbols, Integral, sympify,
WildFunction, Poly, Function, Derivative, Number, pi, NumberSymbol, zoo,
Piecewise, Mul, Pow, nsimplify, ratsimp, trigsimp, radsimp, powsimp,
simplify, together, collect, factorial, apart, combsimp, factor, refine,
cancel, Tuple, default_sort_key, DiracDelta, gamma, Dummy, Sum, E,
exp_polar, Lambda)
from sympy.core.function import AppliedUndef
from sympy.abc import a, b, c, d, e, n, t, u, x, y, z
from sympy.physics.secondquant import FockState
from sympy.physics.units import meter
from sympy.utilities.pytest import raises, XFAIL
class DummyNumber(object):
"""
Minimal implementation of a number that works with SymPy.
If one has a Number class (e.g. Sage Integer, or some other custom class)
that one wants to work well with SymPy, one has to implement at least the
methods of this class DummyNumber, resp. its subclasses I5 and F1_1.
Basically, one just needs to implement either __int__() or __float__() and
then one needs to make sure that the class works with Python integers and
with itself.
"""
def __radd__(self, a):
if isinstance(a, (int, float)):
return a + self.number
return NotImplemented
def __truediv__(a, b):
return a.__div__(b)
def __rtruediv__(a, b):
return a.__rdiv__(b)
def __add__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number + a
return NotImplemented
def __rsub__(self, a):
if isinstance(a, (int, float)):
return a - self.number
return NotImplemented
def __sub__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number - a
return NotImplemented
def __rmul__(self, a):
if isinstance(a, (int, float)):
return a * self.number
return NotImplemented
def __mul__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number * a
return NotImplemented
def __rdiv__(self, a):
if isinstance(a, (int, float)):
return a / self.number
return NotImplemented
def __div__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number / a
return NotImplemented
def __rpow__(self, a):
if isinstance(a, (int, float)):
return a ** self.number
return NotImplemented
def __pow__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number ** a
return NotImplemented
def __pos__(self):
return self.number
def __neg__(self):
return - self.number
class I5(DummyNumber):
number = 5
def __int__(self):
return self.number
class F1_1(DummyNumber):
number = 1.1
def __float__(self):
return self.number
i5 = I5()
f1_1 = F1_1()
# basic sympy objects
basic_objs = [
Rational(2),
Float("1.3"),
x,
y,
pow(x,y)*y,
]
# all supported objects
all_objs = basic_objs + [
5,
5.5,
i5,
f1_1
]
def dotest(s):
for x in all_objs:
for y in all_objs:
s(x,y)
return True
def test_basic():
def j(a,b):
x = a
x = +a
x = -a
x = a+b
x = a-b
x = a*b
x = a/b
x = a**b
assert dotest(j)
def test_ibasic():
def s(a,b):
x = a
x += b
x = a
x -= b
x = a
x *= b
x = a
x /= b
assert dotest(s)
def test_relational():
assert (pi < 3) == False
assert (pi <= 3) == False
assert (pi > 3) == True
assert (pi >= 3) == True
assert (-pi < 3) == True
assert (-pi <= 3) == True
assert (-pi > 3) == False
assert (-pi >= 3) == False
assert (x - 2 < x - 3) == False
def test_relational_noncommutative():
from sympy import Lt, Gt, Le, Ge
A, B = symbols('A,B', commutative=False)
assert (A < B) == Lt(A, B)
assert (A <= B) == Le(A, B)
assert (A > B) == Gt(A, B)
assert (A >= B) == Ge(A, B)
def test_basic_nostr():
for obj in basic_objs:
raises(TypeError, lambda: obj + '1')
raises(TypeError, lambda: obj - '1')
if obj == 2:
if hasattr(int, '__index__'): # Python 2.5+ (PEP 357)
assert obj * '1' == '11'
else:
raises(TypeError, lambda: obj * '1')
raises(TypeError, lambda: obj / '1')
raises(TypeError, lambda: obj ** '1')
def test_leadterm():
assert (3+2*x**(log(3)/log(2)-1)).leadterm(x) == (3,0)
assert (1/x**2+1+x+x**2).leadterm(x)[1] == -2
assert (1/x+1+x+x**2).leadterm(x)[1] == -1
assert (x**2+1/x).leadterm(x)[1] == -1
assert (1+x**2).leadterm(x)[1] == 0
assert (x+1).leadterm(x)[1] == 0
assert (x+x**2).leadterm(x)[1] == 1
assert (x**2).leadterm(x)[1] == 2
def test_as_leading_term():
assert (3+2*x**(log(3)/log(2)-1)).as_leading_term(x) == 3
assert (1/x**2+1+x+x**2).as_leading_term(x) == 1/x**2
assert (1/x+1+x+x**2).as_leading_term(x) == 1/x
assert (x**2+1/x).as_leading_term(x) == 1/x
assert (1+x**2).as_leading_term(x) == 1
assert (x+1).as_leading_term(x) == 1
assert (x+x**2).as_leading_term(x) == x
assert (x**2).as_leading_term(x) == x**2
assert (x + oo).as_leading_term(x) == oo
def test_leadterm2():
assert (x*cos(1)*cos(1 + sin(1)) + sin(1 + sin(1))).leadterm(x) == \
(sin(1 + sin(1)), 0)
def test_leadterm3():
assert (y+z+x).leadterm(x) == (y+z, 0)
def test_as_leading_term2():
assert (x*cos(1)*cos(1 + sin(1)) + sin(1 + sin(1))).as_leading_term(x) == \
sin(1 + sin(1))
def test_as_leading_term3():
assert (2+pi+x).as_leading_term(x) == 2 + pi
assert (2*x+pi*x+x**2).as_leading_term(x) == (2+pi)*x
def test_as_leading_term_stub():
class foo(Function):
pass
assert foo(1/x).as_leading_term(x) == foo(1/x)
assert foo(1).as_leading_term(x) == foo(1)
raises(NotImplementedError, lambda: foo(x).as_leading_term(x))
def test_atoms():
assert sorted(list(x.atoms())) == [x]
assert sorted(list((1+x).atoms())) == sorted([1, x])
assert sorted(list((1+2*cos(x)).atoms(Symbol))) == [x]
assert sorted(list((1+2*cos(x)).atoms(Symbol,Number))) == sorted([1, 2, x])
assert sorted(list((2*(x**(y**x))).atoms())) == sorted([2, x, y])
assert sorted(list(Rational(1,2).atoms())) == [S.Half]
assert sorted(list(Rational(1,2).atoms(Symbol))) == []
assert sorted(list(sin(oo).atoms(oo))) == [oo]
assert sorted(list(Poly(0, x).atoms())) == [S.Zero]
assert sorted(list(Poly(1, x).atoms())) == [S.One]
assert sorted(list(Poly(x, x).atoms())) == [x]
assert sorted(list(Poly(x, x, y).atoms())) == [x]
assert sorted(list(Poly(x + y, x, y).atoms())) == sorted([x, y])
assert sorted(list(Poly(x + y, x, y, z).atoms())) == sorted([x, y])
assert sorted(list(Poly(x + y*t, x, y, z).atoms())) == sorted([t, x, y])
assert list((I*pi).atoms(NumberSymbol)) == [pi]
assert sorted((I*pi).atoms(NumberSymbol, I)) == \
sorted((I*pi).atoms(I,NumberSymbol)) == [pi, I]
assert exp(exp(x)).atoms(exp) == set([exp(exp(x)), exp(x)])
assert (1 + x*(2 + y)+exp(3 + z)).atoms(Add) == set(
[1 + x*(2 + y)+exp(3 + z),
2 + y,
3 + z])
# issue 3033
f = Function('f')
e = (f(x) + sin(x) + 2)
assert e.atoms(AppliedUndef) == \
set([f(x)])
assert e.atoms(AppliedUndef, Function) == \
set([f(x), sin(x)])
assert e.atoms(Function) == \
set([f(x), sin(x)])
assert e.atoms(AppliedUndef, Number) == \
set([f(x), S(2)])
assert e.atoms(Function, Number) == \
set([S(2), sin(x), f(x)])
def test_is_polynomial():
k = Symbol('k', nonnegative=True, integer=True)
assert Rational(2).is_polynomial(x, y, z) == True
assert (S.Pi).is_polynomial(x, y, z) == True
assert x.is_polynomial(x) == True
assert x.is_polynomial(y) == True
assert (x**2).is_polynomial(x) == True
assert (x**2).is_polynomial(y) == True
assert (x**(-2)).is_polynomial(x) == False
assert (x**(-2)).is_polynomial(y) == True
assert (2**x).is_polynomial(x) == False
assert (2**x).is_polynomial(y) == True
assert (x**k).is_polynomial(x) == False
assert (x**k).is_polynomial(k) == False
assert (x**x).is_polynomial(x) == False
assert (k**k).is_polynomial(k) == False
assert (k**x).is_polynomial(k) == False
assert (x**(-k)).is_polynomial(x) == False
assert ((2*x)**k).is_polynomial(x) == False
assert (x**2 + 3*x - 8).is_polynomial(x) == True
assert (x**2 + 3*x - 8).is_polynomial(y) == True
assert (x**2 + 3*x - 8).is_polynomial() == True
assert sqrt(x).is_polynomial(x) == False
assert (sqrt(x)**3).is_polynomial(x) == False
assert (x**2 + 3*x*sqrt(y) - 8).is_polynomial(x) == True
assert (x**2 + 3*x*sqrt(y) - 8).is_polynomial(y) == False
assert ((x**2)*(y**2) + x*(y**2) + y*x + exp(2)).is_polynomial() == True
assert ((x**2)*(y**2) + x*(y**2) + y*x + exp(x)).is_polynomial() == False
assert ((x**2)*(y**2) + x*(y**2) + y*x + exp(2)).is_polynomial(x, y) == True
assert ((x**2)*(y**2) + x*(y**2) + y*x + exp(x)).is_polynomial(x, y) == False
def test_is_rational_function():
assert Integer(1).is_rational_function() == True
assert Integer(1).is_rational_function(x) == True
assert Rational(17,54).is_rational_function() == True
assert Rational(17,54).is_rational_function(x) == True
assert (12/x).is_rational_function() == True
assert (12/x).is_rational_function(x) == True
assert (x/y).is_rational_function() == True
assert (x/y).is_rational_function(x) == True
assert (x/y).is_rational_function(x, y) == True
assert (x**2+1/x/y).is_rational_function() == True
assert (x**2+1/x/y).is_rational_function(x) == True
assert (x**2+1/x/y).is_rational_function(x, y) == True
assert (sin(y)/x).is_rational_function() == False
assert (sin(y)/x).is_rational_function(y) == False
assert (sin(y)/x).is_rational_function(x) == True
assert (sin(y)/x).is_rational_function(x, y) == False
def test_SAGE1():
#see http://code.google.com/p/sympy/issues/detail?id=247
class MyInt:
def _sympy_(self):
return Integer(5)
m = MyInt()
e = Rational(2)*m
assert e == 10
raises(TypeError, lambda: Rational(2)*MyInt)
def test_SAGE2():
class MyInt(object):
def __int__(self):
return 5
assert sympify(MyInt()) == 5
e = Rational(2)*MyInt()
assert e == 10
raises(TypeError, lambda: Rational(2)*MyInt)
def test_SAGE3():
class MySymbol:
def __rmul__(self, other):
return ('mys', other, self)
o = MySymbol()
e = x*o
assert e == ('mys', x, o)
def test_len():
e = x*y
assert len(e.args) == 2
e = x+y+z
assert len(e.args) == 3
def test_doit():
a = Integral(x**2, x)
assert isinstance(a.doit(), Integral) == False
assert isinstance(a.doit(integrals=True), Integral) == False
assert isinstance(a.doit(integrals=False), Integral) == True
assert (2*Integral(x, x)).doit() == x**2
def test_attribute_error():
raises(AttributeError, lambda: x.cos())
raises(AttributeError, lambda: x.sin())
raises(AttributeError, lambda: x.exp())
def test_args():
assert (x*y).args in ((x, y), (y, x))
assert (x+y).args in ((x, y), (y, x))
assert (x*y+1).args in ((x*y, 1), (1, x*y))
assert sin(x*y).args == (x*y,)
assert sin(x*y).args[0] == x*y
assert (x**y).args == (x,y)
assert (x**y).args[0] == x
assert (x**y).args[1] == y
def test_iter_basic_args():
assert list(sin(x*y).iter_basic_args()) == [x*y]
assert list((x**y).iter_basic_args()) == [x, y]
def test_noncommutative_expand_issue658():
A, B, C = symbols('A,B,C', commutative=False)
assert A*B - B*A != 0
assert (A*(A+B)*B).expand() == A**2*B + A*B**2
assert (A*(A+B+C)*B).expand() == A**2*B + A*B**2 + A*C*B
def test_as_numer_denom():
a, b, c = symbols('a, b, c')
assert nan.as_numer_denom() == (nan, 1)
assert oo.as_numer_denom() == (oo, 1)
assert (-oo).as_numer_denom() == (-oo, 1)
assert zoo.as_numer_denom() == (zoo, 1)
assert (-zoo).as_numer_denom() == (zoo, 1)
assert x.as_numer_denom() == (x, 1)
assert (1/x).as_numer_denom() == (1, x)
assert (x/y).as_numer_denom() == (x, y)
assert (x/2).as_numer_denom() == (x, 2)
assert (x*y/z).as_numer_denom() == (x*y, z)
assert (x/(y*z)).as_numer_denom() == (x, y*z)
assert Rational(1, 2).as_numer_denom() == (1, 2)
assert (1/y**2).as_numer_denom() == (1, y**2)
assert (x/y**2).as_numer_denom() == (x, y**2)
assert ((x**2+1)/y).as_numer_denom() == (x**2+1, y)
assert (x*(y+1)/y**7).as_numer_denom() == (x*(y+1), y**7)
assert (x**-2).as_numer_denom() == (1, x**2)
assert (a/x + b/2/x + c/3/x).as_numer_denom() == \
(6*a + 3*b + 2*c, 6*x)
assert (a/x + b/2/x + c/3/y).as_numer_denom() == \
(2*c*x + y*(6*a + 3*b), 6*x*y)
assert (a/x + b/2/x + c/.5/x).as_numer_denom() == \
(2*a + b + 4.0*c, 2*x)
# this should take no more than a few seconds
assert int(log(Add(*[Dummy()/i/x for i in xrange(1, 705)]
).as_numer_denom()[1]/x).n(4)) == 705
for i in [S.Infinity, S.NegativeInfinity, S.ComplexInfinity]:
assert (i + x/3).as_numer_denom() == \
(x + i, 3)
assert (S.Infinity + x/3 + y/4).as_numer_denom() == \
(4*x + 3*y + S.Infinity, 12)
assert (oo*x + zoo*y).as_numer_denom() == \
(zoo*y + oo*x, 1)
A, B, C = symbols('A,B,C', commutative=False)
assert (A*B*C**-1).as_numer_denom() == (A*B*C**-1, 1)
assert (A*B*C**-1/x).as_numer_denom() == (A*B*C**-1, x)
assert (C**-1*A*B).as_numer_denom() == (C**-1*A*B, 1)
assert (C**-1*A*B/x).as_numer_denom() == (C**-1*A*B, x)
assert ((A*B*C)**-1).as_numer_denom() == ((A*B*C)**-1, 1)
assert ((A*B*C)**-1/x).as_numer_denom() == ((A*B*C)**-1, x)
def test_as_independent():
assert (2*x*sin(x)+y+x).as_independent(x) == (y, x + 2*x*sin(x))
assert (2*x*sin(x)+y+x).as_independent(y) == (x + 2*x*sin(x), y)
assert (2*x*sin(x)+y+x).as_independent(x, y) == (0, y + x + 2*x*sin(x))
assert (x*sin(x)*cos(y)).as_independent(x) == (cos(y), x*sin(x))
assert (x*sin(x)*cos(y)).as_independent(y) == (x*sin(x), cos(y))
assert (x*sin(x)*cos(y)).as_independent(x, y) == (1, x*sin(x)*cos(y))
assert (sin(x)).as_independent(x) == (1, sin(x))
assert (sin(x)).as_independent(y) == (sin(x), 1)
assert (2*sin(x)).as_independent(x) == (2, sin(x))
assert (2*sin(x)).as_independent(y) == (2*sin(x), 1)
# issue 1804 = 1766b
n1, n2, n3 = symbols('n1 n2 n3', commutative=False)
assert (n1 + n1*n2).as_independent(n2) == (n1, n1*n2)
assert (n2*n1 + n1*n2).as_independent(n2) == (0, n1*n2 + n2*n1)
assert (n1*n2*n1).as_independent(n2) == (n1, n2*n1)
assert (n1*n2*n1).as_independent(n1) == (1, n1*n2*n1)
assert (3*x).as_independent(x, as_Add=True) == (0, 3*x)
assert (3*x).as_independent(x, as_Add=False) == (3, x)
assert (3+x).as_independent(x, as_Add=True) == (3, x)
assert (3+x).as_independent(x, as_Add=False) == (1, 3 + x)
# issue 2380
assert (3*x).as_independent(Symbol) == (3, x)
# issue 2549
assert (n1*x*y).as_independent(x) == (n1*y, x)
assert ((x + n1)*(x - y)).as_independent(x) == (1, (x + n1)*(x - y))
assert ((x + n1)*(x - y)).as_independent(y) == (x + n1, x - y)
assert (DiracDelta(x - n1)*DiracDelta(x - y)).as_independent(x) == (1, DiracDelta(x - n1)*DiracDelta(x - y))
assert (x*y*n1*n2*n3).as_independent(n2) == (x*y*n1, n2*n3)
assert (x*y*n1*n2*n3).as_independent(n1) == (x*y, n1*n2*n3)
assert (x*y*n1*n2*n3).as_independent(n3) == (x*y*n1*n2, n3)
assert (DiracDelta(x - n1)*DiracDelta(y - n1)*DiracDelta(x - n2)).as_independent(y) == \
(DiracDelta(x - n1), DiracDelta(y - n1)*DiracDelta(x - n2))
# issue 2685
assert (x + Integral(x, (x, 1, 2))).as_independent(x, strict=True) == \
(Integral(x, (x, 1, 2)), x)
def test_call():
# See the long history of this in issues 1927 and 2006.
# No effect as there are no callables
assert sin(x)(1) == sin(x)
assert (1+sin(x))(1) == 1+sin(x)
# Effect in the pressence of callables
l = Lambda(x, 2*x)
assert (l+x)(y) == 2*y+x
assert (x**l)(2) == x**4
# TODO UndefinedFunction does not subclass Expr
#f = Function('f')
#assert (2*f)(x) == 2*f(x)
def test_replace():
f = log(sin(x)) + tan(sin(x**2))
assert f.replace(sin, cos) == log(cos(x)) + tan(cos(x**2))
assert f.replace(sin, lambda a: sin(2*a)) == log(sin(2*x)) + tan(sin(2*x**2))
a = Wild('a')
assert f.replace(sin(a), cos(a)) == log(cos(x)) + tan(cos(x**2))
assert f.replace(sin(a), lambda a: sin(2*a)) == log(sin(2*x)) + tan(sin(2*x**2))
g = 2*sin(x**3)
assert g.replace(lambda expr: expr.is_Number, lambda expr: expr**2) == 4*sin(x**9)
assert cos(x).replace(cos, sin, map=True) == (sin(x), {cos(x): sin(x)})
assert sin(x).replace(cos, sin) == sin(x)
assert (y*sin(x)).replace(sin, lambda expr: sin(expr)/y) == sin(x)
def test_find():
expr = (x + y + 2 + sin(3*x))
assert expr.find(lambda u: u.is_Integer) == set([S(2), S(3)])
assert expr.find(lambda u: u.is_Symbol) == set([x, y])
assert expr.find(lambda u: u.is_Integer, group=True) == {S(2): 1, S(3): 1}
assert expr.find(lambda u: u.is_Symbol, group=True) == {x: 2, y: 1}
assert expr.find(Integer) == set([S(2), S(3)])
assert expr.find(Symbol) == set([x, y])
assert expr.find(Integer, group=True) == {S(2): 1, S(3): 1}
assert expr.find(Symbol, group=True) == {x: 2, y: 1}
a = Wild('a')
expr = sin(sin(x)) + sin(x) + cos(x) + x
assert expr.find(lambda u: type(u) is sin) == set([sin(x), sin(sin(x))])
assert expr.find(lambda u: type(u) is sin, group=True) == {sin(x): 2, sin(sin(x)): 1}
assert expr.find(sin(a)) == set([sin(x), sin(sin(x))])
assert expr.find(sin(a), group=True) == {sin(x): 2, sin(sin(x)): 1}
assert expr.find(sin) == set([sin(x), sin(sin(x))])
assert expr.find(sin, group=True) == {sin(x): 2, sin(sin(x)): 1}
def test_count():
expr = (x + y + 2 + sin(3*x))
assert expr.count(lambda u: u.is_Integer) == 2
assert expr.count(lambda u: u.is_Symbol) == 3
assert expr.count(Integer) == 2
assert expr.count(Symbol) == 3
assert expr.count(2) == 1
a = Wild('a')
assert expr.count(sin) == 1
assert expr.count(sin(a)) == 1
assert expr.count(lambda u: type(u) is sin) == 1
def test_has_basics():
f = Function('f')
g = Function('g')
p = Wild('p')
assert sin(x).has(x)
assert sin(x).has(sin)
assert not sin(x).has(y)
assert not sin(x).has(cos)
assert f(x).has(x)
assert f(x).has(f)
assert not f(x).has(y)
assert not f(x).has(g)
assert f(x).diff(x).has(x)
assert f(x).diff(x).has(f)
assert f(x).diff(x).has(Derivative)
assert not f(x).diff(x).has(y)
assert not f(x).diff(x).has(g)
assert not f(x).diff(x).has(sin)
assert (x**2).has(Symbol)
assert not (x**2).has(Wild)
assert (2*p).has(Wild)
assert not x.has()
def test_has_multiple():
f = x**2*y + sin(2**t + log(z))
assert f.has(x)
assert f.has(y)
assert f.has(z)
assert f.has(t)
assert not f.has(u)
assert f.has(x, y, z, t)
assert f.has(x, y, z, t, u)
i = Integer(4400)
assert not i.has(x)
assert (i*x**i).has(x)
assert not (i*y**i).has(x)
assert (i*y**i).has(x, y)
assert not (i*y**i).has(x, z)
def test_has_piecewise():
f = (x*y + 3/y)**(3 + 2)
g = Function('g')
h = Function('h')
p = Piecewise((g(x), x < -1), (1, x <= 1), (f, True))
assert p.has(x)
assert p.has(y)
assert not p.has(z)
assert p.has(1)
assert p.has(3)
assert not p.has(4)
assert p.has(f)
assert p.has(g)
assert not p.has(h)
def test_has_iterative():
A, B, C = symbols('A,B,C', commutative=False)
f = x*gamma(x)*sin(x)*exp(x*y)*A*B*C*cos(x*A*B)
assert f.has(x)
assert f.has(x*y)
assert f.has(x*sin(x))
assert not f.has(x*sin(y))
assert f.has(x*A)
assert f.has(x*A*B)
assert not f.has(x*A*C)
assert f.has(x*A*B*C)
assert not f.has(x*A*C*B)
assert f.has(x*sin(x)*A*B*C)
assert not f.has(x*sin(x)*A*C*B)
assert not f.has(x*sin(y)*A*B*C)
assert f.has(x*gamma(x))
assert not f.has(x + sin(x))
assert (x & y & z).has(x & z)
def test_has_integrals():
f = Integral(x**2 + sin(x*y*z), (x, 0, x + y + z))
assert f.has(x + y)
assert f.has(x + z)
assert f.has(y + z)
assert f.has(x*y)
assert f.has(x*z)
assert f.has(y*z)
assert not f.has(2*x + y)
assert not f.has(2*x*y)
def test_has_tuple():
f = Function('f')
g = Function('g')
h = Function('h')
assert Tuple(x, y).has(x)
assert not Tuple(x, y).has(z)
assert Tuple(f(x), g(x)).has(x)
assert not Tuple(f(x), g(x)).has(y)
assert Tuple(f(x), g(x)).has(f)
assert Tuple(f(x), g(x)).has(f(x))
assert not Tuple(f, g).has(x)
assert Tuple(f, g).has(f)
assert not Tuple(f, g).has(h)
assert Tuple(True).has(True) is True # .has(1) will also be True
def test_has_units():
from sympy.physics.units import m, s
assert (x*m/s).has(x)
assert (x*m/s).has(y, z) is False
def test_has_polys():
poly = Poly(x**2 + x*y*sin(z), x, y, t)
assert poly.has(x)
assert poly.has(x, y, z)
assert poly.has(x, y, z, t)
def test_has_physics():
assert FockState((x, y)).has(x)
def test_as_poly_as_expr():
f = x**2 + 2*x*y
assert f.as_poly().as_expr() == f
assert f.as_poly(x, y).as_expr() == f
assert (f + sin(x)).as_poly(x, y) is None
p = Poly(f, x, y)
assert p.as_poly() == p
def test_nonzero():
assert bool(S.Zero) == False
assert bool(S.One) == True
assert bool(x) == True
assert bool(x+y) == True
assert bool(x-x) == False
assert bool(x*y) == True
assert bool(x*1) == True
assert bool(x*0) == False
def test_is_number():
assert Float(3.14).is_number == True
assert Integer(737).is_number == True
assert Rational(3, 2).is_number == True
assert Rational(8).is_number == True
assert x.is_number == False
assert (2*x).is_number == False
assert (x + y).is_number == False
assert log(2).is_number == True
assert log(x).is_number == False
assert (2 + log(2)).is_number == True
assert (8+log(2)).is_number == True
assert (2 + log(x)).is_number == False
assert (8+log(2)+x).is_number == False
assert (1+x**2/x-x).is_number == True
assert Tuple(Integer(1)).is_number == False
assert Add(2, x).is_number == False
assert Mul(3, 4).is_number == True
assert Pow(log(2), 2).is_number == True
assert oo.is_number == True
g = WildFunction('g')
assert g.is_number == False
assert (2*g).is_number == False
assert (x**2).subs(x, 3).is_number == True
# test extensibility of .is_number
# on subinstances of Basic
class A(Basic):
pass
a = A()
assert a.is_number == False
def test_as_coeff_add():
assert S(2).as_coeff_add() == (2, ())
assert S(3.0).as_coeff_add() == (0, (S(3.0),))
assert S(-3.0).as_coeff_add() == (0, (S(-3.0),))
assert x .as_coeff_add() == ( 0, (x,))
assert (-1+x).as_coeff_add() == (-1, (x,))
assert ( 2+x).as_coeff_add() == ( 2, (x,))
assert ( 1+x).as_coeff_add() == ( 1, (x,))
assert (x + y).as_coeff_add(y) == (x, (y,))
assert (3*x).as_coeff_add(y) == (3*x, ())
# don't do expansion
e = (x + y)**2
assert e.as_coeff_add(y) == (0, (e,))
def test_as_coeff_mul():
assert S(2).as_coeff_mul() == (2, ())
assert S(3.0).as_coeff_mul() == (1, (S(3.0),))
assert S(-3.0).as_coeff_mul() == (-1, (S(3.0),))
assert x .as_coeff_mul() == ( 1, (x,))
assert (-x).as_coeff_mul() == (-1, (x,))
assert (2*x).as_coeff_mul() == (2, (x,))
assert (x*y).as_coeff_mul(y) == (x, (y,))
assert (3 + x).as_coeff_mul(y) == (3 + x, ())
# don't do expansion
e = exp(x + y)
assert e.as_coeff_mul(y) == (1, (e,))
e = 2**(x + y)
assert e.as_coeff_mul(y) == (1, (e,))
def test_as_coeff_exponent():
assert (3*x**4).as_coeff_exponent(x) == (3, 4)
assert (2*x**3).as_coeff_exponent(x) == (2, 3)
assert (4*x**2).as_coeff_exponent(x) == (4, 2)
assert (6*x**1).as_coeff_exponent(x) == (6, 1)
assert (3*x**0).as_coeff_exponent(x) == (3, 0)
assert (2*x**0).as_coeff_exponent(x) == (2, 0)
assert (1*x**0).as_coeff_exponent(x) == (1, 0)
assert (0*x**0).as_coeff_exponent(x) == (0, 0)
assert (-1*x**0).as_coeff_exponent(x) == (-1, 0)
assert (-2*x**0).as_coeff_exponent(x) == (-2, 0)
assert (2*x**3+pi*x**3).as_coeff_exponent(x) == (2+pi, 3)
assert (x*log(2)/(2*x + pi*x)).as_coeff_exponent(x) == \
(log(2)/(2+pi), 0)
# 1685
D = Derivative
f = Function('f')
fx = D(f(x), x)
assert fx.as_coeff_exponent(f(x)) == (fx ,0)
def test_extractions():
assert ((x*y)**3).extract_multiplicatively(x**2 * y) == x*y**2
assert ((x*y)**3).extract_multiplicatively(x**4 * y) == None
assert (2*x).extract_multiplicatively(2) == x
assert (2*x).extract_multiplicatively(3) == None
assert (2*x).extract_multiplicatively(-1) == None
assert (Rational(1, 2)*x).extract_multiplicatively(3) == x/6
assert (sqrt(x)).extract_multiplicatively(x) == None
assert (sqrt(x)).extract_multiplicatively(1/x) == None
assert ((x*y)**3).extract_additively(1) == None
assert (x + 1).extract_additively(x) == 1
assert (x + 1).extract_additively(2*x) == None
assert (x + 1).extract_additively(-x) == None
assert (-x + 1).extract_additively(2*x) == None
assert (2*x + 3).extract_additively(x) == x + 3
assert (2*x + 3).extract_additively(2) == 2*x + 1
assert (2*x + 3).extract_additively(3) == 2*x
assert (2*x + 3).extract_additively(-2) == None
assert (2*x + 3).extract_additively(3*x) == None
assert (2*x + 3).extract_additively(2*x) == 3
assert x.extract_additively(0) == x
assert S(2).extract_additively(x) is None
assert S(2.).extract_additively(2) == S.Zero
assert S(2*x + 3).extract_additively(x + 1) == x + 2
assert S(2*x + 3).extract_additively(y + 1) is None
assert S(2*x - 3).extract_additively(x + 1) is None
assert S(2*x - 3).extract_additively(y + z) is None
assert ((a + 1)*x*4 + y).extract_additively(x).expand() == \
4*a*x + 3*x + y
assert ((a + 1)*x*4 + 3*y).extract_additively(x + 2*y).expand() == \
4*a*x + 3*x + y
assert (y*(x + 1)).extract_additively(x + 1) is None
assert ((y + 1)*(x + 1) + 3).extract_additively(x + 1) == \
y*(x + 1) + 3
assert ((x + y)*(x + 1) + x + y + 3).extract_additively(x + y) == \
x*(x + y) + 3
assert (x + y + 2*((x + y)*(x + 1)) + 3).extract_additively((x + y)*(x + 1)) == \
x + y + (x + 1)*(x + y) + 3
assert ((y + 1)*(x + 2*y + 1) + 3).extract_additively(y + 1) == \
(x + 2*y)*(y + 1) + 3
n = Symbol("n", integer=True)
assert (Integer(-3)).could_extract_minus_sign() == True
assert (-n*x+x).could_extract_minus_sign() != (n*x-x).could_extract_minus_sign()
assert (x-y).could_extract_minus_sign() != (-x+y).could_extract_minus_sign()
assert (1-x-y).could_extract_minus_sign() == True
assert (1-x+y).could_extract_minus_sign() == False
assert ((-x-x*y)/y).could_extract_minus_sign() == True
assert (-(x+x*y)/y).could_extract_minus_sign() == True
assert ((x+x*y)/(-y)).could_extract_minus_sign() == True
assert ((x+x*y)/y).could_extract_minus_sign() == False
assert (x*(-x-x**3)).could_extract_minus_sign() == True # used to give inf recurs
assert ((-x-y)/(x+y)).could_extract_minus_sign() == True # is_Mul odd case
# The results of each of these will vary on different machines, e.g.
# the first one might be False and the other (then) is true or vice versa,
# so both are included.
assert ((-x-y)/(x-y)).could_extract_minus_sign() == False or\
((-x-y)/(y-x)).could_extract_minus_sign() == False # is_Mul even case
assert ( x - y).could_extract_minus_sign() == False
assert (-x + y).could_extract_minus_sign() == True
def test_coeff():
assert (x+1).coeff(x+1) == 1
assert (3*x).coeff(0) == 0
assert (z*(1+x)*x**2).coeff(1+x) == z*x**2
assert (1+2*x*x**(1+x)).coeff(x*x**(1+x)) == 2
assert (1+2*x**(y+z)).coeff(x**(y+z)) == 2
assert (3+2*x+4*x**2).coeff(1) == 0
assert (3+2*x+4*x**2).coeff(-1) == 0
assert (3+2*x+4*x**2).coeff(x) == 2
assert (3+2*x+4*x**2).coeff(x**2) == 4
assert (3+2*x+4*x**2).coeff(x**3) == 0
assert (-x/8 + x*y).coeff(x) == -S(1)/8 + y
assert (-x/8 + x*y).coeff(-x) == S(1)/8
assert (4*x).coeff(2*x) == 0
assert (2*x).coeff(2*x) == 1
assert (-oo*x).coeff(x*oo) == -1
n1, n2 = symbols('n1 n2', commutative=False)
assert (n1*n2).coeff(n1) == 1
assert (n1*n2).coeff(n2) == n1
assert (n1*n2 + x*n1).coeff(n1) == 1 # 1*n1*(n2+x)
assert (n2*n1 + x*n1).coeff(n1) == n2 + x
assert (n2*n1 + x*n1**2).coeff(n1) == n2
assert (n1**x).coeff(n1) == 0
assert (n1*n2 + n2*n1).coeff(n1) == 0
assert (2*(n1+n2)*n2).coeff(n1+n2, right=1) == n2
assert (2*(n1+n2)*n2).coeff(n1+n2, right=0) == 2
f = Function('f')
assert (2*f(x) + 3*f(x).diff(x)).coeff(f(x)) == 2
expr = z*(x+y)**2
expr2 = z*(x+y)**2 + z*(2*x + 2*y)**2
assert expr.coeff(z) == (x+y)**2
assert expr.coeff(x+y) == 0
assert expr2.coeff(z) == (x+y)**2 + (2*x + 2*y)**2
assert (x + y + 3*z).coeff(1) == x + y
assert (-x + 2*y).coeff(-1) == x
assert (x - 2*y).coeff(-1) == 2*y
assert (3 + 2*x + 4*x**2).coeff(1) == 0
assert (-x - 2*y).coeff(2) == -y
assert (x + sqrt(2)*x).coeff(sqrt(2)) == x
assert (3 + 2*x + 4*x**2).coeff(x) == 2
assert (3 + 2*x + 4*x**2).coeff(x**2) == 4
assert (3 + 2*x + 4*x**2).coeff(x**3) == 0
assert (z*(x + y)**2).coeff((x + y)**2) == z
assert (z*(x + y)**2).coeff(x + y) == 0
assert (2 + 2*x + (x + 1)*y).coeff(x + 1) == y
assert (x + 2*y + 3).coeff(1) == x
assert (x + 2*y + 3).coeff(x, 0) == 2*y + 3
assert (x**2 + 2*y + 3*x).coeff(x**2, 0) == 2*y + 3*x
assert x.coeff(0, 0) == 0
assert x.coeff(x, 0) == 0
n, m, o, l = symbols('n m o l', commutative=False)
assert n.coeff(n) == 1
assert y.coeff(n) == 0
assert (3*n).coeff(n) == 3
assert (2 + n).coeff(x*m) == 0
assert (2*x*n*m).coeff(x) == 2*n*m
assert (2 + n).coeff(x*m*n + y) == 0
assert (2*x*n*m).coeff(3*n) == 0
assert (n*m + m*n*m).coeff(n) == 1 + m
assert (n*m + m*n*m).coeff(n, right=True) == m # = (1 + m)*n*m
assert (n*m + m*n).coeff(n) == 0
assert (n*m + o*m*n).coeff(m*n) == o
assert (n*m + o*m*n).coeff(m*n, right=1) == 1
assert (n*m + n*m*n).coeff(n*m, right=1) == 1 + n # = n*m*(n + 1)
def test_coeff2():
r, kappa = symbols('r, kappa')
psi = Function("psi")
g = 1/r**2 * (2*r*psi(r).diff(r, 1) + r**2 * psi(r).diff(r, 2))
g = g.expand()
assert g.coeff((psi(r).diff(r))) == 2/r
def test_coeff2_0():
r, kappa = symbols('r, kappa')
psi = Function("psi")
g = 1/r**2 * (2*r*psi(r).diff(r, 1) + r**2 * psi(r).diff(r, 2))
g = g.expand()
assert g.coeff(psi(r).diff(r, 2)) == 1
def test_coeff_expand():
expr = z*(x+y)**2
expr2 = z*(x+y)**2 + z*(2*x + 2*y)**2
assert expr.coeff(z) == (x+y)**2
assert expr2.coeff(z) == (x+y)**2 + (2*x + 2*y)**2
def test_integrate():
assert x.integrate(x) == x**2/2
assert x.integrate((x, 0, 1)) == S(1)/2
def test_as_base_exp():
assert x.as_base_exp() == (x, S.One)
assert (x*y*z).as_base_exp() == (x*y*z, S.One)
assert (x+y+z).as_base_exp() == (x+y+z, S.One)
assert ((x+y)**z).as_base_exp() == (x+y, z)
def test_issue1864():
assert hasattr(Mul(x, y), "is_commutative")
assert hasattr(Mul(x, y, evaluate=False), "is_commutative")
assert hasattr(Pow(x, y), "is_commutative")
assert hasattr(Pow(x, y, evaluate=False), "is_commutative")
expr = Mul(Pow(2, 2, evaluate=False), 3, evaluate=False) + 1
assert hasattr(expr, "is_commutative")
def test_action_verbs():
assert nsimplify((1/(exp(3*pi*x/5)+1))) == (1/(exp(3*pi*x/5)+1)).nsimplify()
assert ratsimp(1/x + 1/y) == (1/x + 1/y).ratsimp()
assert trigsimp(log(x), deep=True) == (log(x)).trigsimp(deep = True)
assert radsimp(1/(2+sqrt(2))) == (1/(2+sqrt(2))).radsimp()
assert powsimp(x**y*x**z*y**z, combine='all') == (x**y*x**z*y**z).powsimp(combine='all')
assert simplify(x**y*x**z*y**z) == (x**y*x**z*y**z).simplify()
assert together(1/x + 1/y) == (1/x + 1/y).together()
# Not tested because it's deprecated
#assert separate((x*(y*z)**3)**2) == ((x*(y*z)**3)**2).separate()
assert collect(a*x**2 + b*x**2 + a*x - b*x + c, x) == (a*x**2 + b*x**2 + a*x - b*x + c).collect(x)
assert apart(y/(y+2)/(y+1), y) == (y/(y+2)/(y+1)).apart(y)
assert combsimp(y/(x+2)/(x+1)) == (y/(x+2)/(x+1)).combsimp()
assert factor(x**2+5*x+6) == (x**2+5*x+6).factor()
assert refine(sqrt(x**2)) == sqrt(x**2).refine()
assert cancel((x**2+5*x+6)/(x+2)) == ((x**2+5*x+6)/(x+2)).cancel()
def test_as_powers_dict():
assert x.as_powers_dict() == {x: 1}
assert (x**y*z).as_powers_dict() == {x: y, z: 1}
assert Mul(2, 2, **dict(evaluate=False)).as_powers_dict() == {S(2): S(2)}
def test_as_coefficients_dict():
check = [S(1), x, y, x*y, 1]
assert [Add(3*x, 2*x, y, 3).as_coefficients_dict()[i] for i in check] == \
[3, 5, 1, 0, 0]
assert [(3*x*y).as_coefficients_dict()[i] for i in check] == \
[0, 0, 0, 3, 0]
assert (3.0*x*y).as_coefficients_dict()[3.0*x*y] == 1
def test_args_cnc():
A = symbols('A', commutative=False)
assert (x+A).args_cnc() == \
[[], [x + A]]
assert (x+a).args_cnc() == \
[[a + x], []]
assert (x*a).args_cnc() == \
[[a, x], []]
assert (x*y*A*(A+1)).args_cnc(cset=True) == \
[set([x, y]), [A, 1 + A]]
assert Mul(x, x, evaluate=False).args_cnc(cset=True, warn=False) == \
[set([x]), []]
assert Mul(x, x**2, evaluate=False).args_cnc(cset=True, warn=False) == \
[set([x, x**2]), []]
raises(ValueError, lambda: Mul(x, x, evaluate=False).args_cnc(cset=True))
assert Mul(x, y, x, evaluate=False).args_cnc() == \
[[x, y, x], []]
def test_new_rawargs():
n = Symbol('n', commutative=False)
a = x + n
assert a.is_commutative is False
assert a._new_rawargs(x).is_commutative
assert a._new_rawargs(x, y).is_commutative
assert a._new_rawargs(x, n).is_commutative is False
assert a._new_rawargs(x, y, n).is_commutative is False
m = x*n
assert m.is_commutative is False
assert m._new_rawargs(x).is_commutative
assert m._new_rawargs(n).is_commutative is False
assert m._new_rawargs(x, y).is_commutative
assert m._new_rawargs(x, n).is_commutative is False
assert m._new_rawargs(x, y, n).is_commutative is False
assert m._new_rawargs(x, n, reeval=False).is_commutative is False
assert m._new_rawargs(S.One) is S.One
def test_2127():
assert Add(evaluate=False) == 0
assert Mul(evaluate=False) == 1
assert Mul(x+y, evaluate=False).is_Add
def test_free_symbols():
# free_symbols should return the free symbols of an object
assert S(1).free_symbols == set()
assert (x).free_symbols == set([x])
assert Integral(x, (x, 1, y)).free_symbols == set([y])
assert (-Integral(x, (x, 1, y))).free_symbols == set([y])
assert meter.free_symbols == set()
assert (meter**x).free_symbols == set([x])
def test_issue2201():
x = Symbol('x', commutative=False)
assert x*sqrt(2)/sqrt(6) == x*sqrt(3)/3
def test_issue_2061():
assert sqrt(-1.0*x) == 1.0*sqrt(-x)
assert sqrt(1.0*x) == 1.0*sqrt(x)
def test_as_coeff_Mul():
assert Integer(3).as_coeff_Mul() == (Integer(3), Integer(1))
assert Rational(3, 4).as_coeff_Mul() == (Rational(3, 4), Integer(1))
assert Float(5.0).as_coeff_Mul() == (Float(5.0), Integer(1))
assert (Integer(3)*x).as_coeff_Mul() == (Integer(3), x)
assert (Rational(3, 4)*x).as_coeff_Mul() == (Rational(3, 4), x)
assert (Float(5.0)*x).as_coeff_Mul() == (Float(5.0), x)
assert (Integer(3)*x*y).as_coeff_Mul() == (Integer(3), x*y)
assert (Rational(3, 4)*x*y).as_coeff_Mul() == (Rational(3, 4), x*y)
assert (Float(5.0)*x*y).as_coeff_Mul() == (Float(5.0), x*y)
assert (x).as_coeff_Mul() == (S.One, x)
assert (x*y).as_coeff_Mul() == (S.One, x*y)
def test_as_coeff_Add():
assert Integer(3).as_coeff_Add() == (Integer(3), Integer(0))
assert Rational(3, 4).as_coeff_Add() == (Rational(3, 4), Integer(0))
assert Float(5.0).as_coeff_Add() == (Float(5.0), Integer(0))
assert (Integer(3) + x).as_coeff_Add() == (Integer(3), x)
assert (Rational(3, 4) + x).as_coeff_Add() == (Rational(3, 4), x)
assert (Float(5.0) + x).as_coeff_Add() == (Float(5.0), x)
assert (Integer(3) + x + y).as_coeff_Add() == (Integer(3), x + y)
assert (Rational(3, 4) + x + y).as_coeff_Add() == (Rational(3, 4), x + y)
assert (Float(5.0) + x + y).as_coeff_Add() == (Float(5.0), x + y)
assert (x).as_coeff_Add() == (S.Zero, x)
assert (x*y).as_coeff_Add() == (S.Zero, x*y)
def test_expr_sorting():
f, g = symbols('f,g', cls=Function)
exprs = [1/x**2, 1/x, sqrt(sqrt(x)), sqrt(x), x, sqrt(x)**3, x**2]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [x, 2*x, 2*x**2, 2*x**3, x**n, 2*x**n, sin(x), sin(x)**n, sin(x**2), cos(x), cos(x**2), tan(x)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [x + 1, x**2 + x + 1, x**3 + x**2 + x + 1]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [S(4), x - 3*I/2, x + 3*I/2, x - 4*I + 1, x + 4*I + 1]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [f(x), g(x), exp(x), sin(x), cos(x), factorial(x)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [Tuple(x, y), Tuple(x, z), Tuple(x, y, z)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[3], [1, 2]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[1, 2], [2, 3]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[1, 2], [1, 2, 3]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [{x: -y}, {x: y}]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [set([1]), set([1, 2])]
assert sorted(exprs, key=default_sort_key) == exprs
def test_as_ordered_factors():
f, g = symbols('f,g', cls=Function)
assert x.as_ordered_factors() == [x]
assert (2*x*x**n*sin(x)*cos(x)).as_ordered_factors() == [Integer(2), x, x**n, sin(x), cos(x)]
args = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
expr = Mul(*args)
assert expr.as_ordered_factors() == args
A, B = symbols('A,B', commutative=False)
assert (A*B).as_ordered_factors() == [A, B]
assert (B*A).as_ordered_factors() == [B, A]
def test_as_ordered_terms():
f, g = symbols('f,g', cls=Function)
assert x.as_ordered_terms() == [x]
assert (sin(x)**2*cos(x) + sin(x)*cos(x)**2 + 1).as_ordered_terms() == [sin(x)**2*cos(x), sin(x)*cos(x)**2, 1]
args = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
expr = Add(*args)
assert expr.as_ordered_terms() == args
assert (1 + 4*sqrt(3)*pi*x).as_ordered_terms() == [4*pi*x*sqrt(3), 1]
assert ( 2 + 3*I).as_ordered_terms() == [ 2, 3*I]
assert (-2 + 3*I).as_ordered_terms() == [-2, 3*I]
assert ( 2 - 3*I).as_ordered_terms() == [ 2, -3*I]
assert (-2 - 3*I).as_ordered_terms() == [-2, -3*I]
assert ( 4 + 3*I).as_ordered_terms() == [ 4, 3*I]
assert (-4 + 3*I).as_ordered_terms() == [-4, 3*I]
assert ( 4 - 3*I).as_ordered_terms() == [ 4, -3*I]
assert (-4 - 3*I).as_ordered_terms() == [-4, -3*I]
f = x**2*y**2 + x*y**4 + y + 2
assert f.as_ordered_terms(order="lex") == [x**2*y**2, x*y**4, y, 2]
assert f.as_ordered_terms(order="grlex") == [x*y**4, x**2*y**2, y, 2]
assert f.as_ordered_terms(order="rev-lex") == [2, y, x*y**4, x**2*y**2]
assert f.as_ordered_terms(order="rev-grlex") == [2, y, x**2*y**2, x*y**4]
def test_sort_key_atomic_expr():
from sympy.physics.units import m, s
assert sorted([-m, s], key=lambda arg: arg.sort_key()) == [-m, s]
def test_issue_1100():
# first subs and limit gives NaN
a = x/y
assert a._eval_interval(x, 0, oo)._eval_interval(y, oo, 0) is S.NaN
# second subs and limit gives NaN
assert a._eval_interval(x, 0, oo)._eval_interval(y, 0, oo) is S.NaN
# difference gives S.NaN
a = x - y
assert a._eval_interval(x, 1, oo)._eval_interval(y, oo, 1) is S.NaN
raises(ValueError, lambda: x._eval_interval(x, None, None))
def test_primitive():
assert (3*(x + 1)**2).primitive() == (3, (x + 1)**2)
assert (6*x + 2).primitive() == (2, 3*x + 1)
assert (x/2 + 3).primitive() == (S(1)/2, x + 6)
eq = (6*x + 2)*(x/2 + 3)
assert eq.primitive()[0] == 1
eq = (2 + 2*x)**2
assert eq.primitive()[0] == 1
assert (4.0*x).primitive() == (1, 4.0*x)
assert (4.0*x + y/2).primitive() == (S.Half, 8.0*x + y)
assert (-2*x).primitive() == (2, -x)
assert Add(5*z/7, 0.5*x, 3*y/2, evaluate=False).primitive() == \
(S(1)/14, 7.0*x + 21*y + 10*z)
for i in [S.Infinity, S.NegativeInfinity, S.ComplexInfinity]:
assert (i + x/3).primitive() == \
(S(1)/3, i + x)
assert (S.Infinity + 2*x/3 + 4*y/7).primitive() == \
(S(1)/21, 14*x + 12*y + oo)
assert S.Zero.primitive() == (S.One, S.Zero)
def test_issue_2744():
a = 1 + x
assert (2*a).extract_multiplicatively(a) == 2
assert (4*a).extract_multiplicatively(2*a) == 2
assert ((3*a)*(2*a)).extract_multiplicatively(a) == 6*a
def test_is_constant():
from sympy.solvers.solvers import checksol
Sum(x, (x, 1, 10)).is_constant() == True
Sum(x, (x, 1, n)).is_constant() == False
Sum(x, (x, 1, n)).is_constant(y) == True
Sum(x, (x, 1, n)).is_constant(n) == False
Sum(x, (x, 1, n)).is_constant(x) == True
eq = a*cos(x)**2 + a*sin(x)**2 - a
eq.is_constant() == True
assert eq.subs({x:pi, a:2}) == eq.subs({x:pi, a:3}) == 0
assert x.is_constant() is False
assert x.is_constant(y) is True
assert checksol(x, x, Sum(x, (x, 1, n))) == False
assert checksol(x, x, Sum(x, (x, 1, n))) == False
f = Function('f')
assert checksol(x, x, f(x)) == False
p = symbols('p', positive=True)
assert Pow(x, S(0), evaluate=False).is_constant() == True # == 1
assert Pow(S(0), x, evaluate=False).is_constant() == False # == 0 or 1
assert Pow(S(0), p, evaluate=False).is_constant() == True # == 1
assert (2**x).is_constant() == False
assert Pow(S(2), S(3), evaluate=False).is_constant() == True
z1, z2 = symbols('z1 z2', zero=True)
assert (z1 + 2*z2).is_constant() is True
assert meter.is_constant() is True
assert (3*meter).is_constant() is True
assert (x*meter).is_constant() is False
def test_equals():
assert (-3 - sqrt(5) + (-sqrt(10)/2 - sqrt(2)/2)**2).equals(0)
assert (x**2 - 1).equals((x + 1)*(x - 1))
assert (cos(x)**2 + sin(x)**2).equals(1)
assert (a*cos(x)**2 + a*sin(x)**2).equals(a)
r = sqrt(2)
assert (-1/(r + r*x) + 1/r/(1 + x)).equals(0)
assert factorial(x + 1).equals((x + 1)*factorial(x))
assert sqrt(3).equals(2*sqrt(3)) is False
assert (sqrt(5)*sqrt(3)).equals(sqrt(3)) is False
assert (sqrt(5) + sqrt(3)).equals(0) is False
assert (sqrt(5) + pi).equals(0) is False
assert meter.equals(0) is False
assert (3*meter**2).equals(0) is False
# from integrate(x*sqrt(1+2*x), x);
# diff is zero only when assumptions allow
i = 2*sqrt(2)*x**(S(5)/2)*(1 + 1/(2*x))**(S(5)/2)/5 + \
2*sqrt(2)*x**(S(3)/2)*(1 + 1/(2*x))**(S(5)/2)/(-6 - 3/x)
ans = sqrt(2*x + 1)*(6*x**2 + x - 1)/15
diff = i - ans
assert diff.equals(0) is False
assert diff.subs(x, -S.Half/2) == 7*sqrt(2)/120
# there are regions for x for which the expression is True, for
# example, when x < -1/2 or x > 0 the expression is zero
p = Symbol('p', positive=True)
assert diff.subs(x, p).equals(0) is True
assert diff.subs(x, -1).equals(0) is True
def test_random():
from sympy import posify
assert posify(x)[0]._random() is not None
def test_round():
from sympy.abc import x
assert Float('0.1249999').round(2) == 0.12
d20 = 12345678901234567890
ans = S(d20).round(2)
assert ans.is_Float and ans == d20
ans = S(d20).round(-2)
assert ans.is_Float and ans == 12345678901234567900
assert S('1/7').round(4) == 0.1429
assert S('.[12345]').round(4) == 0.1235
assert S('.1349').round(2) == 0.13
n = S(12345)
ans = n.round()
assert ans.is_Float
assert ans == n
ans = n.round(1)
assert ans.is_Float
assert ans == n
ans = n.round(4)
assert ans.is_Float
assert ans == n
assert n.round(-1) == 12350
r = n.round(-4)
assert r == 10000
# in fact, it should equal many values since __eq__
# compares at equal precision
assert all(r == i for i in range(9984, 10049))
assert n.round(-5) == 0
assert (pi + sqrt(2)).round(2) == 4.56
assert (10*(pi + sqrt(2))).round(-1) == 50
raises(TypeError, lambda: round(x + 2, 2))
assert S(2.3).round(1) == 2.3
e = S(12.345).round(2)
assert e == round(12.345, 2)
assert type(e) is Float
assert (Float(.3, 3) + 2*pi).round() == 7
assert (Float(.3, 3) + 2*pi*100).round() == 629
assert (Float(.03, 3) + 2*pi/100).round(5) == 0.09283
assert (Float(.03, 3) + 2*pi/100).round(4) == 0.0928
assert (pi + 2*E*I).round() == 3 + 5*I
assert S.Zero.round() == 0
a = (Add(1, Float('1.'+'9'*27, ''), evaluate=0))
assert a.round(10) == Float('3.0000000000','')
assert a.round(25) == Float('3.0000000000000000000000000','')
assert a.round(26) == Float('3.00000000000000000000000000','')
assert a.round(27) == Float('2.999999999999999999999999999','')
assert a.round(30) == Float('2.999999999999999999999999999','')
raises(TypeError, lambda: x.round())
# exact magnitude of 10
assert str(S(1).round()) == '1.'
assert str(S(100).round()) == '100.'
# applied to real and imaginary portions
assert (2*pi + E*I).round() == 6 + 3*I
assert (2*pi + I/10).round() == 6
assert (pi/10 + 2*I).round() == 2*I
# the lhs re and im parts are Float with dps of 2
# and those on the right have dps of 15 so they won't compare
# equal unless we use string or compare components (which will
# then coerce the floats to the same precision) or re-create
# the floats
assert str((pi/10 + E*I).round(2)) == '0.31 + 2.72*I'
assert (pi/10 + E*I).round(2).as_real_imag() == (0.31, 2.72)
assert (pi/10 + E*I).round(2) == Float(0.31, 2) + I*Float(2.72, 3)
# issue 3815
assert (I**(I+3)).round(3) == Float('-0.208','')*I
def test_extract_branch_factor():
assert exp_polar(2.0*I*pi).extract_branch_factor() == (1, 1)
| bsd-3-clause | 625,356,421,304,624,600 | 33.645022 | 114 | 0.54313 | false |
praekelt/vumi-go | go/apps/tests/view_helpers.py | 1 | 2691 | from django.core.urlresolvers import reverse
from zope.interface import implements
from vumi.tests.helpers import generate_proxies, IHelper
from go.base import utils as base_utils
from go.base.tests.helpers import DjangoVumiApiHelper
from go.vumitools.tests.helpers import GoMessageHelper
from .helpers import ApplicationHelper
class AppViewsHelper(object):
implements(IHelper)
def __init__(self, conversation_type):
self.conversation_type = conversation_type
self.vumi_helper = DjangoVumiApiHelper()
self._app_helper = ApplicationHelper(
conversation_type, self.vumi_helper)
# Proxy methods from our helpers.
generate_proxies(self, self._app_helper)
generate_proxies(self, self.vumi_helper)
def setup(self):
# Create the things we need to create
self.vumi_helper.setup()
self.vumi_helper.make_django_user()
def cleanup(self):
return self.vumi_helper.cleanup()
def get_new_view_url(self):
return reverse('conversations:new_conversation')
def get_conversation_helper(self, conversation):
return ConversationViewHelper(self, conversation.key)
def create_conversation_helper(self, *args, **kw):
conversation = self.create_conversation(*args, **kw)
return self.get_conversation_helper(conversation)
def get_api_commands_sent(self):
return base_utils.connection.get_commands()
class ConversationViewHelper(object):
def __init__(self, app_views_helper, conversation_key):
self.conversation_key = conversation_key
self.conversation_type = app_views_helper.conversation_type
self.app_helper = app_views_helper
def get_view_url(self, view):
view_def = base_utils.get_conversation_view_definition(
self.conversation_type)
return view_def.get_view_url(
view, conversation_key=self.conversation_key)
def get_action_view_url(self, action_name):
return reverse('conversations:conversation_action', kwargs={
'conversation_key': self.conversation_key,
'action_name': action_name,
})
def get_conversation(self):
return self.app_helper.get_conversation(self.conversation_key)
def add_stored_inbound(self, count, **kw):
msg_helper = GoMessageHelper(vumi_helper=self.app_helper)
conv = self.get_conversation()
return msg_helper.add_inbound_to_conv(conv, count, **kw)
def add_stored_replies(self, msgs):
msg_helper = GoMessageHelper(vumi_helper=self.app_helper)
conv = self.get_conversation()
return msg_helper.add_replies_to_conv(conv, msgs)
| bsd-3-clause | -5,930,899,879,608,765,000 | 33.5 | 70 | 0.687105 | false |
beaker-project/beaker | Client/src/bkr/client/command.py | 1 | 13879 | # -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import optparse
import os
import sys
from optparse import Option
import six
from bkr.common.hub import HubProxy
from bkr.common.pyconfig import PyConfigParser
def username_prompt(prompt=None, default_value=None):
"""
Ask for a username.
"""
if default_value is not None:
return default_value
prompt = prompt or "Enter your username: "
sys.stderr.write(prompt)
return sys.stdin.readline()
def password_prompt(prompt=None, default_value=None):
"""
Ask for a password.
"""
import getpass
if default_value is not None:
return default_value
prompt = prompt or "Enter your password: "
try:
# try to use stderr stream
result = getpass.getpass(prompt, stream=sys.stderr)
except TypeError:
# fall back to stdout
result = getpass.getpass(prompt)
return result
def yes_no_prompt(prompt, default_value=None):
"""
Give a yes/no (y/n) question.
"""
if default_value is not None:
if default_value not in ("Y", "N"):
raise ValueError("Invalid default value: %s" % default_value)
default_value = default_value.upper()
prompt = "%s [%s/%s]: " % (prompt, ("y", "Y")[default_value == "Y"], ("n", "N")[default_value == "N"])
sys.stderr.write(prompt)
while True:
user_input = sys.stdin.readline().strip().upper()
if user_input == "" and default_value is not None:
user_input = default_value
if user_input == "Y":
return True
if user_input == "N":
return False
def are_you_sure_prompt(prompt=None):
"""
Give a yes/no (y/n) question.
"""
prompt = prompt or "Are you sure? Enter 'YES' to continue: "
sys.stderr.write(prompt)
user_input = sys.stdin.readline().strip()
if user_input == "YES":
return True
return False
class Plugin(object):
"""A plugin base class."""
author = None
version = None
enabled = False
def __getattr__(self, name):
"""
Get missing attribute from a container.
This is quite hackish but it allows to define settings
and methods per container.
"""
return getattr(self.container, name)
class Command(Plugin):
"""
An abstract class representing a command for CommandOptionParser.
"""
enabled = False
admin = False
username_prompt = staticmethod(username_prompt)
password_prompt = staticmethod(password_prompt)
yes_no_prompt = staticmethod(yes_no_prompt)
are_you_sure_prompt = staticmethod(are_you_sure_prompt)
def __init__(self, parser):
Plugin.__init__(self)
self.parser = parser
def options(self):
"""
Add options to self.parser.
"""
pass
def run(self, *args, **kwargs):
"""
Run a command. Arguments contain parsed options.
"""
raise NotImplementedError()
class PluginContainer(object):
"""
A plugin container.
Usage: Inherit PluginContainer and register plugins to the new class.
"""
def __getitem__(self, name):
return self._get_plugin(name)
def __iter__(self):
return six.iterkeys(self.plugins)
@classmethod
def normalize_name(cls, name):
return name
@classmethod
def _get_plugins(cls):
"""
Return dictionary of registered plugins.
"""
result = {}
parent_plugins = cls._get_parent_plugins(cls.normalize_name) # pylint: disable=no-member
class_plugins = getattr(cls, "_class_plugins", {})
d = parent_plugins.copy()
d.update(class_plugins)
for name, plugin_class in d.items():
result[name] = plugin_class
return result
@classmethod
def _get_parent_plugins(cls, normalize_function):
result = {}
for parent in cls.__bases__:
if parent is PluginContainer:
# don't use PluginContainer itself - plugins have to be registered to subclasses
continue
if not issubclass(parent, PluginContainer):
# skip parents which are not PluginContainer subclasses
continue
# read inherited plugins first (conflicts are resolved recursively)
plugins = parent._get_parent_plugins(normalize_function) # pylint: disable=no-member
# read class plugins, override inherited on name conflicts
if hasattr(parent, "_class_plugins"):
for plugin_class in parent._class_plugins.values(): # pylint: disable=no-member
normalized_name = normalize_function(plugin_class.__name__)
plugins[normalized_name] = plugin_class
for name, value in six.iteritems(plugins):
if result.get(name, value) != value:
raise RuntimeError(
"Cannot register plugin '%s'. "
"Another plugin with the same normalized name (%s) "
"is already in the container." % (str(value), normalized_name))
result.update(plugins)
return result
@property
def plugins(self):
if not hasattr(self, "_plugins"):
self._plugins = self.__class__._get_plugins()
return self._plugins
def _get_plugin(self, name):
"""
Return a plugin or raise KeyError.
"""
normalized_name = self.normalize_name(name)
if normalized_name not in self.plugins:
raise KeyError("Plugin not found: %s" % normalized_name)
plugin = self.plugins[normalized_name]
plugin.container = self
plugin.normalized_name = normalized_name
return plugin
@classmethod
def register_plugin(cls, plugin, name=None):
"""
Register a new plugin. Return normalized plugin name.
"""
if cls is PluginContainer:
raise TypeError("Can't register plugin to the PluginContainer base class.")
if "_class_plugins" not in cls.__dict__:
cls._class_plugins = {}
if not getattr(plugin, "enabled", False):
return
if not name:
name = cls.normalize_name(plugin.__name__)
cls._class_plugins[name] = plugin
return name
@classmethod
def register_module(cls, module, prefix=None, skip_broken=False):
"""
Register all plugins in a module's sub-modules.
@param module: a python module that contains plugin sub-modules
@type module: module
@param prefix: if specified, only modules with this prefix will be processed
@type prefix: str
@param skip_broken: skip broken sub-modules and print a warning
@type skip_broken: bool
"""
path = os.path.dirname(module.__file__)
module_list = []
for fn in os.listdir(path):
if not fn.endswith(".py"):
continue
if fn.startswith("_"):
continue
if prefix and not fn.startswith(prefix):
continue
if not os.path.isfile(os.path.join(path, fn)):
continue
module_list.append(fn[:-3])
if skip_broken:
for mod in module_list[:]:
try:
__import__(module.__name__, {}, {}, [mod])
except:
import sys
sys.stderr.write("WARNING: Skipping broken plugin module: %s.%s"
% (module.__name__, mod))
module_list.remove(mod)
else:
__import__(module.__name__, {}, {}, module_list)
for mn in module_list:
mod = getattr(module, mn)
for pn in dir(mod):
plugin = getattr(mod, pn)
if type(plugin) is type and issubclass(plugin, Plugin) and plugin is not Plugin:
cls.register_plugin(plugin)
class BeakerClientConfigurationError(ValueError):
"""
Raised to indicate that the Beaker client is not configured properly.
"""
pass
class CommandContainer(PluginContainer):
"""
Container for Command classes.
"""
@classmethod
def normalize_name(cls, name):
"""
Replace some characters in command names.
"""
return name.lower().replace('_', '-').replace(' ', '-')
class ClientCommandContainer(CommandContainer):
def __init__(self, conf, **kwargs):
self.conf = PyConfigParser()
self.conf.load_from_conf(conf)
self.conf.load_from_dict(kwargs)
def set_hub(self, username=None, password=None, auto_login=True, proxy_user=None):
if username:
if password is None:
password = password_prompt(default_value=password)
self.conf["AUTH_METHOD"] = "password"
self.conf["USERNAME"] = username
self.conf["PASSWORD"] = password
if proxy_user:
self.conf["PROXY_USER"] = proxy_user
cacert = self.conf.get('CA_CERT')
if cacert and not os.path.exists(cacert):
raise BeakerClientConfigurationError(
'CA_CERT configuration points to non-existing file: %s' % cacert)
self.hub = HubProxy(conf=self.conf, auto_login=auto_login)
class CommandOptionParser(optparse.OptionParser):
"""Enhanced OptionParser with plugin support."""
def __init__(self,
usage=None,
option_list=None,
option_class=Option,
version=None,
conflict_handler="error",
description=None,
formatter=None,
add_help_option=True,
prog=None,
command_container=None,
default_command="help",
add_username_password_options=False):
usage = usage or "%prog <command> [args] [--help]"
self.container = command_container
self.default_command = default_command
self.command = None
formatter = formatter or optparse.IndentedHelpFormatter(max_help_position=33)
optparse.OptionParser.__init__(self, usage, option_list, option_class, version,
conflict_handler, description, formatter, add_help_option,
prog)
if add_username_password_options:
option_list = [
optparse.Option("--username", help="specify user"),
optparse.Option("--password", help="specify password"),
]
self._populate_option_list(option_list, add_help=False)
def print_help(self, file=None, admin=False):
if file is None:
file = sys.stdout
file.write(self.format_help())
if self.command in (None, "help", "help-admin"):
file.write("\n")
file.write(self.format_help_commands(admin=admin))
def format_help_commands(self, admin=False):
commands = []
admin_commands = []
for name, plugin in sorted(six.iteritems(self.container.plugins)):
if getattr(plugin, 'hidden', False):
continue
is_admin = getattr(plugin, "admin", False)
text = " %-30s %s" % (name, plugin.__doc__.strip() if plugin.__doc__ else "")
if is_admin:
if admin:
admin_commands.append(text)
else:
commands.append(text)
if commands:
commands.insert(0, "commands:")
commands.append("")
if admin_commands:
admin_commands.insert(0, "admin commands:")
admin_commands.append("")
return "\n".join(commands + admin_commands)
def parse_args(self, args=None, values=None):
"""
Return (command_instance, opts, args)
"""
args = self._get_args(args)
if len(args) > 0 and not args[0].startswith("-"):
command = args[0]
args = args[1:]
else:
command = self.default_command
# keep args as is
if not command in self.container.plugins:
self.error("unknown command: %s" % command)
CommandClass = self.container[command]
cmd = CommandClass(self)
if self.command != cmd.normalized_name:
self.command = cmd.normalized_name
cmd.options()
cmd_opts, cmd_args = optparse.OptionParser.parse_args(self, args, values)
return cmd, cmd_opts, cmd_args
def run(self, args=None, values=None):
"""
Parse arguments and run a command
"""
cmd, cmd_opts, cmd_args = self.parse_args(args, values)
cmd_kwargs = cmd_opts.__dict__
cmd.run(*cmd_args, **cmd_kwargs)
class Help(Command):
"""
Show this help message and exit
"""
enabled = True
def options(self):
pass
def run(self, *args, **kwargs):
self.parser.print_help(admin=False)
class Help_Admin(Command):
"""
Show help message about administrative commands and exit
"""
enabled = True
def options(self):
# override default --help option
opt = self.parser.get_option("--help")
opt.action = "store_true"
opt.dest = "help"
def run(self, *args, **kwargs):
self.parser.print_help(admin=True)
CommandContainer.register_plugin(Help)
CommandContainer.register_plugin(Help_Admin)
| gpl-2.0 | 2,149,729,074,512,585,200 | 29.106291 | 106 | 0.572303 | false |
palominodb/tableizer | tableizer/ttt_gui/rrd.py | 1 | 5968 | # rrd.py
# Copyright (C) 2009-2013 PalominoDB, Inc.
#
# You may contact the maintainers at [email protected].
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
from django.conf import settings
import rrdtool
from utilities.utils import flatten, titleize, str_to_datetime, datetime_to_int
class Rrdtool(object):
def server_graph(self, servers, since, type_='full'):
msgs = []
ok = True
for srv in flatten([servers]):
path = settings.FORMATTER_OPTIONS.get('rrd', {}).get('path', '')
rrd_path = os.path.join(path, srv.name, 'server_%s.rrd' % (srv.name))
opts = self.__common_opts('server_%s' % (srv.name), since, type_, 'Server Aggregate - %s' % (srv.name))
opts.append(map(lambda ds: self.__common_ds_opts(ds, rrd_path), [
['data_length', ['AREA%s:STACK', '#00ff40']],
['index_length', ['AREA%s', '#0040ff']],
#['data_free', ['LINE2%s', '#0f00f0']],
]))
opts = flatten(opts)
opts = map(lambda x: str(x), opts)
try:
rrdtool.graph(opts)
except Exception, e:
msgs.append(e)
ok = False
return [ok, msgs]
def database_graph(self, databases, since, type_='full'):
msgs = []
ok = True
for db in flatten([databases]):
path = settings.FORMATTER_OPTIONS.get('rrd', {}).get('path', '')
rrd_path = os.path.join(path, db.server.name, 'database_%s.rrd' % (db.name))
opts = self.__common_opts('database_%s_%s' % (db.server.name, db.name), since,
type_, 'Database Aggregate - %s.%s' % (db.server.name, db.name))
opts.append(map(lambda ds: self.__common_ds_opts(ds, rrd_path), [
['data_length', ['AREA%s:STACK', '#00ff40']],
['index_length', ['AREA%s', '#0040ff']],
#['data_free', ['LINE2%s', '#0f00f0']],
]))
opts = flatten(opts)
opts = map(lambda x: str(x), opts)
try:
rrdtool.graph(opts)
except Exception, e:
msgs.append(e)
ok = False
return [ok, msgs]
def table_graph(self, tables, since, type_='full'):
msgs = []
ok = True
for tbl in flatten([tables]):
path = settings.FORMATTER_OPTIONS.get('rrd', {}).get('path', '')
rrd_path = os.path.join(path, tbl.schema.server.name, tbl.schema.name, '%s.rrd' % (tbl.name))
opts = self.__common_opts('table_%s_%s_%s' % (tbl.schema.server.name, tbl.schema.name, tbl.name),
since, type_, 'Table - %s.%s.%s' % (tbl.schema.server.name, tbl.schema.name, tbl.name))
opts.append(map(lambda ds: self.__common_ds_opts(ds, rrd_path), [
['data_length', ['AREA%s:STACK', '#00ff40']],
['index_length', ['AREA%s', '#0040ff']],
#['data_free', ['LINE2%s', '#0f00f0']],
]))
opts = flatten(opts)
opts = map(lambda x: str(x), opts)
try:
rrdtool.graph(opts)
except Exception, e:
msgs.append(e)
ok = False
return [ok, msgs]
def __common_opts(self, path_frag, since, type_, title):
filename = '%s.%s.%s.png' % (path_frag, since, type_)
since = str_to_datetime(since)
since = datetime_to_int(since)
if not os.path.isdir(os.path.join(settings.MEDIA_ROOT, 'graphs')):
os.makedirs(os.path.join(settings.MEDIA_ROOT, 'graphs'))
path = os.path.join(settings.MEDIA_ROOT, 'graphs', filename)
o = [path, '-s', str(since), '--width', '640' if type_ == 'full' else '128',
'-e', 'now', '--title', '%s' % (str(title))]
if type_ == 'thumb':
o.append('-j')
o.append('--height')
o.append('16')
return o
def __common_ds_opts(self, ds, rrd_path):
dsname = ds[0]
gitems = ds[1:]
ret = []
ret.append('DEF:avg_{0}={1}:{0}:AVERAGE'.format(dsname, rrd_path))
ret.append('DEF:min_{0}={1}:{0}:MIN'.format(dsname, rrd_path))
ret.append('DEF:max_{0}={1}:{0}:MAX'.format(dsname, rrd_path))
ret.append('VDEF:v_last_{0}=avg_{0},LAST'.format(dsname))
ret.append('VDEF:v_avg_{0}=avg_{0},AVERAGE'.format(dsname))
ret.append('VDEF:v_min_{0}=avg_{0},MINIMUM'.format(dsname))
ret.append('VDEF:v_max_{0}=avg_{0},MAXIMUM'.format(dsname))
for gi in gitems:
ret.append(gi[0] % ':avg_{0}{1}:"{2}"'.format(dsname, gi[1], titleize(dsname)))
ret.append('GPRINT:v_last_{0}:"Current\\: %0.2lf%s"'.format(dsname))
ret.append('GPRINT:v_avg_{0}:"Avg\\: %0.2lf%s"'.format(dsname))
ret.append('GPRINT:v_min_{0}:"Min\\: %0.2lf%s"'.format(dsname))
ret.append('GPRINT:v_max_{0}:"Max\\: %0.2lf%s"'.format(dsname))
ret.append('COMMENT:"\\s"')
ret.append('COMMENT:"\\s"')
return ret
| gpl-2.0 | -9,085,685,221,464,438,000 | 42.562044 | 127 | 0.525637 | false |
florian-wagner/gimli | python/pygimli/gui/vtk/wxVTKRenderWindowInteractor.py | 1 | 24830 | # -*- coding: utf-8 -*-
"""
A VTK RenderWindowInteractor widget for wxPython.
Find wxPython info at http://wxPython.org
Created by Prabhu Ramachandran, April 2002
Based on wxVTKRenderWindow.py
Fixes and updates by Charl P. Botha 2003-2008
Updated to new wx namespace and some cleaning up by Andrea Gavana,
December 2006
"""
"""
Please see the example at the end of this file.
----------------------------------------
Creation:
wxVTKRenderWindowInteractor(parent, ID, stereo=0, [wx keywords]):
You should create a wx.PySimpleApp() or some other wx**App before
creating the window.
Behaviour:
Uses __getattr__ to make the wxVTKRenderWindowInteractor behave just
like a vtkGenericRenderWindowInteractor.
----------------------------------------
"""
# import usual libraries
import math
import sys
import os
baseClass = object
_useCapture = None
try:
import wx
# a few configuration items, see what works best on your system
# Use GLCanvas as base class instead of wx.Window.
# This is sometimes necessary under wxGTK or the image is blank.
# (in wxWindows 2.3.1 and earlier, the GLCanvas had scroll bars)
if wx.Platform == "__WXGTK__":
import wx.glcanvas
baseClass = wx.glcanvas.GLCanvas
# Keep capturing mouse after mouse is dragged out of window
# (in wxGTK 2.3.2 there is a bug that keeps this from working,
# but it is only relevant in wxGTK if there are multiple windows)
_useCapture = (wx.Platform == "__WXMSW__")
except ImportError as e:
import traceback
#traceback.print_exc(file=sys.stdout)
sys.stderr.write("No proper wx installed'.\n")
try:
import vtk
except Exception as e:
sys.stderr.write("No proper vtk installed'.\n")
# end of configuration items
class EventTimer(wx.Timer):
"""Simple wx.Timer class."""
def __init__(self, iren):
"""
Default class constructor.
@param iren: current render window
"""
wx.Timer.__init__(self)
self.iren = iren
def Notify(self):
"""The timer has expired."""
self.iren.TimerEvent()
class wxVTKRenderWindowInteractor(baseClass):
"""
A wxRenderWindow for wxPython.
Use GetRenderWindow() to get the vtkRenderWindow.
Create with the keyword stereo=1 in order to
generate a stereo-capable window.
"""
# class variable that can also be used to request instances that use
# stereo; this is overridden by the stereo=1/0 parameter. If you set
# it to True, the NEXT instantiated object will attempt to allocate a
# stereo visual. E.g.:
# wxVTKRenderWindowInteractor.USE_STEREO = True
# myRWI = wxVTKRenderWindowInteractor(parent, -1)
USE_STEREO = False
def __init__(self, parent, ID, *args, **kw):
"""
Default class constructor.
@param parent: parent window
@param ID: window id
@param **kw: wxPython keywords (position, size, style) plus the
'stereo' keyword
"""
# private attributes
self.__RenderWhenDisabled = 0
# First do special handling of some keywords:
# stereo, position, size, style
stereo = 0
if 'stereo' in kw:
if kw['stereo']:
stereo = 1
del kw['stereo']
elif self.USE_STEREO:
stereo = 1
position, size = wx.DefaultPosition, wx.DefaultSize
if 'position' in kw:
position = kw['position']
del kw['position']
if 'size' in kw:
size = kw['size']
del kw['size']
# wx.WANTS_CHARS says to give us e.g. TAB
# wx.NO_FULL_REPAINT_ON_RESIZE cuts down resize flicker under GTK
style = wx.WANTS_CHARS | wx.NO_FULL_REPAINT_ON_RESIZE
if 'style' in kw:
style = style | kw['style']
del kw['style']
# the enclosing frame must be shown under GTK or the windows
# don't connect together properly
if wx.Platform != '__WXMSW__':
l = []
p = parent
while p: # make a list of all parents
l.append(p)
p = p.GetParent()
l.reverse() # sort list into descending order
for p in l:
p.Show(1)
if baseClass.__name__ == 'GLCanvas':
# code added by cpbotha to enable stereo and double
# buffering correctly where the user requests this; remember
# that the glXContext in this case is NOT allocated by VTK,
# but by WX, hence all of this.
# Initialize GLCanvas with correct attriblist
attribList = [wx.glcanvas.WX_GL_RGBA,
wx.glcanvas.WX_GL_MIN_RED, 1,
wx.glcanvas.WX_GL_MIN_GREEN, 1,
wx.glcanvas.WX_GL_MIN_BLUE, 1,
wx.glcanvas.WX_GL_DEPTH_SIZE, 16,
wx.glcanvas.WX_GL_DOUBLEBUFFER]
if stereo:
attribList.append(wx.glcanvas.WX_GL_STEREO)
try:
baseClass.__init__(self, parent, id = ID, pos = position, size = size, style = style,
attribList=attribList)
except wx.PyAssertionError:
# visual couldn't be allocated, so we go back to default
baseClass.__init__(self, parent, ID, position, size, style)
if stereo:
# and make sure everyone knows that the stereo
# visual wasn't set.
stereo = 0
else:
baseClass.__init__(self, parent, ID, position, size, style)
# create the RenderWindow and initialize it
self._Iren = vtk.vtkGenericRenderWindowInteractor()
self._Iren.SetRenderWindow( vtk.vtkRenderWindow() )
self._Iren.AddObserver('CreateTimerEvent', self.CreateTimer)
self._Iren.AddObserver('DestroyTimerEvent', self.DestroyTimer)
self._Iren.GetRenderWindow().AddObserver('CursorChangedEvent',
self.CursorChangedEvent)
try:
self._Iren.GetRenderWindow().SetSize(size.width, size.height)
except AttributeError:
self._Iren.GetRenderWindow().SetSize(size[0], size[1])
if stereo:
self._Iren.GetRenderWindow().StereoCapableWindowOn()
self._Iren.GetRenderWindow().SetStereoTypeToCrystalEyes()
self.__handle = None
self.BindEvents()
# with this, we can make sure that the reparenting logic in
# Render() isn't called before the first OnPaint() has
# successfully been run (and set up the VTK/WX display links)
self.__has_painted = False
# set when we have captured the mouse.
self._own_mouse = False
# used to store WHICH mouse button led to mouse capture
self._mouse_capture_button = 0
# A mapping for cursor changes.
self._cursor_map = {0: wx.CURSOR_ARROW, # VTK_CURSOR_DEFAULT
1: wx.CURSOR_ARROW, # VTK_CURSOR_ARROW
2: wx.CURSOR_SIZENESW, # VTK_CURSOR_SIZENE
3: wx.CURSOR_SIZENWSE, # VTK_CURSOR_SIZENWSE
4: wx.CURSOR_SIZENESW, # VTK_CURSOR_SIZESW
5: wx.CURSOR_SIZENWSE, # VTK_CURSOR_SIZESE
6: wx.CURSOR_SIZENS, # VTK_CURSOR_SIZENS
7: wx.CURSOR_SIZEWE, # VTK_CURSOR_SIZEWE
8: wx.CURSOR_SIZING, # VTK_CURSOR_SIZEALL
9: wx.CURSOR_HAND, # VTK_CURSOR_HAND
10: wx.CURSOR_CROSS, # VTK_CURSOR_CROSSHAIR
}
def BindEvents(self):
"""Binds all the necessary events for navigation, sizing, drawing."""
# refresh window by doing a Render
self.Bind(wx.EVT_PAINT, self.OnPaint)
# turn off background erase to reduce flicker
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda e: None)
# Bind the events to the event converters
self.Bind(wx.EVT_RIGHT_DOWN, self.OnButtonDown)
self.Bind(wx.EVT_LEFT_DOWN, self.OnButtonDown)
self.Bind(wx.EVT_MIDDLE_DOWN, self.OnButtonDown)
self.Bind(wx.EVT_RIGHT_UP, self.OnButtonUp)
self.Bind(wx.EVT_LEFT_UP, self.OnButtonUp)
self.Bind(wx.EVT_MIDDLE_UP, self.OnButtonUp)
self.Bind(wx.EVT_MOUSEWHEEL, self.OnMouseWheel)
self.Bind(wx.EVT_MOTION, self.OnMotion)
self.Bind(wx.EVT_ENTER_WINDOW, self.OnEnter)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave)
# If we use EVT_KEY_DOWN instead of EVT_CHAR, capital versions
# of all characters are always returned. EVT_CHAR also performs
# other necessary keyboard-dependent translations.
self.Bind(wx.EVT_CHAR, self.OnKeyDown)
self.Bind(wx.EVT_KEY_UP, self.OnKeyUp)
self.Bind(wx.EVT_SIZE, self.OnSize)
# the wx 2.8.7.1 documentation states that you HAVE to handle
# this event if you make use of CaptureMouse, which we do.
if _useCapture and hasattr(wx, 'EVT_MOUSE_CAPTURE_LOST'):
self.Bind(wx.EVT_MOUSE_CAPTURE_LOST,
self.OnMouseCaptureLost)
def __getattr__(self, attr):
"""Makes the object behave like a vtkGenericRenderWindowInteractor."""
if attr == '__vtk__':
return lambda t=self._Iren: t
elif hasattr(self._Iren, attr):
return getattr(self._Iren, attr)
else:
raise AttributeError(self.__class__.__name__ + \
" has no attribute named " + attr)
def CreateTimer(self, obj, evt):
"""Creates a timer."""
self._timer = EventTimer(self)
self._timer.Start(10, True)
def DestroyTimer(self, obj, evt):
"""The timer is a one shot timer so will expire automatically."""
return 1
def _CursorChangedEvent(self, obj, evt):
"""Change the wx cursor if the renderwindow's cursor was changed."""
cur = self._cursor_map[obj.GetCurrentCursor()]
c = wx.StockCursor(cur)
self.SetCursor(c)
def CursorChangedEvent(self, obj, evt):
"""Called when the CursorChangedEvent fires on the render window."""
# This indirection is needed since when the event fires, the
# current cursor is not yet set so we defer this by which time
# the current cursor should have been set.
wx.CallAfter(self._CursorChangedEvent, obj, evt)
def HideCursor(self):
"""Hides the cursor."""
c = wx.StockCursor(wx.CURSOR_BLANK)
self.SetCursor(c)
def ShowCursor(self):
"""Shows the cursor."""
rw = self._Iren.GetRenderWindow()
cur = self._cursor_map[rw.GetCurrentCursor()]
c = wx.StockCursor(cur)
self.SetCursor(c)
def GetDisplayId(self):
"""
Function to get X11 Display ID from WX and return it in a format that
can be used by VTK Python.
We query the X11 Display with a new call that was added in wxPython
2.6.0.1. The call returns a SWIG object which we can query for the
address and subsequently turn into an old-style SWIG-mangled string
representation to pass to VTK.
"""
d = None
try:
d = wx.GetXDisplay()
except NameError:
# wx.GetXDisplay was added by Robin Dunn in wxPython 2.6.0.1
# if it's not available, we can't pass it. In general,
# things will still work; on some setups, it'll break.
pass
else:
# wx returns None on platforms where wx.GetXDisplay is not relevant
if d:
d = hex(d)
# On wxPython-2.6.3.2 and above there is no leading '0x'.
if not d.startswith('0x'):
d = '0x' + d
# we now have 0xdeadbeef
# VTK wants it as: _deadbeef_void_p (pre-SWIG-1.3 style)
d = '_%s_%s' % (d[2:], 'void_p')
return d
def OnMouseCaptureLost(self, event):
"""
This is signalled when we lose mouse capture due to an external event,
such as when a dialog box is shown.
See the wx documentation.
"""
# the documentation seems to imply that by this time we've
# already lost capture. I have to assume that we don't need
# to call ReleaseMouse ourselves.
if _useCapture and self._own_mouse:
self._own_mouse = False
def OnPaint(self,event):
"""Handles the wx.EVT_PAINT event for wxVTKRenderWindowInteractor."""
# wx should continue event processing after this handler.
# We call this BEFORE Render(), so that if Render() raises
# an exception, wx doesn't re-call OnPaint repeatedly.
event.Skip()
dc = wx.PaintDC(self)
# make sure the RenderWindow is sized correctly
self._Iren.GetRenderWindow().SetSize(self.GetSizeTuple())
# Tell the RenderWindow to render inside the wx.Window.
if not self.__handle:
# on relevant platforms, set the X11 Display ID
d = self.GetDisplayId()
if d:
self._Iren.GetRenderWindow().SetDisplayId(d)
# store the handle
self.__handle = self.GetHandle()
# and give it to VTK
self._Iren.GetRenderWindow().SetWindowInfo(str(self.__handle))
# now that we've painted once, the Render() reparenting logic
# is safe
self.__has_painted = True
self.Render()
def OnSize(self,event):
"""Handles the wx.EVT_SIZE event for wxVTKRenderWindowInteractor."""
# event processing should continue (we call this before the
# Render(), in case it raises an exception)
event.Skip()
try:
width, height = event.GetSize()
except:
width = event.GetSize().width
height = event.GetSize().height
self._Iren.SetSize(width, height)
self._Iren.ConfigureEvent()
# this will check for __handle
self.Render()
def OnMotion(self,event):
"""Handles the wx.EVT_MOTION event for wxVTKRenderWindowInteractor."""
# event processing should continue
# we call this early in case any of the VTK code raises an
# exception.
event.Skip()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
event.ControlDown(),
event.ShiftDown(),
chr(0), 0, None)
self._Iren.MouseMoveEvent()
def OnEnter(self,event):
"""Handles the wx.EVT_ENTER_WINDOW event for
wxVTKRenderWindowInteractor."""
# event processing should continue
event.Skip()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
event.ControlDown(),
event.ShiftDown(),
chr(0), 0, None)
self._Iren.EnterEvent()
def OnLeave(self,event):
"""Handles the wx.EVT_LEAVE_WINDOW event for
wxVTKRenderWindowInteractor."""
# event processing should continue
event.Skip()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
event.ControlDown(),
event.ShiftDown(),
chr(0), 0, None)
self._Iren.LeaveEvent()
def OnButtonDown(self,event):
"""Handles the wx.EVT_LEFT/RIGHT/MIDDLE_DOWN events for
wxVTKRenderWindowInteractor."""
# allow wx event processing to continue
# on wxPython 2.6.0.1, omitting this will cause problems with
# the initial focus, resulting in the wxVTKRWI ignoring keypresses
# until we focus elsewhere and then refocus the wxVTKRWI frame
# we do it this early in case any of the following VTK code
# raises an exception.
event.Skip()
ctrl, shift = event.ControlDown(), event.ShiftDown()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
ctrl, shift, chr(0), 0, None)
button = 0
if event.RightDown():
self._Iren.RightButtonPressEvent()
button = 'Right'
elif event.LeftDown():
self._Iren.LeftButtonPressEvent()
button = 'Left'
elif event.MiddleDown():
self._Iren.MiddleButtonPressEvent()
button = 'Middle'
# save the button and capture mouse until the button is released
# we only capture the mouse if it hasn't already been captured
if _useCapture and not self._own_mouse:
self._own_mouse = True
self._mouse_capture_button = button
self.CaptureMouse()
def OnButtonUp(self,event):
"""Handles the wx.EVT_LEFT/RIGHT/MIDDLE_UP events for
wxVTKRenderWindowInteractor."""
# event processing should continue
event.Skip()
button = 0
if event.RightUp():
button = 'Right'
elif event.LeftUp():
button = 'Left'
elif event.MiddleUp():
button = 'Middle'
# if the same button is released that captured the mouse, and
# we have the mouse, release it.
# (we need to get rid of this as soon as possible; if we don't
# and one of the event handlers raises an exception, mouse
# is never released.)
if _useCapture and self._own_mouse and \
button==self._mouse_capture_button:
self.ReleaseMouse()
self._own_mouse = False
ctrl, shift = event.ControlDown(), event.ShiftDown()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
ctrl, shift, chr(0), 0, None)
if button == 'Right':
self._Iren.RightButtonReleaseEvent()
elif button == 'Left':
self._Iren.LeftButtonReleaseEvent()
elif button == 'Middle':
self._Iren.MiddleButtonReleaseEvent()
def OnMouseWheel(self,event):
"""Handles the wx.EVT_MOUSEWHEEL event for
wxVTKRenderWindowInteractor."""
# event processing should continue
event.Skip()
ctrl, shift = event.ControlDown(), event.ShiftDown()
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
ctrl, shift, chr(0), 0, None)
if event.GetWheelRotation() > 0:
self._Iren.MouseWheelForwardEvent()
else:
self._Iren.MouseWheelBackwardEvent()
def OnKeyDown(self,event):
"""Handles the wx.EVT_KEY_DOWN event for
wxVTKRenderWindowInteractor."""
# event processing should continue
event.Skip()
ctrl, shift = event.ControlDown(), event.ShiftDown()
keycode, keysym = event.GetKeyCode(), None
key = chr(0)
if keycode < 256:
key = chr(keycode)
# wxPython 2.6.0.1 does not return a valid event.Get{X,Y}()
# for this event, so we use the cached position.
(x,y)= self._Iren.GetEventPosition()
self._Iren.SetEventInformation(x, y,
ctrl, shift, key, 0,
keysym)
self._Iren.KeyPressEvent()
self._Iren.CharEvent()
def OnKeyUp(self,event):
"""Handles the wx.EVT_KEY_UP event for wxVTKRenderWindowInteractor."""
# event processing should continue
event.Skip()
ctrl, shift = event.ControlDown(), event.ShiftDown()
keycode, keysym = event.GetKeyCode(), None
key = chr(0)
if keycode < 256:
key = chr(keycode)
self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(),
ctrl, shift, key, 0,
keysym)
self._Iren.KeyReleaseEvent()
def GetRenderWindow(self):
"""Returns the render window (vtkRenderWindow)."""
return self._Iren.GetRenderWindow()
def Render(self):
"""Actually renders the VTK scene on screen."""
RenderAllowed = 1
if not self.__RenderWhenDisabled:
# the user doesn't want us to render when the toplevel frame
# is disabled - first find the top level parent
topParent = wx.GetTopLevelParent(self)
if topParent:
# if it exists, check whether it's enabled
# if it's not enabeld, RenderAllowed will be false
RenderAllowed = topParent.IsEnabled()
if RenderAllowed:
if self.__handle and self.__handle == self.GetHandle():
self._Iren.GetRenderWindow().Render()
elif self.GetHandle() and self.__has_painted:
# this means the user has reparented us; let's adapt to the
# new situation by doing the WindowRemap dance
self._Iren.GetRenderWindow().SetNextWindowInfo(
str(self.GetHandle()))
# make sure the DisplayId is also set correctly
d = self.GetDisplayId()
if d:
self._Iren.GetRenderWindow().SetDisplayId(d)
# do the actual remap with the new parent information
self._Iren.GetRenderWindow().WindowRemap()
# store the new situation
self.__handle = self.GetHandle()
self._Iren.GetRenderWindow().Render()
def SetRenderWhenDisabled(self, newValue):
"""
Change value of __RenderWhenDisabled ivar.
If __RenderWhenDisabled is false (the default), this widget will not
call Render() on the RenderWindow if the top level frame (i.e. the
containing frame) has been disabled.
This prevents recursive rendering during wx.SafeYield() calls.
wx.SafeYield() can be called during the ProgressMethod() callback of
a VTK object to have progress bars and other GUI elements updated -
it does this by disabling all windows (disallowing user-input to
prevent re-entrancy of code) and then handling all outstanding
GUI events.
However, this often triggers an OnPaint() method for wxVTKRWIs,
resulting in a Render(), resulting in Update() being called whilst
still in progress.
"""
self.__RenderWhenDisabled = bool(newValue)
#--------------------------------------------------------------------
def wxVTKRenderWindowInteractorConeExample():
"""Like it says, just a simple example."""
# every wx app needs an app
app = wx.PySimpleApp()
# create the top-level frame, sizer and wxVTKRWI
frame = wx.Frame(None, -1, "wxVTKRenderWindowInteractor", size=(400,400))
widget = wxVTKRenderWindowInteractor(frame, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(widget, 1, wx.EXPAND)
frame.SetSizer(sizer)
frame.Layout()
# It would be more correct (API-wise) to call widget.Initialize() and
# widget.Start() here, but Initialize() calls RenderWindow.Render().
# That Render() call will get through before we can setup the
# RenderWindow() to render via the wxWidgets-created context; this
# causes flashing on some platforms and downright breaks things on
# other platforms. Instead, we call widget.Enable(). This means
# that the RWI::Initialized ivar is not set, but in THIS SPECIFIC CASE,
# that doesn't matter.
widget.Enable(1)
widget.AddObserver("ExitEvent", lambda o,e,f=frame: f.Close())
ren = vtk.vtkRenderer()
widget.GetRenderWindow().AddRenderer(ren)
cone = vtk.vtkConeSource()
cone.SetResolution(8)
coneMapper = vtk.vtkPolyDataMapper()
coneMapper.SetInput(cone.GetOutput())
coneActor = vtk.vtkActor()
coneActor.SetMapper(coneMapper)
ren.AddActor(coneActor)
# show the window
frame.Show()
app.MainLoop()
if __name__ == "__main__":
wxVTKRenderWindowInteractorConeExample()
| gpl-3.0 | -454,796,966,052,879,700 | 34.573066 | 102 | 0.57499 | false |
spencerlyon2/pygments | pygments/lexers/_clbuiltins.py | 2 | 14050 | # -*- coding: utf-8 -*-
"""
pygments.lexers._clbuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~
ANSI Common Lisp builtins.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
BUILTIN_FUNCTIONS = set(( # 638 functions
'<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
'adjustable-array-p', 'adjust-array', 'allocate-instance',
'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
'apropos-list', 'aref', 'arithmetic-error-operands',
'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
'characterp', 'char-code', 'char-downcase', 'char-equal',
'char-greaterp', 'char-int', 'char-lessp', 'char-name',
'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
'close', 'clrhash', 'code-char', 'coerce', 'compile',
'compiled-function-p', 'compile-file', 'compile-file-pathname',
'compiler-macro-function', 'complement', 'complex', 'complexp',
'compute-applicable-methods', 'compute-restarts', 'concatenate',
'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
'delete-package', 'denominator', 'deposit-field', 'describe',
'describe-object', 'digit-char', 'digit-char-p', 'directory',
'directory-namestring', 'disassemble', 'documentation', 'dpb',
'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
'enough-namestring', 'ensure-directories-exist',
'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
'file-error-pathname', 'file-length', 'file-namestring',
'file-position', 'file-string-length', 'file-write-date',
'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
'fround', 'ftruncate', 'funcall', 'function-keywords',
'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
'gethash', 'get-internal-real-time', 'get-internal-run-time',
'get-macro-character', 'get-output-stream-string', 'get-properties',
'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
'host-namestring', 'identity', 'imagpart', 'import',
'initialize-instance', 'input-stream-p', 'inspect',
'integer-decode-float', 'integer-length', 'integerp',
'interactive-stream-p', 'intern', 'intersection',
'invalid-method-error', 'invoke-debugger', 'invoke-restart',
'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
'listen', 'list-length', 'listp', 'load',
'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
'logical-pathname-translations', 'logior', 'lognand', 'lognor',
'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
'make-instance', 'make-instances-obsolete', 'make-list',
'make-load-form', 'make-load-form-saving-slots', 'make-package',
'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
'merge', 'merge-pathnames', 'method-combination-error',
'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
'package-name', 'package-nicknames', 'packagep',
'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
'pathname-device', 'pathname-directory', 'pathname-host',
'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
'read-from-string', 'read-line', 'read-preserving-whitespace',
'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
'remprop', 'rename-file', 'rename-package', 'replace', 'require',
'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
'search', 'second', 'set', 'set-difference',
'set-dispatch-macro-character', 'set-exclusive-or',
'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
'simple-condition-format-arguments', 'simple-condition-format-control',
'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
'slot-unbound', 'slot-value', 'software-type', 'software-version',
'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
'standard-char-p', 'store-value', 'stream-element-type',
'stream-error-stream', 'stream-external-format', 'streamp', 'string',
'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
'string-capitalize', 'string-downcase', 'string-equal',
'string-greaterp', 'string-left-trim', 'string-lessp',
'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
'translate-logical-pathname', 'translate-pathname', 'tree-equal',
'truename', 'truncate', 'two-way-stream-input-stream',
'two-way-stream-output-stream', 'type-error-datum',
'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
'update-instance-for-different-class',
'update-instance-for-redefined-class', 'upgraded-array-element-type',
'upgraded-complex-part-type', 'upper-case-p', 'use-package',
'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
'y-or-n-p', 'zerop',
))
SPECIAL_FORMS = set((
'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
'unwind-protect',
))
MACROS = set((
'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
'define-compiler-macro', 'define-condition', 'define-method-combination',
'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
'multiple-value-setq', 'nth-value', 'or', 'pop',
'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
'with-condition-restarts', 'with-hash-table-iterator',
'with-input-from-string', 'with-open-file', 'with-open-stream',
'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
'with-slots', 'with-standard-io-syntax',
))
LAMBDA_LIST_KEYWORDS = set((
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
))
DECLARATIONS = set((
'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
'ignorable', 'notinline', 'type',
))
BUILTIN_TYPES = set((
'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
'simple-vector', 'standard-char', 'unsigned-byte',
# Condition Types
'arithmetic-error', 'cell-error', 'condition', 'control-error',
'division-by-zero', 'end-of-file', 'error', 'file-error',
'floating-point-inexact', 'floating-point-overflow',
'floating-point-underflow', 'floating-point-invalid-operation',
'parse-error', 'package-error', 'print-not-readable', 'program-error',
'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
'undefined-function', 'warning',
))
BUILTIN_CLASSES = set((
'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
'file-stream', 'float', 'function', 'generic-function', 'hash-table',
'integer', 'list', 'logical-pathname', 'method-combination', 'method',
'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
'real', 'random-state', 'restart', 'sequence', 'standard-class',
'standard-generic-function', 'standard-method', 'standard-object',
'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
))
| bsd-2-clause | 8,959,820,288,131,650,000 | 59.560345 | 80 | 0.629751 | false |
endlessm/chromium-browser | tools/style_variable_generator/css_generator.py | 1 | 2204 | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from base_generator import Color, Modes, BaseGenerator
class CSSStyleGenerator(BaseGenerator):
'''Generator for CSS Variables'''
def Render(self):
self.Validate()
return self.ApplyTemplate(self, 'css_generator.tmpl',
self.GetParameters())
def GetParameters(self):
return {
'light_variables': self._mode_variables[Modes.LIGHT],
'dark_variables': self._mode_variables[Modes.DARK],
}
def GetFilters(self):
return {
'to_var_name': self._ToVarName,
'css_color': self._CssColor,
'css_color_rgb': self._CssColorRGB,
}
def GetGlobals(self):
return {
'css_color_from_rgb_var': self._CssColorFromRGBVar,
}
def _ToVarName(self, var_name):
return '--%s' % var_name.replace('_', '-')
def _CssColor(self, c):
'''Returns the CSS color representation of |c|'''
assert (isinstance(c, Color))
if c.var:
return 'var(%s)' % self._ToVarName(c.var)
if c.rgb_var:
if c.a != 1:
return 'rgba(var(%s), %g)' % (self._ToVarName(c.rgb_var), c.a)
else:
return 'rgb(var(%s))' % self._ToVarName(c.rgb_var)
if c.a != 1:
return 'rgba(%d, %d, %d, %g)' % (c.r, c.g, c.b, c.a)
else:
return 'rgb(%d, %d, %d)' % (c.r, c.g, c.b)
def _CssColorRGB(self, c):
'''Returns the CSS rgb representation of |c|'''
if c.var:
return 'var(%s-rgb)' % self._ToVarName(c.var)
if c.rgb_var:
return 'var(%s)' % self._ToVarName(c.rgb_var)
return '%d, %d, %d' % (c.r, c.g, c.b)
def _CssColorFromRGBVar(self, name, alpha):
'''Returns the CSS color representation given a color name and alpha'''
if alpha != 1:
return 'rgba(var(%s-rgb), %g)' % (self._ToVarName(name), alpha)
else:
return 'rgb(var(%s-rgb))' % self._ToVarName(name)
| bsd-3-clause | 4,978,558,994,880,524,000 | 30.942029 | 79 | 0.534483 | false |
mmerce/python | bigml/tests/create_forecast_steps.py | 1 | 1792 | # -*- coding: utf-8 -*-
#
# Copyright 2017-2020 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import time
from nose.tools import assert_almost_equals, eq_
from datetime import datetime
from .world import world
from bigml.api import HTTP_CREATED
from bigml.api import FINISHED, FAULTY
from bigml.api import get_status
from .read_forecast_steps import i_get_the_forecast
def i_create_a_forecast(step, data=None):
if data is None:
data = "{}"
time_series = world.time_series['resource']
data = json.loads(data)
resource = world.api.create_forecast(time_series, data)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.forecast = resource['object']
world.forecasts.append(resource['resource'])
def the_forecast_is(step, predictions):
predictions = json.loads(predictions)
attrs = ["point_forecast", "model"]
for field_id in predictions:
forecast = world.forecast['forecast']['result'][field_id]
prediction = predictions[field_id]
eq_(len(forecast), len(prediction), "forecast: %s" % forecast)
for index in range(len(forecast)):
for attr in attrs:
eq_(forecast[index][attr], prediction[index][attr])
| apache-2.0 | 7,671,300,440,271,742,000 | 34.137255 | 75 | 0.704241 | false |
terrycojones/dark-matter | dark/mutations.py | 1 | 16454 | import os
from collections import defaultdict
import numpy as np
try:
import matplotlib
if not os.environ.get('DISPLAY'):
# Use non-interactive Agg backend
matplotlib.use('Agg')
import matplotlib.pyplot as plt
except ImportError:
import platform
if platform.python_implementation() == 'PyPy':
# PyPy doesn't have a version of matplotlib. Make a fake
# class that raises if it is used. This allows us to use other
# 'dark' code that happens to import dark.mutations but not use the
# functions that rely on matplotlib.
class plt(object):
def __getattr__(self, _):
raise NotImplementedError(
'matplotlib is not supported under pypy')
else:
raise
from random import choice, uniform
from dark import ncbidb
def basePlotter(blastHits, title):
"""
Plot the reads and the subject, so that bases in the reads which are
different from the subject are shown. Else a '.' is shown.
like so:
subject_gi ATGCGTACGTACGACACC
read_1 A......TTC..T
@param blastHits: A L{dark.blast.BlastHits} instance.
@param title: A C{str} sequence title that was matched by BLAST. We plot
the reads that matched this title.
"""
result = []
params = blastHits.plotParams
assert params is not None, ('Oops, it looks like you forgot to run '
'computePlotInfo.')
sequence = ncbidb.getSequence(title, blastHits.records.blastDb)
subject = sequence.seq
gi = title.split('|')[1]
sub = '%s\t \t \t%s' % (gi, subject)
result.append(sub)
plotInfo = blastHits.titles[title]['plotInfo']
assert plotInfo is not None, ('Oops, it looks like you forgot to run '
'computePlotInfo.')
items = plotInfo['items']
count = 0
for item in items:
count += 1
hsp = item['hsp']
queryTitle = blastHits.fasta[item['readNum']].id
# If the product of the subject and query frame values is +ve,
# then they're either both +ve or both -ve, so we just use the
# query as is. Otherwise, we need to reverse complement it.
if item['frame']['subject'] * item['frame']['query'] > 0:
query = blastHits.fasta[item['readNum']].seq
reverse = False
else:
# One of the subject or query has negative sense.
query = blastHits.fasta[
item['readNum']].reverse_complement().seq
reverse = True
query = query.upper()
queryStart = hsp['queryStart']
subjectStart = hsp['subjectStart']
queryEnd = hsp['queryEnd']
subjectEnd = hsp['subjectEnd']
# Before comparing the read to the subject, make a string of the
# same length as the subject, which contains the read and
# has ' ' where the read does not match.
# 3 parts need to be taken into account:
# 1) the left offset (if the query doesn't stick out to the left)
# 2) the query. if the frame is -1, it has to be reversed.
# The query consists of 3 parts: left, middle (control for gaps)
# 3) the right offset
# Do part 1) and 2).
if queryStart < 0:
# The query is sticking out to the left.
leftQuery = ''
if subjectStart == 0:
# The match starts at the first base of the subject.
middleLeftQuery = ''
else:
# The match starts into the subject.
# Determine the length of the not matching query
# part to the left.
leftOffset = -1 * queryStart
rightOffset = subjectStart + leftOffset
middleLeftQuery = query[leftOffset:rightOffset]
else:
# The query is not sticking out to the left
# make the left offset.
leftQuery = queryStart * ' '
leftQueryOffset = subjectStart - queryStart
middleLeftQuery = query[:leftQueryOffset]
# Do part 3).
# Disregard gaps in subject while adding.
matchQuery = item['origHsp'].query
matchSubject = item['origHsp'].sbjct
index = 0
mid = ''
for item in range(len(matchQuery)):
if matchSubject[index] != ' ':
mid += matchQuery[index]
index += 1
# if the query has been reversed, turn the matched part around
if reverse:
rev = ''
toReverse = mid
reverseDict = {' ': ' ', '-': '-', 'A': 'T', 'T': 'A',
'C': 'G', 'G': 'C', '.': '.', 'N': 'N'}
for item in toReverse:
newItem = reverseDict[item]
rev += newItem
mid = rev[::-1]
middleQuery = middleLeftQuery + mid
# add right not-matching part of the query
rightQueryOffset = queryEnd - subjectEnd
rightQuery = query[-rightQueryOffset:]
middleQuery += rightQuery
read = leftQuery + middleQuery
# do part 3)
offset = len(subject) - len(read)
# if the read is sticking out to the right
# chop it off
if offset < 0:
read = read[:offset]
# if it's not sticking out, fill the space with ' '
elif offset > 0:
read += offset * ' '
# compare the subject and the read, make a string
# called 'comparison', which contains a '.' if the bases
# are equal and the letter of the read if they are not.
comparison = ''
for readBase, subjectBase in zip(read, subject):
if readBase == ' ':
comparison += ' '
elif readBase == subjectBase:
comparison += '.'
elif readBase != subjectBase:
comparison += readBase
index += 1
que = '%s \t %s' % (queryTitle, comparison)
result.append(que)
# sanity checks
assert (len(comparison) == len(subject)), (
'%d != %d' % (len(comparison), len(subject)))
index = 0
if comparison[index] == ' ':
index += 1
else:
start = index - 1
assert (start == queryStart or start == -1), (
'%s != %s or %s != -1' % (start, queryStart, start))
return result
def getAPOBECFrequencies(dotAlignment, orig, new, pattern):
"""
Gets mutation frequencies if they are in a certain pattern.
@param dotAlignment: result from calling basePlotter
@param orig: A C{str}, naming the original base
@param new: A C{str}, what orig was mutated to
@param pattern: A C{str}m which pattern we're looking for
(must be one of 'cPattern', 'tPattern')
"""
cPattern = ['ACA', 'ACC', 'ACG', 'ACT', 'CCA', 'CCC', 'CCG', 'CCT',
'GCA', 'GCC', 'GCG', 'GCT', 'TCA', 'TCC', 'TCG', 'TCT']
tPattern = ['ATA', 'ATC', 'ATG', 'ATT', 'CTA', 'CTC', 'CTG', 'CTT',
'GTA', 'GTC', 'GTG', 'GTT', 'TTA', 'TTC', 'TTG', 'TTT']
# choose the right pattern
if pattern == 'cPattern':
patterns = cPattern
middleBase = 'C'
else:
patterns = tPattern
middleBase = 'T'
# generate the freqs dict with the right pattern
freqs = defaultdict(int)
for pattern in patterns:
freqs[pattern] = 0
# get the subject sequence from dotAlignment
subject = dotAlignment[0].split('\t')[3]
# exclude the subject from the dotAlignment, so just the queries
# are left over
queries = dotAlignment[1:]
for item in queries:
query = item.split('\t')[1]
index = 0
for queryBase in query:
qBase = query[index]
sBase = subject[index]
if qBase == new and sBase == orig:
try:
plusSb = subject[index + 1]
minusSb = subject[index - 1]
except IndexError:
plusSb = 'end'
motif = '%s%s%s' % (minusSb, middleBase, plusSb)
if motif in freqs:
freqs[motif] += 1
index += 1
return freqs
def getCompleteFreqs(blastHits):
"""
Make a dictionary which collects all mutation frequencies from
all reads.
Calls basePlotter to get dotAlignment, which is passed to
getAPOBECFrequencies with the respective parameter, to collect
the frequencies.
@param blastHits: A L{dark.blast.BlastHits} instance.
"""
allFreqs = {}
for title in blastHits.titles:
allFreqs[title] = {
'C>A': {},
'C>G': {},
'C>T': {},
'T>A': {},
'T>C': {},
'T>G': {},
}
basesPlotted = basePlotter(blastHits, title)
for mutation in allFreqs[title]:
orig = mutation[0]
new = mutation[2]
if orig == 'C':
pattern = 'cPattern'
else:
pattern = 'tPattern'
freqs = getAPOBECFrequencies(basesPlotted, orig, new, pattern)
allFreqs[title][mutation] = freqs
numberOfReads = len(blastHits.titles[title]['plotInfo']['items'])
allFreqs[title]['numberOfReads'] = numberOfReads
allFreqs[title]['bitScoreMax'] = blastHits.titles[
title]['plotInfo']['bitScoreMax']
return allFreqs
def makeFrequencyGraph(allFreqs, title, substitution, pattern,
color='blue', createFigure=True, showFigure=True,
readsAx=False):
"""
For a title, make a graph showing the frequencies.
@param allFreqs: result from getCompleteFreqs
@param title: A C{str}, title of virus of which frequencies should be
plotted.
@param substitution: A C{str}, which substitution should be plotted;
must be one of 'C>A', 'C>G', 'C>T', 'T>A', 'T>C', 'T>G'.
@param pattern: A C{str}, which pattern we're looking for ( must be
one of 'cPattern', 'tPattern')
@param color: A C{str}, color of bars.
@param createFigure: If C{True}, create a figure.
@param showFigure: If C{True}, show the created figure.
@param readsAx: If not None, use this as the subplot for displaying reads.
"""
cPattern = ['ACA', 'ACC', 'ACG', 'ACT', 'CCA', 'CCC', 'CCG', 'CCT',
'GCA', 'GCC', 'GCG', 'GCT', 'TCA', 'TCC', 'TCG', 'TCT']
tPattern = ['ATA', 'ATC', 'ATG', 'ATT', 'CTA', 'CTC', 'CTG', 'CTT',
'GTA', 'GTC', 'GTG', 'GTT', 'TTA', 'TTC', 'TTG', 'TTT']
# choose the right pattern
if pattern == 'cPattern':
patterns = cPattern
else:
patterns = tPattern
fig = plt.figure(figsize=(10, 10))
ax = readsAx or fig.add_subplot(111)
# how many bars
N = 16
ind = np.arange(N)
width = 0.4
# make a list in the right order, so that it can be plotted easily
divisor = allFreqs[title]['numberOfReads']
toPlot = allFreqs[title][substitution]
index = 0
data = []
for item in patterns:
newData = toPlot[patterns[index]] / divisor
data.append(newData)
index += 1
# create the bars
ax.bar(ind, data, width, color=color)
maxY = np.max(data) + 5
# axes and labels
if createFigure:
title = title.split('|')[4][:50]
ax.set_title('%s \n %s' % (title, substitution), fontsize=20)
ax.set_ylim(0, maxY)
ax.set_ylabel('Absolute Number of Mutations', fontsize=16)
ax.set_xticks(ind + width)
ax.set_xticklabels(patterns, rotation=45, fontsize=8)
if createFigure is False:
ax.set_xticks(ind + width)
ax.set_xticklabels(patterns, rotation=45, fontsize=0)
else:
if showFigure:
plt.show()
return maxY
def makeFrequencyPanel(allFreqs, patientName):
"""
For a title, make a graph showing the frequencies.
@param allFreqs: result from getCompleteFreqs
@param patientName: A C{str}, title for the panel
"""
titles = sorted(
iter(allFreqs.keys()),
key=lambda title: (allFreqs[title]['bitScoreMax'], title))
origMaxY = 0
cols = 6
rows = len(allFreqs)
figure, ax = plt.subplots(rows, cols, squeeze=False)
substitutions = ['C>A', 'C>G', 'C>T', 'T>A', 'T>C', 'T>G']
colors = ['blue', 'black', 'red', 'yellow', 'green', 'orange']
for i, title in enumerate(titles):
for index in range(6):
for subst in allFreqs[str(title)]:
substitution = substitutions[index]
print(i, index, title, 'substitution', substitutions[index])
if substitution[0] == 'C':
pattern = 'cPattern'
else:
pattern = 'tPattern'
maxY = makeFrequencyGraph(allFreqs, title, substitution,
pattern, color=colors[index],
createFigure=False, showFigure=False,
readsAx=ax[i][index])
if maxY > origMaxY:
origMaxY = maxY
# add title for individual plot.
# if used for other viruses, this will have to be adapted.
if index == 0:
gi = title.split('|')[1]
titles = title.split(' ')
try:
typeIndex = titles.index('type')
except ValueError:
typeNumber = 'gi: %s' % gi
else:
typeNumber = titles[typeIndex + 1]
ax[i][index].set_ylabel(('Type %s \n maxBitScore: %s' % (
typeNumber, allFreqs[title]['bitScoreMax'])), fontsize=10)
# add xAxis tick labels
if i == 0:
ax[i][index].set_title(substitution, fontsize=13)
if i == len(allFreqs) - 1 or i == (len(allFreqs) - 1) / 2:
if index < 3:
pat = ['ACA', 'ACC', 'ACG', 'ACT', 'CCA', 'CCC', 'CCG',
'CCT', 'GCA', 'GCC', 'GCG', 'GCT', 'TCA', 'TCC',
'TCG', 'TCT']
else:
pat = ['ATA', 'ATC', 'ATG', 'ATT', 'CTA', 'CTC', 'CTG',
'CTT', 'GTA', 'GTC', 'GTG', 'GTT', 'TTA', 'TTC',
'TTG', 'TTT']
ax[i][index].set_xticklabels(pat, rotation=45, fontsize=8)
# make Y-axis equal
for i, title in enumerate(allFreqs):
for index in range(6):
a = ax[i][index]
a.set_ylim([0, origMaxY])
# add title of whole panel
figure.suptitle('Mutation Signatures in %s' % patientName, fontsize=20)
figure.set_size_inches(5 * cols, 3 * rows, forward=True)
figure.show()
return allFreqs
def mutateString(original, n, replacements='acgt'):
"""
Mutate C{original} in C{n} places with chars chosen from C{replacements}.
@param original: The original C{str} to mutate.
@param n: The C{int} number of locations to mutate.
@param replacements: The C{str} of replacement letters.
@return: A new C{str} with C{n} places of C{original} mutated.
@raises ValueError: if C{n} is too high, or C{replacement} contains
duplicates, or if no replacement can be made at a certain locus
because C{replacements} is of length one, or if C{original} is of
zero length.
"""
if not original:
raise ValueError('Empty original string passed.')
if n > len(original):
raise ValueError('Cannot make %d mutations in a string of length %d' %
(n, len(original)))
if len(replacements) != len(set(replacements)):
raise ValueError('Replacement string contains duplicates')
if len(replacements) == 1 and original.find(replacements) != -1:
raise ValueError('Impossible replacement')
result = list(original)
length = len(original)
for offset in range(length):
if uniform(0.0, 1.0) < float(n) / (length - offset):
# Mutate.
while True:
new = choice(replacements)
if new != result[offset]:
result[offset] = new
break
n -= 1
if n == 0:
break
return ''.join(result)
| mit | -974,948,910,823,843,200 | 35.64588 | 79 | 0.544427 | false |
nati/fun | cube.py | 1 | 4119 | import copy
import math
import re
import subprocess
import sys
import time
ret = subprocess.check_output(["resize"])
m = re.match("COLUMNS=(\d+);\nLINES=(\d+);", ret)
WIDTH = int(m.group(1))
HEIGHT = int(m.group(2))
SCALE = 7
X = 0
Y = 1
Z = 2
POINTS = [
[-1, -1, 1],
[-1, 1, 1],
[1, 1, 1],
[1, -1, 1],
[-1, -1, -1],
[-1, 1, -1],
[1, 1, -1],
[1, -1, -1]
]
LINES = [
[0, 1],
[1, 2],
[2, 3],
[0, 3],
[4, 5],
[5, 6],
[6, 7],
[7, 4],
[0, 4],
[1, 5],
[2, 6],
[3, 7],
]
POINTS2 = [
[-1, -1, 0],
[-1, 1, 0],
[1, 1, 0],
[1, -1, 0],
[0, 0, 3],
]
LINES2 = [
[0, 1],
[1, 2],
[2, 3],
[3, 0],
[0, 4],
[1, 4],
[2, 4],
[3, 4]
]
class Campas(object):
def draw_line(self, p1, p2):
steep = abs(p2[Y] - p1[Y]) > abs(p2[X] - p1[X])
if steep:
p1[X], p1[Y] = p1[Y], p1[X]
p2[X], p2[Y] = p2[Y], p2[X]
if p1[X] > p2[X]:
p1[X], p2[X] = p2[X], p1[X]
p1[Y], p2[Y] = p2[Y], p1[Y]
dx = p2[X] - p1[X]
dy = abs(p2[Y] - p1[Y])
error = dx / 2.0
y = p1[Y]
if p1[Y] < p2[Y]:
ystep = 1
else:
ystep = -1
for x in range(p1[X], p2[X]):
if steep:
self.draw_point([y, x])
else:
self.draw_point([x, y])
error = error - dy
if error < 0:
y = y + ystep
error = error + dx
def draw_point(self, p, char="#"):
if p[X] >= WIDTH or 0 > p[X]:
return
if p[Y] >= HEIGHT or 0 > p[Y]:
return
sys.stdout.write("\033[%i;%iH%s" % (p[Y], p[X], char))
def clear_screen(self):
sys.stdout.write("\033[2J")
def flush(self):
sys.stdout.flush()
class Poly(object):
points = []
lines = []
def __init__(self, points, lines, campas):
self.points = copy.deepcopy(points)
self.lines = copy.deepcopy(lines)
self.campas = campas
self.base_point = [0, 0, 1]
def mult(self, transform):
self.points = [self.mult_m_p(transform, p) for p in self.points]
def move(self, axis, distance):
self.base_point[axis] = distance
def mult_m_p(self, m, p):
x, y, z = p
r1 = sum([m[0][0] * x, m[0][1] * y, m[0][2] * z])
r2 = sum([m[1][0] * x, m[1][1] * y, m[1][2] * z])
r3 = sum([m[2][0] * x, m[2][1] * y, m[2][2] * z])
return [r1, r2, r3]
def projection(self, p):
cx, cy = WIDTH / 2, HEIGHT / 2
x = (p[X] + self.base_point[X]) * SCALE / self.base_point[Z] + cx
y = (p[Y] + self.base_point[Y]) * SCALE / self.base_point[Z] + cy
return [int(x), int(y)]
def draw(self):
if self.base_point[Z] <= 0:
return
for point in self.points:
self.campas.draw_point(self.projection(point))
for line in self.lines:
self.campas.draw_line(self.projection(self.points[line[0]]),
self.projection(self.points[line[1]]))
def matrix_rotate_x(a):
return [[1, 0, 0],
[0, math.cos(a), -math.sin(a)],
[0, math.sin(a), math.cos(a)]]
def matrix_rotate_y(a):
return [[math.cos(a), 0, math.sin(a)],
[0, 1, 0],
[-math.sin(a), 0, math.cos(a)]]
campas = Campas()
campas.clear_screen()
cube = Poly(POINTS, LINES, campas)
cube2 = Poly(POINTS2, LINES2, campas)
cube3 = Poly(POINTS, LINES, campas)
i = math.pi / 100.0
j = 0
mx = matrix_rotate_x(i * 1)
my = matrix_rotate_y(i * 5)
while True:
campas.clear_screen()
cube.mult(mx)
cube.mult(my)
cube3.mult(mx)
cube3.mult(my)
cube.move(Z, math.sin(j) + 1.5)
cube.move(X, 10 * math.cos(j))
cube3.move(Z, math.sin(j + math.pi / 2) + 1.5)
cube3.move(Y, 3 * math.cos(j + math.pi / 2))
j += math.pi / 50.0
cube2.mult(mx)
cube2.mult(my)
cube2.move(Z, 1.5)
cube.draw()
cube2.draw()
cube3.draw()
campas.flush()
time.sleep(0.1)
| apache-2.0 | -4,906,687,155,164,076,000 | 20.793651 | 73 | 0.453265 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.