prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Forms.py<|end_file_name|><|fim▁begin|>from dolfin import *
class Forms(object):
"""docstring for Forms"""
def __init__(self, mesh, W,F_M,F_NS, u_k,b_k,params,options={}):
assert type(options) is dict, 'options must be a dictionary object'
self.mesh = mesh
self.W = W
self.F_M= F_M
self.F_NS= F_NS
self.u_k= u_k
self.b_k= b_k
self.params= params
self.options= options
<|fim▁hole|># def MHD2D(mesh, W,F_M,F_NS, u_k,b_k,params,split):
# (u, p, b, r) = TrialFunctions(W)
# (v, q, c,s ) = TestFunctions(W)
# if (split == "Linear"):
# "'Maxwell Setup'"
# a11 = params[1]*params[0]*inner(curl(b),curl(c))*dx
# a12 = inner(c,grad(r))*dx
# a21 = inner(b,grad(s))*dx
# Lmaxwell = inner(c, F_M)*dx
# maxwell = a11+a12+a21
# "'NS Setup'"
# n = FacetNormal(mesh)
# a11 = params[2]*inner(grad(v), grad(u))*dx +inner((grad(u)*u_k),v)*dx+(1/2)*div(u_k)*inner(u,v)*dx- (1/2)*inner(u_k,n)*inner(u,v)*ds
# a12 = -div(v)*p*dx
# a21 = -div(u)*q*dx
# Lns = inner(v, F_NS)*dx
# ns = a11+a12+a21
# "'Coupling term Setup'"
# CoupleTerm = params[0]*inner(v[0]*b_k[1]-v[1]*b_k[0],curl(b))*dx - params[0]*inner(u[0]*b_k[1]-u[1]*b_k[0],curl(c))*dx
# return ns,maxwell,CoupleTerm,Lmaxwell,Lns
# elif (split == NoneLinear):
# "' Linear Setup'"
# m11 = params[1]*params[0]*inner(curl(b),curl(c))*dx
# m12 = inner(c,grad(r))*dx
# m21 = inner(b,grad(s))*dx
# Lmaxwell = inner(c, F_M)*dx
# maxwell = m11+m12+m21
# ns11 = params[2]*inner(grad(v), grad(u))*dx
# ns12 = -div(v)*p*dx
# ns21 = -div(u)*q*dx
# Lns = inner(v, F_NS)*dx
# ns = ns11+ns12+ns21
# linear = ns+maxwell
# RHS = Lns+Lmaxwell
# "' None-Linear Setup'"
# n = FacetNormal(mesh)
# Nlinear = params[0]*inner(v[0]*b_k[1]-v[1]*b_k[0],curl(b))*dx - params[0]*inner(u[0]*b_k[1]-u[1]*b_k[0],curl(c))*dx +inner((grad(u)*u_k),v)*dx+(1/2)*div(u_k)*inner(u,v)*dx- (1/2)*inner(u_k,n)*inner(u,v)*ds
# return linear, Nlinear, RHS
# def MHD3D(mesh, W,F_M,F_NS, u_k,b_k,params):
# (u, p, b, r) = TrialFunctions(W)
# (v, q, c,s ) = TestFunctions(W)
# "'Maxwell Setup'"
# a11 = params[1]*params[0]*inner(curl(b),curl(c))*dx
# a12 = inner(c,grad(r))*dx
# a21 = inner(b,grad(s))*dx
# Lmaxwell = inner(c, F_M)*dx
# maxwell = a11+a12+a21
# "'NS Setup'"
# n = FacetNormal(mesh)
# a11 = params[2]*inner(grad(v), grad(u))*dx +inner((grad(u)*u_k),v)*dx+(1/2)*div(u_k)*inner(u,v)*dx- (1/2)*inner(u_k,n)*inner(u,v)*ds
# a12 = -div(v)*p*dx
# a21 = -div(u)*q*dx
# Lns = inner(v, F_NS)*dx
# ns = a11+a12+a21
# "'Coupling term Setup'"
# CoupleTerm = params[0]*inner(cross(v,b_k),curl(b))*dx - params[0]*inner(cross(u,b_k), b,curl(c))*dx
# return ns,maxwell,CoupleTerm,Lmaxwell,Lns<|fim▁end|> |
def printW(self, W):
print W
|
<|file_name|>get_roles.js<|end_file_name|><|fim▁begin|>{
"status": {
"error": false,
"code": 200,
"type": "success",<|fim▁hole|> "pagination": {
"before_cursor": null,
"after_cursor": null,
"previous_link": null,
"next_link": null
},
"data": [
{
"id": 1111,
"name": "marketing"
}
]
}<|fim▁end|> | "message": "Success"
}, |
<|file_name|>TwitterSearchEndpoint.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.twitter.search;
import org.apache.camel.Consumer;<|fim▁hole|>import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.component.twitter.AbstractTwitterEndpoint;
import org.apache.camel.component.twitter.TwitterConfiguration;
import org.apache.camel.component.twitter.TwitterHelper;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriPath;
import org.apache.camel.util.ObjectHelper;
/**
* The Twitter Search component consumes search results.
*/
@UriEndpoint(firstVersion = "2.10.0", scheme = "twitter-search", title = "Twitter Search", syntax = "twitter-search:keywords",
consumerClass = SearchConsumerHandler.class, label = "api,social")
public class TwitterSearchEndpoint extends AbstractTwitterEndpoint {
@UriPath(description = "The search keywords. Multiple values can be separated with comma.")
@Metadata(required = "true")
private String keywords;
public TwitterSearchEndpoint(String uri, String remaining, TwitterSearchComponent component, TwitterConfiguration properties) {
super(uri, component, properties);
this.keywords = remaining;
}
@Override
public Producer createProducer() throws Exception {
return new SearchProducer(this, keywords);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
return TwitterHelper.createConsumer(processor, this, new SearchConsumerHandler(this, keywords));
}
}<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>"""
Copyright (C) 2009,2014
Andreas Engelbredt Dalsgaard <[email protected]>
Martin Toft <[email protected]>
Mads Chr. Olesen <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. """
def get_index_of_last_ident(node):
last_index = node.leaf
#parse out entire name (follow dots)
curnode = node
while len(curnode.children) == 2 and curnode.children[1].type == 'Identifier':
curnode = curnode.children[1]
last_index = curnode.leaf
if last_index == None:
return []
else:
return last_index.children
def get_last_name_from_complex_identifier(n):
"""Follow the children of a complex identifier node, i.e.
"a.b.c.d" to just return "d"
"""
full_str = get_full_name_from_complex_identifier(n)
if '.' in full_str:
return full_str.rsplit('.',1)[1] #FIXME this could be done without constructing the full string first
else:
return full_str
""" Takes an identifier and return the full name:
e.g., myidentifier.someotheridentifier.nestedidentifier.
"""
def get_full_name_from_complex_identifier(identifierNode):
id_str = identifierNode.children[0]
#parse out entire name (follow dots)
curnode = identifierNode
while len(curnode.children) == 2 and curnode.children[1].type == 'Identifier':
curnode = curnode.children[1]
id_str += '.' + curnode.children[0]
return id_str
""" Takes an identifier and return the list of names:
e.g., ['myidentifier', 'someotheridentifier', 'nestedidentifier']
"""
def get_name_list_from_complex_identifier(identifierNode):
n = identifierNode
names = [n.children[0]]
cur = n<|fim▁hole|> cur.children[1].type == 'Identifier':
cur = cur.children[1]
names.append(cur.children[0])
return names<|fim▁end|> | while len(cur.children) == 2 and \ |
<|file_name|>config.js<|end_file_name|><|fim▁begin|>var _ = require('lodash');
var localConfig = {};
try {
localConfig = require('./config.local');
} catch (e) {
console.log(e.code === 'MODULE_NOT_FOUND');
if (e.code !== 'MODULE_NOT_FOUND' || e.message.indexOf('config.local') < 0) {
//config.local module is broken, rethrow the exception
throw e;
}
}
var path = require('path');
var defaultConfig = {
dataSources: [
{
type: 'local',
base: 'tmp/',
endpoints: ['nodes.json', 'graph.json'],
interval: 3000
}
// {
// type: 'remote',
// base: 'http://example.com/ffmap/',
// endpoints: ['nodes.json', 'graph.json'],
// interval: 30000<|fim▁hole|> },
database: {
host: 'rethinkdb',
port: '28015',
db: 'ffmap'
},
port: 8000
};
module.exports = _.defaults({
defaults: defaultConfig
}, localConfig, defaultConfig);<|fim▁end|> | // },
],
frontend: {
path: path.join(__dirname, 'public') |
<|file_name|>layout.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
This scripts sets an initial layout for the ProEMOnline software. It uses the
PyQtGraph dockarea system and was designed from the dockarea.py example.<|fim▁hole|>Contains:
Left column: Observing Log
Center column: Plots
Right column: Images and Process Log
Menu bar
"""
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
import pyqtgraph.console
import numpy as np
import math
import astropy.io.fits as fits
from pyqtgraph.dockarea import *
#This program operates in four stages.
#Stage 0 - Program Initialized, waiting to open SPE file.
#Stage 1 - SPE file open, stars are being selected
#Stage 2 - Online data reduction and aperture photometry/plotting is done.
#Stage 3 - End of data acquisition detected. Final data written to file. Timestamps verified. Log saved. Weather/time log data saved.
# -> revert back to Stage 0.
stage=0 #start at 0
#Keep track of the current frame:
#One version that we do science on
#One version for display purposes
def newframe(fitsfile):
"""For given filename, return science and display images.
"""
img = fits.getdata(fitsfile)[0]
displayimg = np.copy(img)
#replace everything above 99%tile
#don't do calulcations on this adjusted array!!!
imgvals = displayimg.flatten()
img99percentile = np.percentile(imgvals,99)
displayimg[displayimg > img99percentile] = img99percentile
#make color
displayimg=np.array([displayimg,displayimg,displayimg]).transpose()
return img,displayimg
#Start with some initial example file
fitsfile = 'ProEMExample.fits' #initial file
img,displayimg = newframe(fitsfile)
#Use a function to display a new image
#Autoscaling levels optional
def displayframe(displayimg,autoscale=False):
"""Display an RBG image
Autoscale optional.
Return nothing.
"""
if autoscale:
w5.setImage(displayimg,autoRange=True,levels=[np.min(displayimg),np.max(displayimg)-1])
else:
w5.setImage(displayimg,autoRange=False,autoLevels=False)
#Set up a list to keep track of star positions
starpos=[]
#Open File functionality
class WithMenu(QtGui.QMainWindow):
def __init__(self):
super(WithMenu, self).__init__()
self.initUI()
def initUI(self):
#Note: Exit is protected on Mac. This may work on Windows.
exitAction = QtGui.QAction('Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(self.showDialog)
openFile = QtGui.QAction('Open', self)
openFile.setShortcut('Ctrl+O')
openFile.setStatusTip('Open new File')
openFile.triggered.connect(self.showDialog)
menubar = self.menuBar()
fileMenu = menubar.addMenu('File')
fileMenu.addAction(openFile)
fileMenu.addAction(exitAction)
def showDialog(self):
fname = QtGui.QFileDialog.getOpenFileName(self, 'Open file',
'/home')
#print str(fname)
img = fits.getdata(str(fname))[0]
w5.setImage(img)
def closeEvent(self, event):
reply = QtGui.QMessageBox.question(self, 'Message',
"Really quit?", QtGui.QMessageBox.Yes |
QtGui.QMessageBox.No, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
event.accept()
else:
event.ignore()
app = QtGui.QApplication([])
win = WithMenu()
area = DockArea()
win.setCentralWidget(area)
win.resize(1200,600)
win.setWindowTitle('ProEM Online Data Analysis Demo')
## Create docks, place them into the window one at a time.
## Note that size arguments are only a suggestion; docks will still have to
## fill the entire dock area and obey the limits of their internal widgets.
d1 = Dock("Dock1 - Observing Log", size=(500,300))
d2 = Dock("Dock2 - Process Log", size=(500,300))
d3 = Dock("Dock3 - Fourier Transform", size=(500,400))
d4 = Dock("Dock4 (tabbed) - Smoothed", size=(500,200))
d5 = Dock("Dock5 - Image", size=(500,200))
d6 = Dock("Dock6 (tabbed) - Light Curve", size=(500,200))
d7 = Dock("Dock7 (tabbed) - Comparison Counts", size=(500,200))
d8 = Dock("Dock8 (tabbed) - Seeing", size=(500,200))
area.addDock(d1, 'left') ## place d1 at left edge of dock area (it will fill the whole space since there are no other docks yet)
area.addDock(d2, 'right') ## place d2 at right edge of dock area
area.addDock(d3, 'left', d2)## place d3 at the left edge of d2
area.addDock(d4, 'top',d3) ## place d4 on top d3
area.addDock(d5, 'top',d2) ## place d5 on top d2
area.addDock(d6, 'above', d4) ## place d6 above d4
area.addDock(d7, 'top', d3)
area.addDock(d8, 'above', d7)
## Add widgets into each dock
## First dock holds the Observing Log
w1 = pg.LayoutWidget()
observer = QtGui.QLabel('Observer')
target = QtGui.QLabel('Target')
filt = QtGui.QLabel('Filter')
log = QtGui.QLabel('Log')
observerEdit = QtGui.QLineEdit()
targetEdit = QtGui.QLineEdit()
filtEdit = QtGui.QComboBox()
filtEdit.addItems(["BG40","u'","g'","r'","i'","z'","Other"])
logEdit = QtGui.QTextEdit()
w1.addWidget(observer, 1, 0)
w1.addWidget(observerEdit, 1, 1)
w1.addWidget(target, 2, 0)
w1.addWidget(targetEdit, 2, 1)
w1.addWidget(filt, 3, 0)
w1.addWidget(filtEdit, 3, 1)
w1.addWidget(log, 4, 0)
w1.addWidget(logEdit, 4, 1, 6, 1)
d1.addWidget(w1)
## Process Log
w2 = pg.LayoutWidget()
processLog = QtGui.QTextEdit()
processLog.setReadOnly(True)
#processLog.setTextBackgroundColor(QtGui.QColor("black"))
w2.addWidget(processLog, 0, 0, 6, 1)
d2.addWidget(w2)
## Fourier Transform - Just shows random updating noise for now
w3 = pg.PlotWidget(title="Fourier Transform")
curve = w3.plot(pen='y')
data = np.random.normal(size=(10,1000))
ptr = 0
def update():
global curve, data, ptr, w3
curve.setData(data[ptr%10])
if ptr == 0:
w3.enableAutoRange('xy', False) ## stop auto-scaling after the first data set is plotted
ptr += 1
timer = QtCore.QTimer()
timer.timeout.connect(update)
timer.start(50)
d3.addWidget(w3)
## Smoothed Light Curve
w4 = pg.PlotWidget(title="Dock 4 plot")
w4.plot(np.random.normal(size=100))
d4.addWidget(w4)
## Image
w5 = pg.ImageView()
w5.ui.roiBtn.hide()
w5.ui.normBtn.hide()
displayframe(displayimg,autoscale=True)
def click(event):
event.accept()
pos = event.pos()
#check if we're marking or unmarking a star
#if pos.
starpos.append([pos.x(),pos.y()])
#img[pos.x(),pos.y()]=[255,255-img[pos.x(),pos.y(),1],255-img[pos.x(),pos.y(),1]]
#w5.setImage(img,autoRange=False)
processLog.append("Star selected at "+str( (int(pos.x()),int(pos.y())) ))
w5.getImageItem().mouseClickEvent = click
d5.addWidget(w5)
## Light Curve
w6 = pg.PlotWidget(title="Dock 6 plot")
w6.plot(np.random.normal(size=100))
d6.addWidget(w6)
## Smoothed Light Curve
w7 = pg.PlotWidget(title="Dock 7 plot")
w7.plot(np.random.normal(size=100))
d7.addWidget(w7)
## Smoothed Light Curve
w8 = pg.PlotWidget(title="Dock 8 plot")
w8.plot(np.random.normal(size=100))
d8.addWidget(w8)
win.show()
## Start Qt event loop unless running in interactive mode or using pyside.
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()<|fim▁end|> | |
<|file_name|>sigar.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import datetime
import logging
import os
from urllib.parse import urljoin
from utils import utils, inspector
# https://www.sigar.mil/
archive = 2008
# options:
# standard since/year options for a year range to fetch from.
#
# Notes for IG's web team:
#
SPOTLIGHT_REPORTS_URL = "https://www.sigar.mil/Newsroom/spotlight/spotlight.xml"
SPEECHES_REPORTS_URL = "https://www.sigar.mil/Newsroom/speeches/speeches.xml"
TESTIMONY_REPORTS_URL = "https://www.sigar.mil/Newsroom/testimony/testimony.xml"
PRESS_RELEASES_URL = "https://www.sigar.mil/Newsroom/pressreleases/press-releases.xml"
REPORT_URLS = [
("other", SPOTLIGHT_REPORTS_URL),
("press", SPEECHES_REPORTS_URL),
("testimony", TESTIMONY_REPORTS_URL),
("press", PRESS_RELEASES_URL),
("audit", "https://www.sigar.mil/audits/auditreports/reports.xml"),
("inspection", "https://www.sigar.mil/audits/inspectionreports/inspection-reports.xml"),
("audit", "https://www.sigar.mil/audits/financialreports/Financial-Audits.xml"),
("other", "https://www.sigar.mil/SpecialProjects/projectreports/reports.xml"),
("other", "https://www.sigar.mil/Audits/alertandspecialreports/alert-special-reports.xml"),
("semiannual_report", "https://www.sigar.mil/quarterlyreports/index.xml"),
]
BASE_REPORT_URL = "https://www.sigar.mil/allreports/index.aspx"
def run(options):
year_range = inspector.year_range(options, archive)
# Pull the reports
for report_type, report_url in REPORT_URLS:
doc = utils.beautifulsoup_from_url(report_url)
results = doc.select("item")
if not results:
raise inspector.NoReportsFoundError("SIGAR (%s)" % report_type)
for result in results:
report = report_from(result, report_url, report_type, year_range)
if report:
inspector.save_report(report)
def report_from(result, landing_url, report_type, year_range):
report_url = report_url_for_landing_page(result.find("link").next.strip(), landing_url)
if report_url in ("https://www.sigar.mil/pdf/audits/Financial_Audits/SIGAR _14-15\u2013FA.pdf",
"https://www.sigar.mil/pdf/audits/Financial_Audits/SIGAR_14-14\u2013FA.pdf"):
report_url = report_url.replace("\u2013", "-")
report_filename = report_url.split("/")[-1]
report_id, extension = os.path.splitext(report_filename)
if result.title:
title = result.title.text.strip()
else:<|fim▁hole|> published_on_text = result.find("pubdate").text.strip()
published_on = parse_date(published_on_text)
if report_id == "SIGAR-14-42-AL" and title == "SIGAR 14-42-AL":
# this report is posted in both "spotlight" and "special reports"
return
if report_id == "SIGAR_CSIS_Speech" and published_on.year == 2017:
# There are two speeches with the same file name
report_id += "_2017"
if published_on.year not in year_range:
logging.debug("[%s] Skipping, not in requested range." % report_url)
return
report = {
'inspector': 'sigar',
'inspector_url': "https://www.sigar.mil",
'agency': 'sigar',
'agency_name': "Special Inspector General for Afghanistan Reconstruction",
'type': report_type,
'report_id': report_id,
'url': report_url,
'title': title,
'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"),
}
if report_url.startswith(("https://www.justice.gov/",
"http://www.justice.gov/",
"https://www.fbi.gov/",
"http://www.fbi.gov/",
"https://www.usaid.gov/",
"http://www.usaid.gov/")):
if not os.path.splitext(report_url)[1]:
report['file_type'] = "html"
return report
def report_url_for_landing_page(relative_url, landing_url):
"""
We need to mimic the logic used in https://www.sigar.mil/js/AllReports.js
case SPOTLIGHT:
Title = "Spotlight";
Link = Link.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx");
Link = Link.replace("../../", "../");
break;
case SPEECHES:
Title = "Speeches";
Link = Link.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx");
Link = Link.replace("../../", "../");
break;
case TESTIMONY:
Title = "Testimony";
Link = Link.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx");
Link = Link.replace("../../", "../");
break;
case PRESSRELEASES:
Link = Link.replace("../", "../newsroom/");
Link = Link.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx");
break;
"""
relative_url = relative_url.replace("â\x80\x93", "–")
if landing_url == SPOTLIGHT_REPORTS_URL:
relative_url = relative_url.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx")
relative_url = relative_url.replace("../../", "../")
elif landing_url == SPEECHES_REPORTS_URL:
relative_url = relative_url.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx")
relative_url = relative_url.replace("../../", "../")
elif landing_url == TESTIMONY_REPORTS_URL:
relative_url = relative_url.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx")
relative_url = relative_url.replace("../../", "../")
elif landing_url == PRESS_RELEASES_URL:
relative_url = relative_url.replace("../", "../newsroom/")
relative_url = relative_url.replace("../ReadFile.aspx", "../newsroom/ReadFile.aspx")
return urljoin(BASE_REPORT_URL, relative_url)
def parse_date(text):
for format in [
'%A, %B %d, %Y',
'%A, %B %dst, %Y',
'%A, %B %dnd, %Y',
'%A, %B %drd, %Y',
'%A, %B %dth, %Y'
]:
try:
return datetime.datetime.strptime(text, format)
except ValueError:
pass
raise Exception("Couldn't parse date from {}".format(text))
utils.run(run) if (__name__ == "__main__") else None<|fim▁end|> | title = report_id
|
<|file_name|>ffi.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
pub use self::glx::types::*;
use libc;
/// GLX bindings
pub mod glx {
include!(concat!(env!("OUT_DIR"), "/glx_bindings.rs"));
}
/// Functions that are not necessarly always available
pub mod glx_extra {
include!(concat!(env!("OUT_DIR"), "/glx_extra_bindings.rs"));
}
pub type Atom = libc::c_ulong;
pub type Colormap = XID;
pub type Cursor = XID;
pub type Drawable = XID; // TODO: not sure
pub type KeyCode = libc::c_ulong;
pub type KeySym = XID;
pub type OSMesaContext = *const ();
pub type Status = libc::c_int; // TODO: not sure
pub type Time = libc::c_ulong;
pub type XrmDatabase = *const (); // TODO: not sure
pub type XIC = *mut ();
pub type XIM = *mut ();
pub type Screen = ();
pub const AllocNone: libc::c_int = 0;
pub const AllocAll: libc::c_int = 1;
pub const Button1: libc::c_uint = 1;
pub const Button2: libc::c_uint = 2;
pub const Button3: libc::c_uint = 3;
pub const Button4: libc::c_uint = 4;
pub const Button5: libc::c_uint = 5;
pub const InputOutput: libc::c_uint = 1;
pub const InputOnly: libc::c_uint = 2;
pub const CWBackPixmap: libc::c_ulong = (1<<0);
pub const CWBackPixel: libc::c_ulong = (1<<1);
pub const CWBorderPixmap: libc::c_ulong = (1<<2);
pub const CWBorderPixel: libc::c_ulong = (1<<3);
pub const CWBitGravity: libc::c_ulong = (1<<4);
pub const CWWinGravity: libc::c_ulong = (1<<5);
pub const CWBackingStore: libc::c_ulong = (1<<6);
pub const CWBackingPlanes: libc::c_ulong = (1<<7);
pub const CWBackingPixel: libc::c_ulong = (1<<8);
pub const CWOverrideRedirect: libc::c_ulong = (1<<9);
pub const CWSaveUnder: libc::c_ulong = (1<<10);
pub const CWEventMask: libc::c_ulong = (1<<11);
pub const CWDontPropagate: libc::c_ulong = (1<<12);
pub const CWColormap: libc::c_ulong = (1<<13);
pub const CWCursor: libc::c_ulong = (1<<14);
pub const NoEventMask: libc::c_long = 0;
pub const KeyPressMask: libc::c_long = (1<<0);
pub const KeyReleaseMask: libc::c_long = (1<<1);
pub const ButtonPressMask: libc::c_long = (1<<2);
pub const ButtonReleaseMask: libc::c_long = (1<<3);
pub const EnterWindowMask: libc::c_long = (1<<4);
pub const LeaveWindowMask: libc::c_long = (1<<5);
pub const PointerMotionMask: libc::c_long = (1<<6);
pub const PointerMotionHintMask: libc::c_long = (1<<7);
pub const Button1MotionMask: libc::c_long = (1<<8);
pub const Button2MotionMask: libc::c_long = (1<<9);
pub const Button3MotionMask: libc::c_long = (1<<10);
pub const Button4MotionMask: libc::c_long = (1<<11);
pub const Button5MotionMask: libc::c_long = (1<<12);
pub const ButtonMotionMask: libc::c_long = (1<<13);
pub const KeymapStateMask: libc::c_long = (1<<14);
pub const ExposureMask: libc::c_long = (1<<15);
pub const VisibilityChangeMask: libc::c_long = (1<<16);
pub const StructureNotifyMask: libc::c_long = (1<<17);
pub const ResizeRedirectMask: libc::c_long = (1<<18);
pub const SubstructureNotifyMask: libc::c_long = (1<<19);
pub const SubstructureRedirectMask: libc::c_long = (1<<20);
pub const FocusChangeMask: libc::c_long = (1<<21);
pub const PropertyChangeMask: libc::c_long = (1<<22);
pub const ColormapChangeMask: libc::c_long = (1<<23);
pub const OwnerGrabButtonMask: libc::c_long = (1<<24);
pub const KeyPress: libc::c_int = 2;
pub const KeyRelease: libc::c_int = 3;
pub const ButtonPress: libc::c_int = 4;
pub const ButtonRelease: libc::c_int = 5;
pub const MotionNotify: libc::c_int = 6;
pub const EnterNotify: libc::c_int = 7;
pub const LeaveNotify: libc::c_int = 8;
pub const FocusIn: libc::c_int = 9;
pub const FocusOut: libc::c_int = 10;
pub const KeymapNotify: libc::c_int = 11;
pub const Expose: libc::c_int = 12;
pub const GraphicsExpose: libc::c_int = 13;
pub const NoExpose: libc::c_int = 14;
pub const VisibilityNotify: libc::c_int = 15;
pub const CreateNotify: libc::c_int = 16;
pub const DestroyNotify: libc::c_int = 17;
pub const UnmapNotify: libc::c_int = 18;
pub const MapNotify: libc::c_int = 19;
pub const MapRequest: libc::c_int = 20;
pub const ReparentNotify: libc::c_int = 21;
pub const ConfigureNotify: libc::c_int = 22;
pub const ConfigureRequest: libc::c_int = 23;
pub const GravityNotify: libc::c_int = 24;
pub const ResizeRequest: libc::c_int = 25;
pub const CirculateNotify: libc::c_int = 26;
pub const CirculateRequest: libc::c_int = 27;
pub const PropertyNotify: libc::c_int = 28;
pub const SelectionClear: libc::c_int = 29;
pub const SelectionRequest: libc::c_int = 30;
pub const SelectionNotify: libc::c_int = 31;
pub const ColormapNotify: libc::c_int = 32;
pub const ClientMessage: libc::c_int = 33;
pub const MappingNotify: libc::c_int = 34;
pub const GLX_USE_GL: libc::c_int = 1;
pub const GLX_BUFFER_SIZE: libc::c_int = 2;
pub const GLX_LEVEL: libc::c_int = 3;
pub const GLX_RGBA: libc::c_int = 4;
pub const GLX_DOUBLEBUFFER: libc::c_int = 5;
pub const GLX_STEREO: libc::c_int = 6;
pub const GLX_AUX_BUFFERS: libc::c_int = 7;
pub const GLX_RED_SIZE: libc::c_int = 8;
pub const GLX_GREEN_SIZE: libc::c_int = 9;
pub const GLX_BLUE_SIZE: libc::c_int = 10;
pub const GLX_ALPHA_SIZE: libc::c_int = 11;
pub const GLX_DEPTH_SIZE: libc::c_int = 12;
pub const GLX_STENCIL_SIZE: libc::c_int = 13;
pub const GLX_ACCUM_RED_SIZE: libc::c_int = 14;
pub const GLX_ACCUM_GREEN_SIZE: libc::c_int = 15;
pub const GLX_ACCUM_BLUE_SIZE: libc::c_int = 16;
pub const GLX_ACCUM_ALPHA_SIZE: libc::c_int = 17;
pub const GLX_BAD_SCREEN: libc::c_int = 1;
pub const GLX_BAD_ATTRIBUTE: libc::c_int = 2;
pub const GLX_NO_EXTENSION: libc::c_int = 3;
pub const GLX_BAD_VISUAL: libc::c_int = 4;
pub const GLX_BAD_CONTEXT: libc::c_int = 5;
pub const GLX_BAD_VALUE: libc::c_int = 6;
pub const GLX_BAD_ENUM: libc::c_int = 7;
pub const GLX_VENDOR: libc::c_int = 1;
pub const GLX_VERSION: libc::c_int = 2;
pub const GLX_EXTENSIONS: libc::c_int = 3;
pub const GLX_WINDOW_BIT: libc::c_int = 0x00000001;
pub const GLX_PIXMAP_BIT: libc::c_int = 0x00000002;
pub const GLX_PBUFFER_BIT: libc::c_int = 0x00000004;
pub const GLX_RGBA_BIT: libc::c_int = 0x00000001;
pub const GLX_COLOR_INDEX_BIT: libc::c_int = 0x00000002;
pub const GLX_PBUFFER_CLOBBER_MASK: libc::c_int = 0x08000000;
pub const GLX_FRONT_LEFT_BUFFER_BIT: libc::c_int = 0x00000001;
pub const GLX_FRONT_RIGHT_BUFFER_BIT: libc::c_int = 0x00000002;
pub const GLX_BACK_LEFT_BUFFER_BIT: libc::c_int = 0x00000004;
pub const GLX_BACK_RIGHT_BUFFER_BIT: libc::c_int = 0x00000008;
pub const GLX_AUX_BUFFERS_BIT: libc::c_int = 0x00000010;
pub const GLX_DEPTH_BUFFER_BIT: libc::c_int = 0x00000020;
pub const GLX_STENCIL_BUFFER_BIT: libc::c_int = 0x00000040;
pub const GLX_ACCUM_BUFFER_BIT: libc::c_int = 0x00000080;
pub const GLX_CONFIG_CAVEAT: libc::c_int = 0x20;
pub const GLX_X_VISUAL_TYPE: libc::c_int = 0x22;
pub const GLX_TRANSPARENT_TYPE: libc::c_int = 0x23;
pub const GLX_TRANSPARENT_INDEX_VALUE: libc::c_int = 0x24;
pub const GLX_TRANSPARENT_RED_VALUE: libc::c_int = 0x25;
pub const GLX_TRANSPARENT_GREEN_VALUE: libc::c_int = 0x26;
pub const GLX_TRANSPARENT_BLUE_VALUE: libc::c_int = 0x27;
pub const GLX_TRANSPARENT_ALPHA_VALUE: libc::c_int = 0x28;
#[allow(overflowing_literals)]
pub const GLX_DONT_CARE: libc::c_int = 0xFFFFFFFF;
pub const GLX_NONE: libc::c_int = 0x8000;
pub const GLX_SLOW_CONFIG: libc::c_int = 0x8001;
pub const GLX_TRUE_COLOR: libc::c_int = 0x8002;
pub const GLX_DIRECT_COLOR: libc::c_int = 0x8003;
pub const GLX_PSEUDO_COLOR: libc::c_int = 0x8004;
pub const GLX_const_COLOR: libc::c_int = 0x8005;
pub const GLX_GRAY_SCALE: libc::c_int = 0x8006;
pub const GLX_const_GRAY: libc::c_int = 0x8007;
pub const GLX_TRANSPARENT_RGB: libc::c_int = 0x8008;
pub const GLX_TRANSPARENT_INDEX: libc::c_int = 0x8009;
pub const GLX_VISUAL_ID: libc::c_int = 0x800B;
pub const GLX_SCREEN: libc::c_int = 0x800C;
pub const GLX_NON_CONFORMANT_CONFIG: libc::c_int = 0x800D;
pub const GLX_DRAWABLE_TYPE: libc::c_int = 0x8010;
pub const GLX_RENDER_TYPE: libc::c_int = 0x8011;
pub const GLX_X_RENDERABLE: libc::c_int = 0x8012;
pub const GLX_FBCONFIG_ID: libc::c_int = 0x8013;
pub const GLX_RGBA_TYPE: libc::c_int = 0x8014;
pub const GLX_COLOR_INDEX_TYPE: libc::c_int = 0x8015;
pub const GLX_MAX_PBUFFER_WIDTH: libc::c_int = 0x8016;
pub const GLX_MAX_PBUFFER_HEIGHT: libc::c_int = 0x8017;
pub const GLX_MAX_PBUFFER_PIXELS: libc::c_int = 0x8018;
pub const GLX_PRESERVED_CONTENTS: libc::c_int = 0x801B;
pub const GLX_LARGEST_PBUFFER: libc::c_int = 0x801C;
pub const GLX_WIDTH: libc::c_int = 0x801D;
pub const GLX_HEIGHT: libc::c_int = 0x801E;
pub const GLX_EVENT_MASK: libc::c_int = 0x801F;
pub const GLX_DAMAGED: libc::c_int = 0x8020;
pub const GLX_SAVED: libc::c_int = 0x8021;
pub const GLX_WINDOW: libc::c_int = 0x8022;
pub const GLX_PBUFFER: libc::c_int = 0x8023;
pub const GLX_PBUFFER_HEIGHT: libc::c_int = 0x8040;
pub const GLX_PBUFFER_WIDTH: libc::c_int = 0x8041;
pub const GLX_CONTEXT_MAJOR_VERSION: libc::c_int = 0x2091;
pub const GLX_CONTEXT_MINOR_VERSION: libc::c_int = 0x2092;
pub const GLX_CONTEXT_FLAGS: libc::c_int = 0x2094;
pub const GLX_CONTEXT_PROFILE_MASK: libc::c_int = 0x9126;
pub const GLX_CONTEXT_DEBUG_BIT: libc::c_int = 0x0001;
pub const GLX_CONTEXT_FORWARD_COMPATIBLE_BIT: libc::c_int = 0x0002;
pub const GLX_CONTEXT_CORE_PROFILE_BIT: libc::c_int = 0x00000001;
pub const GLX_CONTEXT_COMPATIBILITY_PROFILE_BIT: libc::c_int = 0x00000002;
pub const XIMPreeditArea: libc::c_long = 0x0001;
pub const XIMPreeditCallbacks: libc::c_long = 0x0002;
pub const XIMPreeditPosition: libc::c_long = 0x0004;
pub const XIMPreeditNothing: libc::c_long = 0x0008;
pub const XIMPreeditNone: libc::c_long = 0x0010;
pub const XIMStatusArea: libc::c_long = 0x0100;
pub const XIMStatusCallbacks: libc::c_long = 0x0200;
pub const XIMStatusNothing: libc::c_long = 0x0400;
pub const XIMStatusNone: libc::c_long = 0x0800;
pub const XK_BackSpace: libc::c_uint = 0xFF08;
pub const XK_Tab: libc::c_uint = 0xFF09;
pub const XK_Linefeed: libc::c_uint = 0xFF0A;
pub const XK_Clear: libc::c_uint = 0xFF0B;
pub const XK_Return: libc::c_uint = 0xFF0D;
pub const XK_Pause: libc::c_uint = 0xFF13;
pub const XK_Scroll_Lock: libc::c_uint = 0xFF14;
pub const XK_Sys_Req: libc::c_uint = 0xFF15;
pub const XK_Escape: libc::c_uint = 0xFF1B;
pub const XK_Delete: libc::c_uint = 0xFFFF;
pub const XK_Multi_key: libc::c_uint = 0xFF20;
pub const XK_Kanji: libc::c_uint = 0xFF21;
pub const XK_Muhenkan: libc::c_uint = 0xFF22;
pub const XK_Henkan_Mode: libc::c_uint = 0xFF23;
pub const XK_Henkan: libc::c_uint = 0xFF23;
pub const XK_Romaji: libc::c_uint = 0xFF24;
pub const XK_Hiragana: libc::c_uint = 0xFF25;
pub const XK_Katakana: libc::c_uint = 0xFF26;
pub const XK_Hiragana_Katakana: libc::c_uint = 0xFF27;
pub const XK_Zenkaku: libc::c_uint = 0xFF28;
pub const XK_Hankaku: libc::c_uint = 0xFF29;
pub const XK_Zenkaku_Hankaku: libc::c_uint = 0xFF2A;
pub const XK_Touroku: libc::c_uint = 0xFF2B;
pub const XK_Massyo: libc::c_uint = 0xFF2C;
pub const XK_Kana_Lock: libc::c_uint = 0xFF2D;
pub const XK_Kana_Shift: libc::c_uint = 0xFF2E;
pub const XK_Eisu_Shift: libc::c_uint = 0xFF2F;
pub const XK_Eisu_toggle: libc::c_uint = 0xFF30;
pub const XK_Home: libc::c_uint = 0xFF50;
pub const XK_Left: libc::c_uint = 0xFF51;
pub const XK_Up: libc::c_uint = 0xFF52;
pub const XK_Right: libc::c_uint = 0xFF53;
pub const XK_Down: libc::c_uint = 0xFF54;
pub const XK_Prior: libc::c_uint = 0xFF55;
pub const XK_Page_Up: libc::c_uint = 0xFF55;
pub const XK_Next: libc::c_uint = 0xFF56;
pub const XK_Page_Down: libc::c_uint = 0xFF56;
pub const XK_End: libc::c_uint = 0xFF57;
pub const XK_Begin: libc::c_uint = 0xFF58;
pub const XK_Win_L: libc::c_uint = 0xFF5B;
pub const XK_Win_R: libc::c_uint = 0xFF5C;
pub const XK_App: libc::c_uint = 0xFF5D;
pub const XK_Select: libc::c_uint = 0xFF60;
pub const XK_Print: libc::c_uint = 0xFF61;
pub const XK_Execute: libc::c_uint = 0xFF62;
pub const XK_Insert: libc::c_uint = 0xFF63;
pub const XK_Undo: libc::c_uint = 0xFF65;
pub const XK_Redo: libc::c_uint = 0xFF66;
pub const XK_Menu: libc::c_uint = 0xFF67;
pub const XK_Find: libc::c_uint = 0xFF68;
pub const XK_Cancel: libc::c_uint = 0xFF69;
pub const XK_Help: libc::c_uint = 0xFF6A;
pub const XK_Break: libc::c_uint = 0xFF6B;
pub const XK_Mode_switch: libc::c_uint = 0xFF7E;
pub const XK_script_switch: libc::c_uint = 0xFF7E;
pub const XK_Num_Lock: libc::c_uint = 0xFF7F;
pub const XK_KP_Space: libc::c_uint = 0xFF80;
pub const XK_KP_Tab: libc::c_uint = 0xFF89;
pub const XK_KP_Enter: libc::c_uint = 0xFF8D;
pub const XK_KP_F1: libc::c_uint = 0xFF91;
pub const XK_KP_F2: libc::c_uint = 0xFF92;
pub const XK_KP_F3: libc::c_uint = 0xFF93;
pub const XK_KP_F4: libc::c_uint = 0xFF94;
pub const XK_KP_Home: libc::c_uint = 0xFF95;
pub const XK_KP_Left: libc::c_uint = 0xFF96;
pub const XK_KP_Up: libc::c_uint = 0xFF97;
pub const XK_KP_Right: libc::c_uint = 0xFF98;
pub const XK_KP_Down: libc::c_uint = 0xFF99;
pub const XK_KP_Prior: libc::c_uint = 0xFF9A;
pub const XK_KP_Page_Up: libc::c_uint = 0xFF9A;
pub const XK_KP_Next: libc::c_uint = 0xFF9B;
pub const XK_KP_Page_Down: libc::c_uint = 0xFF9B;
pub const XK_KP_End: libc::c_uint = 0xFF9C;
pub const XK_KP_Begin: libc::c_uint = 0xFF9D;
pub const XK_KP_Insert: libc::c_uint = 0xFF9E;
pub const XK_KP_Delete: libc::c_uint = 0xFF9F;
pub const XK_KP_Equal: libc::c_uint = 0xFFBD;
pub const XK_KP_Multiply: libc::c_uint = 0xFFAA;
pub const XK_KP_Add: libc::c_uint = 0xFFAB;
pub const XK_KP_Separator: libc::c_uint = 0xFFAC;
pub const XK_KP_Subtract: libc::c_uint = 0xFFAD;
pub const XK_KP_Decimal: libc::c_uint = 0xFFAE;
pub const XK_KP_Divide: libc::c_uint = 0xFFAF;
pub const XK_KP_0: libc::c_uint = 0xFFB0;
pub const XK_KP_1: libc::c_uint = 0xFFB1;
pub const XK_KP_2: libc::c_uint = 0xFFB2;
pub const XK_KP_3: libc::c_uint = 0xFFB3;
pub const XK_KP_4: libc::c_uint = 0xFFB4;
pub const XK_KP_5: libc::c_uint = 0xFFB5;
pub const XK_KP_6: libc::c_uint = 0xFFB6;
pub const XK_KP_7: libc::c_uint = 0xFFB7;
pub const XK_KP_8: libc::c_uint = 0xFFB8;
pub const XK_KP_9: libc::c_uint = 0xFFB9;
pub const XK_F1: libc::c_uint = 0xFFBE;
pub const XK_F2: libc::c_uint = 0xFFBF;
pub const XK_F3: libc::c_uint = 0xFFC0;
pub const XK_F4: libc::c_uint = 0xFFC1;
pub const XK_F5: libc::c_uint = 0xFFC2;
pub const XK_F6: libc::c_uint = 0xFFC3;
pub const XK_F7: libc::c_uint = 0xFFC4;
pub const XK_F8: libc::c_uint = 0xFFC5;
pub const XK_F9: libc::c_uint = 0xFFC6;
pub const XK_F10: libc::c_uint = 0xFFC7;
pub const XK_F11: libc::c_uint = 0xFFC8;
pub const XK_L1: libc::c_uint = 0xFFC8;
pub const XK_F12: libc::c_uint = 0xFFC9;
pub const XK_L2: libc::c_uint = 0xFFC9;
pub const XK_F13: libc::c_uint = 0xFFCA;
pub const XK_L3: libc::c_uint = 0xFFCA;
pub const XK_F14: libc::c_uint = 0xFFCB;
pub const XK_L4: libc::c_uint = 0xFFCB;
pub const XK_F15: libc::c_uint = 0xFFCC;
pub const XK_L5: libc::c_uint = 0xFFCC;
pub const XK_F16: libc::c_uint = 0xFFCD;
pub const XK_L6: libc::c_uint = 0xFFCD;
pub const XK_F17: libc::c_uint = 0xFFCE;
pub const XK_L7: libc::c_uint = 0xFFCE;
pub const XK_F18: libc::c_uint = 0xFFCF;
pub const XK_L8: libc::c_uint = 0xFFCF;
pub const XK_F19: libc::c_uint = 0xFFD0;
pub const XK_L9: libc::c_uint = 0xFFD0;
pub const XK_F20: libc::c_uint = 0xFFD1;
pub const XK_L10: libc::c_uint = 0xFFD1;
pub const XK_F21: libc::c_uint = 0xFFD2;
pub const XK_R1: libc::c_uint = 0xFFD2;
pub const XK_F22: libc::c_uint = 0xFFD3;
pub const XK_R2: libc::c_uint = 0xFFD3;
pub const XK_F23: libc::c_uint = 0xFFD4;
pub const XK_R3: libc::c_uint = 0xFFD4;
pub const XK_F24: libc::c_uint = 0xFFD5;
pub const XK_R4: libc::c_uint = 0xFFD5;
pub const XK_F25: libc::c_uint = 0xFFD6;
pub const XK_R5: libc::c_uint = 0xFFD6;
pub const XK_F26: libc::c_uint = 0xFFD7;
pub const XK_R6: libc::c_uint = 0xFFD7;
pub const XK_F27: libc::c_uint = 0xFFD8;
pub const XK_R7: libc::c_uint = 0xFFD8;
pub const XK_F28: libc::c_uint = 0xFFD9;
pub const XK_R8: libc::c_uint = 0xFFD9;
pub const XK_F29: libc::c_uint = 0xFFDA;
pub const XK_R9: libc::c_uint = 0xFFDA;
pub const XK_F30: libc::c_uint = 0xFFDB;
pub const XK_R10: libc::c_uint = 0xFFDB;
pub const XK_F31: libc::c_uint = 0xFFDC;
pub const XK_R11: libc::c_uint = 0xFFDC;
pub const XK_F32: libc::c_uint = 0xFFDD;
pub const XK_R12: libc::c_uint = 0xFFDD;
pub const XK_F33: libc::c_uint = 0xFFDE;
pub const XK_R13: libc::c_uint = 0xFFDE;
pub const XK_F34: libc::c_uint = 0xFFDF;
pub const XK_R14: libc::c_uint = 0xFFDF;
pub const XK_F35: libc::c_uint = 0xFFE0;
pub const XK_R15: libc::c_uint = 0xFFE0;
pub const XK_Shift_L: libc::c_uint = 0xFFE1;
pub const XK_Shift_R: libc::c_uint = 0xFFE2;
pub const XK_Control_L: libc::c_uint = 0xFFE3;
pub const XK_Control_R: libc::c_uint = 0xFFE4;
pub const XK_Caps_Lock: libc::c_uint = 0xFFE5;
pub const XK_Shift_Lock: libc::c_uint = 0xFFE6;
pub const XK_Meta_L: libc::c_uint = 0xFFE7;
pub const XK_Meta_R: libc::c_uint = 0xFFE8;
pub const XK_Alt_L: libc::c_uint = 0xFFE9;
pub const XK_Alt_R: libc::c_uint = 0xFFEA;
pub const XK_Super_L: libc::c_uint = 0xFFEB;
pub const XK_Super_R: libc::c_uint = 0xFFEC;
pub const XK_Hyper_L: libc::c_uint = 0xFFED;
pub const XK_Hyper_R: libc::c_uint = 0xFFEE;
pub const XK_space: libc::c_uint = 0x020;
pub const XK_exclam: libc::c_uint = 0x021;
pub const XK_quotedbl: libc::c_uint = 0x022;
pub const XK_numbersign: libc::c_uint = 0x023;
pub const XK_dollar: libc::c_uint = 0x024;
pub const XK_percent: libc::c_uint = 0x025;
pub const XK_ampersand: libc::c_uint = 0x026;
pub const XK_apostrophe: libc::c_uint = 0x027;
pub const XK_quoteright: libc::c_uint = 0x027;
pub const XK_parenleft: libc::c_uint = 0x028;
pub const XK_parenright: libc::c_uint = 0x029;
pub const XK_asterisk: libc::c_uint = 0x02a;
pub const XK_plus: libc::c_uint = 0x02b;
pub const XK_comma: libc::c_uint = 0x02c;
pub const XK_minus: libc::c_uint = 0x02d;
pub const XK_period: libc::c_uint = 0x02e;
pub const XK_slash: libc::c_uint = 0x02f;
pub const XK_0: libc::c_uint = 0x030;
pub const XK_1: libc::c_uint = 0x031;
pub const XK_2: libc::c_uint = 0x032;
pub const XK_3: libc::c_uint = 0x033;
pub const XK_4: libc::c_uint = 0x034;
pub const XK_5: libc::c_uint = 0x035;
pub const XK_6: libc::c_uint = 0x036;
pub const XK_7: libc::c_uint = 0x037;
pub const XK_8: libc::c_uint = 0x038;
pub const XK_9: libc::c_uint = 0x039;
pub const XK_colon: libc::c_uint = 0x03a;
pub const XK_semicolon: libc::c_uint = 0x03b;
pub const XK_less: libc::c_uint = 0x03c;
pub const XK_equal: libc::c_uint = 0x03d;
pub const XK_greater: libc::c_uint = 0x03e;
pub const XK_question: libc::c_uint = 0x03f;
pub const XK_at: libc::c_uint = 0x040;
pub const XK_A: libc::c_uint = 0x041;
pub const XK_B: libc::c_uint = 0x042;
pub const XK_C: libc::c_uint = 0x043;
pub const XK_D: libc::c_uint = 0x044;
pub const XK_E: libc::c_uint = 0x045;
pub const XK_F: libc::c_uint = 0x046;
pub const XK_G: libc::c_uint = 0x047;
pub const XK_H: libc::c_uint = 0x048;
pub const XK_I: libc::c_uint = 0x049;
pub const XK_J: libc::c_uint = 0x04a;
pub const XK_K: libc::c_uint = 0x04b;
pub const XK_L: libc::c_uint = 0x04c;
pub const XK_M: libc::c_uint = 0x04d;
pub const XK_N: libc::c_uint = 0x04e;
pub const XK_O: libc::c_uint = 0x04f;
pub const XK_P: libc::c_uint = 0x050;
pub const XK_Q: libc::c_uint = 0x051;
pub const XK_R: libc::c_uint = 0x052;
pub const XK_S: libc::c_uint = 0x053;
pub const XK_T: libc::c_uint = 0x054;
pub const XK_U: libc::c_uint = 0x055;
pub const XK_V: libc::c_uint = 0x056;
pub const XK_W: libc::c_uint = 0x057;
pub const XK_X: libc::c_uint = 0x058;
pub const XK_Y: libc::c_uint = 0x059;
pub const XK_Z: libc::c_uint = 0x05a;
pub const XK_bracketleft: libc::c_uint = 0x05b;
pub const XK_backslash: libc::c_uint = 0x05c;
pub const XK_bracketright: libc::c_uint = 0x05d;
pub const XK_asciicircum: libc::c_uint = 0x05e;
pub const XK_underscore: libc::c_uint = 0x05f;
pub const XK_grave: libc::c_uint = 0x060;
pub const XK_quoteleft: libc::c_uint = 0x060;
pub const XK_a: libc::c_uint = 0x061;
pub const XK_b: libc::c_uint = 0x062;
pub const XK_c: libc::c_uint = 0x063;
pub const XK_d: libc::c_uint = 0x064;
pub const XK_e: libc::c_uint = 0x065;
pub const XK_f: libc::c_uint = 0x066;
pub const XK_g: libc::c_uint = 0x067;
pub const XK_h: libc::c_uint = 0x068;
pub const XK_i: libc::c_uint = 0x069;
pub const XK_j: libc::c_uint = 0x06a;
pub const XK_k: libc::c_uint = 0x06b;
pub const XK_l: libc::c_uint = 0x06c;
pub const XK_m: libc::c_uint = 0x06d;
pub const XK_n: libc::c_uint = 0x06e;
pub const XK_o: libc::c_uint = 0x06f;
pub const XK_p: libc::c_uint = 0x070;
pub const XK_q: libc::c_uint = 0x071;
pub const XK_r: libc::c_uint = 0x072;
pub const XK_s: libc::c_uint = 0x073;
pub const XK_t: libc::c_uint = 0x074;
pub const XK_u: libc::c_uint = 0x075;
pub const XK_v: libc::c_uint = 0x076;
pub const XK_w: libc::c_uint = 0x077;
pub const XK_x: libc::c_uint = 0x078;
pub const XK_y: libc::c_uint = 0x079;
pub const XK_z: libc::c_uint = 0x07a;
pub const XK_braceleft: libc::c_uint = 0x07b;
pub const XK_bar: libc::c_uint = 0x07c;
pub const XK_braceright: libc::c_uint = 0x07d;
pub const XK_asciitilde: libc::c_uint = 0x07e;
pub const XK_nobreakspace: libc::c_uint = 0x0a0;
pub const XK_exclamdown: libc::c_uint = 0x0a1;
pub const XK_cent: libc::c_uint = 0x0a2;
pub const XK_sterling: libc::c_uint = 0x0a3;
pub const XK_currency: libc::c_uint = 0x0a4;
pub const XK_yen: libc::c_uint = 0x0a5;
pub const XK_brokenbar: libc::c_uint = 0x0a6;
pub const XK_section: libc::c_uint = 0x0a7;
pub const XK_diaeresis: libc::c_uint = 0x0a8;
pub const XK_copyright: libc::c_uint = 0x0a9;
pub const XK_ordfeminine: libc::c_uint = 0x0aa;
pub const XK_guillemotleft: libc::c_uint = 0x0ab;
pub const XK_notsign: libc::c_uint = 0x0ac;
pub const XK_hyphen: libc::c_uint = 0x0ad;
pub const XK_registered: libc::c_uint = 0x0ae;
pub const XK_macron: libc::c_uint = 0x0af;
pub const XK_degree: libc::c_uint = 0x0b0;
pub const XK_plusminus: libc::c_uint = 0x0b1;
pub const XK_twosuperior: libc::c_uint = 0x0b2;
pub const XK_threesuperior: libc::c_uint = 0x0b3;
pub const XK_acute: libc::c_uint = 0x0b4;
pub const XK_mu: libc::c_uint = 0x0b5;
pub const XK_paragraph: libc::c_uint = 0x0b6;
pub const XK_periodcentered: libc::c_uint = 0x0b7;
pub const XK_cedilla: libc::c_uint = 0x0b8;
pub const XK_onesuperior: libc::c_uint = 0x0b9;
pub const XK_masculine: libc::c_uint = 0x0ba;
pub const XK_guillemotright: libc::c_uint = 0x0bb;
pub const XK_onequarter: libc::c_uint = 0x0bc;
pub const XK_onehalf: libc::c_uint = 0x0bd;
pub const XK_threequarters: libc::c_uint = 0x0be;
pub const XK_questiondown: libc::c_uint = 0x0bf;
pub const XK_Agrave: libc::c_uint = 0x0c0;
pub const XK_Aacute: libc::c_uint = 0x0c1;
pub const XK_Acircumflex: libc::c_uint = 0x0c2;
pub const XK_Atilde: libc::c_uint = 0x0c3;
pub const XK_Adiaeresis: libc::c_uint = 0x0c4;
pub const XK_Aring: libc::c_uint = 0x0c5;
pub const XK_AE: libc::c_uint = 0x0c6;
pub const XK_Ccedilla: libc::c_uint = 0x0c7;
pub const XK_Egrave: libc::c_uint = 0x0c8;
pub const XK_Eacute: libc::c_uint = 0x0c9;
pub const XK_Ecircumflex: libc::c_uint = 0x0ca;
pub const XK_Ediaeresis: libc::c_uint = 0x0cb;
pub const XK_Igrave: libc::c_uint = 0x0cc;
pub const XK_Iacute: libc::c_uint = 0x0cd;
pub const XK_Icircumflex: libc::c_uint = 0x0ce;
pub const XK_Idiaeresis: libc::c_uint = 0x0cf;
pub const XK_ETH: libc::c_uint = 0x0d0;
pub const XK_Eth: libc::c_uint = 0x0d0;
pub const XK_Ntilde: libc::c_uint = 0x0d1;
pub const XK_Ograve: libc::c_uint = 0x0d2;
pub const XK_Oacute: libc::c_uint = 0x0d3;
pub const XK_Ocircumflex: libc::c_uint = 0x0d4;
pub const XK_Otilde: libc::c_uint = 0x0d5;
pub const XK_Odiaeresis: libc::c_uint = 0x0d6;
pub const XK_multiply: libc::c_uint = 0x0d7;
pub const XK_Ooblique: libc::c_uint = 0x0d8;
pub const XK_Ugrave: libc::c_uint = 0x0d9;
pub const XK_Uacute: libc::c_uint = 0x0da;
pub const XK_Ucircumflex: libc::c_uint = 0x0db;
pub const XK_Udiaeresis: libc::c_uint = 0x0dc;
pub const XK_Yacute: libc::c_uint = 0x0dd;
pub const XK_THORN: libc::c_uint = 0x0de;
pub const XK_Thorn: libc::c_uint = 0x0de;
pub const XK_ssharp: libc::c_uint = 0x0df;
pub const XK_agrave: libc::c_uint = 0x0e0;
pub const XK_aacute: libc::c_uint = 0x0e1;
pub const XK_acircumflex: libc::c_uint = 0x0e2;
pub const XK_atilde: libc::c_uint = 0x0e3;
pub const XK_adiaeresis: libc::c_uint = 0x0e4;
pub const XK_aring: libc::c_uint = 0x0e5;
pub const XK_ae: libc::c_uint = 0x0e6;
pub const XK_ccedilla: libc::c_uint = 0x0e7;
pub const XK_egrave: libc::c_uint = 0x0e8;
pub const XK_eacute: libc::c_uint = 0x0e9;
pub const XK_ecircumflex: libc::c_uint = 0x0ea;
pub const XK_ediaeresis: libc::c_uint = 0x0eb;
pub const XK_igrave: libc::c_uint = 0x0ec;
pub const XK_iacute: libc::c_uint = 0x0ed;
pub const XK_icircumflex: libc::c_uint = 0x0ee;
pub const XK_idiaeresis: libc::c_uint = 0x0ef;
pub const XK_eth: libc::c_uint = 0x0f0;
pub const XK_ntilde: libc::c_uint = 0x0f1;
pub const XK_ograve: libc::c_uint = 0x0f2;
pub const XK_oacute: libc::c_uint = 0x0f3;
pub const XK_ocircumflex: libc::c_uint = 0x0f4;
pub const XK_otilde: libc::c_uint = 0x0f5;
pub const XK_odiaeresis: libc::c_uint = 0x0f6;
pub const XK_division: libc::c_uint = 0x0f7;
pub const XK_oslash: libc::c_uint = 0x0f8;
pub const XK_ugrave: libc::c_uint = 0x0f9;
pub const XK_uacute: libc::c_uint = 0x0fa;
pub const XK_ucircumflex: libc::c_uint = 0x0fb;
pub const XK_udiaeresis: libc::c_uint = 0x0fc;
pub const XK_yacute: libc::c_uint = 0x0fd;
pub const XK_thorn: libc::c_uint = 0x0fe;
pub const XK_ydiaeresis: libc::c_uint = 0x0ff;
pub const XK_Aogonek: libc::c_uint = 0x1a1;
pub const XK_breve: libc::c_uint = 0x1a2;
pub const XK_Lstroke: libc::c_uint = 0x1a3;
pub const XK_Lcaron: libc::c_uint = 0x1a5;
pub const XK_Sacute: libc::c_uint = 0x1a6;
pub const XK_Scaron: libc::c_uint = 0x1a9;
pub const XK_Scedilla: libc::c_uint = 0x1aa;
pub const XK_Tcaron: libc::c_uint = 0x1ab;
pub const XK_Zacute: libc::c_uint = 0x1ac;
pub const XK_Zcaron: libc::c_uint = 0x1ae;
pub const XK_Zabovedot: libc::c_uint = 0x1af;
pub const XK_aogonek: libc::c_uint = 0x1b1;
pub const XK_ogonek: libc::c_uint = 0x1b2;
pub const XK_lstroke: libc::c_uint = 0x1b3;
pub const XK_lcaron: libc::c_uint = 0x1b5;
pub const XK_sacute: libc::c_uint = 0x1b6;
pub const XK_caron: libc::c_uint = 0x1b7;
pub const XK_scaron: libc::c_uint = 0x1b9;
pub const XK_scedilla: libc::c_uint = 0x1ba;
pub const XK_tcaron: libc::c_uint = 0x1bb;
pub const XK_zacute: libc::c_uint = 0x1bc;
pub const XK_doubleacute: libc::c_uint = 0x1bd;
pub const XK_zcaron: libc::c_uint = 0x1be;
pub const XK_zabovedot: libc::c_uint = 0x1bf;
pub const XK_Racute: libc::c_uint = 0x1c0;
pub const XK_Abreve: libc::c_uint = 0x1c3;
pub const XK_Lacute: libc::c_uint = 0x1c5;
pub const XK_Cacute: libc::c_uint = 0x1c6;
pub const XK_Ccaron: libc::c_uint = 0x1c8;
pub const XK_Eogonek: libc::c_uint = 0x1ca;
pub const XK_Ecaron: libc::c_uint = 0x1cc;
pub const XK_Dcaron: libc::c_uint = 0x1cf;
pub const XK_Dstroke: libc::c_uint = 0x1d0;
pub const XK_Nacute: libc::c_uint = 0x1d1;
pub const XK_Ncaron: libc::c_uint = 0x1d2;
pub const XK_Odoubleacute: libc::c_uint = 0x1d5;
pub const XK_Rcaron: libc::c_uint = 0x1d8;
pub const XK_Uring: libc::c_uint = 0x1d9;
pub const XK_Udoubleacute: libc::c_uint = 0x1db;
pub const XK_Tcedilla: libc::c_uint = 0x1de;
pub const XK_racute: libc::c_uint = 0x1e0;
pub const XK_abreve: libc::c_uint = 0x1e3;
pub const XK_lacute: libc::c_uint = 0x1e5;
pub const XK_cacute: libc::c_uint = 0x1e6;
pub const XK_ccaron: libc::c_uint = 0x1e8;
pub const XK_eogonek: libc::c_uint = 0x1ea;
pub const XK_ecaron: libc::c_uint = 0x1ec;
pub const XK_dcaron: libc::c_uint = 0x1ef;
pub const XK_dstroke: libc::c_uint = 0x1f0;
pub const XK_nacute: libc::c_uint = 0x1f1;
pub const XK_ncaron: libc::c_uint = 0x1f2;
pub const XK_odoubleacute: libc::c_uint = 0x1f5;
pub const XK_udoubleacute: libc::c_uint = 0x1fb;
pub const XK_rcaron: libc::c_uint = 0x1f8;
pub const XK_uring: libc::c_uint = 0x1f9;
pub const XK_tcedilla: libc::c_uint = 0x1fe;
pub const XK_abovedot: libc::c_uint = 0x1ff;
pub const XK_Hstroke: libc::c_uint = 0x2a1;
pub const XK_Hcircumflex: libc::c_uint = 0x2a6;
pub const XK_Iabovedot: libc::c_uint = 0x2a9;
pub const XK_Gbreve: libc::c_uint = 0x2ab;
pub const XK_Jcircumflex: libc::c_uint = 0x2ac;
pub const XK_hstroke: libc::c_uint = 0x2b1;
pub const XK_hcircumflex: libc::c_uint = 0x2b6;
pub const XK_idotless: libc::c_uint = 0x2b9;
pub const XK_gbreve: libc::c_uint = 0x2bb;
pub const XK_jcircumflex: libc::c_uint = 0x2bc;
pub const XK_Cabovedot: libc::c_uint = 0x2c5;
pub const XK_Ccircumflex: libc::c_uint = 0x2c6;
pub const XK_Gabovedot: libc::c_uint = 0x2d5;
pub const XK_Gcircumflex: libc::c_uint = 0x2d8;
pub const XK_Ubreve: libc::c_uint = 0x2dd;
pub const XK_Scircumflex: libc::c_uint = 0x2de;
pub const XK_cabovedot: libc::c_uint = 0x2e5;
pub const XK_ccircumflex: libc::c_uint = 0x2e6;
pub const XK_gabovedot: libc::c_uint = 0x2f5;
pub const XK_gcircumflex: libc::c_uint = 0x2f8;
pub const XK_ubreve: libc::c_uint = 0x2fd;
pub const XK_scircumflex: libc::c_uint = 0x2fe;
pub const XK_kra: libc::c_uint = 0x3a2;
pub const XK_kappa: libc::c_uint = 0x3a2;
pub const XK_Rcedilla: libc::c_uint = 0x3a3;
pub const XK_Itilde: libc::c_uint = 0x3a5;
pub const XK_Lcedilla: libc::c_uint = 0x3a6;
pub const XK_Emacron: libc::c_uint = 0x3aa;
pub const XK_Gcedilla: libc::c_uint = 0x3ab;
pub const XK_Tslash: libc::c_uint = 0x3ac;
pub const XK_rcedilla: libc::c_uint = 0x3b3;
pub const XK_itilde: libc::c_uint = 0x3b5;
pub const XK_lcedilla: libc::c_uint = 0x3b6;
pub const XK_emacron: libc::c_uint = 0x3ba;
pub const XK_gcedilla: libc::c_uint = 0x3bb;
pub const XK_tslash: libc::c_uint = 0x3bc;
pub const XK_ENG: libc::c_uint = 0x3bd;
pub const XK_eng: libc::c_uint = 0x3bf;
pub const XK_Amacron: libc::c_uint = 0x3c0;
pub const XK_Iogonek: libc::c_uint = 0x3c7;
pub const XK_Eabovedot: libc::c_uint = 0x3cc;
pub const XK_Imacron: libc::c_uint = 0x3cf;
pub const XK_Ncedilla: libc::c_uint = 0x3d1;
pub const XK_Omacron: libc::c_uint = 0x3d2;
pub const XK_Kcedilla: libc::c_uint = 0x3d3;
pub const XK_Uogonek: libc::c_uint = 0x3d9;
pub const XK_Utilde: libc::c_uint = 0x3dd;
pub const XK_Umacron: libc::c_uint = 0x3de;
pub const XK_amacron: libc::c_uint = 0x3e0;
pub const XK_iogonek: libc::c_uint = 0x3e7;
pub const XK_eabovedot: libc::c_uint = 0x3ec;
pub const XK_imacron: libc::c_uint = 0x3ef;
pub const XK_ncedilla: libc::c_uint = 0x3f1;
pub const XK_omacron: libc::c_uint = 0x3f2;
pub const XK_kcedilla: libc::c_uint = 0x3f3;
pub const XK_uogonek: libc::c_uint = 0x3f9;
pub const XK_utilde: libc::c_uint = 0x3fd;
pub const XK_umacron: libc::c_uint = 0x3fe;
pub const XK_overline: libc::c_uint = 0x47e;
pub const XK_kana_fullstop: libc::c_uint = 0x4a1;
pub const XK_kana_openingbracket: libc::c_uint = 0x4a2;
pub const XK_kana_closingbracket: libc::c_uint = 0x4a3;
pub const XK_kana_comma: libc::c_uint = 0x4a4;
pub const XK_kana_conjunctive: libc::c_uint = 0x4a5;
pub const XK_kana_middledot: libc::c_uint = 0x4a5;
pub const XK_kana_WO: libc::c_uint = 0x4a6;
pub const XK_kana_a: libc::c_uint = 0x4a7;
pub const XK_kana_i: libc::c_uint = 0x4a8;
pub const XK_kana_u: libc::c_uint = 0x4a9;
pub const XK_kana_e: libc::c_uint = 0x4aa;
pub const XK_kana_o: libc::c_uint = 0x4ab;
pub const XK_kana_ya: libc::c_uint = 0x4ac;
pub const XK_kana_yu: libc::c_uint = 0x4ad;
pub const XK_kana_yo: libc::c_uint = 0x4ae;
pub const XK_kana_tsu: libc::c_uint = 0x4af;
pub const XK_kana_tu: libc::c_uint = 0x4af;
pub const XK_prolongedsound: libc::c_uint = 0x4b0;
pub const XK_kana_A: libc::c_uint = 0x4b1;
pub const XK_kana_I: libc::c_uint = 0x4b2;
pub const XK_kana_U: libc::c_uint = 0x4b3;
pub const XK_kana_E: libc::c_uint = 0x4b4;
pub const XK_kana_O: libc::c_uint = 0x4b5;
pub const XK_kana_KA: libc::c_uint = 0x4b6;
pub const XK_kana_KI: libc::c_uint = 0x4b7;
pub const XK_kana_KU: libc::c_uint = 0x4b8;
pub const XK_kana_KE: libc::c_uint = 0x4b9;
pub const XK_kana_KO: libc::c_uint = 0x4ba;
pub const XK_kana_SA: libc::c_uint = 0x4bb;
pub const XK_kana_SHI: libc::c_uint = 0x4bc;
pub const XK_kana_SU: libc::c_uint = 0x4bd;
pub const XK_kana_SE: libc::c_uint = 0x4be;
pub const XK_kana_SO: libc::c_uint = 0x4bf;
pub const XK_kana_TA: libc::c_uint = 0x4c0;
pub const XK_kana_CHI: libc::c_uint = 0x4c1;
pub const XK_kana_TI: libc::c_uint = 0x4c1;
pub const XK_kana_TSU: libc::c_uint = 0x4c2;
pub const XK_kana_TU: libc::c_uint = 0x4c2;
pub const XK_kana_TE: libc::c_uint = 0x4c3;
pub const XK_kana_TO: libc::c_uint = 0x4c4;
pub const XK_kana_NA: libc::c_uint = 0x4c5;
pub const XK_kana_NI: libc::c_uint = 0x4c6;
pub const XK_kana_NU: libc::c_uint = 0x4c7;
pub const XK_kana_NE: libc::c_uint = 0x4c8;
pub const XK_kana_NO: libc::c_uint = 0x4c9;
pub const XK_kana_HA: libc::c_uint = 0x4ca;
pub const XK_kana_HI: libc::c_uint = 0x4cb;
pub const XK_kana_FU: libc::c_uint = 0x4cc;
pub const XK_kana_HU: libc::c_uint = 0x4cc;
pub const XK_kana_HE: libc::c_uint = 0x4cd;
pub const XK_kana_HO: libc::c_uint = 0x4ce;
pub const XK_kana_MA: libc::c_uint = 0x4cf;
pub const XK_kana_MI: libc::c_uint = 0x4d0;
pub const XK_kana_MU: libc::c_uint = 0x4d1;
pub const XK_kana_ME: libc::c_uint = 0x4d2;
pub const XK_kana_MO: libc::c_uint = 0x4d3;
pub const XK_kana_YA: libc::c_uint = 0x4d4;
pub const XK_kana_YU: libc::c_uint = 0x4d5;
pub const XK_kana_YO: libc::c_uint = 0x4d6;
pub const XK_kana_RA: libc::c_uint = 0x4d7;
pub const XK_kana_RI: libc::c_uint = 0x4d8;
pub const XK_kana_RU: libc::c_uint = 0x4d9;
pub const XK_kana_RE: libc::c_uint = 0x4da;
pub const XK_kana_RO: libc::c_uint = 0x4db;
pub const XK_kana_WA: libc::c_uint = 0x4dc;
pub const XK_kana_N: libc::c_uint = 0x4dd;
pub const XK_voicedsound: libc::c_uint = 0x4de;
pub const XK_semivoicedsound: libc::c_uint = 0x4df;
pub const XK_kana_switch: libc::c_uint = 0xFF7E;
pub const XK_Arabic_comma: libc::c_uint = 0x5ac;
pub const XK_Arabic_semicolon: libc::c_uint = 0x5bb;
pub const XK_Arabic_question_mark: libc::c_uint = 0x5bf;
pub const XK_Arabic_hamza: libc::c_uint = 0x5c1;
pub const XK_Arabic_maddaonalef: libc::c_uint = 0x5c2;
pub const XK_Arabic_hamzaonalef: libc::c_uint = 0x5c3;
pub const XK_Arabic_hamzaonwaw: libc::c_uint = 0x5c4;
pub const XK_Arabic_hamzaunderalef: libc::c_uint = 0x5c5;
pub const XK_Arabic_hamzaonyeh: libc::c_uint = 0x5c6;
pub const XK_Arabic_alef: libc::c_uint = 0x5c7;
pub const XK_Arabic_beh: libc::c_uint = 0x5c8;
pub const XK_Arabic_tehmarbuta: libc::c_uint = 0x5c9;
pub const XK_Arabic_teh: libc::c_uint = 0x5ca;
pub const XK_Arabic_theh: libc::c_uint = 0x5cb;
pub const XK_Arabic_jeem: libc::c_uint = 0x5cc;
pub const XK_Arabic_hah: libc::c_uint = 0x5cd;
pub const XK_Arabic_khah: libc::c_uint = 0x5ce;
pub const XK_Arabic_dal: libc::c_uint = 0x5cf;
pub const XK_Arabic_thal: libc::c_uint = 0x5d0;
pub const XK_Arabic_ra: libc::c_uint = 0x5d1;
pub const XK_Arabic_zain: libc::c_uint = 0x5d2;
pub const XK_Arabic_seen: libc::c_uint = 0x5d3;
pub const XK_Arabic_sheen: libc::c_uint = 0x5d4;
pub const XK_Arabic_sad: libc::c_uint = 0x5d5;
pub const XK_Arabic_dad: libc::c_uint = 0x5d6;
pub const XK_Arabic_tah: libc::c_uint = 0x5d7;
pub const XK_Arabic_zah: libc::c_uint = 0x5d8;
pub const XK_Arabic_ain: libc::c_uint = 0x5d9;
pub const XK_Arabic_ghain: libc::c_uint = 0x5da;
pub const XK_Arabic_tatweel: libc::c_uint = 0x5e0;
pub const XK_Arabic_feh: libc::c_uint = 0x5e1;
pub const XK_Arabic_qaf: libc::c_uint = 0x5e2;
pub const XK_Arabic_kaf: libc::c_uint = 0x5e3;
pub const XK_Arabic_lam: libc::c_uint = 0x5e4;
pub const XK_Arabic_meem: libc::c_uint = 0x5e5;
pub const XK_Arabic_noon: libc::c_uint = 0x5e6;
pub const XK_Arabic_ha: libc::c_uint = 0x5e7;
pub const XK_Arabic_heh: libc::c_uint = 0x5e7;
pub const XK_Arabic_waw: libc::c_uint = 0x5e8;
pub const XK_Arabic_alefmaksura: libc::c_uint = 0x5e9;
pub const XK_Arabic_yeh: libc::c_uint = 0x5ea;
pub const XK_Arabic_fathatan: libc::c_uint = 0x5eb;
pub const XK_Arabic_dammatan: libc::c_uint = 0x5ec;
pub const XK_Arabic_kasratan: libc::c_uint = 0x5ed;
pub const XK_Arabic_fatha: libc::c_uint = 0x5ee;
pub const XK_Arabic_damma: libc::c_uint = 0x5ef;
pub const XK_Arabic_kasra: libc::c_uint = 0x5f0;
pub const XK_Arabic_shadda: libc::c_uint = 0x5f1;
pub const XK_Arabic_sukun: libc::c_uint = 0x5f2;
pub const XK_Arabic_switch: libc::c_uint = 0xFF7E;
pub const XK_Serbian_dje: libc::c_uint = 0x6a1;
pub const XK_Macedonia_gje: libc::c_uint = 0x6a2;
pub const XK_Cyrillic_io: libc::c_uint = 0x6a3;
pub const XK_Ukrainian_ie: libc::c_uint = 0x6a4;
pub const XK_Ukranian_je: libc::c_uint = 0x6a4;
pub const XK_Macedonia_dse: libc::c_uint = 0x6a5;
pub const XK_Ukrainian_i: libc::c_uint = 0x6a6;
pub const XK_Ukranian_i: libc::c_uint = 0x6a6;
pub const XK_Ukrainian_yi: libc::c_uint = 0x6a7;
pub const XK_Ukranian_yi: libc::c_uint = 0x6a7;
pub const XK_Cyrillic_je: libc::c_uint = 0x6a8;
pub const XK_Serbian_je: libc::c_uint = 0x6a8;
pub const XK_Cyrillic_lje: libc::c_uint = 0x6a9;
pub const XK_Serbian_lje: libc::c_uint = 0x6a9;
pub const XK_Cyrillic_nje: libc::c_uint = 0x6aa;
pub const XK_Serbian_nje: libc::c_uint = 0x6aa;
pub const XK_Serbian_tshe: libc::c_uint = 0x6ab;
pub const XK_Macedonia_kje: libc::c_uint = 0x6ac;
pub const XK_Byelorussian_shortu: libc::c_uint = 0x6ae;
pub const XK_Cyrillic_dzhe: libc::c_uint = 0x6af;
pub const XK_Serbian_dze: libc::c_uint = 0x6af;
pub const XK_numerosign: libc::c_uint = 0x6b0;
pub const XK_Serbian_DJE: libc::c_uint = 0x6b1;
pub const XK_Macedonia_GJE: libc::c_uint = 0x6b2;
pub const XK_Cyrillic_IO: libc::c_uint = 0x6b3;
pub const XK_Ukrainian_IE: libc::c_uint = 0x6b4;
pub const XK_Ukranian_JE: libc::c_uint = 0x6b4;
pub const XK_Macedonia_DSE: libc::c_uint = 0x6b5;
pub const XK_Ukrainian_I: libc::c_uint = 0x6b6;
pub const XK_Ukranian_I: libc::c_uint = 0x6b6;
pub const XK_Ukrainian_YI: libc::c_uint = 0x6b7;
pub const XK_Ukranian_YI: libc::c_uint = 0x6b7;
pub const XK_Cyrillic_JE: libc::c_uint = 0x6b8;
pub const XK_Serbian_JE: libc::c_uint = 0x6b8;
pub const XK_Cyrillic_LJE: libc::c_uint = 0x6b9;
pub const XK_Serbian_LJE: libc::c_uint = 0x6b9;
pub const XK_Cyrillic_NJE: libc::c_uint = 0x6ba;
pub const XK_Serbian_NJE: libc::c_uint = 0x6ba;
pub const XK_Serbian_TSHE: libc::c_uint = 0x6bb;
pub const XK_Macedonia_KJE: libc::c_uint = 0x6bc;
pub const XK_Byelorussian_SHORTU: libc::c_uint = 0x6be;
pub const XK_Cyrillic_DZHE: libc::c_uint = 0x6bf;
pub const XK_Serbian_DZE: libc::c_uint = 0x6bf;
pub const XK_Cyrillic_yu: libc::c_uint = 0x6c0;
pub const XK_Cyrillic_a: libc::c_uint = 0x6c1;
pub const XK_Cyrillic_be: libc::c_uint = 0x6c2;
pub const XK_Cyrillic_tse: libc::c_uint = 0x6c3;
pub const XK_Cyrillic_de: libc::c_uint = 0x6c4;
pub const XK_Cyrillic_ie: libc::c_uint = 0x6c5;
pub const XK_Cyrillic_ef: libc::c_uint = 0x6c6;
pub const XK_Cyrillic_ghe: libc::c_uint = 0x6c7;
pub const XK_Cyrillic_ha: libc::c_uint = 0x6c8;
pub const XK_Cyrillic_i: libc::c_uint = 0x6c9;
pub const XK_Cyrillic_shorti: libc::c_uint = 0x6ca;
pub const XK_Cyrillic_ka: libc::c_uint = 0x6cb;
pub const XK_Cyrillic_el: libc::c_uint = 0x6cc;
pub const XK_Cyrillic_em: libc::c_uint = 0x6cd;
pub const XK_Cyrillic_en: libc::c_uint = 0x6ce;
pub const XK_Cyrillic_o: libc::c_uint = 0x6cf;
pub const XK_Cyrillic_pe: libc::c_uint = 0x6d0;
pub const XK_Cyrillic_ya: libc::c_uint = 0x6d1;
pub const XK_Cyrillic_er: libc::c_uint = 0x6d2;
pub const XK_Cyrillic_es: libc::c_uint = 0x6d3;
pub const XK_Cyrillic_te: libc::c_uint = 0x6d4;
pub const XK_Cyrillic_u: libc::c_uint = 0x6d5;
pub const XK_Cyrillic_zhe: libc::c_uint = 0x6d6;
pub const XK_Cyrillic_ve: libc::c_uint = 0x6d7;
pub const XK_Cyrillic_softsign: libc::c_uint = 0x6d8;
pub const XK_Cyrillic_yeru: libc::c_uint = 0x6d9;
pub const XK_Cyrillic_ze: libc::c_uint = 0x6da;
pub const XK_Cyrillic_sha: libc::c_uint = 0x6db;
pub const XK_Cyrillic_e: libc::c_uint = 0x6dc;
pub const XK_Cyrillic_shcha: libc::c_uint = 0x6dd;
pub const XK_Cyrillic_che: libc::c_uint = 0x6de;
pub const XK_Cyrillic_hardsign: libc::c_uint = 0x6df;
pub const XK_Cyrillic_YU: libc::c_uint = 0x6e0;
pub const XK_Cyrillic_A: libc::c_uint = 0x6e1;
pub const XK_Cyrillic_BE: libc::c_uint = 0x6e2;
pub const XK_Cyrillic_TSE: libc::c_uint = 0x6e3;
pub const XK_Cyrillic_DE: libc::c_uint = 0x6e4;
pub const XK_Cyrillic_IE: libc::c_uint = 0x6e5;
pub const XK_Cyrillic_EF: libc::c_uint = 0x6e6;
pub const XK_Cyrillic_GHE: libc::c_uint = 0x6e7;
pub const XK_Cyrillic_HA: libc::c_uint = 0x6e8;
pub const XK_Cyrillic_I: libc::c_uint = 0x6e9;
pub const XK_Cyrillic_SHORTI: libc::c_uint = 0x6ea;
pub const XK_Cyrillic_KA: libc::c_uint = 0x6eb;
pub const XK_Cyrillic_EL: libc::c_uint = 0x6ec;
pub const XK_Cyrillic_EM: libc::c_uint = 0x6ed;
pub const XK_Cyrillic_EN: libc::c_uint = 0x6ee;
pub const XK_Cyrillic_O: libc::c_uint = 0x6ef;
pub const XK_Cyrillic_PE: libc::c_uint = 0x6f0;
pub const XK_Cyrillic_YA: libc::c_uint = 0x6f1;
pub const XK_Cyrillic_ER: libc::c_uint = 0x6f2;
pub const XK_Cyrillic_ES: libc::c_uint = 0x6f3;
pub const XK_Cyrillic_TE: libc::c_uint = 0x6f4;
pub const XK_Cyrillic_U: libc::c_uint = 0x6f5;
pub const XK_Cyrillic_ZHE: libc::c_uint = 0x6f6;
pub const XK_Cyrillic_VE: libc::c_uint = 0x6f7;
pub const XK_Cyrillic_SOFTSIGN: libc::c_uint = 0x6f8;
pub const XK_Cyrillic_YERU: libc::c_uint = 0x6f9;
pub const XK_Cyrillic_ZE: libc::c_uint = 0x6fa;
pub const XK_Cyrillic_SHA: libc::c_uint = 0x6fb;
pub const XK_Cyrillic_E: libc::c_uint = 0x6fc;
pub const XK_Cyrillic_SHCHA: libc::c_uint = 0x6fd;
pub const XK_Cyrillic_CHE: libc::c_uint = 0x6fe;
pub const XK_Cyrillic_HARDSIGN: libc::c_uint = 0x6ff;
pub const XK_Greek_ALPHAaccent: libc::c_uint = 0x7a1;
pub const XK_Greek_EPSILONaccent: libc::c_uint = 0x7a2;
pub const XK_Greek_ETAaccent: libc::c_uint = 0x7a3;
pub const XK_Greek_IOTAaccent: libc::c_uint = 0x7a4;
pub const XK_Greek_IOTAdiaeresis: libc::c_uint = 0x7a5;
pub const XK_Greek_OMICRONaccent: libc::c_uint = 0x7a7;
pub const XK_Greek_UPSILONaccent: libc::c_uint = 0x7a8;
pub const XK_Greek_UPSILONdieresis: libc::c_uint = 0x7a9;
pub const XK_Greek_OMEGAaccent: libc::c_uint = 0x7ab;
pub const XK_Greek_accentdieresis: libc::c_uint = 0x7ae;
pub const XK_Greek_horizbar: libc::c_uint = 0x7af;
pub const XK_Greek_alphaaccent: libc::c_uint = 0x7b1;
pub const XK_Greek_epsilonaccent: libc::c_uint = 0x7b2;
pub const XK_Greek_etaaccent: libc::c_uint = 0x7b3;
pub const XK_Greek_iotaaccent: libc::c_uint = 0x7b4;
pub const XK_Greek_iotadieresis: libc::c_uint = 0x7b5;
pub const XK_Greek_iotaaccentdieresis: libc::c_uint = 0x7b6;
pub const XK_Greek_omicronaccent: libc::c_uint = 0x7b7;
pub const XK_Greek_upsilonaccent: libc::c_uint = 0x7b8;
pub const XK_Greek_upsilondieresis: libc::c_uint = 0x7b9;
pub const XK_Greek_upsilonaccentdieresis: libc::c_uint = 0x7ba;
pub const XK_Greek_omegaaccent: libc::c_uint = 0x7bb;
pub const XK_Greek_ALPHA: libc::c_uint = 0x7c1;
pub const XK_Greek_BETA: libc::c_uint = 0x7c2;
pub const XK_Greek_GAMMA: libc::c_uint = 0x7c3;
pub const XK_Greek_DELTA: libc::c_uint = 0x7c4;
pub const XK_Greek_EPSILON: libc::c_uint = 0x7c5;
pub const XK_Greek_ZETA: libc::c_uint = 0x7c6;
pub const XK_Greek_ETA: libc::c_uint = 0x7c7;
pub const XK_Greek_THETA: libc::c_uint = 0x7c8;
pub const XK_Greek_IOTA: libc::c_uint = 0x7c9;
pub const XK_Greek_KAPPA: libc::c_uint = 0x7ca;
pub const XK_Greek_LAMDA: libc::c_uint = 0x7cb;
pub const XK_Greek_LAMBDA: libc::c_uint = 0x7cb;
pub const XK_Greek_MU: libc::c_uint = 0x7cc;
pub const XK_Greek_NU: libc::c_uint = 0x7cd;
pub const XK_Greek_XI: libc::c_uint = 0x7ce;
pub const XK_Greek_OMICRON: libc::c_uint = 0x7cf;
pub const XK_Greek_PI: libc::c_uint = 0x7d0;
pub const XK_Greek_RHO: libc::c_uint = 0x7d1;
pub const XK_Greek_SIGMA: libc::c_uint = 0x7d2;
pub const XK_Greek_TAU: libc::c_uint = 0x7d4;
pub const XK_Greek_UPSILON: libc::c_uint = 0x7d5;
pub const XK_Greek_PHI: libc::c_uint = 0x7d6;
pub const XK_Greek_CHI: libc::c_uint = 0x7d7;
pub const XK_Greek_PSI: libc::c_uint = 0x7d8;
pub const XK_Greek_OMEGA: libc::c_uint = 0x7d9;
pub const XK_Greek_alpha: libc::c_uint = 0x7e1;
pub const XK_Greek_beta: libc::c_uint = 0x7e2;
pub const XK_Greek_gamma: libc::c_uint = 0x7e3;
pub const XK_Greek_delta: libc::c_uint = 0x7e4;
pub const XK_Greek_epsilon: libc::c_uint = 0x7e5;
pub const XK_Greek_zeta: libc::c_uint = 0x7e6;
pub const XK_Greek_eta: libc::c_uint = 0x7e7;
pub const XK_Greek_theta: libc::c_uint = 0x7e8;
pub const XK_Greek_iota: libc::c_uint = 0x7e9;
pub const XK_Greek_kappa: libc::c_uint = 0x7ea;
pub const XK_Greek_lamda: libc::c_uint = 0x7eb;
pub const XK_Greek_lambda: libc::c_uint = 0x7eb;
pub const XK_Greek_mu: libc::c_uint = 0x7ec;
pub const XK_Greek_nu: libc::c_uint = 0x7ed;
pub const XK_Greek_xi: libc::c_uint = 0x7ee;
pub const XK_Greek_omicron: libc::c_uint = 0x7ef;
pub const XK_Greek_pi: libc::c_uint = 0x7f0;
pub const XK_Greek_rho: libc::c_uint = 0x7f1;
pub const XK_Greek_sigma: libc::c_uint = 0x7f2;
pub const XK_Greek_finalsmallsigma: libc::c_uint = 0x7f3;
pub const XK_Greek_tau: libc::c_uint = 0x7f4;
pub const XK_Greek_upsilon: libc::c_uint = 0x7f5;
pub const XK_Greek_phi: libc::c_uint = 0x7f6;
pub const XK_Greek_chi: libc::c_uint = 0x7f7;
pub const XK_Greek_psi: libc::c_uint = 0x7f8;
pub const XK_Greek_omega: libc::c_uint = 0x7f9;
pub const XK_Greek_switch: libc::c_uint = 0xFF7E;
pub const XK_leftradical: libc::c_uint = 0x8a1;
pub const XK_topleftradical: libc::c_uint = 0x8a2;
pub const XK_horizconnector: libc::c_uint = 0x8a3;
pub const XK_topintegral: libc::c_uint = 0x8a4;
pub const XK_botintegral: libc::c_uint = 0x8a5;
pub const XK_vertconnector: libc::c_uint = 0x8a6;
pub const XK_topleftsqbracket: libc::c_uint = 0x8a7;
pub const XK_botleftsqbracket: libc::c_uint = 0x8a8;
pub const XK_toprightsqbracket: libc::c_uint = 0x8a9;
pub const XK_botrightsqbracket: libc::c_uint = 0x8aa;
pub const XK_topleftparens: libc::c_uint = 0x8ab;
pub const XK_botleftparens: libc::c_uint = 0x8ac;
pub const XK_toprightparens: libc::c_uint = 0x8ad;
pub const XK_botrightparens: libc::c_uint = 0x8ae;
pub const XK_leftmiddlecurlybrace: libc::c_uint = 0x8af;
pub const XK_rightmiddlecurlybrace: libc::c_uint = 0x8b0;
pub const XK_topleftsummation: libc::c_uint = 0x8b1;
pub const XK_botleftsummation: libc::c_uint = 0x8b2;
pub const XK_topvertsummationconnector: libc::c_uint = 0x8b3;
pub const XK_botvertsummationconnector: libc::c_uint = 0x8b4;
pub const XK_toprightsummation: libc::c_uint = 0x8b5;
pub const XK_botrightsummation: libc::c_uint = 0x8b6;
pub const XK_rightmiddlesummation: libc::c_uint = 0x8b7;
pub const XK_lessthanequal: libc::c_uint = 0x8bc;
pub const XK_notequal: libc::c_uint = 0x8bd;
pub const XK_greaterthanequal: libc::c_uint = 0x8be;
pub const XK_integral: libc::c_uint = 0x8bf;
pub const XK_therefore: libc::c_uint = 0x8c0;
pub const XK_variation: libc::c_uint = 0x8c1;
pub const XK_infinity: libc::c_uint = 0x8c2;
pub const XK_nabla: libc::c_uint = 0x8c5;
pub const XK_approximate: libc::c_uint = 0x8c8;
pub const XK_similarequal: libc::c_uint = 0x8c9;
pub const XK_ifonlyif: libc::c_uint = 0x8cd;
pub const XK_implies: libc::c_uint = 0x8ce;
pub const XK_identical: libc::c_uint = 0x8cf;
pub const XK_radical: libc::c_uint = 0x8d6;
pub const XK_includedin: libc::c_uint = 0x8da;
pub const XK_includes: libc::c_uint = 0x8db;
pub const XK_intersection: libc::c_uint = 0x8dc;
pub const XK_union: libc::c_uint = 0x8dd;
pub const XK_logicaland: libc::c_uint = 0x8de;
pub const XK_logicalor: libc::c_uint = 0x8df;
pub const XK_partialderivative: libc::c_uint = 0x8ef;
pub const XK_function: libc::c_uint = 0x8f6;
pub const XK_leftarrow: libc::c_uint = 0x8fb;
pub const XK_uparrow: libc::c_uint = 0x8fc;
pub const XK_rightarrow: libc::c_uint = 0x8fd;
pub const XK_downarrow: libc::c_uint = 0x8fe;
pub const XK_blank: libc::c_uint = 0x9df;
pub const XK_soliddiamond: libc::c_uint = 0x9e0;
pub const XK_checkerboard: libc::c_uint = 0x9e1;
pub const XK_ht: libc::c_uint = 0x9e2;
pub const XK_ff: libc::c_uint = 0x9e3;
pub const XK_cr: libc::c_uint = 0x9e4;
pub const XK_lf: libc::c_uint = 0x9e5;
pub const XK_nl: libc::c_uint = 0x9e8;
pub const XK_vt: libc::c_uint = 0x9e9;
pub const XK_lowrightcorner: libc::c_uint = 0x9ea;
pub const XK_uprightcorner: libc::c_uint = 0x9eb;
pub const XK_upleftcorner: libc::c_uint = 0x9ec;
pub const XK_lowleftcorner: libc::c_uint = 0x9ed;
pub const XK_crossinglines: libc::c_uint = 0x9ee;
pub const XK_horizlinescan1: libc::c_uint = 0x9ef;
pub const XK_horizlinescan3: libc::c_uint = 0x9f0;
pub const XK_horizlinescan5: libc::c_uint = 0x9f1;
pub const XK_horizlinescan7: libc::c_uint = 0x9f2;
pub const XK_horizlinescan9: libc::c_uint = 0x9f3;
pub const XK_leftt: libc::c_uint = 0x9f4;
pub const XK_rightt: libc::c_uint = 0x9f5;
pub const XK_bott: libc::c_uint = 0x9f6;
pub const XK_topt: libc::c_uint = 0x9f7;
pub const XK_vertbar: libc::c_uint = 0x9f8;
pub const XK_emspace: libc::c_uint = 0xaa1;
pub const XK_enspace: libc::c_uint = 0xaa2;
pub const XK_em3space: libc::c_uint = 0xaa3;
pub const XK_em4space: libc::c_uint = 0xaa4;
pub const XK_digitspace: libc::c_uint = 0xaa5;
pub const XK_punctspace: libc::c_uint = 0xaa6;
pub const XK_thinspace: libc::c_uint = 0xaa7;
pub const XK_hairspace: libc::c_uint = 0xaa8;
pub const XK_emdash: libc::c_uint = 0xaa9;
pub const XK_endash: libc::c_uint = 0xaaa;
pub const XK_signifblank: libc::c_uint = 0xaac;
pub const XK_ellipsis: libc::c_uint = 0xaae;
pub const XK_doubbaselinedot: libc::c_uint = 0xaaf;
pub const XK_onethird: libc::c_uint = 0xab0;
pub const XK_twothirds: libc::c_uint = 0xab1;
pub const XK_onefifth: libc::c_uint = 0xab2;
pub const XK_twofifths: libc::c_uint = 0xab3;
pub const XK_threefifths: libc::c_uint = 0xab4;
pub const XK_fourfifths: libc::c_uint = 0xab5;
pub const XK_onesixth: libc::c_uint = 0xab6;
pub const XK_fivesixths: libc::c_uint = 0xab7;
pub const XK_careof: libc::c_uint = 0xab8;
pub const XK_figdash: libc::c_uint = 0xabb;
pub const XK_leftanglebracket: libc::c_uint = 0xabc;
pub const XK_decimalpoint: libc::c_uint = 0xabd;
pub const XK_rightanglebracket: libc::c_uint = 0xabe;
pub const XK_marker: libc::c_uint = 0xabf;
pub const XK_oneeighth: libc::c_uint = 0xac3;
pub const XK_threeeighths: libc::c_uint = 0xac4;
pub const XK_fiveeighths: libc::c_uint = 0xac5;
pub const XK_seveneighths: libc::c_uint = 0xac6;
pub const XK_trademark: libc::c_uint = 0xac9;
pub const XK_signaturemark: libc::c_uint = 0xaca;
pub const XK_trademarkincircle: libc::c_uint = 0xacb;
pub const XK_leftopentriangle: libc::c_uint = 0xacc;
pub const XK_rightopentriangle: libc::c_uint = 0xacd;
pub const XK_emopencircle: libc::c_uint = 0xace;
pub const XK_emopenrectangle: libc::c_uint = 0xacf;
pub const XK_leftsinglequotemark: libc::c_uint = 0xad0;<|fim▁hole|>pub const XK_leftdoublequotemark: libc::c_uint = 0xad2;
pub const XK_rightdoublequotemark: libc::c_uint = 0xad3;
pub const XK_prescription: libc::c_uint = 0xad4;
pub const XK_minutes: libc::c_uint = 0xad6;
pub const XK_seconds: libc::c_uint = 0xad7;
pub const XK_latincross: libc::c_uint = 0xad9;
pub const XK_hexagram: libc::c_uint = 0xada;
pub const XK_filledrectbullet: libc::c_uint = 0xadb;
pub const XK_filledlefttribullet: libc::c_uint = 0xadc;
pub const XK_filledrighttribullet: libc::c_uint = 0xadd;
pub const XK_emfilledcircle: libc::c_uint = 0xade;
pub const XK_emfilledrect: libc::c_uint = 0xadf;
pub const XK_enopencircbullet: libc::c_uint = 0xae0;
pub const XK_enopensquarebullet: libc::c_uint = 0xae1;
pub const XK_openrectbullet: libc::c_uint = 0xae2;
pub const XK_opentribulletup: libc::c_uint = 0xae3;
pub const XK_opentribulletdown: libc::c_uint = 0xae4;
pub const XK_openstar: libc::c_uint = 0xae5;
pub const XK_enfilledcircbullet: libc::c_uint = 0xae6;
pub const XK_enfilledsqbullet: libc::c_uint = 0xae7;
pub const XK_filledtribulletup: libc::c_uint = 0xae8;
pub const XK_filledtribulletdown: libc::c_uint = 0xae9;
pub const XK_leftpointer: libc::c_uint = 0xaea;
pub const XK_rightpointer: libc::c_uint = 0xaeb;
pub const XK_club: libc::c_uint = 0xaec;
pub const XK_diamond: libc::c_uint = 0xaed;
pub const XK_heart: libc::c_uint = 0xaee;
pub const XK_maltesecross: libc::c_uint = 0xaf0;
pub const XK_dagger: libc::c_uint = 0xaf1;
pub const XK_doubledagger: libc::c_uint = 0xaf2;
pub const XK_checkmark: libc::c_uint = 0xaf3;
pub const XK_ballotcross: libc::c_uint = 0xaf4;
pub const XK_musicalsharp: libc::c_uint = 0xaf5;
pub const XK_musicalflat: libc::c_uint = 0xaf6;
pub const XK_malesymbol: libc::c_uint = 0xaf7;
pub const XK_femalesymbol: libc::c_uint = 0xaf8;
pub const XK_telephone: libc::c_uint = 0xaf9;
pub const XK_telephonerecorder: libc::c_uint = 0xafa;
pub const XK_phonographcopyright: libc::c_uint = 0xafb;
pub const XK_caret: libc::c_uint = 0xafc;
pub const XK_singlelowquotemark: libc::c_uint = 0xafd;
pub const XK_doublelowquotemark: libc::c_uint = 0xafe;
pub const XK_cursor: libc::c_uint = 0xaff;
pub const XK_leftcaret: libc::c_uint = 0xba3;
pub const XK_rightcaret: libc::c_uint = 0xba6;
pub const XK_downcaret: libc::c_uint = 0xba8;
pub const XK_upcaret: libc::c_uint = 0xba9;
pub const XK_overbar: libc::c_uint = 0xbc0;
pub const XK_downtack: libc::c_uint = 0xbc2;
pub const XK_upshoe: libc::c_uint = 0xbc3;
pub const XK_downstile: libc::c_uint = 0xbc4;
pub const XK_underbar: libc::c_uint = 0xbc6;
pub const XK_jot: libc::c_uint = 0xbca;
pub const XK_quad: libc::c_uint = 0xbcc;
pub const XK_uptack: libc::c_uint = 0xbce;
pub const XK_circle: libc::c_uint = 0xbcf;
pub const XK_upstile: libc::c_uint = 0xbd3;
pub const XK_downshoe: libc::c_uint = 0xbd6;
pub const XK_rightshoe: libc::c_uint = 0xbd8;
pub const XK_leftshoe: libc::c_uint = 0xbda;
pub const XK_lefttack: libc::c_uint = 0xbdc;
pub const XK_righttack: libc::c_uint = 0xbfc;
pub const XK_hebrew_doublelowline: libc::c_uint = 0xcdf;
pub const XK_hebrew_aleph: libc::c_uint = 0xce0;
pub const XK_hebrew_bet: libc::c_uint = 0xce1;
pub const XK_hebrew_beth: libc::c_uint = 0xce1;
pub const XK_hebrew_gimel: libc::c_uint = 0xce2;
pub const XK_hebrew_gimmel: libc::c_uint = 0xce2;
pub const XK_hebrew_dalet: libc::c_uint = 0xce3;
pub const XK_hebrew_daleth: libc::c_uint = 0xce3;
pub const XK_hebrew_he: libc::c_uint = 0xce4;
pub const XK_hebrew_waw: libc::c_uint = 0xce5;
pub const XK_hebrew_zain: libc::c_uint = 0xce6;
pub const XK_hebrew_zayin: libc::c_uint = 0xce6;
pub const XK_hebrew_chet: libc::c_uint = 0xce7;
pub const XK_hebrew_het: libc::c_uint = 0xce7;
pub const XK_hebrew_tet: libc::c_uint = 0xce8;
pub const XK_hebrew_teth: libc::c_uint = 0xce8;
pub const XK_hebrew_yod: libc::c_uint = 0xce9;
pub const XK_hebrew_finalkaph: libc::c_uint = 0xcea;
pub const XK_hebrew_kaph: libc::c_uint = 0xceb;
pub const XK_hebrew_lamed: libc::c_uint = 0xcec;
pub const XK_hebrew_finalmem: libc::c_uint = 0xced;
pub const XK_hebrew_mem: libc::c_uint = 0xcee;
pub const XK_hebrew_finalnun: libc::c_uint = 0xcef;
pub const XK_hebrew_nun: libc::c_uint = 0xcf0;
pub const XK_hebrew_samech: libc::c_uint = 0xcf1;
pub const XK_hebrew_samekh: libc::c_uint = 0xcf1;
pub const XK_hebrew_ayin: libc::c_uint = 0xcf2;
pub const XK_hebrew_finalpe: libc::c_uint = 0xcf3;
pub const XK_hebrew_pe: libc::c_uint = 0xcf4;
pub const XK_hebrew_finalzade: libc::c_uint = 0xcf5;
pub const XK_hebrew_finalzadi: libc::c_uint = 0xcf5;
pub const XK_hebrew_zade: libc::c_uint = 0xcf6;
pub const XK_hebrew_zadi: libc::c_uint = 0xcf6;
pub const XK_hebrew_qoph: libc::c_uint = 0xcf7;
pub const XK_hebrew_kuf: libc::c_uint = 0xcf7;
pub const XK_hebrew_resh: libc::c_uint = 0xcf8;
pub const XK_hebrew_shin: libc::c_uint = 0xcf9;
pub const XK_hebrew_taw: libc::c_uint = 0xcfa;
pub const XK_hebrew_taf: libc::c_uint = 0xcfa;
pub const XK_Hebrew_switch: libc::c_uint = 0xFF7E;
#[repr(C)]
pub struct XSetWindowAttributes {
pub background_pixmap: Pixmap,
pub background_pixel: libc::c_ulong,
pub border_pixmap: Pixmap,
pub border_pixel: libc::c_ulong,
pub bit_gravity: libc::c_int,
pub win_gravity: libc::c_int,
pub backing_store: libc::c_int,
pub backing_planes: libc::c_ulong,
pub backing_pixel: libc::c_long,
pub save_under: Bool,
pub event_mask: libc::c_long,
pub do_not_propagate_mask: libc::c_long,
pub override_redirect: Bool,
pub colormap: Colormap,
pub cursor: Cursor,
}
#[repr(C)]
pub struct XEvent {
pub type_: libc::c_int,
pad: [libc::c_long; 24],
}
#[repr(C)]
pub struct XClientMessageEvent {
pub type_: libc::c_int,
pub serial: libc::c_ulong,
pub send_event: Bool,
pub display: *mut Display,
pub window: Window,
pub message_type: Atom,
pub format: libc::c_int,
pub l: [libc::c_long; 5],
}
#[repr(C)]
pub struct XResizeRequestEvent {
pub type_: libc::c_int,
pub serial: libc::c_ulong,
pub send_event: Bool,
pub display: *mut Display,
pub window: Window,
pub width: libc::c_int,
pub height: libc::c_int,
}
#[repr(C)]
pub struct XMotionEvent {
pub type_: libc::c_int,
pub serial: libc::c_ulong,
pub send_event: Bool,
pub display: *mut Display,
pub window: Window,
pub root: Window,
pub subwindow: Window,
pub time: Time,
pub x: libc::c_int,
pub y: libc::c_int,
pub x_root: libc::c_int,
pub y_root: libc::c_int,
pub state: libc::c_uint,
pub is_hint: libc::c_char,
pub same_screen: Bool,
}
#[repr(C)]
pub struct XKeyEvent {
pub type_: libc::c_int,
pub serial: libc::c_ulong,
pub send_event: Bool,
pub display: *mut Display,
pub window: Window,
pub root: Window,
pub subwindow: Window,
pub time: Time,
pub x: libc::c_int,
pub y: libc::c_int,
pub x_root: libc::c_int,
pub y_root: libc::c_int,
pub state: libc::c_uint,
pub keycode: libc::c_uint,
pub same_screen: Bool,
}
#[repr(C)]
pub struct XButtonEvent {
pub type_: libc::c_int,
pub serial: libc::c_ulong,
pub send_event: Bool,
pub display: *mut Display,
pub window: Window,
pub root: Window,
pub subwindow: Window,
pub time: Time,
pub x: libc::c_int,
pub y: libc::c_int,
pub x_root: libc::c_int,
pub y_root: libc::c_int,
pub state: libc::c_uint,
pub button: libc::c_uint,
pub same_screen: Bool,
}
#[repr(C)]
pub struct XConfigureEvent {
pub type_: libc::c_int,
pub serial: libc::c_ulong,
pub send_event: Bool,
pub display: *mut Display,
pub event: Window,
pub window: Window,
pub x: libc::c_int,
pub y: libc::c_int,
pub width: libc::c_int,
pub height: libc::c_int,
pub border_width: libc::c_int,
pub above: Window,
pub override_redirect: Bool,
}
#[repr(C)]
pub struct XF86VidModeModeInfo {
pub dotclock: libc::c_uint,
pub hdisplay: libc::c_ushort,
pub hsyncstart: libc::c_ushort,
pub hsyncend: libc::c_ushort,
pub htotal: libc::c_ushort,
pub hskew: libc::c_ushort,
pub vdisplay: libc::c_ushort,
pub vsyncstart: libc::c_ushort,
pub vsyncend: libc::c_ushort,
pub vtotal: libc::c_ushort,
pub flags: libc::c_uint,
privsize: libc::c_int,
private: libc::c_long,
}
#[cfg(feature = "headless")]
#[link(name = "OSMesa")]
extern "C" {
pub fn OSMesaCreateContext(format: libc::c_uint, sharelist: OSMesaContext) -> OSMesaContext;
pub fn OSMesaCreateContextExt(format: libc::c_uint, depthBits: libc::c_int,
stencilBits: libc::c_int, accumBits: libc::c_int, sharelist: OSMesaContext)
-> OSMesaContext;
pub fn OSMesaDestroyContext(ctx: OSMesaContext);
pub fn OSMesaMakeCurrent(ctx: OSMesaContext, buffer: *mut libc::c_void, type_: libc::c_uint,
width: libc::c_int, height: libc::c_int) -> libc::c_uchar;
pub fn OSMesaGetCurrentContext() -> OSMesaContext;
pub fn OSMesaPixelStore(pname: libc::c_int, value: libc::c_int);
pub fn OSMesaGetIntegerv(pname: libc::c_int, value: *mut libc::c_int);
pub fn OSMesaGetDepthBuffer(c: OSMesaContext, width: *mut libc::c_int,
height: *mut libc::c_int, bytesPerValue: *mut libc::c_int,
buffer: *mut *mut libc::c_void);
pub fn OSMesaGetColorBuffer(c: OSMesaContext, width: *mut libc::c_int,
height: *mut libc::c_int, format: *mut libc::c_int, buffer: *mut *mut libc::c_void);
pub fn OSMesaGetProcAddress(funcName: *const libc::c_char) -> *const libc::c_void;
pub fn OSMesaColorClamp(enable: libc::c_uchar);
}
#[cfg(feature = "window")]
#[link(name = "GL")]
#[link(name = "X11")]
#[link(name = "Xxf86vm")]
#[link(name = "Xcursor")]
extern "C" {
pub fn XCloseDisplay(display: *mut Display);
pub fn XCheckMaskEvent(display: *mut Display, event_mask: libc::c_long,
event_return: *mut XEvent) -> Bool;
pub fn XCheckTypedEvent(display: *mut Display, event_type: libc::c_int,
event_return: *mut XEvent) -> Bool;
pub fn XCreateColormap(display: *mut Display, w: Window,
visual: *mut Visual, alloc: libc::c_int) -> Colormap;
pub fn XCreateWindow(display: *mut Display, parent: Window, x: libc::c_int,
y: libc::c_int, width: libc::c_uint, height: libc::c_uint,
border_width: libc::c_uint, depth: libc::c_int, class: libc::c_uint,
visual: *mut Visual, valuemask: libc::c_ulong,
attributes: *mut XSetWindowAttributes) -> Window;
pub fn XDefaultRootWindow(display: *mut Display) -> Window;
pub fn XDefaultScreen(display: *mut Display) -> libc::c_int;
pub fn XDestroyWindow(display: *mut Display, w: Window);
pub fn XFilterEvent(event: *mut XEvent, w: Window) -> Bool;
pub fn XFlush(display: *mut Display);
pub fn XFree(data: *const libc::c_void);
pub fn XGetGeometry(display: *mut Display, d: Drawable, root_return: *mut Window,
x_return: *mut libc::c_int, y_return: *mut libc::c_int,
width_return: *mut libc::c_uint, height_return: *mut libc::c_uint,
border_width_return: *mut libc::c_uint, depth_return: *mut libc::c_uint) -> Status;
pub fn XSendEvent(display: *mut Display, window: Window, propagate: Bool,
event_mask: libc::c_long, event_send: *mut XEvent) -> Status;
pub fn XInternAtom(display: *mut Display, atom_name: *const libc::c_char,
only_if_exists: Bool) -> Atom;
pub fn XKeycodeToKeysym(display: *mut Display, keycode: KeyCode,
index: libc::c_int) -> KeySym;
pub fn XMoveWindow(display: *mut Display, w: Window, x: libc::c_int, y: libc::c_int);
pub fn XMapWindow(display: *mut Display, w: Window);
pub fn XMapRaised(display: *mut Display, w: Window);
pub fn XUnmapWindow(display: *mut Display, w: Window);
pub fn XNextEvent(display: *mut Display, event_return: *mut XEvent);
pub fn XInitThreads() -> Status;
pub fn XOpenDisplay(display_name: *const libc::c_char) -> *mut Display;
pub fn XPeekEvent(display: *mut Display, event_return: *mut XEvent);
pub fn XRefreshKeyboardMapping(event_map: *const XEvent);
pub fn XSetWMProtocols(display: *mut Display, w: Window, protocols: *mut Atom,
count: libc::c_int) -> Status;
pub fn XStoreName(display: *mut Display, w: Window, window_name: *const libc::c_char);
pub fn XScreenCount(display: *mut Display) -> libc::c_int;
pub fn XScreenOfDisplay(display: *mut Display, screen_number: libc::c_int) -> *const Screen;
pub fn XWidthOfScreen(screen: *const Screen) -> libc::c_int;
pub fn XHeightOfScreen(screen: *const Screen) -> libc::c_int;
pub fn XCloseIM(im: XIM) -> Status;
pub fn XOpenIM(display: *mut Display, db: XrmDatabase, res_name: *mut libc::c_char,
res_class: *mut libc::c_char) -> XIM;
// TODO: this is a vararg function
//pub fn XCreateIC(im: XIM; .) -> XIC;
pub fn XCreateIC(im: XIM, a: *const libc::c_char, b: libc::c_long, c: *const libc::c_char,
d: Window, e: *const ()) -> XIC;
pub fn XDestroyIC(ic: XIC);
pub fn XSetICFocus(ic: XIC);
pub fn XUnsetICFocus(ic: XIC);
pub fn Xutf8LookupString(ic: XIC, event: *mut XKeyEvent,
buffer_return: *mut libc::c_char, bytes_buffer: libc::c_int,
keysym_return: *mut KeySym, status_return: *mut Status) -> libc::c_int;
pub fn XkbSetDetectableAutoRepeat(dpy: *mut Display, detectable: bool, supported_rtm: *mut bool) -> bool;
pub fn XF86VidModeSwitchToMode(dpy: *mut Display, screen: libc::c_int,
modeline: *mut XF86VidModeModeInfo) -> Bool;
pub fn XF86VidModeSetViewPort(dpy: *mut Display, screen: libc::c_int,
x: libc::c_int, y: libc::c_int) -> Bool;
pub fn XF86VidModeGetAllModeLines(dpy: *mut Display, screen: libc::c_int,
modecount_return: *mut libc::c_int, modesinfo: *mut *mut *mut XF86VidModeModeInfo) -> Bool;
pub fn XcursorLibraryLoadCursor(dpy: *mut Display, name: *const libc::c_char) -> Cursor;
pub fn XDefineCursor(dby: *mut Display, w: Window, cursor: Cursor);
}
/*
GLXFBConfig *glXGetFBConfigs (Display *dpy, int screen, int *nelements);
int glXGetFBConfigAttrib (Display *dpy, GLXFBConfig config, int attribute, int *value);
GLXWindow glXCreateWindow (Display *dpy, GLXFBConfig config, Window win, const int *attrib_list);
void glXDestroyWindow (Display *dpy, GLXWindow win);
GLXPixmap glXCreatePixmap (Display *dpy, GLXFBConfig config, Pixmap pixmap, const int *attrib_list);
void glXDestroyPixmap (Display *dpy, GLXPixmap pixmap);
GLXPbuffer glXCreatePbuffer (Display *dpy, GLXFBConfig config, const int *attrib_list);
void glXDestroyPbuffer (Display *dpy, GLXPbuffer pbuf);
void glXQueryDrawable (Display *dpy, GLXDrawable draw, int attribute, unsigned int *value);
GLXContext glXCreateNewContext (Display *dpy, GLXFBConfig config, int render_type, GLXContext share_list, Bool direct);
Bool glXMakeContextCurrent (Display *dpy, GLXDrawable draw, GLXDrawable read, GLXContext ctx);
GLXDrawable glXGetCurrentReadDrawable (void);
int glXQueryContext (Display *dpy, GLXContext ctx, int attribute, int *value);
void glXSelectEvent (Display *dpy, GLXDrawable draw, unsigned long event_mask);
void glXGetSelectedEvent (Display *dpy, GLXDrawable draw, unsigned long *event_mask);
extern void glXCopyContext( Display *dpy, GLXContext src, GLXContext dst,
unsigned long mask );
extern GLXPixmap glXCreateGLXPixmap( Display *dpy, XVisualInfo *visual,
Pixmap pixmap );
extern void glXDestroyGLXPixmap( Display *dpy, GLXPixmap pixmap );
extern Bool glXQueryExtension( Display *dpy, int *errorb, int *event );
extern Bool glXQueryVersion( Display *dpy, int *maj, int *min );
extern Bool glXIsDirect( Display *dpy, GLXContext ctx );
extern int glXGetConfig( Display *dpy, XVisualInfo *visual,
int attrib, int *value );
extern GLXContext glXGetCurrentContext( void );
extern GLXDrawable glXGetCurrentDrawable( void );
extern void glXWaitGL( void );
extern void glXWaitX( void );
extern void glXUseXFont( Font font, int first, int count, int list );
extern const char *glXQueryExtensionsString( Display *dpy, int screen );
extern const char *glXQueryServerString( Display *dpy, int screen, int name );
extern const char *glXGetClientString( Display *dpy, int name );
extern Display *glXGetCurrentDisplay( void );
*/<|fim▁end|> | pub const XK_rightsinglequotemark: libc::c_uint = 0xad1; |
<|file_name|>check-migration-strategies.js<|end_file_name|><|fim▁begin|>import { newRxTypeError, newRxError } from '../../rx-error';
import { getPreviousVersions } from '../../rx-schema';
/**
* checks if the migrationStrategies are ok, throws if not
* @throws {Error|TypeError} if not ok
*/
export function checkMigrationStrategies(schema, migrationStrategies) {
// migrationStrategies must be object not array
if (typeof migrationStrategies !== 'object' || Array.isArray(migrationStrategies)) {
throw newRxTypeError('COL11', {
schema: schema
});
}
var previousVersions = getPreviousVersions(schema); // for every previousVersion there must be strategy
if (previousVersions.length !== Object.keys(migrationStrategies).length) {
throw newRxError('COL12', {
have: Object.keys(migrationStrategies),
should: previousVersions
});
} // every strategy must have number as property and be a function
<|fim▁hole|> s: migrationStrategies[vNr + 1]
};
}).filter(function (strat) {
return typeof strat.s !== 'function';
}).forEach(function (strat) {
throw newRxTypeError('COL13', {
version: strat.v,
type: typeof strat,
schema: schema
});
});
return true;
}
//# sourceMappingURL=check-migration-strategies.js.map<|fim▁end|> | previousVersions.map(function (vNr) {
return {
v: vNr, |
<|file_name|>async-unsafe-fn-call-in-safe.rs<|end_file_name|><|fim▁begin|>// edition:2018
// revisions: mir thir
// [thir]compile-flags: -Z thir-unsafeck
struct S;
<|fim▁hole|>async unsafe fn f() {}
async fn g() {
S::f(); //~ ERROR call to unsafe function is unsafe
f(); //~ ERROR call to unsafe function is unsafe
}
fn main() {
S::f(); //[mir]~ ERROR call to unsafe function is unsafe
f(); //[mir]~ ERROR call to unsafe function is unsafe
}<|fim▁end|> | impl S {
async unsafe fn f() {}
}
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Raw bindings to C functions of the Fast Artificial Neural Network library
//!
//!
//! # Creation/Execution
//!
//! The FANN library is designed to be very easy to use.
//! A feedforward ANN can be created by a simple `fann_create_standard` function, while
//! other ANNs can be created just as easily. The ANNs can be trained by `fann_train_on_file`
//! and executed by `fann_run`.
//!
//! All of this can be done without much knowledge of the internals of ANNs, although the ANNs
//! created will still be powerful and effective. If you have more knowledge about ANNs, and desire
//! more control, almost every part of the ANNs can be parametrized to create specialized and highly
//! optimal ANNs.
//!
//!
//! # Training
//!
//! There are many different ways of training neural networks and the FANN library supports
//! a number of different approaches.
//!
//! Two fundementally different approaches are the most commonly used:
//!
//! * Fixed topology training - The size and topology of the ANN is determined in advance
//! and the training alters the weights in order to minimize the difference between
//! the desired output values and the actual output values. This kind of training is
//! supported by `fann_train_on_data`.
//!
//! * Evolving topology training - The training start out with an empty ANN, only consisting
//! of input and output neurons. Hidden neurons and connections are added during training,
//! in order to achieve the same goal as for fixed topology training. This kind of training
//! is supported by FANN Cascade Training.
//!
//!
//! # Cascade Training
//!
//! Cascade training differs from ordinary training in the sense that it starts with an empty neural
//! network and then adds neurons one by one, while it trains the neural network. The main benefit
//! of this approach is that you do not have to guess the number of hidden layers and neurons prior
//! to training, but cascade training has also proved better at solving some problems.
//!
//! The basic idea of cascade training is that a number of candidate neurons are trained separate
//! from the real network, then the most promising of these candidate neurons is inserted into the
//! neural network. Then the output connections are trained and new candidate neurons are prepared.
//! The candidate neurons are created as shortcut connected neurons in a new hidden layer, which
//! means that the final neural network will consist of a number of hidden layers with one shortcut
//! connected neuron in each.
//!
//!
//! # File Input/Output
//!
//! It is possible to save an entire ann to a file with `fann_save` for future loading with
//! `fann_create_from_file`.
//!
//!
//! # Error Handling
//!
//! Errors from the FANN library are usually reported on `stderr`.
//! It is however possible to redirect these error messages to a file,
//! or completely ignore them with the `fann_set_error_log` function.
//!
//! It is also possible to inspect the last error message by using the
//! `fann_get_errno` and `fann_get_errstr` functions.
//!
//!
//! # Datatypes
//!
//! The two main datatypes used in the FANN library are `fann`,
//! which represents an artificial neural network, and `fann_train_data`,
//! which represents training data.
#![allow(non_camel_case_types)]
// TODO: Cross-link the documentation.
extern crate libc;
pub use fann_activationfunc_enum::*;
pub use fann_errno_enum::*;
pub use fann_errorfunc_enum::*;
pub use fann_nettype_enum::*;
pub use fann_stopfunc_enum::*;
pub use fann_train_enum::*;
use libc::{c_char, c_float, c_int, c_uint, c_void};
use libc::FILE;
#[cfg(feature = "double")]
type fann_type_internal = libc::c_double;
#[cfg(not(feature = "double"))]
type fann_type_internal = c_float;
/// The type of weights, inputs and outputs in a neural network. In the Rust bindings, it is
/// defined as `c_float` by default, and as `c_double`, if the `double` feature is configured.
///
/// In the FANN C library, `fann_type` is defined as:
///
/// * `float` - if you include fann.h or floatfann.h
/// * `double` - if you include doublefann.h
/// * `int` - if you include fixedfann.h (only for executing a network, not training).
pub type fann_type = fann_type_internal;
/// Error events on `fann` and `fann_train_data`.
#[repr(C)]
#[derive(Copy, Clone)]
pub enum fann_errno_enum {
/// No error
FANN_E_NO_ERROR = 0,
/// Unable to open configuration file for reading
FANN_E_CANT_OPEN_CONFIG_R,
/// Unable to open configuration file for writing
FANN_E_CANT_OPEN_CONFIG_W,
/// Wrong version of configuration file
FANN_E_WRONG_CONFIG_VERSION,
/// Error reading info from configuration file
FANN_E_CANT_READ_CONFIG,
/// Error reading neuron info from configuration file
FANN_E_CANT_READ_NEURON,
/// Error reading connections from configuration file
FANN_E_CANT_READ_CONNECTIONS,
/// Number of connections not equal to the number expected
FANN_E_WRONG_NUM_CONNECTIONS,
/// Unable to open train data file for writing
FANN_E_CANT_OPEN_TD_W,
/// Unable to open train data file for reading
FANN_E_CANT_OPEN_TD_R,
/// Error reading training data from file
FANN_E_CANT_READ_TD,
/// Unable to allocate memory
FANN_E_CANT_ALLOCATE_MEM,
/// Unable to train with the selected activation function
FANN_E_CANT_TRAIN_ACTIVATION,
/// Unable to use the selected activation function
FANN_E_CANT_USE_ACTIVATION,
/// Irreconcilable differences between two `fann_train_data` structures
FANN_E_TRAIN_DATA_MISMATCH,
/// Unable to use the selected training algorithm
FANN_E_CANT_USE_TRAIN_ALG,
/// Trying to take subset which is not within the training set
FANN_E_TRAIN_DATA_SUBSET,
/// Index is out of bound
FANN_E_INDEX_OUT_OF_BOUND,
/// Scaling parameters not present
FANN_E_SCALE_NOT_PRESENT,
}
/// The Training algorithms used when training on `fann_train_data` with functions like
/// `fann_train_on_data` or `fann_train_on_file`. The incremental training alters the weights
/// after each time it is presented an input pattern, while batch only alters the weights once after
/// it has been presented to all the patterns.
#[repr(C)]
#[derive(Copy, Clone)]
pub enum fann_train_enum {
/// Standard backpropagation algorithm, where the weights are
/// updated after each training pattern. This means that the weights are updated many
/// times during a single epoch. For this reason some problems will train very fast with
/// this algorithm, while other more advanced problems will not train very well.
FANN_TRAIN_INCREMENTAL = 0,
/// Standard backpropagation algorithm, where the weights are updated after calculating the mean
/// square error for the whole training set. This means that the weights are only updated once
/// during an epoch. For this reason some problems will train slower with this algorithm. But
/// since the mean square error is calculated more correctly than in incremental training, some
/// problems will reach better solutions with this algorithm.
FANN_TRAIN_BATCH,
/// A more advanced batch training algorithm which achieves good results
/// for many problems. The RPROP training algorithm is adaptive, and does therefore not
/// use the `learning_rate`. Some other parameters can however be set to change the way the
/// RPROP algorithm works, but it is only recommended for users with insight in how the RPROP
/// training algorithm works. The RPROP training algorithm is described by
/// [Riedmiller and Braun, 1993], but the actual learning algorithm used here is the
/// iRPROP- training algorithm which is described by [Igel and Husken, 2000] which
/// is a variant of the standard RPROP training algorithm.
FANN_TRAIN_RPROP,
/// A more advanced batch training algorithm which achieves good results
/// for many problems. The quickprop training algorithm uses the `learning_rate` parameter
/// along with other more advanced parameters, but it is only recommended to change these
/// advanced parameters for users with insight in how the quickprop training algorithm works.
/// The quickprop training algorithm is described by [Fahlman, 1988].
FANN_TRAIN_QUICKPROP,
}
/// The activation functions used for the neurons during training. The activation functions
/// can either be defined for a group of neurons by `fann_set_activation_function_hidden` and
/// `fann_set_activation_function_output`, or it can be defined for a single neuron by
/// `fann_set_activation_function`.
///
/// The steepness of an activation function is defined in the same way by
/// `fann_set_activation_steepness_hidden`, `fann_set_activation_steepness_output` and
/// `fann_set_activation_steepness`.
///
/// The functions are described with functions where:
///
/// * x is the input to the activation function,
///
/// * y is the output,
///
/// * s is the steepness and
///
/// * d is the derivation.
#[repr(C)]
#[derive(Copy, Clone)]
pub enum fann_activationfunc_enum {
/// Neuron does not exist or does not have an activation function.
FANN_NONE = -1,
/// Linear activation function.
///
/// * span: -inf < y < inf
///
/// * y = x*s, d = 1*s
///
/// * Can NOT be used in fixed point.
FANN_LINEAR = 0,
/// Threshold activation function.
///
/// * x < 0 -> y = 0, x >= 0 -> y = 1
///
/// * Can NOT be used during training.
FANN_THRESHOLD,
/// Threshold activation function.
///
/// * x < 0 -> y = 0, x >= 0 -> y = 1
///
/// * Can NOT be used during training.
FANN_THRESHOLD_SYMMETRIC,
/// Sigmoid activation function.
///
/// * One of the most used activation functions.
///
/// * span: 0 < y < 1
///
/// * y = 1/(1 + exp(-2*s*x))
///
/// * d = 2*s*y*(1 - y)
FANN_SIGMOID,
/// Stepwise linear approximation to sigmoid.
///
/// * Faster than sigmoid but a bit less precise.
FANN_SIGMOID_STEPWISE,
/// Symmetric sigmoid activation function, aka. tanh.
///
/// * One of the most used activation functions.
///
/// * span: -1 < y < 1
///
/// * y = tanh(s*x) = 2/(1 + exp(-2*s*x)) - 1
///
/// * d = s*(1-(y*y))
FANN_SIGMOID_SYMMETRIC,
/// Stepwise linear approximation to symmetric sigmoid.
///
/// * Faster than symmetric sigmoid but a bit less precise.
FANN_SIGMOID_SYMMETRIC_STEPWISE,
/// Gaussian activation function.
///
/// * 0 when x = -inf, 1 when x = 0 and 0 when x = inf
///
/// * span: 0 < y < 1
///
/// * y = exp(-x*s*x*s)
///
/// * d = -2*x*s*y*s
FANN_GAUSSIAN,
/// Symmetric gaussian activation function.
///
/// * -1 when x = -inf, 1 when x = 0 and 0 when x = inf
///
/// * span: -1 < y < 1
///
/// * y = exp(-x*s*x*s)*2-1
///
/// * d = -2*x*s*(y+1)*s
FANN_GAUSSIAN_SYMMETRIC,
/// Stepwise linear approximation to gaussian.
/// Faster than gaussian but a bit less precise.
/// NOT implemented yet.
FANN_GAUSSIAN_STEPWISE,
/// Fast (sigmoid like) activation function defined by David Elliott
///
/// * span: 0 < y < 1
///
/// * y = ((x*s) / 2) / (1 + |x*s|) + 0.5
///
/// * d = s*1/(2*(1+|x*s|)*(1+|x*s|))
FANN_ELLIOTT,
/// Fast (symmetric sigmoid like) activation function defined by David Elliott
///
/// * span: -1 < y < 1
///
/// * y = (x*s) / (1 + |x*s|)
///
/// * d = s*1/((1+|x*s|)*(1+|x*s|))
FANN_ELLIOTT_SYMMETRIC,
/// Bounded linear activation function.
///
/// * span: 0 <= y <= 1
///
/// * y = x*s, d = 1*s
FANN_LINEAR_PIECE,
/// Bounded linear activation function.
///
/// * span: -1 <= y <= 1
///
/// * y = x*s, d = 1*s
FANN_LINEAR_PIECE_SYMMETRIC,
/// Periodical sine activation function.
///
/// * span: -1 <= y <= 1
///
/// * y = sin(x*s)
///
/// * d = s*cos(x*s)
FANN_SIN_SYMMETRIC,
/// Periodical cosine activation function.
///
/// * span: -1 <= y <= 1
///
/// * y = cos(x*s)
///
/// * d = s*-sin(x*s)
FANN_COS_SYMMETRIC,
/// Periodical sine activation function.
///
/// * span: 0 <= y <= 1
///
/// * y = sin(x*s)/2+0.5
///
/// * d = s*cos(x*s)/2
FANN_SIN,
/// Periodical cosine activation function.
///
/// * span: 0 <= y <= 1
///
/// * y = cos(x*s)/2+0.5
///
/// * d = s*-sin(x*s)/2
FANN_COS,
}
/// Error function used during training.
#[repr(C)]
#[derive(Copy, Clone)]
pub enum fann_errorfunc_enum {
/// Standard linear error function.
FANN_ERRORFUNC_LINEAR = 0,
/// Tanh error function, usually better but can require a lower learning rate. This error
/// function aggressively targets outputs that differ much from the desired, while not targeting
/// outputs that only differ a little that much. This activation function is not recommended for
/// cascade training and incremental training.
FANN_ERRORFUNC_TANH,
}
/// Stop criteria used during training.
#[repr(C)]
#[derive(Copy, Clone)]
pub enum fann_stopfunc_enum {
/// Stop criterion is Mean Square Error (MSE) value.
FANN_STOPFUNC_MSE = 0,
/// Stop criterion is number of bits that fail. The number of bits means the
/// number of output neurons which differ more than the bit fail limit
/// (see `fann_get_bit_fail_limit`, `fann_set_bit_fail_limit`).
/// The bits are counted in all of the training data, so this number can be higher than
/// the number of training data.
FANN_STOPFUNC_BIT,
}
/// Definition of network types used by `fann_get_network_type`.
#[repr(C)]
#[derive(Copy, Clone)]
pub enum fann_nettype_enum {
/// Each layer only has connections to the next layer.
FANN_NETTYPE_LAYER = 0,
/// Each layer has connections to all following layers.
FANN_NETTYPE_SHORTCUT,
}
/// This callback function can be called during training when using `fann_train_on_data`,
/// `fann_train_on_file` or `fann_cascadetrain_on_data`.
///
/// The callback can be set by using `fann_set_callback` and is very useful for doing custom
/// things during training. It is recommended to use this function when implementing custom
/// training procedures, or when visualizing the training in a GUI etc. The parameters which the
/// callback function takes are the parameters given to `fann_train_on_data`, plus an `epochs`
/// parameter which tells how many epochs the training has taken so far.
///
/// The callback function should return an integer, if the callback function returns -1, the
/// training will terminate.
///
/// Example of a callback function:
///
/// ```
/// extern crate libc;
/// extern crate fann_sys;
///
/// use libc::*;
/// use fann_sys::*;
///
/// extern "C" fn cb(ann: *mut fann,
/// train: *mut fann_train_data,
/// max_epochs: c_uint,
/// epochs_between_reports: c_uint,
/// desired_error: c_float,
/// epochs: c_uint) -> c_int {
/// let mut mse = unsafe { fann_get_MSE(ann) };
/// println!("Epochs: {}. MSE: {}. Desired MSE: {}", epochs, mse, desired_error);
/// 0
/// }
///
/// fn main() {
/// let test_callback: fann_callback_type = Some(cb);
/// }
/// ```
pub type fann_callback_type = Option<
extern "C" fn(
ann: *mut fann,
train: *mut fann_train_data,
max_epochs: c_uint,
epochs_between_reports: c_uint,
desired_error: c_float,
epochs: c_uint,
) -> c_int,
>;
#[repr(C)]
struct fann_neuron {
first_con: c_uint,
last_con: c_uint,
sum: fann_type,
value: fann_type,
activation_steepness: fann_type,
activation_function: fann_activationfunc_enum,
}
#[repr(C)]
struct fann_layer {
first_neuron: *mut fann_neuron,
last_neuron: *mut fann_neuron,
}
/// Structure used to store error-related information, both
/// `fann` and `fann_train_data` can be cast to this type.
///
/// # See also
/// `fann_set_error_log`, `fann_get_errno`
#[repr(C)]
pub struct fann_error {
errno_f: fann_errno_enum,
error_log: *mut FILE,
errstr: *mut c_char,
}
/// The fast artificial neural network (`fann`) structure.
///
/// Data within this structure should never be accessed directly, but only by using the
/// `fann_get_...` and `fann_set_...` functions.
///
/// The fann structure is created using one of the `fann_create_...` functions and each of
/// the functions which operates on the structure takes a `fann` pointer as the first parameter.
///
/// # See also
/// `fann_create_standard`, `fann_destroy`
#[repr(C)]
pub struct fann {
errno_f: fann_errno_enum,
error_log: *mut FILE,
errstr: *mut c_char,
learning_rate: c_float,
learning_momentum: c_float,
connection_rate: c_float,
network_type: fann_nettype_enum,
first_layer: *mut fann_layer,
last_layer: *mut fann_layer,
total_neurons: c_uint,
num_input: c_uint,
num_output: c_uint,
weights: *mut fann_type,
connections: *mut *mut fann_neuron,
train_errors: *mut fann_type,
training_algorithm: fann_train_enum,
total_connections: c_uint,
output: *mut fann_type,
num_mse: c_uint,
mse_value: c_float,
num_bit_fail: c_uint,
bit_fail_limit: fann_type,
train_error_function: fann_errorfunc_enum,
train_stop_function: fann_stopfunc_enum,
callback: fann_callback_type,
user_data: *mut c_void,
cascade_output_change_fraction: c_float,
cascade_output_stagnation_epochs: c_uint,
cascade_candidate_change_fraction: c_float,
cascade_candidate_stagnation_epochs: c_uint,
cascade_best_candidate: c_uint,
cascade_candidate_limit: fann_type,
cascade_weight_multiplier: fann_type,
cascade_max_out_epochs: c_uint,
cascade_max_cand_epochs: c_uint,
cascade_activation_functions: *mut fann_activationfunc_enum,
cascade_activation_functions_count: c_uint,
cascade_activation_steepnesses: *mut fann_type,
cascade_activation_steepnesses_count: c_uint,
cascade_num_candidate_groups: c_uint,
cascade_candidate_scores: *mut fann_type,
total_neurons_allocated: c_uint,
total_connections_allocated: c_uint,
quickprop_decay: c_float,
quickprop_mu: c_float,
rprop_increase_factor: c_float,
rprop_decrease_factor: c_float,
rprop_delta_min: c_float,
rprop_delta_max: c_float,
rprop_delta_zero: c_float,
train_slopes: *mut fann_type,
prev_steps: *mut fann_type,
prev_train_slopes: *mut fann_type,
prev_weights_deltas: *mut fann_type,
scale_mean_in: *mut c_float,
scale_deviation_in: *mut c_float,
scale_new_min_in: *mut c_float,
scale_factor_in: *mut c_float,
scale_mean_out: *mut c_float,
scale_deviation_out: *mut c_float,
scale_new_min_out: *mut c_float,
scale_factor_out: *mut c_float,
}
/// Describes a connection between two neurons and its weight.
///
/// # See Also
/// `fann_get_connection_array`, `fann_set_weight_array`
///
/// This structure appears in FANN >= 2.1.0.
#[repr(C)]
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct fann_connection {
/// Unique number used to identify source neuron
pub from_neuron: c_uint,
/// Unique number used to identify destination neuron
pub to_neuron: c_uint,
/// The numerical value of the weight
pub weight: fann_type,
}
/// Structure used to store data, for use with training.
///
/// The data inside this structure should never be manipulated directly, but should use some
/// of the supplied training data manipulation functions.
///
/// The training data structure is very useful for storing data during training and testing of a
/// neural network.
///
/// # See also
/// `fann_read_train_from_file`, `fann_train_on_data`, `fann_destroy_train`
#[repr(C)]
pub struct fann_train_data {
errno_f: fann_errno_enum,
error_log: *mut FILE,
errstr: *mut c_char,
num_data: c_uint,
num_input: c_uint,
num_output: c_uint,
input: *mut *mut fann_type,
output: *mut *mut fann_type,
}
#[cfg_attr(not(feature = "double"), link(name = "fann"))]
#[cfg_attr(feature = "double", link(name = "doublefann"))]
extern "C" {
pub static mut fann_default_error_log: *mut FILE;
/// Change where errors are logged to. Both `fann` and `fann_data` can be
/// cast to `fann_error`, so this function can be used to set either of these.
///
/// If `log_file` is NULL, no errors will be printed.
///
/// If `errdat` is NULL, the default log will be set. The default log is the log used when
/// creating `fann` and `fann_data`. This default log will also be the default for all new
/// structs that are created.
///
/// The default behavior is to log them to `stderr`.
///
/// # See also
/// `fann_error`
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_set_error_log(errdat: *mut fann_error, log_file: *mut FILE);
/// Returns the last error number.
///
/// # See also
/// `fann_errno_enum`, `fann_reset_errno`
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_get_errno(errdat: *const fann_error) -> fann_errno_enum;
/// Resets the last error number.
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_reset_errno(errdat: *mut fann_error);
/// Resets the last error string.
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_reset_errstr(errdat: *mut fann_error);
/// Returns the last error string.
///
/// This function calls `fann_reset_errno` and `fann_reset_errstr`.
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_get_errstr(errdat: *mut fann_error) -> *mut c_char;
/// Prints the last error to `stderr`.
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_print_error(errdat: *mut fann_error);
/// Train one iteration with a set of inputs, and a set of desired outputs.
/// This training is always incremental training (see `fann_train_enum`), since
/// only one pattern is presented.
///
/// # Parameters
///
/// * `ann` - The neural network structure
/// * `input` - an array of inputs. This array must be exactly `fann_get_num_input`
/// long.
/// * `desired_output` - an array of desired outputs. This array must be exactly
/// `fann_get_num_output` long.
///
/// # See also
/// `fann_train_on_data`, `fann_train_epoch`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_train(ann: *mut fann, input: *const fann_type, desired_output: *const fann_type);
/// Test with a set of inputs, and a set of desired outputs.
/// This operation updates the mean square error, but does not
/// change the network in any way.
///
/// # See also
/// `fann_test_data`, `fann_train`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_test(
ann: *mut fann,
input: *const fann_type,
desired_output: *const fann_type,
) -> *mut fann_type;
/// Reads the mean square error from the network.
///
/// This value is calculated during
/// training or testing, and can therefore sometimes be a bit off if the weights
/// have been changed since the last calculation of the value.
///
/// # See also
/// `fann_test_data`
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_get_MSE(ann: *const fann) -> c_float;
/// The number of fail bits; means the number of output neurons which differ more
/// than the bit fail limit (see `fann_get_bit_fail_limit`, `fann_set_bit_fail_limit`).
/// The bits are counted in all of the training data, so this number can be higher than
/// the number of training data.
///
/// This value is reset by `fann_reset_MSE` and updated by all the same functions which also
/// update the MSE value (e.g. `fann_test_data`, `fann_train_epoch`)
///
/// # See also
/// `fann_stopfunc_enum`, `fann_get_MSE`
///
/// This function appears in FANN >= 2.0.0
pub fn fann_get_bit_fail(ann: *const fann) -> c_uint;
/// Resets the mean square error from the network.
///
/// This function also resets the number of bits that fail.
///
/// # See also
/// `fann_get_bit_fail_limit`, `fann_get_MSE`
///
/// This function appears in FANN >= 1.1.0
pub fn fann_reset_MSE(ann: *mut fann);
/// Trains on an entire dataset, for a period of time.
///
/// This training uses the training algorithm chosen by `fann_set_training_algorithm`,
/// and the parameters set for these training algorithms.
///
/// # Parameters
///
/// * `ann` - The neural network
/// * `data` - The data that should be used during training
/// * `max_epochs` - The maximum number of epochs the training should continue
/// * `epochs_between_reports` - The number of epochs between printing a status report to
/// `stdout`. A value of zero means no reports should be printed.
/// * `desired_error` - The desired `fann_get_MSE` or `fann_get_bit_fail`, depending on
/// which stop function is chosen by `fann_set_train_stop_function`.
///
/// Instead of printing out reports every `epochs_between_reports`, a callback function can be
/// called (see `fann_set_callback`).
///
/// # See also
/// `fann_train_on_file`, `fann_train_epoch`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_train_on_data(
ann: *mut fann,
data: *const fann_train_data,
max_epochs: c_uint,
epochs_between_reports: c_uint,
desired_error: c_float,
);
/// Does the same as `fann_train_on_data`, but reads the training data directly from a file.
///
/// # See also
/// `fann_train_on_data`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_train_on_file(
ann: *mut fann,
filename: *const c_char,
max_epochs: c_uint,
epochs_between_reports: c_uint,
desired_error: c_float,
);
/// Train one epoch with a set of training data.
///
/// Train one epoch with the training data stored in `data`. One epoch is where all of
/// the training data is considered exactly once.
///
/// This function returns the MSE error as it is calculated either before or during
/// the actual training. This is not the actual MSE after the training epoch, but since
/// calculating this will require to go through the entire training set once more, it is
/// more than adequate to use this value during training.
///
/// The training algorithm used by this function is chosen by the `fann_set_training_algorithm`
/// function.
///
/// # See also
/// `fann_train_on_data`, `fann_test_data`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_train_epoch(ann: *mut fann, data: *const fann_train_data) -> c_float;
/// Tests a set of training data and calculates the MSE for the training data.
///
/// This function updates the MSE and the bit fail values.
///
/// # See also
/// `fann_test`, `fann_get_MSE`, `fann_get_bit_fail`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_test_data(ann: *mut fann, data: *const fann_train_data) -> c_float;
/// Reads a file that stores training data.
///
/// The file must be formatted like:
///
/// ```text
/// num_train_data num_input num_output
/// inputdata separated by space
/// outputdata separated by space
/// .
/// .
/// .
/// inputdata separated by space
/// outputdata separated by space
/// ```
///
/// # See also
/// `fann_train_on_data`, `fann_destroy_train`, `fann_save_train`
///
/// This function appears in FANN >= 1.0.0
pub fn fann_read_train_from_file(filename: *const c_char) -> *mut fann_train_data;
/// Creates the training data struct from a user supplied function.
/// As the training data are numerable (data 1, data 2...), the user must write
/// a function that receives the number of the training data set (input,output)
/// and returns the set.
///
/// # Parameters
///
/// * `num_data` - The number of training data
/// * `num_input` - The number of inputs per training data
/// * `num_output` - The number of ouputs per training data
/// * `user_function` - The user supplied function
///
/// # Parameters for the user function
///
/// * `num` - The number of the training data set
/// * `num_input` - The number of inputs per training data
/// * `num_output` - The number of ouputs per training data
/// * `input` - The set of inputs
/// * `output` - The set of desired outputs
///
/// # See also
/// `fann_read_train_from_file`, `fann_train_on_data`, `fann_destroy_train`, `fann_save_train`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_create_train_from_callback(
num_data: c_uint,
num_input: c_uint,
num_output: c_uint,
user_function: Option<
extern "C" fn(
num: c_uint,
num_input: c_uint,
num_output: c_uint,
input: *mut fann_type,
output: *mut fann_type,
),
>,
) -> *mut fann_train_data;
/// Destructs the training data and properly deallocates all of the associated data.
/// Be sure to call this function when finished using the training data.
///
/// This function appears in FANN >= 1.0.0
pub fn fann_destroy_train(train_data: *mut fann_train_data);
/// Shuffles training data, randomizing the order.
/// This is recommended for incremental training, while it has no influence during batch
/// training.
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_shuffle_train_data(train_data: *mut fann_train_data);
/// Scale input and output data based on previously calculated parameters.
///
/// # Parameters
///
/// * `ann` - ANN for which trained parameters were calculated before
/// * `data` - training data that needs to be scaled
///
/// # See also
/// `fann_descale_train`, `fann_set_scaling_params`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_scale_train(ann: *mut fann, data: *mut fann_train_data);
/// Descale input and output data based on previously calculated parameters.
///
/// # Parameters
///
/// * `ann` - ann for which trained parameters were calculated before
/// * `data` - training data that needs to be descaled
///
/// # See also
/// `fann_scale_train`, `fann_set_scaling_params`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_descale_train(ann: *mut fann, data: *mut fann_train_data);
/// Calculate input scaling parameters for future use based on training data.
///
/// # Parameters
///
/// * `ann` - ANN for which parameters need to be calculated
/// * `data` - training data that will be used to calculate scaling parameters
/// * `new_input_min` - desired lower bound in input data after scaling (not strictly followed)
/// * `new_input_max` - desired upper bound in input data after scaling (not strictly followed)
///
/// # See also
/// `fann_set_output_scaling_params`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_set_input_scaling_params(
ann: *mut fann,
data: *const fann_train_data,
new_input_min: c_float,
new_input_max: c_float,
) -> c_int;
/// Calculate output scaling parameters for future use based on training data.
///
/// # Parameters
///
/// * `ann` - ANN for which parameters need to be calculated
/// * `data` - training data that will be used to calculate scaling parameters
/// * `new_output_min` - desired lower bound in output data after scaling (not strictly
/// followed)
/// * `new_output_max` - desired upper bound in output data after scaling (not strictly
/// followed)
///
/// # See also
/// `fann_set_input_scaling_params`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_set_output_scaling_params(
ann: *mut fann,
data: *const fann_train_data,
new_output_min: c_float,
new_output_max: c_float,
) -> c_int;
/// Calculate input and output scaling parameters for future use based on training data.
///
/// # Parameters
///
/// * `ann` - ANN for which parameters need to be calculated
/// * `data` - training data that will be used to calculate scaling parameters
/// * `new_input_min` - desired lower bound in input data after scaling (not strictly followed)
/// * `new_input_max` - desired upper bound in input data after scaling (not strictly followed)
/// * `new_output_min` - desired lower bound in output data after scaling (not strictly
/// followed)
/// * `new_output_max` - desired upper bound in output data after scaling (not strictly
/// followed)
///
/// # See also
/// `fann_set_input_scaling_params`, `fann_set_output_scaling_params`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_set_scaling_params(
ann: *mut fann,
data: *const fann_train_data,
new_input_min: c_float,
new_input_max: c_float,
new_output_min: c_float,
new_output_max: c_float,
) -> c_int;
/// Clears scaling parameters.
///
/// # Parameters
///
/// * `ann` - ann for which to clear scaling parameters
///
/// This function appears in FANN >= 2.1.0
pub fn fann_clear_scaling_params(ann: *mut fann) -> c_int;
/// Scale data in input vector before feeding it to the ANN based on previously calculated
/// parameters.
///
/// # Parameters
///
/// `ann` - for which scaling parameters were calculated
/// `input_vector` - input vector that will be scaled
///
/// # See also
/// `fann_descale_input`, `fann_scale_output`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_scale_input(ann: *mut fann, input_vector: *mut fann_type);
/// Scale data in output vector before feeding it to the ANN based on previously calculated
/// parameters.
///
/// # Parameters
///
/// * `ann` - for which scaling parameters were calculated
/// * `output_vector` - output vector that will be scaled
///
/// # See also
/// `fann_descale_output`, `fann_scale_intput`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_scale_output(ann: *mut fann, output_vector: *mut fann_type);
/// Scale data in input vector after getting it from the ANN based on previously calculated
/// parameters.
///
/// # Parameters
///
/// * `ann` - for which scaling parameters were calculated
/// * `input_vector` - input vector that will be descaled
///
/// # See also
/// `fann_scale_input`, `fann_descale_output`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_descale_input(ann: *mut fann, input_vector: *mut fann_type);
/// Scale data in output vector after getting it from the ANN based on previously calculated
/// parameters.
///
/// # Parameters
///
/// * `ann` - for which scaling parameters were calculated
/// * `output_vector` - output vector that will be descaled
///
/// # See also
/// `fann_descale_input`, `fann_scale_output`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_descale_output(ann: *mut fann, output_vector: *mut fann_type);
/// Scales the inputs in the training data to the specified range.
///
/// # See also
/// `fann_scale_output_train_data`, `fann_scale_train_data`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_scale_input_train_data(
train_data: *mut fann_train_data,
new_min: fann_type,
new_max: fann_type,
);
/// Scales the outputs in the training data to the specified range.
///
/// # See also
/// `fann_scale_input_train_data`, `fann_scale_train_data`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_scale_output_train_data(
train_data: *mut fann_train_data,
new_min: fann_type,
new_max: fann_type,
);
/// Scales the inputs and outputs in the training data to the specified range.
///
/// # See also
/// `fann_scale_output_train_data`, `fann_scale_input_train_data`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_scale_train_data(
train_data: *mut fann_train_data,
new_min: fann_type,
new_max: fann_type,
);
/// Merges the data from `data1` and `data2` into a new `fann_train_data`.
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_merge_train_data(
data1: *const fann_train_data,
data2: *const fann_train_data,
) -> *mut fann_train_data;
/// Returns an exact copy of a `fann_train_data`.
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_duplicate_train_data(data: *const fann_train_data) -> *mut fann_train_data;
/// Returns an copy of a subset of the `fann_train_data`, starting at position `pos`
/// and `length` elements forward.
///
/// ```notest
/// fann_subset_train_data(train_data, 0, fann_length_train_data(train_data))
/// ```
///
/// will do the same as `fann_duplicate_train_data`.
///
/// # See also
/// `fann_length_train_data`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_subset_train_data(
data: *const fann_train_data,
pos: c_uint,
length: c_uint,
) -> *mut fann_train_data;
/// Returns the number of training patterns in the `fann_train_data`.
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_length_train_data(data: *const fann_train_data) -> c_uint;
/// Returns the number of inputs in each of the training patterns in the `fann_train_data`.
///
/// # See also
/// `fann_num_train_data`, `fann_num_output_train_data`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_num_input_train_data(data: *const fann_train_data) -> c_uint;
/// Returns the number of outputs in each of the training patterns in the `fann_train_data`.
///
/// # See also
/// `fann_num_train_data`, `fann_num_input_train_data`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_num_output_train_data(data: *const fann_train_data) -> c_uint;
/// Save the training structure to a file, with the format specified in
/// `fann_read_train_from_file`
///
/// # Return
///
/// The function returns 0 on success and -1 on failure.
///
/// # See also
/// `fann_read_train_from_file`, `fann_save_train_to_fixed`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_save_train(data: *mut fann_train_data, filename: *const c_char) -> c_int;
/// Saves the training structure to a fixed point data file.
///
/// This function is very useful for testing the quality of a fixed point network.
///
/// # Return
///
/// The function returns 0 on success and -1 on failure.
///
/// # See also
/// `fann_save_train`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_save_train_to_fixed(
data: *mut fann_train_data,
filename: *const c_char,
decimal_point: c_uint,
) -> c_int;
/// Return the training algorithm as described by `fann_train_enum`. This training algorithm
/// is used by `fann_train_on_data` and associated functions.
///
/// Note that this algorithm is also used during `fann_cascadetrain_on_data`, although only
/// `FANN_TRAIN_RPROP` and `FANN_TRAIN_QUICKPROP` is allowed during cascade training.
///
/// The default training algorithm is `FANN_TRAIN_RPROP`.
///
/// # See also
/// `fann_set_training_algorithm`, `fann_train_enum`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_get_training_algorithm(ann: *const fann) -> fann_train_enum;
/// Set the training algorithm.
///
/// More info available in `fann_get_training_algorithm`.
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_set_training_algorithm(ann: *mut fann, training_algorithm: fann_train_enum);
/// Return the learning rate.
///
/// The learning rate is used to determine how aggressive training should be for some of the
/// training algorithms (`FANN_TRAIN_INCREMENTAL`, `FANN_TRAIN_BATCH`, `FANN_TRAIN_QUICKPROP`).
/// Do however note that it is not used in `FANN_TRAIN_RPROP`.
///
/// The default learning rate is 0.7.
///
/// # See also
/// `fann_set_learning_rate`, `fann_set_training_algorithm`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_get_learning_rate(ann: *const fann) -> c_float;
/// Set the learning rate.
///
/// More info available in `fann_get_learning_rate`.
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_set_learning_rate(ann: *mut fann, learning_rate: c_float);
/// Get the learning momentum.
///
/// The learning momentum can be used to speed up FANN_TRAIN_INCREMENTAL training.
/// A too high momentum will however not benefit training. Setting momentum to 0 will
/// be the same as not using the momentum parameter. The recommended value of this parameter
/// is between 0.0 and 1.0.
///
/// The default momentum is 0.
///
/// # See also
/// `fann_set_learning_momentum`, `fann_set_training_algorithm`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_learning_momentum(ann: *const fann) -> c_float;
/// Set the learning momentum.
///
/// More info available in `fann_get_learning_momentum`.
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_learning_momentum(ann: *mut fann, learning_momentum: c_float);
/// Get the activation function for neuron number `neuron` in layer number `layer`,
/// counting the input layer as layer 0.
///
/// It is not possible to get activation functions for the neurons in the input layer.
///
/// Information about the individual activation functions is available at
/// `fann_activationfunc_enum`.
///
/// # Returns
///
/// The activation function for the neuron or `FANN_NONE` if the neuron is not defined in the
/// neural network.
///
/// # See also
/// `fann_set_activation_function_layer`, `fann_set_activation_function_hidden`,
/// `fann_set_activation_function_output`, `fann_set_activation_steepness`,
/// `fann_set_activation_function`
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_get_activation_function(
ann: *const fann,
layer: c_int,
neuron: c_int,
) -> fann_activationfunc_enum;
/// Set the activation function for neuron number `neuron` in layer number `layer`,
/// counting the input layer as layer 0.
///
/// It is not possible to set activation functions for the neurons in the input layer.
///
/// When choosing an activation function it is important to note that the activation
/// functions have different range. `FANN_SIGMOID` is e.g. in the 0 - 1 range while
/// `FANN_SIGMOID_SYMMETRIC` is in the -1 - 1 range and `FANN_LINEAR` is unbounded.
///
/// Information about the individual activation functions is available at
/// `fann_activationfunc_enum`.
///
/// The default activation function is `FANN_SIGMOID_STEPWISE`.
///
/// # See also
/// `fann_set_activation_function_layer`, `fann_set_activation_function_hidden`,
/// `fann_set_activation_function_output`, `fann_set_activation_steepness`,
/// `fann_get_activation_function`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_activation_function(
ann: *mut fann,
activation_function: fann_activationfunc_enum,
layer: c_int,
neuron: c_int,
);
/// Set the activation function for all the neurons in the layer number `layer`,
/// counting the input layer as layer 0.
///
/// It is not possible to set activation functions for the neurons in the input layer.
///
/// # See also
/// `fann_set_activation_function`, `fann_set_activation_function_hidden`,
/// `fann_set_activation_function_output`, `fann_set_activation_steepness_layer`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_activation_function_layer(
ann: *mut fann,
activation_function: fann_activationfunc_enum,
layer: c_int,
);
/// Set the activation function for all of the hidden layers.
///
/// # See also
/// `fann_set_activation_function`, `fann_set_activation_function_layer`,
/// `fann_set_activation_function_output`, `fann_set_activation_steepness_hidden`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_set_activation_function_hidden(
ann: *mut fann,
activation_function: fann_activationfunc_enum,
);
/// Set the activation function for the output layer.
///
/// # See also
/// `fann_set_activation_function`, `fann_set_activation_function_layer`,
/// `fann_set_activation_function_hidden`, `fann_set_activation_steepness_output`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_set_activation_function_output(
ann: *mut fann,
activation_function: fann_activationfunc_enum,
);
/// Get the activation steepness for neuron number `neuron` in layer number `layer`,
/// counting the input layer as layer 0.
///
/// It is not possible to get activation steepness for the neurons in the input layer.
///
/// The steepness of an activation function says something about how fast the activation
/// function goes from the minimum to the maximum. A high value for the activation function will
/// also give a more aggressive training.
///
/// When training neural networks where the output values should be at the extremes (usually 0
/// and 1, depending on the activation function), a steep activation function can be used (e.g.
/// 1.0).
///
/// The default activation steepness is 0.5.
///
/// # Returns
/// The activation steepness for the neuron or -1 if the neuron is not defined in the neural
/// network.
///
/// #See also
/// `fann_set_activation_steepness_layer`, `fann_set_activation_steepness_hidden`,
/// `fann_set_activation_steepness_output`, `fann_set_activation_function`,
/// `fann_set_activation_steepness`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_get_activation_steepness(
ann: *const fann,
layer: c_int,
neuron: c_int,
) -> fann_type;
/// Set the activation steepness for neuron number `neuron` in layer number `layer`,
/// counting the input layer as layer 0.
///
/// It is not possible to set activation steepness for the neurons in the input layer.
///
/// The steepness of an activation function says something about how fast the activation
/// function goes from the minimum to the maximum. A high value for the activation function will
/// also give a more aggressive training.
///
/// When training neural networks where the output values should be at the extremes (usually 0
/// and 1, depending on the activation function), a steep activation function can be used (e.g.
/// 1.0).
///
/// The default activation steepness is 0.5.
///
/// # See also
/// `fann_set_activation_steepness_layer`, `fann_set_activation_steepness_hidden`,
/// `fann_set_activation_steepness_output`, `fann_set_activation_function`,
/// `fann_get_activation_steepness`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_activation_steepness(
ann: *mut fann,
steepness: fann_type,
layer: c_int,
neuron: c_int,
);
/// Set the activation steepness for all neurons in layer number `layer`,
/// counting the input layer as layer 0.
///
/// It is not possible to set activation steepness for the neurons in the input layer.
///
/// # See also
/// `fann_set_activation_steepness`, `fann_set_activation_steepness_hidden`,
/// `fann_set_activation_steepness_output`, `fann_set_activation_function_layer`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_activation_steepness_layer(ann: *mut fann, steepness: fann_type, layer: c_int);
/// Set the steepness of the activation steepness in all of the hidden layers.
///
/// See also:
/// `fann_set_activation_steepness`, `fann_set_activation_steepness_layer`,
/// `fann_set_activation_steepness_output`, `fann_set_activation_function_hidden`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_activation_steepness_hidden(ann: *mut fann, steepness: fann_type);
/// Set the steepness of the activation steepness in the output layer.
///
/// # See also
/// `fann_set_activation_steepness`, `fann_set_activation_steepness_layer`,
/// `fann_set_activation_steepness_hidden`, `fann_set_activation_function_output`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_activation_steepness_output(ann: *mut fann, steepness: fann_type);
/// Returns the error function used during training.
///
/// The error functions are described further in `fann_errorfunc_enum`.
///
/// The default error function is `FANN_ERRORFUNC_TANH`
///
/// # See also
/// `fann_set_train_error_function`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_get_train_error_function(ann: *const fann) -> fann_errorfunc_enum;
/// Set the error function used during training.
///
/// The error functions are described further in `fann_errorfunc_enum`.
///
/// # See also
/// `fann_get_train_error_function`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_train_error_function(ann: *mut fann, train_error_function: fann_errorfunc_enum);
/// Returns the the stop function used during training.
///
/// The stop function is described further in `fann_stopfunc_enum`.
///
/// The default stop function is `FANN_STOPFUNC_MSE`.
///
/// # See also
/// `fann_get_train_stop_function`, `fann_get_bit_fail_limit`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_train_stop_function(ann: *const fann) -> fann_stopfunc_enum;
/// Set the stop function used during training.
///
/// Returns the the stop function used during training.
///
/// The stop function is described further in `fann_stopfunc_enum`.
///
/// # See also
/// `fann_get_train_stop_function`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_train_stop_function(ann: *mut fann, train_stop_function: fann_stopfunc_enum);
/// Returns the bit fail limit used during training.
///
/// The bit fail limit is used during training where the `fann_stopfunc_enum` is set to
/// `FANN_STOPFUNC_BIT`.
///
/// The limit is the maximum accepted difference between the desired output and the actual
/// output during training. Each output that diverges more than this limit is counted as an
/// error bit. This difference is divided by two when dealing with symmetric activation
/// functions, so that symmetric and not symmetric activation functions can use the same limit.
///
/// The default bit fail limit is 0.35.
///
/// # See also
/// `fann_set_bit_fail_limit`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_bit_fail_limit(ann: *const fann) -> fann_type;
/// Set the bit fail limit used during training.
///
/// # See also
/// `fann_get_bit_fail_limit`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_bit_fail_limit(ann: *mut fann, bit_fail_limit: fann_type);
/// Sets the callback function for use during training.
///
/// See `fann_callback_type` for more information about the callback function.
///
/// The default callback function simply prints out some status information.
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_callback(ann: *mut fann, callback: fann_callback_type);
/// The decay is a small negative valued number which is the factor that the weights
/// should become smaller in each iteration during quickprop training. This is used
/// to make sure that the weights do not become too high during training.
///
/// The default decay is -0.0001.
///
/// # See also
/// `fann_set_quickprop_decay`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_get_quickprop_decay(ann: *const fann) -> c_float;
/// Sets the quickprop decay factor.
///
/// # See also
/// `fann_get_quickprop_decay`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_quickprop_decay(ann: *mut fann, quickprop_decay: c_float);
/// The mu factor is used to increase and decrease the step size during quickprop training.
/// The mu factor should always be above 1, since it would otherwise decrease the step size
/// when it was supposed to increase it.
///
/// The default mu factor is 1.75.
///
/// # See also
/// `fann_set_quickprop_mu`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_get_quickprop_mu(ann: *const fann) -> c_float;
/// Sets the quickprop mu factor.
///
/// # See also
/// `fann_get_quickprop_mu`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_quickprop_mu(ann: *mut fann, quickprop_mu: c_float);
/// The increase factor is a value larger than 1, which is used to
/// increase the step size during RPROP training.
///
/// The default increase factor is 1.2.
///
/// # See also
/// `fann_set_rprop_increase_factor`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_get_rprop_increase_factor(ann: *const fann) -> c_float;
/// The increase factor used during RPROP training.
///
/// # See also
/// `fann_get_rprop_increase_factor`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_rprop_increase_factor(ann: *mut fann, rprop_increase_factor: c_float);
/// The decrease factor is a value smaller than 1, which is used to decrease the step size
/// during RPROP training.
///
/// The default decrease factor is 0.5.
///
/// # See also
/// `fann_set_rprop_decrease_factor`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_get_rprop_decrease_factor(ann: *const fann) -> c_float;
/// The decrease factor is a value smaller than 1, which is used to decrease the step size
/// during RPROP training.
///
/// # See also
/// `fann_get_rprop_decrease_factor`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_rprop_decrease_factor(ann: *mut fann, rprop_decrease_factor: c_float);
/// The minimum step size is a small positive number determining how small the minimum step size
/// may be.
///
/// The default value delta min is 0.0.
///
/// # See also
/// `fann_set_rprop_delta_min`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_get_rprop_delta_min(ann: *const fann) -> c_float;
/// The minimum step size is a small positive number determining how small the minimum step size
/// may be.
///
/// # See also
/// `fann_get_rprop_delta_min`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_rprop_delta_min(ann: *mut fann, rprop_delta_min: c_float);
/// The maximum step size is a positive number determining how large the maximum step size may
/// be.
///
/// The default delta max is 50.0.
///
/// # See also
/// `fann_set_rprop_delta_max`, `fann_get_rprop_delta_min`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_get_rprop_delta_max(ann: *const fann) -> c_float;
/// The maximum step size is a positive number determining how large the maximum step size may
/// be.
///
/// # See also
/// `fann_get_rprop_delta_max`, `fann_get_rprop_delta_min`
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_set_rprop_delta_max(ann: *mut fann, rprop_delta_max: c_float);
/// The initial step size is a positive number determining the initial step size.
///
/// The default delta zero is 0.1.
///
/// # See also
/// `fann_set_rprop_delta_zero`, `fann_get_rprop_delta_min`, `fann_get_rprop_delta_max`
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_get_rprop_delta_zero(ann: *const fann) -> c_float;
/// The initial step size is a positive number determining the initial step size.
///
/// # See also
/// `fann_get_rprop_delta_zero`, `fann_get_rprop_delta_zero`
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_set_rprop_delta_zero(ann: *mut fann, rprop_delta_max: c_float);
/// Trains on an entire dataset, for a period of time using the Cascade2 training algorithm.
/// This algorithm adds neurons to the neural network while training, which means that it
/// needs to start with an ANN without any hidden layers. The neural network should also use
/// shortcut connections, so `fann_create_shortcut` should be used to create the ANN like this:
///
/// ```notest
/// let ann = fann_create_shortcut(2,
/// fann_num_input_train_data(train_data),
/// fann_num_output_train_data(train_data));
/// ```
///
/// This training uses the parameters set using `fann_set_cascade_...`, but it also uses
/// another training algorithm as it's internal training algorithm. This algorithm can be set to
/// either `FANN_TRAIN_RPROP` or `FANN_TRAIN_QUICKPROP` by `fann_set_training_algorithm`, and
/// the parameters set for these training algorithms will also affect the cascade training.
///
/// # Parameters
///
/// * `ann` - The neural network
/// * `data` - The data that should be used during training
/// * `max_neuron` - The maximum number of neurons to be added to the ANN
/// * `neurons_between_reports` - The number of neurons between printing a status report to
/// stdout. A value of zero means no reports should be printed.
/// * `desired_error` - The desired `fann_get_MSE` or `fann_get_bit_fail`, depending
/// on which stop function is chosen by `fann_set_train_stop_function`.
///
/// Instead of printing out reports every neurons_between_reports, a callback function can be
/// called (see `fann_set_callback`).
///
/// # See also
/// `fann_train_on_data`, `fann_cascadetrain_on_file`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_cascadetrain_on_data(
ann: *mut fann,
data: *const fann_train_data,
max_neurons: c_uint,
neurons_between_reports: c_uint,
desired_error: c_float,
);
/// Does the same as `fann_cascadetrain_on_data`, but reads the training data directly from a
/// file.
///
/// # See also
/// `fann_cascadetrain_on_data`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_cascadetrain_on_file(
ann: *mut fann,
filename: *const c_char,
max_neurons: c_uint,
neurons_between_reports: c_uint,
desired_error: c_float,
);
/// The cascade output change fraction is a number between 0 and 1 determining how large a
/// fraction the `fann_get_MSE` value should change within
/// `fann_get_cascade_output_stagnation_epochs` during training of the output connections, in
/// order for the training not to stagnate. If the training stagnates, the training of the
/// output connections will be ended and new candidates will be prepared.
///
/// This means:
/// If the MSE does not change by a fraction of `fann_get_cascade_output_change_fraction` during
/// a period of `fann_get_cascade_output_stagnation_epochs`, the training of the output
/// connections is stopped because the training has stagnated.
///
/// If the cascade output change fraction is low, the output connections will be trained more
/// and if the fraction is high they will be trained less.
///
/// The default cascade output change fraction is 0.01, which is equivalent to a 1% change in
/// MSE.
///
/// # See also
/// `fann_set_cascade_output_change_fraction`, `fann_get_MSE`,
/// `fann_get_cascade_output_stagnation_epochs`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_output_change_fraction(ann: *const fann) -> c_float;
/// Sets the cascade output change fraction.
///
/// # See also
/// `fann_get_cascade_output_change_fraction`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_output_change_fraction(
ann: *mut fann,
cascade_output_change_fraction: c_float,
);
/// The number of cascade output stagnation epochs determines the number of epochs training is
/// allowed to continue without changing the MSE by a fraction of
/// `fann_get_cascade_output_change_fraction`.
///
/// See more info about this parameter in `fann_get_cascade_output_change_fraction`.
///
/// The default number of cascade output stagnation epochs is 12.
///
/// # See also
/// `fann_set_cascade_output_stagnation_epochs`, `fann_get_cascade_output_change_fraction`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_output_stagnation_epochs(ann: *const fann) -> c_uint;
/// Sets the number of cascade output stagnation epochs.
///
/// # See also
/// `fann_get_cascade_output_stagnation_epochs`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_output_stagnation_epochs(
ann: *mut fann,
cascade_output_stagnation_epochs: c_uint,
);
/// The cascade candidate change fraction is a number between 0 and 1 determining how large a
/// fraction the `fann_get_MSE` value should change within
/// `fann_get_cascade_candidate_stagnation_epochs` during training of the candidate neurons, in
/// order for the training not to stagnate. If the training stagnates, the training of the
/// candidate neurons will be ended and the best candidate will be selected.
///
/// This means:
/// If the MSE does not change by a fraction of `fann_get_cascade_candidate_change_fraction`
/// during a period of `fann_get_cascade_candidate_stagnation_epochs`, the training of the
/// candidate neurons is stopped because the training has stagnated.
///
/// If the cascade candidate change fraction is low, the candidate neurons will be trained more
/// and if the fraction is high they will be trained less.
///
/// The default cascade candidate change fraction is 0.01, which is equivalent to a 1% change in
/// MSE.
///
/// # See also
/// `fann_set_cascade_candidate_change_fraction`, `fann_get_MSE`,
/// `fann_get_cascade_candidate_stagnation_epochs`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_candidate_change_fraction(ann: *const fann) -> c_float;
/// Sets the cascade candidate change fraction.
///
/// # See also
/// `fann_get_cascade_candidate_change_fraction`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_candidate_change_fraction(
ann: *mut fann,
cascade_candidate_change_fraction: c_float,
);
/// The number of cascade candidate stagnation epochs determines the number of epochs training
/// is allowed to continue without changing the MSE by a fraction of
/// `fann_get_cascade_candidate_change_fraction`.
///
/// See more info about this parameter in `fann_get_cascade_candidate_change_fraction`.
///
/// The default number of cascade candidate stagnation epochs is 12.
///
/// # See also
/// `fann_set_cascade_candidate_stagnation_epochs`, `fann_get_cascade_candidate_change_fraction`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_candidate_stagnation_epochs(ann: *const fann) -> c_uint;
/// Sets the number of cascade candidate stagnation epochs.
///
/// # See also
/// `fann_get_cascade_candidate_stagnation_epochs`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_candidate_stagnation_epochs(
ann: *mut fann,
cascade_candidate_stagnation_epochs: c_uint,
);
/// The weight multiplier is a parameter which is used to multiply the weights from the
/// candidate neuron before adding the neuron to the neural network. This parameter is usually
/// between 0 and 1, and is used to make the training a bit less aggressive.
///
/// The default weight multiplier is 0.4
///
/// # See also
/// `fann_set_cascade_weight_multiplier`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_weight_multiplier(ann: *const fann) -> fann_type;
/// Sets the weight multiplier.
///
/// # See also
/// `fann_get_cascade_weight_multiplier`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_weight_multiplier(ann: *mut fann, cascade_weight_multiplier: fann_type);
/// The candidate limit is a limit for how much the candidate neuron may be trained.
/// The limit is a limit on the proportion between the MSE and candidate score.
///
/// Set this to a lower value to avoid overfitting and to a higher if overfitting is
/// not a problem.
///
/// The default candidate limit is 1000.0
///
/// # See also
/// `fann_set_cascade_candidate_limit`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_candidate_limit(ann: *const fann) -> fann_type;
/// Sets the candidate limit.
///
/// # See also
/// `fann_get_cascade_candidate_limit`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_candidate_limit(ann: *mut fann, cascade_candidate_limit: fann_type);
/// The maximum out epochs determines the maximum number of epochs the output connections
/// may be trained after adding a new candidate neuron.
///
/// The default max out epochs is 150
///
/// # See also
/// `fann_set_cascade_max_out_epochs`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_max_out_epochs(ann: *const fann) -> c_uint;
/// Sets the maximum out epochs.
///
/// # See also
/// `fann_get_cascade_max_out_epochs`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_max_out_epochs(ann: *mut fann, cascade_max_out_epochs: c_uint);
/// The maximum candidate epochs determines the maximum number of epochs the input
/// connections to the candidates may be trained before adding a new candidate neuron.
///
/// The default max candidate epochs is 150.
///
/// # See also
/// `fann_set_cascade_max_cand_epochs`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_max_cand_epochs(ann: *const fann) -> c_uint;
/// Sets the max candidate epochs.
///
/// # See also
/// `fann_get_cascade_max_cand_epochs`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_max_cand_epochs(ann: *mut fann, cascade_max_cand_epochs: c_uint);
/// The number of candidates used during training (calculated by multiplying
/// `fann_get_cascade_activation_functions_count`,
/// `fann_get_cascade_activation_steepnesses_count` and
/// `fann_get_cascade_num_candidate_groups`).
///
/// The actual candidates is defined by the `fann_get_cascade_activation_functions` and
/// `fann_get_cascade_activation_steepnesses` arrays. These arrays define the activation
/// functions and activation steepnesses used for the candidate neurons. If there are 2
/// activation functions in the activation function array and 3 steepnesses in the steepness
/// array, then there will be 2x3=6 different candidates which will be trained. These 6
/// different candidates can be copied into several candidate groups, where the only difference
/// between these groups is the initial weights. If the number of groups is set to 2, then the
/// number of candidate neurons will be 2x3x2=12. The number of candidate groups is defined by
/// `fann_set_cascade_num_candidate_groups`.
///
/// The default number of candidates is 6x4x2 = 48
///
/// # See also
/// `fann_get_cascade_activation_functions`, `fann_get_cascade_activation_functions_count`,
/// `fann_get_cascade_activation_steepnesses`, `fann_get_cascade_activation_steepnesses_count`,
/// `fann_get_cascade_num_candidate_groups`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_num_candidates(ann: *const fann) -> c_uint;
/// The number of activation functions in the `fann_get_cascade_activation_functions` array.
///
/// The default number of activation functions is 6.
///
/// # See also
/// `fann_get_cascade_activation_functions`, `fann_set_cascade_activation_functions`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_activation_functions_count(ann: *const fann) -> c_uint;
/// The cascade activation functions array is an array of the different activation functions
/// used by the candidates.
///
/// See `fann_get_cascade_num_candidates` for a description of which candidate neurons will be
/// generated by this array.
///
/// The default activation functions is {`FANN_SIGMOID`, `FANN_SIGMOID_SYMMETRIC`,
/// `FANN_GAUSSIAN`, `FANN_GAUSSIAN_SYMMETRIC`, `FANN_ELLIOTT`, `FANN_ELLIOTT_SYMMETRIC`,
/// `FANN_SIN_SYMMETRIC`, `FANN_COS_SYMMETRIC`, `FANN_SIN`, `FANN_COS`}
///
/// # See also
/// `fann_get_cascade_activation_functions_count`, `fann_set_cascade_activation_functions`,
/// `fann_activationfunc_enum`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_activation_functions(ann: *const fann)
-> *mut fann_activationfunc_enum;
/// Sets the array of cascade candidate activation functions. The array must be just as long
/// as defined by the count.
///
/// See `fann_get_cascade_num_candidates` for a description of which candidate neurons will be
/// generated by this array.
///
/// # See also
/// `fann_get_cascade_activation_steepnesses_count`, `fann_get_cascade_activation_steepnesses`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_activation_functions(
ann: *mut fann,
cascade_activation_functions: *const fann_activationfunc_enum,
cascade_activation_functions_count: c_uint,
);
/// The number of activation steepnesses in the `fann_get_cascade_activation_functions` array.
///
/// The default number of activation steepnesses is 4.
///
/// # See also
/// `fann_get_cascade_activation_steepnesses`, `fann_set_cascade_activation_functions`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_activation_steepnesses_count(ann: *const fann) -> c_uint;
/// The cascade activation steepnesses array is an array of the different activation functions
/// used by the candidates.
///
/// See `fann_get_cascade_num_candidates` for a description of which candidate neurons will be
/// generated by this array.
///
/// The default activation steepnesses is {0.25, 0.50, 0.75, 1.00}
///
/// # See also
/// `fann_set_cascade_activation_steepnesses`, `fann_get_cascade_activation_steepnesses_count`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_activation_steepnesses(ann: *const fann) -> *mut fann_type;
/// Sets the array of cascade candidate activation steepnesses. The array must be just as long
/// as defined by the count.
///
/// See `fann_get_cascade_num_candidates` for a description of which candidate neurons will be
/// generated by this array.
///
/// # See also
/// `fann_get_cascade_activation_steepnesses`, `fann_get_cascade_activation_steepnesses_count`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_activation_steepnesses(
ann: *mut fann,
cascade_activation_steepnesses: *const fann_type,
cascade_activation_steepnesses_count: c_uint,
);
/// The number of candidate groups is the number of groups of identical candidates which will be
/// used during training.
///
/// This number can be used to have more candidates without having to define new parameters for
/// the candidates.
///
/// See `fann_get_cascade_num_candidates` for a description of which candidate neurons will be
/// generated by this parameter.
///
/// The default number of candidate groups is 2.
///
/// # See also
/// `fann_set_cascade_num_candidate_groups`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_get_cascade_num_candidate_groups(ann: *const fann) -> c_uint;
/// Sets the number of candidate groups.
///
/// # See also
/// `fann_get_cascade_num_candidate_groups`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_set_cascade_num_candidate_groups(
ann: *mut fann,
cascade_num_candidate_groups: c_uint,
);
/// Constructs a backpropagation neural network from a configuration file, which has been saved
/// by `fann_save`.
///
/// # See also
/// `fann_save`, `fann_save_to_fixed`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_create_from_file(configuration_file: *const c_char) -> *mut fann;
/// Save the entire network to a configuration file.
///
/// The configuration file contains all information about the neural network and enables
/// `fann_create_from_file` to create an exact copy of the neural network and all of the
/// parameters associated with the neural network.
///
/// These three parameters (`fann_set_callback`, `fann_set_error_log`,
/// `fann_set_user_data`) are *NOT* saved to the file because they cannot safely be
/// ported to a different location. Also temporary parameters generated during training
/// like `fann_get_MSE` are not saved.
///
/// # Return
/// The function returns 0 on success and -1 on failure.
///
/// # See also
/// `fann_create_from_file`, `fann_save_to_fixed`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_save(ann: *mut fann, configuration_file: *const c_char) -> c_int;
/// Saves the entire network to a configuration file.
/// But it is saved in fixed point format no matter which
/// format it is currently in.
///
/// This is useful for training a network in floating points,
/// and then later executing it in fixed point.
///
/// The function returns the bit position of the fix point, which
/// can be used to find out how accurate the fixed point network will be.
/// A high value indicates high precision, and a low value indicates low
/// precision.
///
/// A negative value indicates very low precision, and a very strong possibility for overflow.
/// (the actual fix point will be set to 0, since a negative fix point does not make sense).
///
/// Generally, a fix point lower than 6 is bad, and should be avoided.
/// The best way to avoid this is to have fewer connections to each neuron,
/// or just fewer neurons in each layer.
///
/// The fixed point use of this network is only intended for use on machines that
/// have no floating point processor, like an iPAQ. On normal computers the floating
/// point version is actually faster.
///
/// # See also
/// `fann_create_from_file`, `fann_save`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_save_to_fixed(ann: *mut fann, configuration_file: *const c_char) -> c_int;
/// Creates a standard fully connected backpropagation neural network.
///
/// There will be a bias neuron in each layer (except the output layer),
/// and this bias neuron will be connected to all neurons in the next layer.
/// When running the network, the bias nodes always emit 1.
///
/// To destroy a `fann` use the `fann_destroy` function.
///
/// # Parameters
///
/// * `num_layers` - The total number of layers including the input and the output layer.
/// * `...` - Integer values determining the number of neurons in each layer starting
/// with the input layer and ending with the output layer.
///
/// # Returns
///
/// A pointer to the newly created `fann`.
///
/// # Example
///
///
/// ```
/// // Creating an ANN with 2 input neurons, 1 output neuron,
/// // and two hidden layers with 8 and 9 neurons
/// unsafe {
/// let ann = fann_sys::fann_create_standard(4, 2, 8, 9, 1);
/// }
/// ```
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_create_standard(num_layers: c_uint, ...) -> *mut fann;
/// Just like `fann_create_standard`, but with an array of layer sizes
/// instead of individual parameters.
///
/// # Example
///
/// ```
/// // Creating an ANN with 2 input neurons, 1 output neuron,
/// // and two hidden layers with 8 and 9 neurons
/// let layers = [2, 8, 9, 1];
/// unsafe {
/// let ann = fann_sys::fann_create_standard_array(4, layers.as_ptr());
/// }
/// ```
///
/// # See also
/// `fann_create_standard`, `fann_create_sparse`, `fann_create_shortcut`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_create_standard_array(num_layers: c_uint, layers: *const c_uint) -> *mut fann;
/// Creates a standard backpropagation neural network, which is not fully connected.
///
/// # Parameters
///
/// * `connection_rate` - The connection rate controls how many connections there will be in the
/// network. If the connection rate is set to 1, the network will be fully
/// connected, but if it is set to 0.5, only half of the connections will be set.
/// A connection rate of 1 will yield the same result as `fann_create_standard`.
/// * `num_layers` - The total number of layers including the input and the output layer.
/// * `...` - Integer values determining the number of neurons in each layer
/// starting with the input layer and ending with the output layer.
///
/// # Returns
/// A pointer to the newly created `fann`.
///
/// # See also
/// `fann_create_sparse_array`, `fann_create_standard`, `fann_create_shortcut`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_create_sparse(connection_rate: c_float, num_layers: c_uint, ...) -> *mut fann;
/// Just like `fann_create_sparse`, but with an array of layer sizes
/// instead of individual parameters.
///
/// See `fann_create_standard_array` for a description of the parameters.
///
/// # See also
/// `fann_create_sparse`, `fann_create_standard`, `fann_create_shortcut`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_create_sparse_array(
connection_rate: c_float,
num_layers: c_uint,
layers: *const c_uint,
) -> *mut fann;
/// Creates a standard backpropagation neural network, which is not fully connected and which
/// also has shortcut connections.
///
/// Shortcut connections are connections that skip layers. A fully connected network with
/// shortcut connections is a network where all neurons are connected to all neurons in later
/// layers. Including direct connections from the input layer to the output layer.
///
/// See `fann_create_standard` for a description of the parameters.
///
/// # See also
/// `fann_create_shortcut_array`, `fann_create_standard`, `fann_create_sparse`,
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_create_shortcut(num_layers: c_uint, ...) -> *mut fann;
/// Just like `fann_create_shortcut`, but with an array of layer sizes
/// instead of individual parameters.
///
/// See `fann_create_standard_array` for a description of the parameters.
///
/// # See also
/// `fann_create_shortcut`, `fann_create_standard`, `fann_create_sparse`
///
/// This function appears in FANN >= 2.0.0.
pub fn fann_create_shortcut_array(num_layers: c_uint, layers: *const c_uint) -> *mut fann;
/// Destroys the entire network, properly freeing all the associated memory.
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_destroy(ann: *mut fann);
/// Runs input through the neural network, returning an array of outputs, the number of
/// which being equal to the number of neurons in the output layer.
///
/// Ownership of the output array remains with the `fann` structure. It may be overwritten by
/// subsequent function calls. Do not deallocate it!
///
/// # See also
/// `fann_test`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_run(ann: *mut fann, input: *const fann_type) -> *mut fann_type;
/// Give each connection a random weight between `min_weight` and `max_weight`.
///
/// From the beginning the weights are random between -0.1 and 0.1.
///
/// # See also
/// `fann_init_weights`
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_randomize_weights(ann: *mut fann, min_weight: fann_type, max_weight: fann_type);
/// Initialize the weights using Widrow + Nguyen's algorithm.
///
/// This function behaves similarly to `fann_randomize_weights`. It will use the algorithm
/// developed by Derrick Nguyen and Bernard Widrow to set the weights in such a way
/// as to speed up training. This technique is not always successful, and in some cases can be
/// less efficient than a purely random initialization.
///
/// The algorithm requires access to the range of the input data (ie, largest and smallest
/// input), and therefore accepts a second argument, `data`, which is the training data that
/// will be used to train the network.
///
/// # See also
/// `fann_randomize_weights`, `fann_read_train_from_file`
///
/// This function appears in FANN >= 1.1.0.
pub fn fann_init_weights(ann: *mut fann, train_data: *mut fann_train_data);
/// Prints the connections of the ANN in a compact matrix, for easy viewing of the internals
/// of the ANN.
///
/// The output from `fann_print_connections` on a small (2 2 1) network trained on the xor
/// problem:
///
/// ```text
/// Layer / Neuron 012345
/// L 1 / N 3 BBa...
/// L 1 / N 4 BBA...
/// L 1 / N 5 ......
/// L 2 / N 6 ...BBA
/// L 2 / N 7 ......
/// ```
///
/// This network has five real neurons and two bias neurons. This gives a total of seven
/// neurons named from 0 to 6. The connections between these neurons can be seen in the matrix.
/// "." is a place where there is no connection, while a character tells how strong the
/// connection is on a scale from a-z. The two real neurons in the hidden layer (neuron 3 and 4
/// in layer 1) have connections from the three neurons in the previous layer as is visible in
/// the first two lines. The output neuron 6 has connections from the three neurons in the
/// hidden layer 3 - 5 as is visible in the fourth line.
///
/// To simplify the matrix output neurons are not visible as neurons that connections can come
/// from, and input and bias neurons are not visible as neurons that connections can go to.
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_print_connections(ann: *mut fann);
/// Prints all of the parameters and options of the ANN.
///
/// This function appears in FANN >= 1.2.0.
pub fn fann_print_parameters(ann: *mut fann);
/// Get the number of input neurons.
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_get_num_input(ann: *const fann) -> c_uint;
/// Get the number of output neurons.
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_get_num_output(ann: *const fann) -> c_uint;
/// Get the total number of neurons in the entire network. This number does also include the
/// bias neurons, so a 2-4-2 network has 2+4+2 +2(bias) = 10 neurons.
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_get_total_neurons(ann: *const fann) -> c_uint;
/// Get the total number of connections in the entire network.
///
/// This function appears in FANN >= 1.0.0.
pub fn fann_get_total_connections(ann: *const fann) -> c_uint;
/// Get the type of neural network it was created as.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// # Returns
/// The neural network type from enum `fann_network_type_enum`
///
/// # See also
/// `fann_network_type_enum`
///
/// This function appears in FANN >= 2.1.0
pub fn fann_get_network_type(ann: *const fann) -> fann_nettype_enum;
/// Get the connection rate used when the network was created.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// # Returns
/// The connection rate
///
/// This function appears in FANN >= 2.1.0
pub fn fann_get_connection_rate(ann: *const fann) -> c_float;
/// Get the number of layers in the network.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// # Returns
///
/// The number of layers in the neural network
///
/// # Example
///
/// ```
/// // Obtain the number of layers in a neural network
/// unsafe {
/// let ann = fann_sys::fann_create_standard(4, 2, 8, 9, 1);
/// assert_eq!(4, fann_sys::fann_get_num_layers(ann));
/// }
/// ```
///
/// This function appears in FANN >= 2.1.0
pub fn fann_get_num_layers(ann: *const fann) -> c_uint;
/// Get the number of neurons in each layer in the network.
///
/// Bias is not included so the layers match the `fann_create` functions.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// The layers array must be preallocated to accommodate at least `fann_num_layers` items.
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_get_layer_array(ann: *const fann, layers: *mut c_uint);
/// Get the number of bias in each layer in the network.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// The bias array must be preallocated to accommodate at least `fann_num_layers` items.
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_get_bias_array(ann: *const fann, bias: *mut c_uint);
/// Get the connections in the network.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// The connections array must be preallocated to accommodate at least
/// `fann_get_total_connections` items.
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_get_connection_array(ann: *const fann, connections: *mut fann_connection);
/// Set connections in the network.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// Only the weights can be changed, connections and weights are ignored
/// if they do not already exist in the network.
///
/// The array must accommodate `num_connections` items.
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_set_weight_array(
ann: *mut fann,
connections: *mut fann_connection,
num_connections: c_uint,
);
/// Set a connection in the network.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// Only the weights can be changed. The connection/weight is
/// ignored if it does not already exist in the network.
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_set_weight(
ann: *mut fann,
from_neuron: c_uint,
to_neuron: c_uint,
weight: fann_type,
);
/// Store a pointer to user defined data. The pointer can be retrieved with `fann_get_user_data`
/// for example in a callback. It is the user's responsibility to allocate and deallocate any
/// data that the pointer might point to.
///
/// # Parameters
///
/// * `ann` - A previously created neural network structure of type `fann` pointer.
/// * `user_data` - A void pointer to user defined data.
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_set_user_data(ann: *mut fann, user_data: *mut c_void);
/// Get a pointer to user defined data that was previously set with `fann_set_user_data`. It is
/// the user's responsibility to allocate and deallocate any data that the pointer might point
/// to.
///<|fim▁hole|> /// * `ann` - A previously created neural network structure of type `fann` pointer.
///
/// # Returns
/// A void pointer to user defined data.
///
/// This function appears in FANN >= 2.1.0.
pub fn fann_get_user_data(ann: *mut fann) -> *mut c_void;
}
#[cfg(test)]
mod tests {
use super::*;
use std::ffi::CString;
use std::fs::remove_file;
use std::str::from_utf8;
const EPSILON: fann_type = 0.2;
#[test]
fn test_tutorial_example() {
let c_trainfile = CString::new(&b"test_files/xor.data"[..]).unwrap();
let p_trainfile = c_trainfile.as_ptr();
let c_savefile = CString::new(&b"test_files/xor.net"[..]).unwrap();
let p_savefile = c_savefile.as_ptr();
// Train an ANN with a data set and then save the ANN to a file.
let num_input = 2;
let num_output = 1;
let num_layers = 3;
let num_neurons_hidden = 3;
let desired_error = 0.001;
let max_epochs = 500_000;
let epochs_between_reports = 1000;
unsafe {
let ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);
fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);
fann_train_on_file(
ann,
p_trainfile,
max_epochs,
epochs_between_reports,
desired_error,
);
fann_save(ann, p_savefile);
fann_destroy(ann);
}
// Load the ANN and execute input.
unsafe {
let ann = fann_create_from_file(p_savefile);
assert!(EPSILON > (1.0 - *fann_run(ann, [-1.0, 1.0].as_ptr())).abs());
assert!(EPSILON > (1.0 - *fann_run(ann, [1.0, -1.0].as_ptr())).abs());
assert!(EPSILON > (-1.0 - *fann_run(ann, [1.0, 1.0].as_ptr())).abs());
assert!(EPSILON > (-1.0 - *fann_run(ann, [-1.0, -1.0].as_ptr())).abs());
fann_destroy(ann);
}
// Delete the ANN file created by the test.
remove_file(from_utf8(c_savefile.to_bytes()).unwrap()).unwrap();
}
}<|fim▁end|> | /// # Parameters
/// |
<|file_name|>WebForm.cpp<|end_file_name|><|fim▁begin|>#include "WebForm.h"
WebForm::WebForm(void)
:__pWeb(null), __phonegapCommand(null)
{
geolocation = null;
device = null;
accel = null;
network = null;
console = null;
compass = null;
contacts = null;
}
WebForm::~WebForm(void) {
}
bool
WebForm::Initialize()
{
return true;
}
result
WebForm::OnInitializing(void)
{
result r = E_SUCCESS;
// TODO: Add your initialization code here
r = CreateWebControl();
if (IsFailed(r))
{
AppLog("CreateMainForm() has failed.\n");
goto CATCH;
}
__pWeb->LoadUrl("file:///Res/index.html");
//__pWeb->LoadUrl("file:///Res/mobile-spec/index.html");
return r;
CATCH:
return false;
}
result
WebForm::OnTerminating(void)
{
result r = E_SUCCESS;
// delete __phonegapCommand;
// delete geolocation;
// delete device;
// delete accel;
// delete network;
// delete console;
// delete compass;
// delete contacts;
// delete notification;
// delete camera;
return r;
}
void
WebForm::OnActionPerformed(const Osp::Ui::Control& source, int actionId)
{
switch(actionId)
{
default:
break;
}<|fim▁hole|>void
WebForm::LaunchBrowser(const String& url) {
ArrayList* pDataList = null;
pDataList = new ArrayList();
pDataList->Construct();
String* pData = null;
pData = new String(L"url:");
pData->Append(url);
AppLogDebug("Launching Stock Browser with %S", pData->GetPointer());
pDataList->Add(*pData);
AppControl* pAc = AppManager::FindAppControlN(APPCONTROL_BROWSER, "");
if(pAc) {
pAc->Start(pDataList, null);
delete pAc;
}
pDataList->RemoveAll(true);
delete pDataList;
}
bool
WebForm::OnLoadingRequested (const Osp::Base::String& url, WebNavigationType type) {
AppLogDebug("URL REQUESTED %S", url.GetPointer());
if(url.StartsWith("gap://", 0)) {
// __phonegapCommand = null;
__phonegapCommand = new String(url);
// FIXME: for some reason this does not work if we return true. Web freezes.
// __pWeb->StopLoading();
// String* test;
// test = __pWeb->EvaluateJavascriptN(L"'test'");
// AppLogDebug("String is %S", test->GetPointer());
// delete test;
// return true;
return false;
} else if(url.StartsWith("http://", 0) || url.StartsWith("https://", 0)) {
AppLogDebug("Non PhoneGap command. External URL. Launching WebBrowser");
LaunchBrowser(url);
return false;
}
return false;
}
void
WebForm::OnLoadingCompleted() {
// Setting DeviceInfo to initialize PhoneGap (should be done only once) and firing onNativeReady event
String* deviceInfo;
deviceInfo = __pWeb->EvaluateJavascriptN(L"window.device.uuid");
if(deviceInfo->IsEmpty()) {
device->SetDeviceInfo();
__pWeb->EvaluateJavascriptN("PhoneGap.onNativeReady.fire();");
} else {
//AppLogDebug("DeviceInfo = %S;", deviceInfo->GetPointer());
}
delete deviceInfo;
// Analyzing PhoneGap command
if(__phonegapCommand) {
if(__phonegapCommand->StartsWith(L"gap://com.phonegap.Geolocation", 0)) {
geolocation->Run(*__phonegapCommand);
}
else if(__phonegapCommand->StartsWith(L"gap://com.phonegap.Accelerometer", 0)) {
accel->Run(*__phonegapCommand);
}
else if(__phonegapCommand->StartsWith(L"gap://com.phonegap.Network", 0)) {
network->Run(*__phonegapCommand);
}
else if(__phonegapCommand->StartsWith(L"gap://com.phonegap.DebugConsole", 0)) {
console->Run(*__phonegapCommand);
}
else if(__phonegapCommand->StartsWith(L"gap://com.phonegap.Compass", 0)) {
compass->Run(*__phonegapCommand);
}
else if(__phonegapCommand->StartsWith(L"gap://com.phonegap.Contacts", 0)) {
contacts->Run(*__phonegapCommand);
}
else if(__phonegapCommand->StartsWith(L"gap://com.phonegap.Notification", 0)) {
notification->Run(*__phonegapCommand);
}
else if(__phonegapCommand->StartsWith(L"gap://com.phonegap.Camera", 0)) {
camera->Run(*__phonegapCommand);
}
// Tell the JS code that we got this command, and we're ready for another
__pWeb->EvaluateJavascriptN(L"PhoneGap.queue.ready = true;");
delete __phonegapCommand;
__phonegapCommand = null;
}
else {
AppLogDebug("Non PhoneGap command completed");
}
}
result
WebForm::CreateWebControl(void)
{
result r = E_SUCCESS;
int screen_width = 0;
int screen_height = 0;
/*screen*/
r = SystemInfo::GetValue("ScreenWidth", screen_width);
TryCatch(r == E_SUCCESS, , "SystemInfo: To get a value is failed");
r = SystemInfo::GetValue("ScreenHeight", screen_height);
TryCatch(r == E_SUCCESS, , "SystemInfo: To get a value is failed");
/*Web*/
__pWeb = new Web();
r = __pWeb->Construct(Rectangle(0, 0, screen_width, screen_height - 38));
TryCatch(r == E_SUCCESS, ,"Web is not constructed\n ");
r = this->AddControl(*__pWeb);
TryCatch(r == E_SUCCESS, ,"Web is not attached\n ");
__pWeb->SetLoadingListener(this);
__pWeb->SetFocus();
if(__pWeb) {
geolocation = new GeoLocation(__pWeb);
device = new Device(__pWeb);
accel = new Accelerometer(__pWeb);
network = new Network(__pWeb);
console = new DebugConsole(__pWeb);
compass = new Compass(__pWeb);
contacts = new Contacts(__pWeb);
notification = new Notification(__pWeb);
camera = new Kamera(__pWeb);
}
return r;
CATCH:
AppLog("Error = %s\n", GetErrorMessage(r));
return r;
}<|fim▁end|> | }
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
__all__ = ['pdb']
__version__ = '0.9.0'
import sys
# backwards compatibility to support `from fairseq.meters import AverageMeter`
from fairseq.logging import meters, metrics, progress_bar # noqa<|fim▁hole|>
import fairseq.criterions # noqa
import fairseq.models # noqa
import fairseq.modules # noqa
import fairseq.optim # noqa
import fairseq.optim.lr_scheduler # noqa
import fairseq.pdb # noqa
import fairseq.tasks # noqa
import fairseq.benchmark # noqa
import fairseq.model_parallel # noqa<|fim▁end|> | sys.modules['fairseq.meters'] = meters
sys.modules['fairseq.metrics'] = metrics
sys.modules['fairseq.progress_bar'] = progress_bar |
<|file_name|>Notebook.cpp<|end_file_name|><|fim▁begin|>#include <SFGUI/Engines/BREW.hpp>
#include <SFGUI/Context.hpp>
#include <SFGUI/Renderer.hpp>
#include <SFGUI/Notebook.hpp>
namespace sfg {
namespace eng {
RenderQueue* BREW::CreateNotebookDrawable( SharedPtr<const Notebook> notebook ) const {
sf::Color border_color( GetProperty<sf::Color>( "BorderColor", notebook ) );
sf::Color border_color_light( border_color );
sf::Color border_color_dark( border_color );
int border_color_shift( GetProperty<int>( "BorderColorShift", notebook ) );
sf::Color background_color( GetProperty<sf::Color>( "BackgroundColor", notebook ) );
sf::Color background_color_dark( GetProperty<sf::Color>( "BackgroundColorDark", notebook ) );
sf::Color background_color_prelight( GetProperty<sf::Color>( "BackgroundColorPrelight", notebook ) );
float padding( GetProperty<float>( "Padding", notebook ) );
float border_width( GetProperty<float>( "BorderWidth", notebook ) );
float scroll_button_size( GetProperty<float>( "ScrollButtonSize", notebook ) );
sf::Color arrow_color( GetProperty<sf::Color>( "Color", notebook ) );<|fim▁hole|> ShiftBorderColors( border_color_light, border_color_dark, border_color_shift );
RenderQueue* queue( new RenderQueue );
Notebook::IndexType page_count = notebook->GetPageCount();
if( !page_count ) {
return queue;
}
Notebook::IndexType current_page = notebook->GetCurrentPage();
Notebook::IndexType prelight_tab = notebook->GetPrelightTab();
// Get size in the dimension all tabs have uniform size.
sf::Vector2f tab_size( notebook->GetNthTabLabel( 0 )->GetAllocation().width, notebook->GetNthTabLabel( 0 )->GetAllocation().height );
// Get size in the dimension all children have uniform size.
sf::Vector2f child_size( notebook->GetNthPage( 0 )->GetAllocation().width, notebook->GetNthPage( 0 )->GetAllocation().height );
if( notebook->GetTabPosition() == Notebook::TOP ) {
// Tabs are positioned at top.
// Pane.
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( 0.f, tab_size.y + 2.f * ( border_width + padding ) ),
sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), child_size.y + 2.f * ( border_width + padding ) ),
border_width,
background_color,
border_color,
border_color_shift
)
);
// First tab label left border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f, 0.f ),
sf::Vector2f( notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f, tab_size.y + 3.f * border_width + 2.f * padding ),
border_color_light,
border_width
)
);
// Tab labels
for( Notebook::IndexType index = notebook->GetFirstDisplayedTab(); index < notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount(); ++index ) {
Widget::Ptr label = notebook->GetNthTabLabel( index );
sf::FloatRect label_allocation = label->GetAllocation();
// Top border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - border_width - padding ),
sf::Vector2f( label_allocation.left + label_allocation.width + padding, label_allocation.top - border_width - padding ),
border_color_light,
border_width
)
);
// Right border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left + label_allocation.width - border_width + padding, label_allocation.top - border_width - padding ),
sf::Vector2f( label_allocation.left + label_allocation.width - border_width + padding, label_allocation.top + label_allocation.height + border_width + padding ),
border_color_dark,
border_width
)
);
if( index == current_page ) {
// Active left border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - border_width - padding ),
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + 2.f * border_width + padding ),
border_color_light,
border_width
)
);
// Active background
queue->Add(
Renderer::Get().CreateRect(
sf::FloatRect(
label_allocation.left - padding,
label_allocation.top - padding,
label_allocation.width + 2.f * padding - border_width,
label_allocation.height + 2.f * ( border_width + padding )
),
background_color
)
);
}
else {
// Inactive background
queue->Add(
Renderer::Get().CreateRect(
sf::FloatRect(
label_allocation.left - padding,
label_allocation.top - padding,
label_allocation.width + 2.f * padding - border_width,
label_allocation.height + 2.f * padding
),
( index == prelight_tab ) ? background_color_prelight : background_color_dark
)
);
}
}
if( notebook->GetScrollable() ) {
// Forward button
if( ( notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount() ) < notebook->GetPageCount() ) {
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( notebook->GetAllocation().width - scroll_button_size, 0.f ),
sf::Vector2f( scroll_button_size, tab_size.y + 2.f * ( padding + border_width ) ),
border_width,
notebook->IsForwardScrollPrelight() ? scroll_button_prelight : background_color,
border_color,
notebook->IsScrollingForward() ? -border_color_shift : border_color_shift
)
);
queue->Add(
Renderer::Get().CreateTriangle(
sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .66f, .33f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .66f, .66f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .33f, .5f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
arrow_color
)
);
}
// Backward button
if( notebook->GetFirstDisplayedTab() != 0 ) {
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( 0.f, 0.f ),
sf::Vector2f( scroll_button_size, tab_size.y + 2.f * ( padding + border_width ) ),
border_width,
notebook->IsBackwardScrollPrelight() ? scroll_button_prelight : background_color,
border_color,
notebook->IsScrollingBackward() ? -border_color_shift : border_color_shift
)
);
queue->Add(
Renderer::Get().CreateTriangle(
sf::Vector2f( scroll_button_size * .66f, .66f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
sf::Vector2f( scroll_button_size * .66f, .33f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
sf::Vector2f( scroll_button_size * .33f, .5f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
arrow_color
)
);
}
}
}
else if( notebook->GetTabPosition() == Notebook::BOTTOM ) {
// Tabs are positioned at bottom.
// Pane.
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( 0.f, 0.f ),
sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), child_size.y + 2.f * ( border_width + padding ) ),
border_width,
background_color,
border_color,
border_color_shift
)
);
// First tab label left border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f, child_size.y + 2.f * border_width + 2.f * padding ),
sf::Vector2f( notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f, child_size.y + tab_size.y + 3.f * border_width + 4.f * padding ),
border_color_light,
border_width
)
);
// Tab labels
for( Notebook::IndexType index = notebook->GetFirstDisplayedTab(); index < notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount(); ++index ) {
Widget::Ptr label = notebook->GetNthTabLabel( index );
sf::FloatRect label_allocation = label->GetAllocation();
// Bottom border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + border_width + padding ),
sf::Vector2f( label_allocation.left + label_allocation.width + padding, label_allocation.top + label_allocation.height + border_width + padding ),
border_color_dark,
border_width
)
);
// Right border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left + label_allocation.width + padding - border_width, label_allocation.top - padding ),
sf::Vector2f( label_allocation.left + label_allocation.width + padding - border_width, label_allocation.top + label_allocation.height + border_width + padding ),
border_color_dark,
border_width
)
);
if( index == current_page ) {
// Active left border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - padding ),
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + border_width + padding ),
border_color_light,
border_width
)
);
// Active background
queue->Add(
Renderer::Get().CreateRect(
sf::FloatRect(
label_allocation.left - padding,
label_allocation.top - padding - border_width,
label_allocation.width + 2.f * padding - border_width,
label_allocation.height + 2.f * padding + 2.f * border_width
),
background_color
)
);
}
else {
// Inactive background
queue->Add(
Renderer::Get().CreateRect(
sf::FloatRect(
label_allocation.left - padding,
label_allocation.top - padding,
label_allocation.width + 2.f * padding - border_width,
label_allocation.height + 2.f * padding + border_width
),
( index == prelight_tab ) ? background_color_prelight : background_color_dark
)
);
}
}
if( notebook->GetScrollable() ) {
// Forward button
if( ( notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount() ) < notebook->GetPageCount() ) {
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( notebook->GetAllocation().width - scroll_button_size, notebook->GetAllocation().height - ( scroll_button_size + padding ) ),
sf::Vector2f( scroll_button_size, tab_size.y + 2.f * ( padding + border_width ) ),
border_width,
notebook->IsForwardScrollPrelight() ? scroll_button_prelight : background_color,
border_color,
notebook->IsScrollingForward() ? -border_color_shift : border_color_shift
)
);
queue->Add(
Renderer::Get().CreateTriangle(
sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .66f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .33f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .66f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .66f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .33f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .5f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
arrow_color
)
);
}
// Backward button
if( notebook->GetFirstDisplayedTab() != 0 ) {
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( 0.f, notebook->GetAllocation().height - ( scroll_button_size + padding ) ),
sf::Vector2f( scroll_button_size, tab_size.y + 2.f * ( padding + border_width ) ),
border_width,
notebook->IsBackwardScrollPrelight() ? scroll_button_prelight : background_color,
border_color,
notebook->IsScrollingBackward() ? -border_color_shift : border_color_shift
)
);
queue->Add(
Renderer::Get().CreateTriangle(
sf::Vector2f( scroll_button_size * .66f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .66f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
sf::Vector2f( scroll_button_size * .66f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .33f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
sf::Vector2f( scroll_button_size * .33f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .5f * ( tab_size.y + 2.f * ( padding + border_width ) ) ),
arrow_color
)
);
}
}
}
else if( notebook->GetTabPosition() == Notebook::LEFT ) {
// Tabs are positioned at left.
// Pane.
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( tab_size.x + 2.f * ( border_width + padding ), 0.f ),
sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), child_size.y + 2.f * ( border_width + padding ) ),
border_width,
background_color,
border_color,
border_color_shift
)
);
// First tab label top border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( 0.f, notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f ),
sf::Vector2f( tab_size.x + 2.f * padding + 3.f * border_width, notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f ),
border_color_light,
border_width
)
);
// Tab labels
for( Notebook::IndexType index = notebook->GetFirstDisplayedTab(); index < notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount(); ++index ) {
Widget::Ptr label = notebook->GetNthTabLabel( index );
sf::FloatRect label_allocation = label->GetAllocation();
// Left border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - border_width - padding ),
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + border_width + padding ),
border_color_light,
border_width
)
);
// Bottom border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + padding ),
sf::Vector2f( label_allocation.left + label_allocation.width + ( index == current_page ? border_width : 0.f ) + border_width + padding, label_allocation.top + label_allocation.height + padding ),
border_color_dark,
border_width
)
);
if( index == current_page ) {
// Active top border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - border_width - padding ),
sf::Vector2f( label_allocation.left + label_allocation.width + 2.f * border_width + padding, label_allocation.top - border_width - padding ),
border_color_light,
border_width
)
);
// Active background
queue->Add(
Renderer::Get().CreateRect(
sf::FloatRect(
label_allocation.left - padding,
label_allocation.top - padding,
label_allocation.width + 2.f * ( border_width + padding ),
label_allocation.height + 2.f * padding
),
background_color
)
);
}
else {
// Inactive background
queue->Add(
Renderer::Get().CreateRect(
sf::FloatRect(
label_allocation.left - padding,
label_allocation.top - padding,
label_allocation.width + 2.f * padding,
label_allocation.height + 2.f * padding - border_width
),
( index == prelight_tab ) ? background_color_prelight : background_color_dark
)
);
}
}
if( notebook->GetScrollable() ) {
// Forward button
if( ( notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount() ) < notebook->GetPageCount() ) {
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( 0.f, notebook->GetAllocation().height - ( scroll_button_size ) ),
sf::Vector2f( tab_size.x + 2.f * ( padding + border_width ), scroll_button_size ),
border_width,
notebook->IsForwardScrollPrelight() ? scroll_button_prelight : background_color,
border_color,
notebook->IsScrollingForward() ? -border_color_shift : border_color_shift
)
);
queue->Add(
Renderer::Get().CreateTriangle(
sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .6f, .33f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ),
sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .4f, .33f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ),
sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .5f, .66f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ),
arrow_color
)
);
}
// Backward button
if( notebook->GetFirstDisplayedTab() != 0 ) {
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( 0.f, 0.f ),
sf::Vector2f( tab_size.x + 2.f * ( padding + border_width ), scroll_button_size ),
border_width,
notebook->IsBackwardScrollPrelight() ? scroll_button_prelight : background_color,
border_color,
notebook->IsScrollingBackward() ? -border_color_shift : border_color_shift
)
);
queue->Add(
Renderer::Get().CreateTriangle(
sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .4f, .66f * scroll_button_size ),
sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .6f, .66f * scroll_button_size ),
sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .5f, .33f * scroll_button_size ),
arrow_color
)
);
}
}
}
else if( notebook->GetTabPosition() == Notebook::RIGHT ) {
// Tabs are positioned at right.
// Pane.
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( 0.f, 0.f ),
sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), child_size.y + 2.f * ( border_width + padding ) ),
border_width,
background_color,
border_color,
border_color_shift
)
);
// First tab label top border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f ),
sf::Vector2f( child_size.x + 4.f * padding + 3.f * border_width + tab_size.x, notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f ),
border_color_light,
border_width
)
);
// Tab labels
for( Notebook::IndexType index = notebook->GetFirstDisplayedTab(); index < notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount(); ++index ) {
Widget::Ptr label = notebook->GetNthTabLabel( index );
sf::FloatRect label_allocation = label->GetAllocation();
// Right border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left + label_allocation.width + border_width + padding, label_allocation.top - border_width - padding ),
sf::Vector2f( label_allocation.left + label_allocation.width + border_width + padding, label_allocation.top + label_allocation.height + border_width + padding ),
border_color_dark,
border_width
)
);
// Bottom border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - padding, label_allocation.top + label_allocation.height + padding ),
sf::Vector2f( label_allocation.left + label_allocation.width + border_width + padding, label_allocation.top + label_allocation.height + padding ),
border_color_dark,
border_width
)
);
if( index == current_page ) {
// Active top border
queue->Add(
Renderer::Get().CreateLine(
sf::Vector2f( label_allocation.left - padding - border_width, label_allocation.top - border_width - padding ),
sf::Vector2f( label_allocation.left + label_allocation.width + border_width + padding, label_allocation.top - border_width - padding ),
border_color_light,
border_width
)
);
// Active background
queue->Add(
Renderer::Get().CreateRect(
sf::FloatRect(
label_allocation.left - padding - border_width,
label_allocation.top - padding,
label_allocation.width + 2.f * padding + 2.f * border_width,
label_allocation.height + 2.f * padding
),
background_color
)
);
}
else {
// Inactive background
queue->Add(
Renderer::Get().CreateRect(
sf::FloatRect(
label_allocation.left - padding,
label_allocation.top - padding,
label_allocation.width + 2.f * padding + border_width,
label_allocation.height + 2.f * padding
),
(index == prelight_tab) ? background_color_prelight : background_color_dark
)
);
}
}
if( notebook->GetScrollable() ) {
// Forward button
if( ( notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount() ) < notebook->GetPageCount() ) {
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * padding + border_width ), notebook->GetAllocation().height - ( scroll_button_size ) ),
sf::Vector2f( tab_size.x + 2.f * ( padding + border_width ), scroll_button_size ),
border_width,
notebook->IsForwardScrollPrelight() ? scroll_button_prelight : background_color,
border_color,
notebook->IsScrollingForward() ? -border_color_shift : border_color_shift
)
);
queue->Add(
Renderer::Get().CreateTriangle(
sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .4f, .33f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ),
sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .6f, .33f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ),
sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .5f, .66f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ),
arrow_color
)
);
}
// Backward button
if( notebook->GetFirstDisplayedTab() != 0 ) {
queue->Add(
Renderer::Get().CreatePane(
sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * padding + border_width ), 0.f ),
sf::Vector2f( tab_size.x + 2.f * ( padding + border_width ), scroll_button_size ),
border_width,
notebook->IsBackwardScrollPrelight() ? scroll_button_prelight : background_color,
border_color,
notebook->IsScrollingBackward() ? -border_color_shift : border_color_shift
)
);
queue->Add(
Renderer::Get().CreateTriangle(
sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .6f, .66f * scroll_button_size ),
sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .4f, .66f * scroll_button_size ),
sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .5f, .33f * scroll_button_size ),
arrow_color
)
);
}
}
}
return queue;
}
}
}<|fim▁end|> | sf::Color scroll_button_prelight( GetProperty<sf::Color>( "ScrollButtonPrelightColor", notebook ) );
|
<|file_name|>model.go<|end_file_name|><|fim▁begin|>// Copyright 2020 Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package common
import "istio.io/istio/pkg/config/protocol"
// TLSSettings defines TLS configuration for Echo server
type TLSSettings struct {
RootCert string
ClientCert string
Key string
// If provided, override the host name used for the connection
// This needed for integration tests, as we are connecting using a port-forward (127.0.0.1), so
// any DNS certs will not validate.
Hostname string
}
// Port represents a network port where a service is listening for
// connections. The port should be annotated with the type of protocol
// used by the port.
type Port struct {
// Name ascribes a human readable name for the port object. When a
// service has multiple ports, the name field is mandatory<|fim▁hole|> // service.
Port int
// Protocol to be used for the port.
Protocol protocol.Instance
// TLS determines if the port will use TLS.
TLS bool
}
// PortList is a set of ports
type PortList []*Port<|fim▁end|> | Name string
// Port number where the service can be reached. Does not necessarily
// map to the corresponding port numbers for the instances behind the |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from colorama import Fore
from rtxlib import info, error, debug
from rtxlib.preprocessors.SparkPreProcessor import SparkPreProcessor
def init_pre_processors(wf):
""" we look into the workflows definition and run the required preprocessors """
if hasattr(wf, "pre_processors"):
pp = wf.pre_processors
for p in pp:
if p["type"] == "spark":
p["instance"] = SparkPreProcessor(wf, p)
else:
info("> Preprocessor | None", Fore.CYAN)
def kill_pre_processors(wf):
""" after the experiment, we stop all preprocessors """
try:
for p in wf.pre_processors:
p["instance"].shutdown()
info("> Shutting down Spark preprocessor")<|fim▁hole|><|fim▁end|> | except AttributeError:
pass |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import itertools
import operator
import uuid
from functools import partial
from inspect import getmembers
from io import FileIO
from six import iteritems, string_types, text_type
from jinja2.exceptions import UndefinedError
from ansible.errors import AnsibleParserError
from ansible.parsing import DataLoader
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.template import Templar
from ansible.utils.boolean import boolean
from ansible.utils.debug import debug
from ansible.utils.vars import combine_vars, isidentifier
from ansible.template import template
class Base:
# connection/transport
_connection = FieldAttribute(isa='string')
_port = FieldAttribute(isa='int')
_remote_user = FieldAttribute(isa='string')
# variables
_vars = FieldAttribute(isa='dict', default=dict(), priority=100)
# flags and misc. settings
_environment = FieldAttribute(isa='list')
_no_log = FieldAttribute(isa='bool')
# param names which have been deprecated/removed
DEPRECATED_ATTRIBUTES = [
'sudo', 'sudo_user', 'sudo_pass', 'sudo_exe', 'sudo_flags',
'su', 'su_user', 'su_pass', 'su_exe', 'su_flags',
]
def __init__(self):
# initialize the data loader and variable manager, which will be provided
# later when the object is actually loaded
self._loader = None
self._variable_manager = None
# every object gets a random uuid:
self._uuid = uuid.uuid4()
# and initialize the base attributes
self._initialize_base_attributes()
try:
from __main__ import display
self._display = display
except ImportError:
from ansible.utils.display import Display
self._display = Display()
# The following three functions are used to programatically define data
# descriptors (aka properties) for the Attributes of all of the playbook
# objects (tasks, blocks, plays, etc).
#
# The function signature is a little strange because of how we define
# them. We use partial to give each method the name of the Attribute that
# it is for. Since partial prefills the positional arguments at the
# beginning of the function we end up with the first positional argument
# being allocated to the name instead of to the class instance (self) as
# normal. To deal with that we make the property name field the first
# positional argument and self the second arg.
#
# Because these methods are defined inside of the class, they get bound to
# the instance when the object is created. After we run partial on them
# and put the result back into the class as a property, they get bound
# a second time. This leads to self being placed in the arguments twice.
# To work around that, we mark the functions as @staticmethod so that the
# first binding to the instance doesn't happen.
@staticmethod
def _generic_g(prop_name, self):
method = "_get_attr_%s" % prop_name
if hasattr(self, method):
return getattr(self, method)()
value = self._attributes[prop_name]
if value is None and hasattr(self, '_get_parent_attribute'):
value = self._get_parent_attribute(prop_name)
return value
@staticmethod
def _generic_s(prop_name, self, value):
self._attributes[prop_name] = value
@staticmethod
def _generic_d(prop_name, self):
del self._attributes[prop_name]
def _get_base_attributes(self):
'''
Returns the list of attributes for this class (or any subclass thereof).
If the attribute name starts with an underscore, it is removed
'''
base_attributes = dict()
for (name, value) in getmembers(self.__class__):
if isinstance(value, Attribute):
if name.startswith('_'):
name = name[1:]
base_attributes[name] = value
return base_attributes
def _initialize_base_attributes(self):
# each class knows attributes set upon it, see Task.py for example
self._attributes = dict()
for (name, value) in self._get_base_attributes().items():
getter = partial(self._generic_g, name)
setter = partial(self._generic_s, name)
deleter = partial(self._generic_d, name)
# Place the property into the class so that cls.name is the
# property functions.
setattr(Base, name, property(getter, setter, deleter))
# Place the value into the instance so that the property can
# process and hold that value/
setattr(self, name, value.default)
def preprocess_data(self, ds):
''' infrequently used method to do some pre-processing of legacy terms '''
for base_class in self.__class__.mro():
method = getattr(self, "_preprocess_data_%s" % base_class.__name__.lower(), None)
if method:
return method(ds)
return ds
def load_data(self, ds, variable_manager=None, loader=None):
''' walk the input datastructure and assign any values '''
assert ds is not None
# cache the datastructure internally
setattr(self, '_ds', ds)
# the variable manager class is used to manage and merge variables
# down to a single dictionary for reference in templating, etc.
self._variable_manager = variable_manager
# the data loader class is used to parse data from strings and files
if loader is not None:<|fim▁hole|>
# call the preprocess_data() function to massage the data into
# something we can more easily parse, and then call the validation
# function on it to ensure there are no incorrect key values
ds = self.preprocess_data(ds)
self._validate_attributes(ds)
# Walk all attributes in the class. We sort them based on their priority
# so that certain fields can be loaded before others, if they are dependent.
# FIXME: we currently don't do anything with private attributes but
# may later decide to filter them out of 'ds' here.
base_attributes = self._get_base_attributes()
for name, attr in sorted(base_attributes.items(), key=operator.itemgetter(1)):
# copy the value over unless a _load_field method is defined
if name in ds:
method = getattr(self, '_load_%s' % name, None)
if method:
self._attributes[name] = method(name, ds[name])
else:
self._attributes[name] = ds[name]
# run early, non-critical validation
self.validate()
# return the constructed object
return self
def get_ds(self):
try:
return getattr(self, '_ds')
except AttributeError:
return None
def get_loader(self):
return self._loader
def get_variable_manager(self):
return self._variable_manager
def _validate_attributes(self, ds):
'''
Ensures that there are no keys in the datastructure which do
not map to attributes for this object.
'''
valid_attrs = frozenset(name for name in self._get_base_attributes())
for key in ds:
if key not in valid_attrs:
raise AnsibleParserError("'%s' is not a valid attribute for a %s" % (key, self.__class__.__name__), obj=ds)
def validate(self, all_vars=dict()):
''' validation that is done at parse time, not load time '''
# walk all fields in the object
for (name, attribute) in iteritems(self._get_base_attributes()):
# run validator only if present
method = getattr(self, '_validate_%s' % name, None)
if method:
method(attribute, name, getattr(self, name))
else:
# and make sure the attribute is of the type it should be
value = getattr(self, name)
if value is not None:
if attribute.isa == 'string' and isinstance(value, (list, dict)):
raise AnsibleParserError("The field '%s' is supposed to be a string type, however the incoming data structure is a %s" % (name, type(value)), obj=self.get_ds())
def copy(self):
'''
Create a copy of this object and return it.
'''
new_me = self.__class__()
for name in self._get_base_attributes():
setattr(new_me, name, getattr(self, name))
new_me._loader = self._loader
new_me._variable_manager = self._variable_manager
# if the ds value was set on the object, copy it to the new copy too
if hasattr(self, '_ds'):
new_me._ds = self._ds
return new_me
def post_validate(self, templar):
'''
we can't tell that everything is of the right type until we have
all the variables. Run basic types (from isa) as well as
any _post_validate_<foo> functions.
'''
basedir = None
if self._loader is not None:
basedir = self._loader.get_basedir()
# save the omit value for later checking
omit_value = templar._available_variables.get('omit')
for (name, attribute) in iteritems(self._get_base_attributes()):
if getattr(self, name) is None:
if not attribute.required:
continue
else:
raise AnsibleParserError("the field '%s' is required but was not set" % name)
elif not attribute.always_post_validate and self.__class__.__name__ not in ('Task', 'Handler', 'PlayContext'):
# Intermediate objects like Play() won't have their fields validated by
# default, as their values are often inherited by other objects and validated
# later, so we don't want them to fail out early
continue
try:
# Run the post-validator if present. These methods are responsible for
# using the given templar to template the values, if required.
method = getattr(self, '_post_validate_%s' % name, None)
if method:
value = method(attribute, getattr(self, name), templar)
else:
# if the attribute contains a variable, template it now
value = templar.template(getattr(self, name))
# if this evaluated to the omit value, set the value back to
# the default specified in the FieldAttribute and move on
if omit_value is not None and value == omit_value:
value = attribute.default
continue
# and make sure the attribute is of the type it should be
if value is not None:
if attribute.isa == 'string':
value = text_type(value)
elif attribute.isa == 'int':
value = int(value)
elif attribute.isa == 'float':
value = float(value)
elif attribute.isa == 'bool':
value = boolean(value)
elif attribute.isa == 'percent':
# special value, which may be an integer or float
# with an optional '%' at the end
if isinstance(value, string_types) and '%' in value:
value = value.replace('%', '')
value = float(value)
elif attribute.isa == 'list':
if value is None:
value = []
elif not isinstance(value, list):
value = [ value ]
if attribute.listof is not None:
for item in value:
if not isinstance(item, attribute.listof):
raise AnsibleParserError("the field '%s' should be a list of %s, but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds())
elif attribute.required and attribute.listof == string_types:
if item is None or item.strip() == "":
raise AnsibleParserError("the field '%s' is required, and cannot have empty values" % (name,), obj=self.get_ds())
elif attribute.isa == 'set':
if value is None:
value = set()
else:
if not isinstance(value, (list, set)):
value = [ value ]
if not isinstance(value, set):
value = set(value)
elif attribute.isa == 'dict':
if value is None:
value = dict()
elif not isinstance(value, dict):
raise TypeError("%s is not a dictionary" % value)
# and assign the massaged value back to the attribute field
setattr(self, name, value)
except (TypeError, ValueError) as e:
raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s. Error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds())
except UndefinedError as e:
if templar._fail_on_undefined_errors and name != 'name':
raise AnsibleParserError("the field '%s' has an invalid value, which appears to include a variable that is undefined. The error was: %s" % (name,e), obj=self.get_ds())
def serialize(self):
'''
Serializes the object derived from the base object into
a dictionary of values. This only serializes the field
attributes for the object, so this may need to be overridden
for any classes which wish to add additional items not stored
as field attributes.
'''
repr = dict()
for name in self._get_base_attributes():
repr[name] = getattr(self, name)
# serialize the uuid field
repr['uuid'] = getattr(self, '_uuid')
return repr
def deserialize(self, data):
'''
Given a dictionary of values, load up the field attributes for
this object. As with serialize(), if there are any non-field
attribute data members, this method will need to be overridden
and extended.
'''
assert isinstance(data, dict)
for (name, attribute) in iteritems(self._get_base_attributes()):
if name in data:
setattr(self, name, data[name])
else:
setattr(self, name, attribute.default)
# restore the UUID field
setattr(self, '_uuid', data.get('uuid'))
def _load_vars(self, attr, ds):
'''
Vars in a play can be specified either as a dictionary directly, or
as a list of dictionaries. If the later, this method will turn the
list into a single dictionary.
'''
def _validate_variable_keys(ds):
for key in ds:
if not isidentifier(key):
raise TypeError("%s is not a valid variable name" % key)
try:
if isinstance(ds, dict):
_validate_variable_keys(ds)
return ds
elif isinstance(ds, list):
all_vars = dict()
for item in ds:
if not isinstance(item, dict):
raise ValueError
_validate_variable_keys(item)
all_vars = combine_vars(all_vars, item)
return all_vars
elif ds is None:
return {}
else:
raise ValueError
except ValueError:
raise AnsibleParserError("Vars in a %s must be specified as a dictionary, or a list of dictionaries" % self.__class__.__name__, obj=ds)
except TypeError as e:
raise AnsibleParserError("Invalid variable name in vars specified for %s: %s" % (self.__class__.__name__, e), obj=ds)
def _extend_value(self, value, new_value):
'''
Will extend the value given with new_value (and will turn both
into lists if they are not so already). The values are run through
a set to remove duplicate values.
'''
if not isinstance(value, list):
value = [ value ]
if not isinstance(new_value, list):
new_value = [ new_value ]
#return list(set(value + new_value))
return [i for i,_ in itertools.groupby(value + new_value)]
def __getstate__(self):
return self.serialize()
def __setstate__(self, data):
self.__init__()
self.deserialize(data)<|fim▁end|> | self._loader = loader
else:
self._loader = DataLoader() |
<|file_name|>LoginActivity.java<|end_file_name|><|fim▁begin|>package com.ke2g.cued_recall;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.support.v7.app.ActionBarActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.Toast;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.util.ArrayList;
import java.util.List;
public class LoginActivity extends ActionBarActivity {
public static final String TAG = LoginActivity.class.getSimpleName();
private int discretization;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void doLogin(View V){
if(!getUsername().equals("")) {
Intent i = new Intent(this, returnIntent.class);
startActivityForResult(i, 1);
}
}
private ArrayList<Point> getUserPoints(String username){
SharedPreferences appSharedPrefs = PreferenceManager
.getDefaultSharedPreferences(this.getApplicationContext());
SharedPreferences.Editor prefsEditor = appSharedPrefs.edit();
Gson gson = new Gson();
String json = appSharedPrefs.getString(username, "");
if(json.equals("")){
return null;
} else {
User u = gson.fromJson(json, User.class);
return u.getPoints();
}
}
private String getUserHash(String username){
SharedPreferences appSharedPrefs = PreferenceManager
.getDefaultSharedPreferences(this.getApplicationContext());
SharedPreferences.Editor prefsEditor = appSharedPrefs.edit();
Gson gson = new Gson();
String json = appSharedPrefs.getString(username, "");
if(json.equals("")){
return null;
} else {
User u = gson.fromJson(json, User.class);
return u.getHash();
}
}
private void setTry(String username, int total, int invalid) {
SharedPreferences appSharedPrefs = PreferenceManager
.getDefaultSharedPreferences(this.getApplicationContext());
SharedPreferences.Editor prefsEditor = appSharedPrefs.edit();
Gson gson = new Gson();
String json = appSharedPrefs.getString(username, "");
if(!json.equals("")){
User u = gson.fromJson(json, User.class);
u.setInvalidLogins(invalid);
u.setTotalLogins(total);
json = gson.toJson(u);
prefsEditor.putString(username, json);
prefsEditor.commit();
}
}
private int getTolerance(){
SharedPreferences SP = PreferenceManager.getDefaultSharedPreferences(getBaseContext());
return Integer.parseInt(SP.getString("tolerance", "75"));
}
private String getUsername() {
EditText et = (EditText) findViewById(R.id.editText_login);
return et.getText().toString().trim();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == Activity.RESULT_OK) {
ArrayList<Point> points = data.getParcelableArrayListExtra("RESULT_CLICKS");
//check if points are the saved ones
if(getDiscretization() == 1){
if (CuedRecallIntent.areHashEqual(points, getUserHash(getUsername()), getTolerance())) {
setTry(getUsername(), 1, 0);
Toast.makeText(this, "Correct username and password", Toast.LENGTH_LONG).show();
} else {
setTry(getUsername(), 1, 1);
Toast.makeText(this, "Username and password don't match", Toast.LENGTH_LONG).show();
}
} else {
if (CuedRecallIntent.arePointsEqual(points, getUserPoints(getUsername()), getTolerance())) {
setTry(getUsername(), 1, 0);<|fim▁hole|> setTry(getUsername(), 1, 1);
Toast.makeText(this, "Username and password don't match", Toast.LENGTH_LONG).show();
}
}
}
}
public int getDiscretization() {
SharedPreferences SP = PreferenceManager.getDefaultSharedPreferences(getBaseContext());
return Integer.parseInt(SP.getString("discreType", "0"));
}
}<|fim▁end|> | Toast.makeText(this, "Correct username and password", Toast.LENGTH_LONG).show();
} else { |
<|file_name|>sorting.js<|end_file_name|><|fim▁begin|>const simple_sort = (key, a, b) => {
if (a[key] < b[key]) return -1
if (a[key] > b[key]) return 1
return 0
}
const name_sort = (a, b) => simple_sort('name', a, b)
const skill_sort = (a, b) => simple_sort('skill', a, b)
const speed_sort = (a, b) => simple_sort('speed', a, b)
export {
simple_sort,
name_sort,
skill_sort,<|fim▁hole|><|fim▁end|> | speed_sort
} |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import MapMesh from './MapMesh';
import { MapMeshOptions } from './MapMesh';
import DefaultMapViewController from "./DefaultMapViewController"
import Grid from './Grid';
import {TileData} from "./interfaces"
export {
MapMesh,
MapMeshOptions,
Grid,<|fim▁hole|> TileData
}<|fim▁end|> | DefaultMapViewController, |
<|file_name|>capture-analysis-2.rs<|end_file_name|><|fim▁begin|>// edition:2021
#![feature(rustc_attrs)]
#[derive(Debug)]
struct Point {
x: String,
y: i32,
}
fn main() {
let mut p = Point { x: String::new(), y: 10 };
let c = #[rustc_capture_analysis]
//~^ ERROR: attributes on expressions are experimental
//~| NOTE: see issue #15701 <https://github.com/rust-lang/rust/issues/15701>
|| {
//~^ First Pass analysis includes:
//~| Min Capture analysis includes:
let _x = p.x;
//~^ NOTE: Capturing p[(0, 0)] -> ByValue
//~| NOTE: p[] captured as ByValue here<|fim▁hole|> };
}<|fim▁end|> | println!("{:?}", p);
//~^ NOTE: Capturing p[] -> ImmBorrow
//~| NOTE: Min Capture p[] -> ByValue
//~| NOTE: p[] used here |
<|file_name|>test_cargo_bench.rs<|end_file_name|><|fim▁begin|>use std::old_path;
use std::str;
use support::{project, execs, basic_bin_manifest, basic_lib_manifest};
use support::{COMPILING, cargo_dir, FRESH, RUNNING};
use support::paths::PathExt;
use hamcrest::{assert_that, existing_file};
use cargo::util::process;
fn setup() {}
test!(cargo_bench_simple {
let p = project("foo")
.file("Cargo.toml", basic_bin_manifest("foo").as_slice())
.file("src/foo.rs", r#"
extern crate test;
fn hello() -> &'static str {
"hello"
}
pub fn main() {
println!("{}", hello())
}
#[bench]
fn bench_hello(_b: &mut test::Bencher) {
assert_eq!(hello(), "hello")
}"#);
assert_that(p.cargo_process("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(
process(p.bin("foo")).unwrap(),
execs().with_stdout("hello\n"));
assert_that(p.process(cargo_dir().join("cargo")).arg("bench"),
execs().with_stdout(format!("\
{} foo v0.5.0 ({})
{} target[..]release[..]foo-[..]
running 1 test
test bench_hello ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
COMPILING, p.url(),
RUNNING)));
});
test!(bench_tarname {
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[bin]]
name="bin1"
path="src/bin1.rs"
[[bin]]
name="bin2"
path="src/bin2.rs"
"#)
.file("src/bin1.rs", r#"
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }"#)
.file("src/bin2.rs", r#"
extern crate test;
#[bench] fn run2(_ben: &mut test::Bencher) { }"#);
let expected_stdout = format!("\
{compiling} foo v0.0.1 ({dir})
{runnning} target[..]release[..]bin2[..]
running 1 test
test run2 ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING,
runnning = RUNNING,
dir = prj.url());
assert_that(prj.cargo_process("bench").arg("--bench").arg("bin2"),
execs().with_status(0).with_stdout(expected_stdout.as_slice()));
});
test!(cargo_bench_verbose {
let p = project("foo")
.file("Cargo.toml", basic_bin_manifest("foo").as_slice())
.file("src/foo.rs", r#"
extern crate test;
fn main() {}
#[bench] fn bench_hello(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench").arg("-v").arg("hello"),
execs().with_stdout(format!("\
{compiling} foo v0.5.0 ({url})
{running} `rustc src[..]foo.rs [..]`
{running} `[..]target[..]release[..]foo-[..] hello --bench`
running 1 test
test bench_hello ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING, url = p.url(), running = RUNNING)));
});
test!(many_similar_names {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
extern crate test;
pub fn foo() {}
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
")
.file("src/main.rs", "
extern crate foo;
extern crate test;
fn main() {}
#[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() }
")
.file("benches/foo.rs", r#"
extern crate foo;
extern crate test;
#[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() }
"#);
let output = p.cargo_process("bench").exec_with_output().unwrap();
let output = str::from_utf8(output.output.as_slice()).unwrap();
assert!(output.contains("test bin_bench"), "bin_bench missing\n{}", output);
assert!(output.contains("test lib_bench"), "lib_bench missing\n{}", output);
assert!(output.contains("test bench_bench"), "bench_bench missing\n{}", output);
});
test!(cargo_bench_failing_test {
let p = project("foo")
.file("Cargo.toml", basic_bin_manifest("foo").as_slice())
.file("src/foo.rs", r#"
#![feature(test)]
extern crate test;
fn hello() -> &'static str {
"hello"
}
pub fn main() {
println!("{}", hello())
}
#[bench]
fn bench_hello(_b: &mut test::Bencher) {
assert_eq!(hello(), "nope")
}"#);
assert_that(p.cargo_process("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(
process(p.bin("foo")).unwrap(),
execs().with_stdout("hello\n"));
assert_that(p.process(cargo_dir().join("cargo")).arg("bench"),
execs().with_stdout(format!("\
{} foo v0.5.0 ({})
{} target[..]release[..]foo-[..]
running 1 test
test bench_hello ... ",
COMPILING, p.url(), RUNNING))
.with_stderr(format!("\
thread '<main>' panicked at 'assertion failed: \
`(left == right) && (right == left)` (left: \
`\"hello\"`, right: `\"nope\"`)', src{sep}foo.rs:14
", sep = old_path::SEP))
.with_status(101));
});
test!(bench_with_lib_dep {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[[bin]]
name = "baz"
path = "src/main.rs"
"#)
.file("src/lib.rs", r#"
extern crate test;
///
/// ```rust
/// extern crate foo;
/// fn main() {
/// println!("{}", foo::foo());
/// }
/// ```
///
pub fn foo(){}
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
"#)
.file("src/main.rs", "
extern crate foo;
extern crate test;
fn main() {}
#[bench]
fn bin_bench(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_stdout(format!("\
{} foo v0.0.1 ({})
{running} target[..]release[..]baz-[..]
running 1 test
test bin_bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
{running} target[..]release[..]foo-[..]
running 1 test
test lib_bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
COMPILING, p.url(), running = RUNNING)))
});
test!(bench_with_deep_lib_dep {
let p = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[dependencies.foo]
path = "../foo"
"#)
.file("src/lib.rs", "
extern crate foo;
extern crate test;
#[bench]
fn bar_bench(_b: &mut test::Bencher) {
foo::foo();
}
");
let p2 = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
extern crate test;
pub fn foo() {}
#[bench]
fn foo_bench(_b: &mut test::Bencher) {}
");
p2.build();
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stdout(format!("\
{compiling} foo v0.0.1 ({dir})
{compiling} bar v0.0.1 ({dir})
{running} target[..]
running 1 test
test bar_bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING, running = RUNNING,
dir = p.url()).as_slice()));
});
test!(external_bench_explicit {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[[bench]]
name = "bench"
path = "src/bench.rs"
"#)
.file("src/lib.rs", r#"
extern crate test;
pub fn get_hello() -> &'static str { "Hello" }
#[bench]
fn internal_bench(_b: &mut test::Bencher) {}
"#)
.file("src/bench.rs", r#"
extern crate foo;
extern crate test;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench"),
execs().with_stdout(format!("\
{} foo v0.0.1 ({})
{running} target[..]release[..]bench-[..]
running 1 test
test external_bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
{running} target[..]release[..]foo-[..]
running 1 test
test internal_bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
COMPILING, p.url(), running = RUNNING)))
});
test!(external_bench_implicit {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
extern crate test;
pub fn get_hello() -> &'static str { "Hello" }
#[bench]
fn internal_bench(_b: &mut test::Bencher) {}
"#)
.file("benches/external.rs", r#"
extern crate foo;
extern crate test;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench"),
execs().with_stdout(format!("\
{} foo v0.0.1 ({})
{running} target[..]release[..]external-[..]
running 1 test
test external_bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
{running} target[..]release[..]foo-[..]
running 1 test
test internal_bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
COMPILING, p.url(), running = RUNNING)))
});
test!(dont_run_examples {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
"#)
.file("examples/dont-run-me-i-will-fail.rs", r#"
fn main() { panic!("Examples should not be run by 'cargo test'"); }
"#);
assert_that(p.cargo_process("bench"),
execs().with_status(0));
});
test!(pass_through_command_line {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
extern crate test;
#[bench] fn foo(_b: &mut test::Bencher) {}
#[bench] fn bar(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench").arg("bar"),
execs().with_status(0)
.with_stdout(format!("\
{compiling} foo v0.0.1 ({dir})
{running} target[..]release[..]foo-[..]
running 1 test
test bar ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING, running = RUNNING,
dir = p.url()).as_slice()));
assert_that(p.cargo_process("bench").arg("foo"),
execs().with_status(0)
.with_stdout(format!("\
{compiling} foo v0.0.1 ({dir})
{running} target[..]release[..]foo-[..]
running 1 test
test foo ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING, running = RUNNING,
dir = p.url()).as_slice()));
});
// Regression test for running cargo-bench twice with
// tests in an rlib
test!(cargo_bench_twice {
let p = project("test_twice")
.file("Cargo.toml", basic_lib_manifest("test_twice").as_slice())
.file("src/test_twice.rs", r#"
#![crate_type = "rlib"]
extern crate test;
#[bench]
fn dummy_bench(b: &mut test::Bencher) { }
"#);
p.cargo_process("build");
for _ in range(0, 2) {
assert_that(p.process(cargo_dir().join("cargo")).arg("bench"),
execs().with_status(0));
}
});
test!(lib_bin_same_name {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
[[bin]]
name = "foo"
"#)
.file("src/lib.rs", "
extern crate test;
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
")
.file("src/main.rs", "
extern crate foo;
extern crate test;
#[bench]
fn bin_bench(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_stdout(format!("\
{} foo v0.0.1 ({})
{running} target[..]release[..]foo-[..]
running 1 test
test [..] ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
{running} target[..]release[..]foo-[..]
running 1 test
test [..] ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
COMPILING, p.url(), running = RUNNING)))
});
test!(lib_with_standard_name {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
extern crate test;
/// ```
/// syntax::foo();
/// ```
pub fn foo() {}
#[bench]
fn foo_bench(_b: &mut test::Bencher) {}
")
.file("benches/bench.rs", "
extern crate syntax;
extern crate test;
#[bench]<|fim▁hole|> fn bench(_b: &mut test::Bencher) { syntax::foo() }
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stdout(format!("\
{compiling} syntax v0.0.1 ({dir})
{running} target[..]release[..]bench-[..]
running 1 test
test bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
{running} target[..]release[..]syntax-[..]
running 1 test
test foo_bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING, running = RUNNING,
dir = p.url()).as_slice()));
});
test!(lib_with_standard_name2 {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
name = "syntax"
bench = false
doctest = false
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
extern crate syntax;
extern crate test;
fn main() {}
#[bench]
fn bench(_b: &mut test::Bencher) { syntax::foo() }
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stdout(format!("\
{compiling} syntax v0.0.1 ({dir})
{running} target[..]release[..]syntax-[..]
running 1 test
test bench ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING, running = RUNNING,
dir = p.url()).as_slice()));
});
test!(bin_there_for_integration {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "
extern crate test;
fn main() { std::os::set_exit_status(1); }
#[bench] fn main_bench(_b: &mut test::Bencher) {}
")
.file("benches/foo.rs", r#"
extern crate test;
use std::old_io::Command;
#[bench]
fn bench_bench(_b: &mut test::Bencher) {
let status = Command::new("target/release/foo").status().unwrap();
assert!(status.matches_exit_status(1));
}
"#);
let output = p.cargo_process("bench").exec_with_output().unwrap();
let output = str::from_utf8(output.output.as_slice()).unwrap();
assert!(output.contains("main_bench ... bench: 0 ns/iter (+/- 0)"),
"no main_bench\n{}",
output);
assert!(output.contains("bench_bench ... bench: 0 ns/iter (+/- 0)"),
"no bench_bench\n{}",
output);
});
test!(bench_dylib {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate_type = ["dylib"]
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", r#"
extern crate "bar" as the_bar;
extern crate test;
pub fn bar() { the_bar::baz(); }
#[bench]
fn foo(_b: &mut test::Bencher) {}
"#)
.file("benches/bench.rs", r#"
extern crate "foo" as the_foo;
extern crate test;
#[bench]
fn foo(_b: &mut test::Bencher) { the_foo::bar(); }
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
crate_type = ["dylib"]
"#)
.file("bar/src/lib.rs", "
pub fn baz() {}
");
assert_that(p.cargo_process("bench").arg("-v"),
execs().with_status(0)
.with_stdout(format!("\
{compiling} bar v0.0.1 ({dir})
{running} [..] -C opt-level=3 [..]
{compiling} foo v0.0.1 ({dir})
{running} [..] -C opt-level=3 [..]
{running} [..] -C opt-level=3 [..]
{running} [..] -C opt-level=3 [..]
{running} [..]target[..]release[..]bench-[..]
running 1 test
test foo ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
{running} [..]target[..]release[..]foo-[..]
running 1 test
test foo ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING, running = RUNNING,
dir = p.url()).as_slice()));
p.root().move_into_the_past().unwrap();
assert_that(p.process(cargo_dir().join("cargo")).arg("bench").arg("-v"),
execs().with_status(0)
.with_stdout(format!("\
{fresh} bar v0.0.1 ({dir})
{fresh} foo v0.0.1 ({dir})
{running} [..]target[..]release[..]bench-[..]
running 1 test
test foo ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
{running} [..]target[..]release[..]foo-[..]
running 1 test
test foo ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
fresh = FRESH, running = RUNNING,
dir = p.url()).as_slice()));
});
test!(bench_twice_with_build_cmd {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
"#)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "
extern crate test;
#[bench]
fn foo(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stdout(format!("\
{compiling} foo v0.0.1 ({dir})
{running} target[..]release[..]foo-[..]
running 1 test
test foo ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING, running = RUNNING,
dir = p.url()).as_slice()));
assert_that(p.process(cargo_dir().join("cargo")).arg("bench"),
execs().with_status(0)
.with_stdout(format!("\
{running} target[..]release[..]foo-[..]
running 1 test
test foo ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
running = RUNNING)));
});
test!(bench_with_examples {
let p = project("testbench")
.file("Cargo.toml", r#"
[package]
name = "testbench"
version = "6.6.6"
authors = []
[[example]]
name = "teste1"
[[bench]]
name = "testb1"
"#)
.file("src/lib.rs", r#"
extern crate test;
use test::Bencher;
pub fn f1() {
println!("f1");
}
pub fn f2() {}
#[bench]
fn bench_bench1(_b: &mut Bencher) {
f2();
}
"#)
.file("benches/testb1.rs", "
extern crate testbench;
extern crate test;
use test::Bencher;
#[bench]
fn bench_bench2(_b: &mut Bencher) {
testbench::f2();
}
")
.file("examples/teste1.rs", r#"
extern crate testbench;
fn main() {
println!("example1");
testbench::f1();
}
"#);
assert_that(p.cargo_process("bench").arg("-v"),
execs().with_status(0)
.with_stdout(format!("\
{compiling} testbench v6.6.6 ({url})
{running} `rustc src{sep}lib.rs --crate-name testbench --crate-type lib [..]`
{running} `rustc src{sep}lib.rs --crate-name testbench --crate-type lib [..]`
{running} `rustc benches{sep}testb1.rs --crate-name testb1 --crate-type bin \
[..] --test [..]`
{running} `{dir}{sep}target{sep}release{sep}testb1-[..] --bench`
running 1 test
test bench_bench2 ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
{running} `{dir}{sep}target{sep}release{sep}testbench-[..] --bench`
running 1 test
test bench_bench1 ... bench: 0 ns/iter (+/- 0)
test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
",
compiling = COMPILING,
running = RUNNING,
dir = p.root().display(),
url = p.url(),
sep = old_path::SEP).as_slice()));
});<|fim▁end|> | |
<|file_name|>lineroot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
'''
.. module:: lineroot
Definition of the base class LineRoot and base classes LineSingle/LineMultiple
to define interfaces and hierarchy for the real operational classes
.. moduleauthor:: Daniel Rodriguez
'''
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import operator
import six
from . import metabase
class MetaLineRoot(metabase.MetaParams):
'''
Once the object is created (effectively pre-init) the "owner" of this
class is sought
'''
def donew(cls, *args, **kwargs):
_obj, args, kwargs = super(MetaLineRoot, cls).donew(*args, **kwargs)
# Find the owner and store it
# startlevel = 4 ... to skip intermediate call stacks
ownerskip = kwargs.pop('_ownerskip', None)
_obj._owner = metabase.findowner(_obj,
_obj._OwnerCls or LineMultiple,
skip=ownerskip)
# Parameter values have now been set before __init__
return _obj, args, kwargs<|fim▁hole|>class LineRoot(six.with_metaclass(MetaLineRoot, object)):
'''
Defines a common base and interfaces for Single and Multiple
LineXXX instances
Period management
Iteration management
Operation (dual/single operand) Management
Rich Comparison operator definition
'''
_OwnerCls = None
_minperiod = 1
_opstage = 1
IndType, StratType, ObsType = range(3)
def _stage1(self):
self._opstage = 1
def _stage2(self):
self._opstage = 2
def _operation(self, other, operation, r=False, intify=False):
if self._opstage == 1:
return self._operation_stage1(
other, operation, r=r, intify=intify)
return self._operation_stage2(other, operation)
def _operationown(self, operation):
if self._opstage == 1:
return self._operationown_stage1(operation)
return self._operationown_stage2(operation)
def setminperiod(self, minperiod):
'''
Direct minperiod manipulation. It could be used for example
by a strategy
to not wait for all indicators to produce a value
'''
self._minperiod = minperiod
def updateminperiod(self, minperiod):
'''
Update the minperiod if needed. The minperiod will have been
calculated elsewhere
and has to take over if greater that self's
'''
self._minperiod = max(self._minperiod, minperiod)
def addminperiod(self, minperiod):
'''
Add a minperiod to own ... to be defined by subclasses
'''
raise NotImplementedError
def incminperiod(self, minperiod):
'''
Increment the minperiod with no considerations
'''
raise NotImplementedError
def prenext(self):
'''
It will be called during the "minperiod" phase of an iteration.
'''
pass
def nextstart(self):
'''
It will be called when the minperiod phase is over for the 1st
post-minperiod value. Only called once and defaults to automatically
calling next
'''
self.next()
def next(self):
'''
Called to calculate values when the minperiod is over
'''
pass
def preonce(self, start, end):
'''
It will be called during the "minperiod" phase of a "once" iteration
'''
pass
def oncestart(self, start, end):
'''
It will be called when the minperiod phase is over for the 1st
post-minperiod value
Only called once and defaults to automatically calling once
'''
self.once(start, end)
def once(self, start, end):
'''
Called to calculate values at "once" when the minperiod is over
'''
pass
# Arithmetic operators
def _makeoperation(self, other, operation, r=False, _ownerskip=None):
raise NotImplementedError
def _makeoperationown(self, operation, _ownerskip=None):
raise NotImplementedError
def _operationown_stage1(self, operation):
'''
Operation with single operand which is "self"
'''
return self._makeoperationown(operation, _ownerskip=self)
def _roperation(self, other, operation, intify=False):
'''
Relies on self._operation to and passes "r" True to define a
reverse operation
'''
return self._operation(other, operation, r=True, intify=intify)
def _operation_stage1(self, other, operation, r=False, intify=False):
'''
Two operands' operation. Scanning of other happens to understand
if other must be directly an operand or rather a subitem thereof
'''
if isinstance(other, LineMultiple):
other = other.lines[0]
return self._makeoperation(other, operation, r, self)
def _operation_stage2(self, other, operation):
'''
Rich Comparison operators. Scans other and returns either an
operation with other directly or a subitem from other
'''
if isinstance(other, LineRoot):
other = other[0]
# operation(float, other) ... expecting other to be a float
return operation(self[0], other)
def _operationown_stage2(self, operation):
return operation(self[0])
def __add__(self, other):
return self._operation(other, operator.__add__)
def __radd__(self, other):
return self._roperation(other, operator.__add__)
def __sub__(self, other):
return self._operation(other, operator.__sub__)
def __rsub__(self, other):
return self._roperation(other, operator.__sub__)
def __mul__(self, other):
return self._operation(other, operator.__mul__)
def __rmul__(self, other):
return self._roperation(other, operator.__mul__)
def __truediv__(self, other):
return self._operation(other, operator.__truediv__)
def __rtruediv__(self, other):
return self._roperation(other, operator.__truediv__)
def __pow__(self, other):
return self._operation(other, operator.__pow__)
def __rpow__(self, other):
return self._roperation(other, operator.__pow__)
def __abs__(self):
return self._operationown(operator.__abs__)
def __lt__(self, other):
return self._operation(other, operator.__lt__)
def __gt__(self, other):
return self._operation(other, operator.__gt__)
def __le__(self, other):
return self._operation(other, operator.__le__)
def __ge__(self, other):
return self._operation(other, operator.__ge__)
def __eq__(self, other):
return self._operation(other, operator.__eq__)
def __ne__(self, other):
return self._operation(other, operator.__ne__)
def __nonzero__(self):
return self._operationown(bool)
__bool__ = __nonzero__
# Python 3 forces explicit implementation of hash if
# the class has redefined __eq__
__hash__ = object.__hash__
class LineMultiple(LineRoot):
'''
Base class for LineXXX instances that hold more than one line
'''
def reset(self):
self._stage1()
self.lines.reset()
def _stage1(self):
super(LineMultiple, self)._stage1()
for line in self.lines:
line._stage1()
def _stage2(self):
super(LineMultiple, self)._stage2()
for line in self.lines:
line._stage2()
def addminperiod(self, minperiod):
'''
The passed minperiod is fed to the lins
'''
# pass it down to the lines
for line in self.lines:
line.addminperiod(minperiod)
def incminperiod(self, minperiod):
'''
The passed minperiod is fed to the lins
'''
# pass it down to the lines
for line in self.lines:
line.incminperiod(minperiod)
def _makeoperation(self, other, operation, r=False, _ownerskip=None):
return self.lines[0]._makeoperation(other, operation, r, _ownerskip)
def _makeoperationown(self, operation, _ownerskip=None):
return self.lines[0]._makeoperationown(operation, _ownerskip)
class LineSingle(LineRoot):
'''
Base class for LineXXX instances that hold a single line
'''
def addminperiod(self, minperiod):
'''
Add the minperiod (substracting the overlapping 1 minimum period)
'''
self._minperiod += minperiod - 1
def incminperiod(self, minperiod):
'''
Increment the minperiod with no considerations
'''
self._minperiod += minperiod<|fim▁end|> | |
<|file_name|>appsync.go<|end_file_name|><|fim▁begin|>package events
import "encoding/json"
// AppSyncResolverTemplate represents the requests from AppSync to Lambda
type AppSyncResolverTemplate struct {
Version string `json:"version"`
Operation AppSyncOperation `json:"operation"`
Payload json.RawMessage `json:"payload"`
}
<|fim▁hole|> CognitoIdentityID string `json:"cognitoIdentityId"`
SourceIP []string `json:"sourceIp"`
Username string `json:"username"`
UserARN string `json:"userArn"`
}
// AppSyncCognitoIdentity contains information about the caller authed via Cognito.
type AppSyncCognitoIdentity struct {
Sub string `json:"sub"`
Issuer string `json:"issuer"`
Username string `json:"username"`
Claims map[string]interface{} `json:"claims"`
SourceIP []string `json:"sourceIp"`
DefaultAuthStrategy string `json:"defaultAuthStrategy"`
}
// AppSyncOperation specifies the operation type supported by Lambda operations
type AppSyncOperation string
const (
// OperationInvoke lets AWS AppSync know to call your Lambda function for every GraphQL field resolver
OperationInvoke AppSyncOperation = "Invoke"
// OperationBatchInvoke instructs AWS AppSync to batch requests for the current GraphQL field
OperationBatchInvoke AppSyncOperation = "BatchInvoke"
)<|fim▁end|> | // AppSyncIAMIdentity contains information about the caller authed via IAM.
type AppSyncIAMIdentity struct {
AccountID string `json:"accountId"`
CognitoIdentityPoolID string `json:"cognitoIdentityPoolId"` |
<|file_name|>gzip.rs<|end_file_name|><|fim▁begin|>extern crate extra;
extern crate libflate;
use extra::option::OptionalExt;
use libflate::gzip::Encoder;
use std::io::Write;
use std::{env, fs, io, process};
fn main() {
let mut stderr = io::stderr();
let mut keep = false;
let mut files = Vec::new();
for arg in env::args().skip(1) {
if arg == "-k" {
keep = true;
} else {
files.push(arg)
}
}
if files.is_empty() {<|fim▁hole|> }
for arg in files {
{
let output = fs::File::create(&format!("{}.gz", &arg)).try(&mut stderr);
let mut encoder = Encoder::new(output).try(&mut stderr);
let mut input = fs::File::open(&arg).try(&mut stderr);
io::copy(&mut input, &mut encoder).try(&mut stderr);
let mut encoded = encoder.finish().into_result().try(&mut stderr);
encoded.flush().try(&mut stderr);
}
if !keep {
fs::remove_file(&arg).try(&mut stderr);
}
}
}<|fim▁end|> | eprintln!("gzip: no files provided");
process::exit(1); |
<|file_name|>scrapertools.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# Download Tools
# Based on the code from VideoMonkey XBMC Plugin
#------------------------------------------------------------
# pelisalacarta
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
# Creado por:
# Jesús ([email protected])
# jurrabi ([email protected])
# bandavi ([email protected])
# Licencia: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
#------------------------------------------------------------
# Historial de cambios:
#------------------------------------------------------------
import urlparse,urllib2,urllib
import time
import os
import config
import logger
import re
import downloadtools
import socket
logger.info("[scrapertools.py] init")
# True - Muestra las cabeceras HTTP en el log
# False - No las muestra
DEBUG_LEVEL = True
CACHE_ACTIVA = "0" # Automatica
CACHE_SIEMPRE = "1" # Cachear todo
CACHE_NUNCA = "2" # No cachear nada
CACHE_PATH = config.get_setting("cache.dir")
logger.info("[scrapertools.py] CACHE_PATH="+CACHE_PATH)
DEBUG = False
def cache_page(url,post=None,headers=[['User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']],modo_cache=CACHE_ACTIVA, timeout=socket.getdefaulttimeout()):
return cachePage(url,post,headers,modo_cache,timeout=timeout)
# TODO: (3.1) Quitar el parámetro modoCache (ahora se hace por configuración)
# TODO: (3.2) Usar notación minusculas_con_underscores para funciones y variables como recomienda Python http://www.python.org/dev/peps/pep-0008/
def cachePage(url,post=None,headers=[['User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']],modoCache=CACHE_ACTIVA, timeout=socket.getdefaulttimeout()):
logger.info("[scrapertools.py] cachePage url="+url)
modoCache = config.get_setting("cache.mode")
'''
if config.get_platform()=="plex":
from PMS import HTTP
try:
logger.info("url="+url)
data = HTTP.Request(url)
logger.info("descargada")
except:
data = ""
logger.error("Error descargando "+url)
import sys
for line in sys.exc_info():
logger.error( "%s" % line )
return data
'''
# CACHE_NUNCA: Siempre va a la URL a descargar
# obligatorio para peticiones POST
if modoCache == CACHE_NUNCA or post is not None:
logger.info("[scrapertools.py] MODO_CACHE=2 (no cachear)")
try:
data = downloadpage(url,post,headers, timeout=timeout)
except:
data=""
# CACHE_SIEMPRE: Siempre descarga de cache, sin comprobar fechas, excepto cuando no está
elif modoCache == CACHE_SIEMPRE:
logger.info("[scrapertools.py] MODO_CACHE=1 (cachear todo)")
# Obtiene los handlers del fichero en la cache
cachedFile, newFile = getCacheFileNames(url)
# Si no hay ninguno, descarga
if cachedFile == "":
logger.debug("[scrapertools.py] No está en cache")
# Lo descarga
data = downloadpage(url,post,headers)
# Lo graba en cache
outfile = open(newFile,"w")
outfile.write(data)
outfile.flush()
outfile.close()
logger.info("[scrapertools.py] Grabado a " + newFile)
else:
logger.info("[scrapertools.py] Leyendo de cache " + cachedFile)
infile = open( cachedFile )
data = infile.read()
infile.close()
# CACHE_ACTIVA: Descarga de la cache si no ha cambiado
else:
logger.info("[scrapertools.py] MODO_CACHE=0 (automática)")
# Datos descargados
data = ""
# Obtiene los handlers del fichero en la cache
cachedFile, newFile = getCacheFileNames(url)
# Si no hay ninguno, descarga
if cachedFile == "":
logger.debug("[scrapertools.py] No está en cache")
# Lo descarga
data = downloadpage(url,post,headers)
# Lo graba en cache
outfile = open(newFile,"w")
outfile.write(data)
outfile.flush()
outfile.close()
logger.info("[scrapertools.py] Grabado a " + newFile)
# Si sólo hay uno comprueba el timestamp (hace una petición if-modified-since)
else:
# Extrae el timestamp antiguo del nombre del fichero
oldtimestamp = time.mktime( time.strptime(cachedFile[-20:-6], "%Y%m%d%H%M%S") )
logger.info("[scrapertools.py] oldtimestamp="+cachedFile[-20:-6])
logger.info("[scrapertools.py] oldtimestamp="+time.ctime(oldtimestamp))
# Hace la petición
updated,data = downloadtools.downloadIfNotModifiedSince(url,oldtimestamp)
# Si ha cambiado
if updated:
# Borra el viejo
logger.debug("[scrapertools.py] Borrando "+cachedFile)
os.remove(cachedFile)
# Graba en cache el nuevo
outfile = open(newFile,"w")
outfile.write(data)
outfile.flush()
outfile.close()
logger.info("[scrapertools.py] Grabado a " + newFile)
# Devuelve el contenido del fichero de la cache
else:
logger.info("[scrapertools.py] Leyendo de cache " + cachedFile)
infile = open( cachedFile )
data = infile.read()
infile.close()
return data
def getCacheFileNames(url):
# Obtiene el directorio de la cache para esta url
siteCachePath = getSiteCachePath(url)
# Obtiene el ID de la cache (md5 de la URL)
cacheId = get_md5(url)
logger.debug("[scrapertools.py] cacheId="+cacheId)
# Timestamp actual
nowtimestamp = time.strftime("%Y%m%d%H%M%S", time.localtime())
logger.debug("[scrapertools.py] nowtimestamp="+nowtimestamp)
# Nombre del fichero
# La cache se almacena en una estructura CACHE + URL
ruta = os.path.join( siteCachePath , cacheId[:2] , cacheId[2:] )
newFile = os.path.join( ruta , nowtimestamp + ".cache" )
logger.debug("[scrapertools.py] newFile="+newFile)
if not os.path.exists(ruta):
os.makedirs( ruta )
# Busca ese fichero en la cache
cachedFile = getCachedFile(siteCachePath,cacheId)
return cachedFile, newFile
# Busca ese fichero en la cache
def getCachedFile(siteCachePath,cacheId):
mascara = os.path.join(siteCachePath,cacheId[:2],cacheId[2:],"*.cache")
logger.debug("[scrapertools.py] mascara="+mascara)
import glob
ficheros = glob.glob( mascara )
logger.debug("[scrapertools.py] Hay %d ficheros con ese id" % len(ficheros))
cachedFile = ""
# Si hay más de uno, los borra (serán pruebas de programación) y descarga de nuevo
if len(ficheros)>1:
logger.debug("[scrapertools.py] Cache inválida")
for fichero in ficheros:
logger.debug("[scrapertools.py] Borrando "+fichero)
os.remove(fichero)
cachedFile = ""
# Hay uno: fichero cacheado
elif len(ficheros)==1:
cachedFile = ficheros[0]
return cachedFile
def getSiteCachePath(url):
# Obtiene el dominio principal de la URL
dominio = urlparse.urlparse(url)[1]
logger.debug("[scrapertools.py] dominio="+dominio)
nombres = dominio.split(".")
if len(nombres)>1:
dominio = nombres[len(nombres)-2]+"."+nombres[len(nombres)-1]
else:
dominio = nombres[0]
logger.debug("[scrapertools.py] dominio="+dominio)
# Crea un directorio en la cache para direcciones de ese dominio
siteCachePath = os.path.join( CACHE_PATH , dominio )
if not os.path.exists(CACHE_PATH):
try:
os.mkdir( CACHE_PATH )
except:
logger.error("[scrapertools.py] Error al crear directorio "+CACHE_PATH)
if not os.path.exists(siteCachePath):
try:
os.mkdir( siteCachePath )
except:
logger.error("[scrapertools.py] Error al crear directorio "+siteCachePath)
logger.debug("[scrapertools.py] siteCachePath="+siteCachePath)
return siteCachePath
def cachePage2(url,headers):
logger.info("Descargando " + url)
inicio = time.clock()
req = urllib2.Request(url)
for header in headers:
logger.info(header[0]+":"+header[1])
req.add_header(header[0], header[1])
try:
response = urllib2.urlopen(req)
except:
req = urllib2.Request(url.replace(" ","%20"))
for header in headers:
logger.info(header[0]+":"+header[1])
req.add_header(header[0], header[1])
response = urllib2.urlopen(req)
data=response.read()
response.close()
fin = time.clock()
logger.info("Descargado en %d segundos " % (fin-inicio+1))
'''
outfile = open(localFileName,"w")
outfile.write(data)
outfile.flush()
outfile.close()
logger.info("Grabado a " + localFileName)
'''
return data
def cachePagePost(url,post):
logger.info("Descargando " + url)
inicio = time.clock()
req = urllib2.Request(url,post)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
req = urllib2.Request(url.replace(" ","%20"),post)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
data=response.read()
response.close()
fin = time.clock()
logger.info("Descargado en %d segundos " % (fin-inicio+1))
'''
outfile = open(localFileName,"w")
outfile.write(data)
outfile.flush()
outfile.close()
logger.info("Grabado a " + localFileName)
'''
return data
class NoRedirectHandler(urllib2.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers):
infourl = urllib.addinfourl(fp, headers, req.get_full_url())
infourl.status = code
infourl.code = code
return infourl
http_error_300 = http_error_302
http_error_301 = http_error_302
http_error_303 = http_error_302
http_error_307 = http_error_302
def downloadpage(url,post=None,headers=[['User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']],follow_redirects=True, timeout=socket.getdefaulttimeout(), header_to_get=None):
logger.info("[scrapertools.py] downloadpage")
logger.info("[scrapertools.py] url="+url)
if post is not None:
logger.info("[scrapertools.py] post="+post)
else:
logger.info("[scrapertools.py] post=None")
# ---------------------------------
# Instala las cookies
# ---------------------------------
# Inicializa la librería de las cookies
ficherocookies = os.path.join( config.get_setting("cookies.dir"), 'cookies.dat' )
logger.info("[scrapertools.py] ficherocookies="+ficherocookies)
cj = None
ClientCookie = None
cookielib = None
# Let's see if cookielib is available
try:
logger.info("[scrapertools.py] Importando cookielib")
import cookielib
except ImportError:
logger.info("[scrapertools.py] cookielib no disponible")
# If importing cookielib fails
# let's try ClientCookie
try:
logger.info("[scrapertools.py] Importando ClientCookie")
import ClientCookie
except ImportError:
logger.info("[scrapertools.py] ClientCookie no disponible")
# ClientCookie isn't available either
urlopen = urllib2.urlopen
Request = urllib2.Request
else:
logger.info("[scrapertools.py] ClientCookie disponible")
# imported ClientCookie
urlopen = ClientCookie.urlopen
Request = ClientCookie.Request
cj = ClientCookie.MozillaCookieJar()
else:
logger.info("[scrapertools.py] cookielib disponible")
# importing cookielib worked
urlopen = urllib2.urlopen
Request = urllib2.Request
logger.info("[scrapertools.py] cambio en politicas")
#cj = cookielib.LWPCookieJar(ficherocookies,policy=MyCookiePolicy())
#cj = cookielib.MozillaCookieJar(ficherocookies,policy=MyCookiePolicy)
#cj = cookielib.FileCookieJar(ficherocookies)
try:
cj = cookielib.MozillaCookieJar()
cj.set_policy(MyCookiePolicy())
except:
import traceback
logger.info(traceback.format_exc())
if cj is not None:
# we successfully imported
# one of the two cookie handling modules
logger.info("[scrapertools.py] Hay cookies")
if os.path.isfile(ficherocookies):
logger.info("[scrapertools.py] Leyendo fichero cookies")
# if we have a cookie file already saved
# then load the cookies into the Cookie Jar
try:
cj.load(ficherocookies,ignore_discard=True)
except:
logger.info("[scrapertools.py] El fichero de cookies existe pero es ilegible, se borra")
os.remove(ficherocookies)
# Now we need to get our Cookie Jar
# installed in the opener;
# for fetching URLs
if cookielib is not None:
logger.info("[scrapertools.py] opener usando urllib2 (cookielib)")
# if we use cookielib
# then we get the HTTPCookieProcessor
# and install the opener in urllib2
if not follow_redirects:
opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),urllib2.HTTPCookieProcessor(cj),NoRedirectHandler())
else:
opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
else:
logger.info("[scrapertools.py] opener usando ClientCookie")
# if we use ClientCookie
# then we get the HTTPCookieProcessor
# and install the opener in ClientCookie
opener = ClientCookie.build_opener(ClientCookie.HTTPCookieProcessor(cj))
ClientCookie.install_opener(opener)
# -------------------------------------------------
# Cookies instaladas, lanza la petición
# -------------------------------------------------
# Contador
inicio = time.clock()
# Diccionario para las cabeceras
txheaders = {}
# Construye el request
if post is None:
logger.info("[scrapertools.py] petición GET")
else:
logger.info("[scrapertools.py] petición POST")
# Añade las cabeceras
logger.info("[scrapertools.py] ---------------------------")
for header in headers:
logger.info("[scrapertools.py] header %s=%s" % (str(header[0]),str(header[1])) )
txheaders[header[0]]=header[1]
logger.info("[scrapertools.py] ---------------------------")
req = Request(url, post, txheaders)
try:
if timeout is None:
logger.info("[scrapertools.py] Peticion sin timeout")
handle=urlopen(req)
else:
logger.info("[scrapertools.py] Peticion con timeout")
#Para todas las versiones:
deftimeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
handle=urlopen(req)
socket.setdefaulttimeout(deftimeout)
logger.info("[scrapertools.py] ...hecha")
# Actualiza el almacén de cookies
logger.info("[scrapertools.py] Grabando cookies...")
cj.save(ficherocookies,ignore_discard=True) # ,ignore_expires=True
logger.info("[scrapertools.py] ...hecho")
# Lee los datos y cierra
if handle.info().get('Content-Encoding') == 'gzip':
logger.info("[scrapertools.py] gzipped")
fin = inicio
import StringIO
data=handle.read()
compressedstream = StringIO.StringIO(data)
import gzip
gzipper = gzip.GzipFile(fileobj=compressedstream)
data = gzipper.read()
gzipper.close()
fin = time.clock()
else:
logger.info("[scrapertools.py] normal")
data = handle.read()
except urllib2.HTTPError,e:
import traceback
logger.info(traceback.format_exc())
data = e.read()
#logger.info("data="+repr(data))
return data
info = handle.info()
logger.info("[scrapertools.py] Respuesta")
logger.info("[scrapertools.py] ---------------------------")
for header in info:
logger.info("[scrapertools.py] "+header+"="+info[header])
# Truco para devolver el valor de un header en lugar del cuerpo entero
if header_to_get is not None:
if header==header_to_get:
data=info[header]
handle.close()
logger.info("[scrapertools.py] ---------------------------")
'''
# Lanza la petición
try:
response = urllib2.urlopen(req)
# Si falla la repite sustituyendo caracteres especiales
except:
req = urllib2.Request(url.replace(" ","%20"))
# Añade las cabeceras
for header in headers:
req.add_header(header[0],header[1])
response = urllib2.urlopen(req)
'''
# Tiempo transcurrido
fin = time.clock()
logger.info("[scrapertools.py] Descargado en %d segundos " % (fin-inicio+1))
return data
import cookielib
class MyCookiePolicy(cookielib.DefaultCookiePolicy):
def set_ok(self, cookie, request):
#logger.info("set_ok Cookie "+repr(cookie)+" request "+repr(request))
#cookie.discard = False
#cookie.
devuelve = cookielib.DefaultCookiePolicy.set_ok(self, cookie, request)
#logger.info("set_ok "+repr(devuelve))
return devuelve
def return_ok(self, cookie, request):
#logger.info("return_ok Cookie "+repr(cookie)+" request "+repr(request))
#cookie.discard = False
devuelve = cookielib.DefaultCookiePolicy.return_ok(self, cookie, request)
#logger.info("return_ok "+repr(devuelve))
return devuelve
def domain_return_ok(self, domain, request):
#logger.info("domain_return_ok domain "+repr(domain)+" request "+repr(request))
devuelve = cookielib.DefaultCookiePolicy.domain_return_ok(self, domain, request)
#logger.info("domain_return_ok "+repr(devuelve))
return devuelve
def path_return_ok(self,path, request):
#logger.info("path_return_ok path "+repr(path)+" request "+repr(request))
devuelve = cookielib.DefaultCookiePolicy.path_return_ok(self, path, request)
#logger.info("path_return_ok "+repr(devuelve))
return devuelve
def downloadpagewithcookies(url):
# ---------------------------------
# Instala las cookies
# ---------------------------------
# Inicializa la librería de las cookies
ficherocookies = os.path.join( config.get_data_path(), 'cookies.dat' )
logger.info("[scrapertools.py] Cookiefile="+ficherocookies)
cj = None
ClientCookie = None
cookielib = None
# Let's see if cookielib is available
try:
import cookielib
except ImportError:
# If importing cookielib fails
# let's try ClientCookie
try:
import ClientCookie
except ImportError:
# ClientCookie isn't available either
urlopen = urllib2.urlopen
Request = urllib2.Request
else:
# imported ClientCookie
urlopen = ClientCookie.urlopen
Request = ClientCookie.Request
cj = ClientCookie.MozillaCookieJar()
else:
# importing cookielib worked
urlopen = urllib2.urlopen
Request = urllib2.Request
cj = cookielib.MozillaCookieJar()
# This is a subclass of FileCookieJar
# that has useful load and save methods
if cj is not None:
# we successfully imported
# one of the two cookie handling modules
if os.path.isfile(ficherocookies):
# if we have a cookie file already saved
# then load the cookies into the Cookie Jar
try:
cj.load(ficherocookies)
except:
logger.info("[scrapertools.py] El fichero de cookies existe pero es ilegible, se borra")
os.remove(ficherocookies)
# Now we need to get our Cookie Jar
# installed in the opener;
# for fetching URLs
if cookielib is not None:
# if we use cookielib
# then we get the HTTPCookieProcessor
# and install the opener in urllib2
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
else:
# if we use ClientCookie
# then we get the HTTPCookieProcessor
# and install the opener in ClientCookie
opener = ClientCookie.build_opener(ClientCookie.HTTPCookieProcessor(cj))
ClientCookie.install_opener(opener)
#print "-------------------------------------------------------"
theurl = url
# an example url that sets a cookie,
# try different urls here and see the cookie collection you can make !
#txheaders = {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3',
# 'Referer':'http://www.megavideo.com/?s=signup'}
txheaders = {
'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3',
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Host':'www.meristation.com',
'Accept-Language':'es-es,es;q=0.8,en-us;q=0.5,en;q=0.3',
'Accept-Charset':'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Keep-Alive':'300',
'Connection':'keep-alive'}
# fake a user agent, some websites (like google) don't like automated exploration
req = Request(theurl, None, txheaders)
handle = urlopen(req)
cj.save(ficherocookies) # save the cookies again
data=handle.read()
handle.close()
return data
def downloadpageWithoutCookies(url):
logger.info("[scrapertools.py] Descargando " + url)
inicio = time.clock()
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 6.0; es-ES; rv:1.9.0.14) Gecko/2009082707 Firefox/3.0.14')
req.add_header('X-Requested-With','XMLHttpRequest')
try:
response = urllib2.urlopen(req)
except:
req = urllib2.Request(url.replace(" ","%20"))
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 6.0; es-ES; rv:1.9.0.14) Gecko/2009082707 Firefox/3.0.14')
response = urllib2.urlopen(req)
data=response.read()
response.close()
fin = time.clock()
logger.info("[scrapertools.py] Descargado en %d segundos " % (fin-inicio+1))
return data
def downloadpageGzip(url):
# Inicializa la librería de las cookies
ficherocookies = os.path.join( config.get_data_path(), 'cookies.dat' )
logger.info("Cookiefile="+ficherocookies)
inicio = time.clock()
cj = None
ClientCookie = None
cookielib = None
# Let's see if cookielib is available
try:
import cookielib
except ImportError:
# If importing cookielib fails
# let's try ClientCookie
try:
import ClientCookie
except ImportError:
# ClientCookie isn't available either
urlopen = urllib2.urlopen
Request = urllib2.Request
else:
# imported ClientCookie
urlopen = ClientCookie.urlopen
Request = ClientCookie.Request
cj = ClientCookie.MozillaCookieJar()
else:
# importing cookielib worked
urlopen = urllib2.urlopen
Request = urllib2.Request
cj = cookielib.MozillaCookieJar()
# This is a subclass of FileCookieJar
# that has useful load and save methods
# ---------------------------------
# Instala las cookies
# ---------------------------------
if cj is not None:
# we successfully imported
# one of the two cookie handling modules
if os.path.isfile(ficherocookies):
# if we have a cookie file already saved
# then load the cookies into the Cookie Jar
try:
cj.load(ficherocookies)
except:
logger.info("[scrapertools.py] El fichero de cookies existe pero es ilegible, se borra")
os.remove(ficherocookies)
# Now we need to get our Cookie Jar
# installed in the opener;
# for fetching URLs
if cookielib is not None:
# if we use cookielib
# then we get the HTTPCookieProcessor
# and install the opener in urllib2
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
else:
# if we use ClientCookie
# then we get the HTTPCookieProcessor
# and install the opener in ClientCookie
opener = ClientCookie.build_opener(ClientCookie.HTTPCookieProcessor(cj))
ClientCookie.install_opener(opener)
#print "-------------------------------------------------------"
theurl = url
# an example url that sets a cookie,
# try different urls here and see the cookie collection you can make !
#txheaders = {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3',
# 'Referer':'http://www.megavideo.com/?s=signup'}
import httplib
parsedurl = urlparse.urlparse(url)
logger.info("parsedurl="+str(parsedurl))
txheaders = {
'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3',
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language':'es-es,es;q=0.8,en-us;q=0.5,en;q=0.3',
'Accept-Charset':'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Accept-Encoding':'gzip,deflate',
'Keep-Alive':'300',
'Connection':'keep-alive',
'Referer':parsedurl[0]+"://"+parsedurl[1]}
logger.info(str(txheaders))
# fake a user agent, some websites (like google) don't like automated exploration
req = Request(theurl, None, txheaders)
handle = urlopen(req)
cj.save(ficherocookies) # save the cookies again
data=handle.read()
handle.close()
fin = time.clock()
logger.info("[scrapertools.py] Descargado 'Gzipped data' en %d segundos " % (fin-inicio+1))
# Descomprime el archivo de datos Gzip
try:
fin = inicio
import StringIO
compressedstream = StringIO.StringIO(data)
import gzip
gzipper = gzip.GzipFile(fileobj=compressedstream)
data1 = gzipper.read()
gzipper.close()<|fim▁hole|> except:
return data
def printMatches(matches):
i = 0
for match in matches:
logger.info("[scrapertools.py] %d %s" % (i , match))
i = i + 1
def get_match(data,patron,index=0):
matches = re.findall( patron , data , flags=re.DOTALL )
return matches[index]
def find_single_match(data,patron,index=0):
try:
matches = re.findall( patron , data , flags=re.DOTALL )
return matches[index]
except:
return ""
def entityunescape(cadena):
return unescape(cadena)
def unescape(text):
"""Removes HTML or XML character references
and entities from a text string.
keep &, >, < in the source code.
from Fredrik Lundh
http://effbot.org/zone/re-sub.htm#unescape-html
"""
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16)).encode("utf-8")
else:
return unichr(int(text[2:-1])).encode("utf-8")
except ValueError:
logger.info("error de valor")
pass
else:
# named entity
try:
'''
if text[1:-1] == "amp":
text = "&amp;"
elif text[1:-1] == "gt":
text = "&gt;"
elif text[1:-1] == "lt":
text = "&lt;"
else:
print text[1:-1]
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]).encode("utf-8")
'''
import htmlentitydefs
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]).encode("utf-8")
except KeyError:
logger.info("keyerror")
pass
except:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, text)
# Convierte los codigos html "ñ" y lo reemplaza por "ñ" caracter unicode utf-8
def decodeHtmlentities(string):
string = entitiesfix(string)
entity_re = re.compile("&(#?)(\d{1,5}|\w{1,8});")
def substitute_entity(match):
from htmlentitydefs import name2codepoint as n2cp
ent = match.group(2)
if match.group(1) == "#":
return unichr(int(ent)).encode('utf-8')
else:
cp = n2cp.get(ent)
if cp:
return unichr(cp).encode('utf-8')
else:
return match.group()
return entity_re.subn(substitute_entity, string)[0]
def entitiesfix(string):
# Las entidades comienzan siempre con el símbolo & , y terminan con un punto y coma ( ; ).
string = string.replace("á","á")
string = string.replace("é","é")
string = string.replace("í","í")
string = string.replace("ó","ó")
string = string.replace("ú","ú")
string = string.replace("Á","Á")
string = string.replace("É","É")
string = string.replace("Í","Í")
string = string.replace("Ó","Ó")
string = string.replace("Ú","Ú")
string = string.replace("ü" ,"ü")
string = string.replace("Ü" ,"Ü")
string = string.replace("ñ","ñ")
string = string.replace("¿" ,"¿")
string = string.replace("¡" ,"¡")
string = string.replace(";;" ,";")
return string
def htmlclean(cadena):
cadena = cadena.replace("<center>","")
cadena = cadena.replace("</center>","")
cadena = cadena.replace("<cite>","")
cadena = cadena.replace("</cite>","")
cadena = cadena.replace("<em>","")
cadena = cadena.replace("</em>","")
cadena = cadena.replace("<b>","")
cadena = cadena.replace("</b>","")
cadena = cadena.replace("<u>","")
cadena = cadena.replace("</u>","")
cadena = cadena.replace("<li>","")
cadena = cadena.replace("</li>","")
cadena = cadena.replace("<tbody>","")
cadena = cadena.replace("</tbody>","")
cadena = cadena.replace("<tr>","")
cadena = cadena.replace("</tr>","")
cadena = cadena.replace("<![CDATA[","")
cadena = cadena.replace("<Br />","")
cadena = cadena.replace("<BR />","")
cadena = cadena.replace("<Br>","")
cadena = re.compile("<script.*?</script>",re.DOTALL).sub("",cadena)
cadena = re.compile("<option[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</option>","")
cadena = re.compile("<i[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</iframe>","")
cadena = cadena.replace("</i>","")
cadena = re.compile("<table[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</table>","")
cadena = re.compile("<td[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</td>","")
cadena = re.compile("<div[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</div>","")
cadena = re.compile("<dd[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</dd>","")
cadena = re.compile("<font[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</font>","")
cadena = re.compile("<strong[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</strong>","")
cadena = re.compile("<small[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</small>","")
cadena = re.compile("<span[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</span>","")
cadena = re.compile("<a[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</a>","")
cadena = re.compile("<p[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</p>","")
cadena = re.compile("<ul[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</ul>","")
cadena = re.compile("<h1[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</h1>","")
cadena = re.compile("<h2[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</h2>","")
cadena = re.compile("<h3[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</h3>","")
cadena = re.compile("<h4[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</h4>","")
cadena = re.compile("<!--[^-]+-->",re.DOTALL).sub("",cadena)
cadena = re.compile("<img[^>]*>",re.DOTALL).sub("",cadena)
cadena = re.compile("<br[^>]*>",re.DOTALL).sub("",cadena)
cadena = re.compile("<object[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</object>","")
cadena = re.compile("<param[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</param>","")
cadena = re.compile("<embed[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</embed>","")
cadena = re.compile("<title[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</title>","")
cadena = re.compile("<link[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("\t","")
cadena = entityunescape(cadena)
return cadena
def slugify(title):
#print title
# Sustituye acentos y eñes
title = title.replace("Á","a")
title = title.replace("É","e")
title = title.replace("Í","i")
title = title.replace("Ó","o")
title = title.replace("Ú","u")
title = title.replace("á","a")
title = title.replace("é","e")
title = title.replace("í","i")
title = title.replace("ó","o")
title = title.replace("ú","u")
title = title.replace("À","a")
title = title.replace("È","e")
title = title.replace("Ì","i")
title = title.replace("Ò","o")
title = title.replace("Ù","u")
title = title.replace("à","a")
title = title.replace("è","e")
title = title.replace("ì","i")
title = title.replace("ò","o")
title = title.replace("ù","u")
title = title.replace("ç","c")
title = title.replace("Ç","C")
title = title.replace("Ñ","n")
title = title.replace("ñ","n")
title = title.replace("/","-")
title = title.replace("&","&")
# Pasa a minúsculas
title = title.lower().strip()
# Elimina caracteres no válidos
validchars = "abcdefghijklmnopqrstuvwxyz1234567890- "
title = ''.join(c for c in title if c in validchars)
# Sustituye espacios en blanco duplicados y saltos de línea
title = re.compile("\s+",re.DOTALL).sub(" ",title)
# Sustituye espacios en blanco por guiones
title = re.compile("\s",re.DOTALL).sub("-",title.strip())
# Sustituye espacios en blanco duplicados y saltos de línea
title = re.compile("\-+",re.DOTALL).sub("-",title)
# Arregla casos especiales
if title.startswith("-"):
title = title [1:]
if title=="":
title = "-"+str(time.time())
return title
def remove_show_from_title(title,show):
#print slugify(title)+" == "+slugify(show)
# Quita el nombre del programa del título
if slugify(title).startswith(slugify(show)):
# Convierte a unicode primero, o el encoding se pierde
title = unicode(title,"utf-8","replace")
show = unicode(show,"utf-8","replace")
title = title[ len(show) : ].strip()
if title.startswith("-"):
title = title[ 1: ].strip()
if title=="":
title = str( time.time() )
# Vuelve a utf-8
title = title.encode("utf-8","ignore")
show = show.encode("utf-8","ignore")
return title
def getRandom(str):
return get_md5(str)
def getLocationHeaderFromResponse(url):
return get_header_from_response(url,header_to_get="location")
def get_header_from_response(url,header_to_get="",post=None,headers=[['User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']]):
header_to_get = header_to_get.lower()
logger.info("[scrapertools.py] get_header_from_response url="+url+", header_to_get="+header_to_get)
if post is not None:
logger.info("[scrapertools.py] post="+post)
else:
logger.info("[scrapertools.py] post=None")
# Inicializa la librería de las cookies
ficherocookies = os.path.join( config.get_setting("cookies.dir"), 'cookies.dat' )
logger.info("[scrapertools.py] ficherocookies="+ficherocookies)
cj = None
ClientCookie = None
cookielib = None
import cookielib
# importing cookielib worked
urlopen = urllib2.urlopen
Request = urllib2.Request
cj = cookielib.MozillaCookieJar()
# This is a subclass of FileCookieJar
# that has useful load and save methods
if os.path.isfile(ficherocookies):
logger.info("[scrapertools.py] Leyendo fichero cookies")
# if we have a cookie file already saved
# then load the cookies into the Cookie Jar
try:
cj.load(ficherocookies)
except:
logger.info("[scrapertools.py] El fichero de cookies existe pero es ilegible, se borra")
os.remove(ficherocookies)
if header_to_get=="location":
opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),urllib2.HTTPCookieProcessor(cj),NoRedirectHandler())
else:
opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
# Contador
inicio = time.clock()
# Diccionario para las cabeceras
txheaders = {}
# Traza la peticion
if post is None:
logger.info("[scrapertools.py] petición GET")
else:
logger.info("[scrapertools.py] petición POST")
# Login y password Filenium
# http://abcd%40gmail.com:[email protected]/get/Oi8vd3d3/LmZpbGVz/ZXJ2ZS5j/b20vZmls/ZS9kTnBL/dm11/b0/?.zip
if "filenium" in url:
from servers import filenium
url , authorization_header = filenium.extract_authorization_header(url)
headers.append( [ "Authorization",authorization_header ] )
# Array de cabeceras
logger.info("[scrapertools.py] ---------------------------")
for header in headers:
logger.info("[scrapertools.py] header=%s" % str(header[0]))
txheaders[header[0]]=header[1]
logger.info("[scrapertools.py] ---------------------------")
# Construye el request
req = Request(url, post, txheaders)
handle = urlopen(req)
# Actualiza el almacén de cookies
cj.save(ficherocookies)
# Lee los datos y cierra
#data=handle.read()
info = handle.info()
logger.info("[scrapertools.py] Respuesta")
logger.info("[scrapertools.py] ---------------------------")
location_header=""
for header in info:
logger.info("[scrapertools.py] "+header+"="+info[header])
if header==header_to_get:
location_header=info[header]
handle.close()
logger.info("[scrapertools.py] ---------------------------")
# Tiempo transcurrido
fin = time.clock()
logger.info("[scrapertools.py] Descargado en %d segundos " % (fin-inicio+1))
return location_header
def get_headers_from_response(url,post=None,headers=[['User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']]):
return_headers = []
logger.info("[scrapertools.py] get_headers_from_response url="+url)
if post is not None:
logger.info("[scrapertools.py] post="+post)
else:
logger.info("[scrapertools.py] post=None")
# Inicializa la librería de las cookies
ficherocookies = os.path.join( config.get_setting("cookies.dir"), 'cookies.dat' )
logger.info("[scrapertools.py] ficherocookies="+ficherocookies)
cj = None
ClientCookie = None
cookielib = None
import cookielib
# importing cookielib worked
urlopen = urllib2.urlopen
Request = urllib2.Request
cj = cookielib.MozillaCookieJar()
# This is a subclass of FileCookieJar
# that has useful load and save methods
if os.path.isfile(ficherocookies):
logger.info("[scrapertools.py] Leyendo fichero cookies")
# if we have a cookie file already saved
# then load the cookies into the Cookie Jar
try:
cj.load(ficherocookies)
except:
logger.info("[scrapertools.py] El fichero de cookies existe pero es ilegible, se borra")
os.remove(ficherocookies)
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj),NoRedirectHandler())
urllib2.install_opener(opener)
# Contador
inicio = time.clock()
# Diccionario para las cabeceras
txheaders = {}
# Traza la peticion
if post is None:
logger.info("[scrapertools.py] petición GET")
else:
logger.info("[scrapertools.py] petición POST")
# Array de cabeceras
logger.info("[scrapertools.py] ---------------------------")
for header in headers:
logger.info("[scrapertools.py] header=%s" % str(header[0]))
txheaders[header[0]]=header[1]
logger.info("[scrapertools.py] ---------------------------")
# Construye el request
req = Request(url, post, txheaders)
handle = urlopen(req)
# Actualiza el almacén de cookies
cj.save(ficherocookies)
# Lee los datos y cierra
#data=handle.read()
info = handle.info()
logger.info("[scrapertools.py] Respuesta")
logger.info("[scrapertools.py] ---------------------------")
location_header=""
for header in info:
logger.info("[scrapertools.py] "+header+"="+info[header])
return_headers.append( [header,info[header]] )
handle.close()
logger.info("[scrapertools.py] ---------------------------")
# Tiempo transcurrido
fin = time.clock()
logger.info("[scrapertools.py] Descargado en %d segundos " % (fin-inicio+1))
return return_headers
def unseo(cadena):
if cadena.upper().startswith("VER GRATIS LA PELICULA "):
cadena = cadena[23:]
elif cadena.upper().startswith("VER GRATIS PELICULA "):
cadena = cadena[20:]
elif cadena.upper().startswith("VER ONLINE LA PELICULA "):
cadena = cadena[23:]
elif cadena.upper().startswith("VER GRATIS "):
cadena = cadena[11:]
elif cadena.upper().startswith("VER ONLINE "):
cadena = cadena[11:]
elif cadena.upper().startswith("DESCARGA DIRECTA "):
cadena = cadena[17:]
return cadena
#scrapertools.get_filename_from_url(media_url)[-4:]
def get_filename_from_url(url):
import urlparse
parsed_url = urlparse.urlparse(url)
try:
filename = parsed_url.path
except:
# Si falla es porque la implementación de parsed_url no reconoce los atributos como "path"
if len(parsed_url)>=4:
filename = parsed_url[2]
else:
filename = ""
return filename
def get_domain_from_url(url):
import urlparse
parsed_url = urlparse.urlparse(url)
try:
filename = parsed_url.netloc
except:
# Si falla es porque la implementación de parsed_url no reconoce los atributos como "path"
if len(parsed_url)>=4:
filename = parsed_url[1]
else:
filename = ""
return filename
# Parses the title of a tv show episode and returns the season id + episode id in format "1x01"
def get_season_and_episode(title):
logger.info("get_season_and_episode('"+title+"')")
patron ="(\d+)[x|X](\d+)"
matches = re.compile(patron).findall(title)
logger.info(str(matches))
filename=matches[0][0]+"x"+matches[0][1]
logger.info("get_season_and_episode('"+title+"') -> "+filename)
return filename
def get_sha1(cadena):
try:
import hashlib
devuelve = hashlib.sha1(cadena).hexdigest()
except:
import sha
import binascii
devuelve = binascii.hexlify(sha.new(cadena).digest())
return devuelve
def get_md5(cadena):
try:
import hashlib
devuelve = hashlib.md5(cadena).hexdigest()
except:
import md5
import binascii
devuelve = binascii.hexlify(md5.new(cadena).digest())
return devuelve
def read_body_and_headers(url, post=None, headers=[], follow_redirects=False, timeout=None):
logger.info("read_body_and_headers "+url)
if post is not None:
logger.info("read_body_and_headers post="+post)
if len(headers)==0:
headers.append(["User-Agent","Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:18.0) Gecko/20100101 Firefox/18.0"])
# Start cookie lib
ficherocookies = os.path.join( config.get_data_path(), 'cookies.dat' )
logger.info("read_body_and_headers cookies_file="+ficherocookies)
cj = None
ClientCookie = None
cookielib = None
# Let's see if cookielib is available
try:
logger.info("read_body_and_headers importing cookielib")
import cookielib
except ImportError:
logger.info("read_body_and_headers cookielib no disponible")
# If importing cookielib fails
# let's try ClientCookie
try:
logger.info("read_body_and_headers importing ClientCookie")
import ClientCookie
except ImportError:
logger.info("read_body_and_headers ClientCookie not available")
# ClientCookie isn't available either
urlopen = urllib2.urlopen
Request = urllib2.Request
else:
logger.info("read_body_and_headers ClientCookie available")
# imported ClientCookie
urlopen = ClientCookie.urlopen
Request = ClientCookie.Request
cj = ClientCookie.MozillaCookieJar()
else:
logger.info("read_body_and_headers cookielib available")
# importing cookielib worked
urlopen = urllib2.urlopen
Request = urllib2.Request
cj = cookielib.MozillaCookieJar()
# This is a subclass of FileCookieJar
# that has useful load and save methods
if cj is not None:
# we successfully imported
# one of the two cookie handling modules
logger.info("read_body_and_headers Cookies enabled")
if os.path.isfile(ficherocookies):
logger.info("read_body_and_headers Reading cookie file")
# if we have a cookie file already saved
# then load the cookies into the Cookie Jar
try:
cj.load(ficherocookies)
except:
logger.info("read_body_and_headers Wrong cookie file, deleting...")
os.remove(ficherocookies)
# Now we need to get our Cookie Jar
# installed in the opener;
# for fetching URLs
if cookielib is not None:
logger.info("read_body_and_headers opener using urllib2 (cookielib)")
# if we use cookielib
# then we get the HTTPCookieProcessor
# and install the opener in urllib2
if not follow_redirects:
opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),urllib2.HTTPCookieProcessor(cj),NoRedirectHandler())
else:
opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=DEBUG_LEVEL),urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
else:
logger.info("read_body_and_headers opener using ClientCookie")
# if we use ClientCookie
# then we get the HTTPCookieProcessor
# and install the opener in ClientCookie
opener = ClientCookie.build_opener(ClientCookie.HTTPCookieProcessor(cj))
ClientCookie.install_opener(opener)
# -------------------------------------------------
# Cookies instaladas, lanza la petición
# -------------------------------------------------
# Contador
inicio = time.clock()
# Diccionario para las cabeceras
txheaders = {}
# Construye el request
if post is None:
logger.info("read_body_and_headers GET request")
else:
logger.info("read_body_and_headers POST request")
# Añade las cabeceras
logger.info("read_body_and_headers ---------------------------")
for header in headers:
logger.info("read_body_and_headers header %s=%s" % (str(header[0]),str(header[1])) )
txheaders[header[0]]=header[1]
logger.info("read_body_and_headers ---------------------------")
req = Request(url, post, txheaders)
if timeout is None:
handle=urlopen(req)
else:
#Disponible en python 2.6 en adelante --> handle = urlopen(req, timeout=timeout)
#Para todas las versiones:
try:
import socket
deftimeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
handle=urlopen(req)
socket.setdefaulttimeout(deftimeout)
except:
import sys
for line in sys.exc_info():
logger.info( "%s" % line )
# Actualiza el almacén de cookies
cj.save(ficherocookies)
# Lee los datos y cierra
if handle.info().get('Content-Encoding') == 'gzip':
buf = StringIO( handle.read())
f = gzip.GzipFile(fileobj=buf)
data = f.read()
else:
data=handle.read()
info = handle.info()
logger.info("read_body_and_headers Response")
returnheaders=[]
logger.info("read_body_and_headers ---------------------------")
for header in info:
logger.info("read_body_and_headers "+header+"="+info[header])
returnheaders.append([header,info[header]])
handle.close()
logger.info("read_body_and_headers ---------------------------")
'''
# Lanza la petición
try:
response = urllib2.urlopen(req)
# Si falla la repite sustituyendo caracteres especiales
except:
req = urllib2.Request(url.replace(" ","%20"))
# Añade las cabeceras
for header in headers:
req.add_header(header[0],header[1])
response = urllib2.urlopen(req)
'''
# Tiempo transcurrido
fin = time.clock()
logger.info("read_body_and_headers Downloaded in %d seconds " % (fin-inicio+1))
logger.info("read_body_and_headers body="+data)
return data,returnheaders<|fim▁end|> | fin = time.clock()
logger.info("[scrapertools.py] 'Gzipped data' descomprimido en %d segundos " % (fin-inicio+1))
return data1 |
<|file_name|>settings.js<|end_file_name|><|fim▁begin|>module.exports = {
token: 'TELEGRAM_BOT_TOKEN',
polling: {
timeout: 3,<|fim▁hole|><|fim▁end|> | limit: 100
}
}; |
<|file_name|>state.ts<|end_file_name|><|fim▁begin|>import { Derivable, DerivableAtom, ErrorWrapper, ReactorOptions, State, unresolved } from '@politie/sherlock';
export type StateObject<V> =
{ value: V, errored: false, resolved: true } |
{ error: any, errored: true, resolved: true } |
{ errored: false, resolved: false };
export function getStateObject<V>(from: Derivable<V>): StateObject<V> {
return toStateObject(from.getState());
}
export function toStateObject<V>(state: State<V>): StateObject<V> {
if (state === unresolved) {
return { errored: false, resolved: false };
}
if (state instanceof ErrorWrapper) {
const { error } = state;
return { error, errored: true, resolved: true };
}
return { value: state, errored: false, resolved: true };
}
export function fromStateObject<V>(state: StateObject<V>): State<V> {
if (state.errored) {
return new ErrorWrapper(state.error);
}
if (state.resolved) {
return state.value;
}
return unresolved;
}
<|fim▁hole|>export function dematerialize<V>(derivable: Derivable<StateObject<V>>): Derivable<V> {
return derivable.map(fromStateObject);
}
export function setStateObject<V>(to: DerivableAtom<V>, state: StateObject<V>) {
if (!state.resolved) {
to.unset();
} else if (state.errored) {
to.setError(state.error);
} else {
to.set(state.value);
}
}
export function syncState<V>(from: Derivable<V>, to: DerivableAtom<V>, opts?: Partial<ReactorOptions<StateObject<V>>>) {
return materialize(from).react(state => setStateObject(to, state), opts);
}
export function copyState<V>(from: Derivable<V>, to: DerivableAtom<V>) {
setStateObject(to, getStateObject(from));
}<|fim▁end|> | export function materialize<V>(derivable: Derivable<V>): Derivable<StateObject<V>> {
return derivable.mapState(toStateObject);
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | __package__ = 'archivebox.core' |
<|file_name|>PencilCodeEditor.ts<|end_file_name|><|fim▁begin|>// Copyright 2019 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Requires for PencilCodeEditor interaction.
*/
require(
'interactions/PencilCodeEditor/directives/' +
'pencil-code-editor-rules.service.ts');
require(
'interactions/PencilCodeEditor/directives/' +
'pencil-code-editor-validation.service.ts');<|fim▁hole|>require(
'interactions/PencilCodeEditor/directives/' +
'oppia-interactive-pencil-code-editor.component.ts');
require(
'interactions/PencilCodeEditor/directives/' +
'oppia-response-pencil-code-editor.component.ts');
require(
'interactions/PencilCodeEditor/directives/' +
'oppia-short-response-pencil-code-editor.component.ts');<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import numpy as np
import param
from ..core import (HoloMap, DynamicMap, CompositeOverlay, Layout,
GridSpace, NdLayout, Store)
from ..core.util import (match_spec, is_number, wrap_tuple, basestring,
get_overlay_spec, unique_iterator, safe_unicode)
def displayable(obj):
"""
Predicate that returns whether the object is displayable or not
(i.e whether the object obeys the nesting hierarchy
"""
if isinstance(obj, HoloMap):
return not (obj.type in [Layout, GridSpace, NdLayout])
if isinstance(obj, (GridSpace, Layout, NdLayout)):
for el in obj.values():
if not displayable(el):
return False
return True
return True
class Warning(param.Parameterized): pass
display_warning = Warning(name='Warning')
def collate(obj):
if isinstance(obj, HoloMap):
display_warning.warning("Nesting %ss within a HoloMap makes it difficult "
"to access your data or control how it appears; "
"we recommend calling .collate() on the HoloMap "
"in order to follow the recommended nesting "
"structure shown in the Composing Data tutorial"
"(http://git.io/vtIQh)" % obj.type.__name__)
return obj.collate()
elif isinstance(obj, (Layout, NdLayout)):
try:
display_warning.warning(
"Layout contains HoloMaps which are not nested in the "
"recommended format for accessing your data; calling "
".collate() on these objects will resolve any violations "
"of the recommended nesting presented in the Composing Data "
"tutorial (http://git.io/vqs03)")
expanded = []
for el in obj.values():
if isinstance(el, HoloMap) and not displayable(el):
collated_layout = Layout.from_values(el.collate())
expanded.extend(collated_layout.values())
return Layout(expanded)
except:
raise Exception(undisplayable_info(obj))
else:
raise Exception(undisplayable_info(obj))
def undisplayable_info(obj, html=False):
"Generate helpful message regarding an undisplayable object"
collate = '<tt>collate</tt>' if html else 'collate'
info = "For more information, please consult the Composing Data tutorial (http://git.io/vtIQh)"
if isinstance(obj, HoloMap):
error = "HoloMap of %s objects cannot be displayed." % obj.type.__name__
remedy = "Please call the %s method to generate a displayable object" % collate
elif isinstance(obj, Layout):
error = "Layout containing HoloMaps of Layout or GridSpace objects cannot be displayed."
remedy = "Please call the %s method on the appropriate elements." % collate
elif isinstance(obj, GridSpace):
error = "GridSpace containing HoloMaps of Layouts cannot be displayed."
remedy = "Please call the %s method on the appropriate elements." % collate
if not html:
return '\n'.join([error, remedy, info])
else:
return "<center>{msg}</center>".format(msg=('<br>'.join(
['<b>%s</b>' % error, remedy, '<i>%s</i>' % info])))
def compute_sizes(sizes, size_fn, scaling_factor, scaling_method, base_size):
"""
Scales point sizes according to a scaling factor,
base size and size_fn, which will be applied before
scaling.
"""
if scaling_method == 'area':
pass
elif scaling_method == 'width':
scaling_factor = scaling_factor**2
else:
raise ValueError(
'Invalid value for argument "scaling_method": "{}". '
'Valid values are: "width", "area".'.format(scaling_method))
sizes = size_fn(sizes)
return (base_size*scaling_factor*sizes)
def get_sideplot_ranges(plot, element, main, ranges):
"""
Utility to find the range for an adjoined
plot given the plot, the element, the
Element the plot is adjoined to and the
dictionary of ranges.
"""
key = plot.current_key
dims = element.dimensions(label=True)
dim = dims[1] if dims[1] != 'Frequency' else dims[0]
range_item = main
if isinstance(main, HoloMap):
if issubclass(main.type, CompositeOverlay):
range_item = [hm for hm in main.split_overlays()[1]
if dim in hm.dimensions('all', label=True)][0]
else:
range_item = HoloMap({0: main}, kdims=['Frame'])
ranges = match_spec(range_item.last, ranges)<|fim▁hole|> else:
framewise = plot.lookup_options(range_item.last, 'norm').options.get('framewise')
if framewise and range_item.get(key, False):
main_range = range_item[key].range(dim)
else:
main_range = range_item.range(dim)
# If .main is an NdOverlay or a HoloMap of Overlays get the correct style
if isinstance(range_item, HoloMap):
range_item = range_item.last
if isinstance(range_item, CompositeOverlay):
range_item = [ov for ov in range_item
if dim in ov.dimensions('all', label=True)][0]
return range_item, main_range, dim
def within_range(range1, range2):
"""Checks whether range1 is within the range specified by range2."""
return ((range1[0] is None or range2[0] is None or range1[0] >= range2[0]) and
(range1[1] is None or range2[1] is None or range1[1] <= range2[1]))
def validate_sampled_mode(holomaps, dynmaps):
composite = HoloMap(enumerate(holomaps), kdims=['testing_kdim'])
holomap_kdims = set(unique_iterator([kd.name for dm in holomaps for kd in dm.kdims]))
hmranges = {d: composite.range(d) for d in holomap_kdims}
if any(not set(d.name for d in dm.kdims) <= holomap_kdims
for dm in dynmaps):
raise Exception('In sampled mode DynamicMap key dimensions must be a '
'subset of dimensions of the HoloMap(s) defining the sampling.')
elif not all(within_range(hmrange, dm.range(d)) for dm in dynmaps
for d, hmrange in hmranges.items() if d in dm.kdims):
raise Exception('HoloMap(s) have keys outside the ranges specified on '
'the DynamicMap(s).')
def get_dynamic_mode(composite):
"Returns the common mode of the dynamic maps in given composite object"
dynmaps = composite.traverse(lambda x: x, [DynamicMap])
holomaps = composite.traverse(lambda x: x, ['HoloMap'])
dynamic_modes = [m.call_mode for m in dynmaps]
dynamic_sampled = any(m.sampled for m in dynmaps)
if holomaps:
validate_sampled_mode(holomaps, dynmaps)
elif dynamic_sampled and not holomaps:
raise Exception("DynamicMaps in sampled mode must be displayed alongside "
"a HoloMap to define the sampling.")
if len(set(dynamic_modes)) > 1:
raise Exception("Cannot display composites of DynamicMap objects "
"with different interval modes (i.e open or bounded mode).")
elif dynamic_modes and not holomaps:
return 'bounded' if dynamic_modes[0] == 'key' else 'open', dynamic_sampled
else:
return None, dynamic_sampled
def initialize_sampled(obj, dimensions, key):
"""
Initializes any DynamicMaps in sampled mode.
"""
select = dict(zip([d.name for d in dimensions], key))
try:
obj.select([DynamicMap], **select)
except KeyError:
pass
def save_frames(obj, filename, fmt=None, backend=None, options=None):
"""
Utility to export object to files frame by frame, numbered individually.
Will use default backend and figure format by default.
"""
backend = Store.current_backend if backend is None else backend
renderer = Store.renderers[backend]
fmt = renderer.params('fig').objects[0] if fmt is None else fmt
plot = renderer.get_plot(obj)
for i in range(len(plot)):
plot.update(i)
renderer.save(plot, '%s_%s' % (filename, i), fmt=fmt, options=options)
def dynamic_update(plot, subplot, key, overlay, items):
"""
Given a plot, subplot and dynamically generated (Nd)Overlay
find the closest matching Element for that plot.
"""
match_spec = get_overlay_spec(overlay,
wrap_tuple(key),
subplot.current_frame)
specs = [(i, get_overlay_spec(overlay, wrap_tuple(k), el))
for i, (k, el) in enumerate(items)]
return closest_match(match_spec, specs)
def closest_match(match, specs, depth=0):
"""
Recursively iterates over type, group, label and overlay key,
finding the closest matching spec.
"""
new_specs = []
match_lengths = []
for i, spec in specs:
if spec[0] == match[0]:
new_specs.append((i, spec[1:]))
else:
if is_number(match[0]) and is_number(spec[0]):
match_length = -abs(match[0]-spec[0])
elif all(isinstance(s[0], basestring) for s in [spec, match]):
match_length = max(i for i in range(len(match[0]))
if match[0].startswith(spec[0][:i]))
else:
match_length = 0
match_lengths.append((i, match_length, spec[0]))
if len(new_specs) == 1:
return new_specs[0][0]
elif new_specs:
depth = depth+1
return closest_match(match[1:], new_specs, depth)
else:
if depth == 0 or not match_lengths:
return None
else:
return sorted(match_lengths, key=lambda x: -x[1])[0][0]
def map_colors(arr, crange, cmap, hex=True):
"""
Maps an array of values to RGB hex strings, given
a color range and colormap.
"""
if crange:
cmin, cmax = crange
else:
cmin, cmax = np.nanmin(arr), np.nanmax(arr)
arr = (arr - cmin) / (cmax-cmin)
arr = np.ma.array(arr, mask=np.logical_not(np.isfinite(arr)))
arr = cmap(arr)
if hex:
arr *= 255
return ["#{0:02x}{1:02x}{2:02x}".format(*(int(v) for v in c[:-1]))
for c in arr]
else:
return arr
def dim_axis_label(dimensions, separator=', '):
"""
Returns an axis label for one or more dimensions.
"""
if not isinstance(dimensions, list): dimensions = [dimensions]
return separator.join([safe_unicode(d.pprint_label)
for d in dimensions])<|fim▁end|> |
if dim in ranges:
main_range = ranges[dim] |
<|file_name|>highlight.js<|end_file_name|><|fim▁begin|>/*
Syntax highlighting with language autodetection.
http://softwaremaniacs.org/soft/highlight/
*/
var DEFAULT_LANGUAGES = ['python', 'ruby', 'perl', 'php', 'css', 'xml', 'html', 'django', 'javascript', 'java', 'cpp', 'sql', 'smalltalk'];
var ALL_LANGUAGES = (DEFAULT_LANGUAGES.join(',') + ',' + ['1c', 'ada', 'elisp', 'axapta', 'delphi', 'rib', 'rsl', 'vbscript'].join(',')).split(',');
var LANGUAGE_GROUPS = {
'xml': 'www',
'html': 'www',
'css': 'www',
'django': 'www',
'python': 'dynamic',
'perl': 'dynamic',
'php': 'dynamic',
'ruby': 'dynamic',
'cpp': 'static',
'java': 'static',
'delphi': 'static'
}
var IDENT_RE = '[a-zA-Z][a-zA-Z0-9_]*';
var UNDERSCORE_IDENT_RE = '[a-zA-Z_][a-zA-Z0-9_]*';
var NUMBER_RE = '\\b\\d+(\\.\\d+)?';
var C_NUMBER_RE = '\\b(0x[A-Za-z0-9]+|\\d+(\\.\\d+)?)';
// Common modes
var APOS_STRING_MODE = {
className: 'string',
begin: '\'', end: '\'',
illegal: '\\n',
contains: ['escape'],
relevance: 0
}
var QUOTE_STRING_MODE = {
className: 'string',
begin: '"', end: '"',
illegal: '\\n',
contains: ['escape'],
relevance: 0
}
var BACKSLASH_ESCAPE = {
className: 'escape',
begin: '\\\\.', end: '^',
relevance: 0
}
var C_LINE_COMMENT_MODE = {
className: 'comment',
begin: '//', end: '$',
relevance: 0
}
var C_BLOCK_COMMENT_MODE = {
className: 'comment',
begin: '/\\*', end: '\\*/'
}
var HASH_COMMENT_MODE = {
className: 'comment',
begin: '#', end: '$'
}
var C_NUMBER_MODE = {
className: 'number',
begin: C_NUMBER_RE, end: '^',
relevance: 0
}
var LANGUAGES = {}
var selected_languages = {};
function Highlighter(language_name, value) {
function subMode(lexem) {
if (!modes[modes.length - 1].contains)
return null;
for (var i in modes[modes.length - 1].contains) {
var className = modes[modes.length - 1].contains[i];
for (var key in language.modes)
if (language.modes[key].className == className && language.modes[key].beginRe.test(lexem))
return language.modes[key];
}//for
return null;
}//subMode
function endOfMode(mode_index, lexem) {
if (modes[mode_index].end && modes[mode_index].endRe.test(lexem))
return 1;
if (modes[mode_index].endsWithParent) {
var level = endOfMode(mode_index - 1, lexem);
return level ? level + 1 : 0;
}//if
return 0;
}//endOfMode
function isIllegal(lexem) {
if (!modes[modes.length - 1].illegalRe)
return false;
return modes[modes.length - 1].illegalRe.test(lexem);
}//isIllegal
function eatModeChunk(value, index) {
if (!modes[modes.length - 1].terminators) {
var terminators = [];
if (modes[modes.length - 1].contains)
for (var key in language.modes) {
if (contains(modes[modes.length - 1].contains, language.modes[key].className) &&
!contains(terminators, language.modes[key].begin))
terminators[terminators.length] = language.modes[key].begin;
}//for
var mode_index = modes.length - 1;
do {
if (modes[mode_index].end && !contains(terminators, modes[mode_index].end))
terminators[terminators.length] = modes[mode_index].end;
mode_index--;
} while (modes[mode_index + 1].endsWithParent);
if (modes[modes.length - 1].illegal)
if (!contains(terminators, modes[modes.length - 1].illegal))
terminators[terminators.length] = modes[modes.length - 1].illegal;
var terminator_re = '(' + terminators[0];
for (var i = 0; i < terminators.length; i++)
terminator_re += '|' + terminators[i];
terminator_re += ')';
modes[modes.length - 1].terminators = langRe(language, terminator_re);
}//if
value = value.substr(index);
var match = modes[modes.length - 1].terminators.exec(value);
if (!match)
return [value, '', true];
if (match.index == 0)
return ['', match[0], false];
else
return [value.substr(0, match.index), match[0], false];
}//eatModeChunk
function escape(value) {
return value.replace(/&/gm, '&').replace(/</gm, '<').replace(/>/gm, '>');
}//escape
function keywordMatch(mode, match) {
var match_str = language.case_insensitive ? match[0].toLowerCase() : match[0]
for (var className in mode.keywordGroups) {
var value = mode.keywordGroups[className].hasOwnProperty(match_str);
if (value)
return [className, value];
}//for
return false;
}//keywordMatch
function processKeywords(buffer) {
var mode = modes[modes.length - 1];
if (!mode.keywords || !mode.lexems)
return escape(buffer);
if (!mode.lexemsRe) {
var lexems = [];
for (var key in mode.lexems)
if (!contains(lexems, mode.lexems[key]))
lexems[lexems.length] = mode.lexems[key];
var lexems_re = '(' + lexems[0];
for (var i = 1; i < lexems.length; i++)
lexems_re += '|' + lexems[i];
lexems_re += ')';
mode.lexemsRe = langRe(language, lexems_re, true);
}//if
var result = '';
var last_index = 0;
mode.lexemsRe.lastIndex = 0;
var match = mode.lexemsRe.exec(buffer);
while (match) {
result += escape(buffer.substr(last_index, match.index - last_index));
keyword_match = keywordMatch(mode, match);
if (keyword_match) {
keyword_count += keyword_match[1];
result += '<span class="'+ keyword_match[0] +'">' + escape(match[0]) + '</span>';
} else {
result += escape(match[0]);
}//if
last_index = mode.lexemsRe.lastIndex;
match = mode.lexemsRe.exec(buffer);
}//while
result += escape(buffer.substr(last_index, buffer.length - last_index));
return result;
}//processKeywords
function processModeInfo(buffer, lexem, end) {
if (end) {
result += processKeywords(modes[modes.length - 1].buffer + buffer);
return;
}//if
if (isIllegal(lexem))
throw 'Illegal';
var new_mode = subMode(lexem);
if (new_mode) {
modes[modes.length - 1].buffer += buffer;
result += processKeywords(modes[modes.length - 1].buffer);
if (new_mode.excludeBegin) {
result += lexem + '<span class="' + new_mode.className + '">';
new_mode.buffer = '';
} else {
result += '<span class="' + new_mode.className + '">';
new_mode.buffer = lexem;
}//if
modes[modes.length] = new_mode;
relevance += modes[modes.length - 1].relevance != undefined ? modes[modes.length - 1].relevance : 1;
return;
}//if
var end_level = endOfMode(modes.length - 1, lexem);
if (end_level) {
modes[modes.length - 1].buffer += buffer;
if (modes[modes.length - 1].excludeEnd) {
result += processKeywords(modes[modes.length - 1].buffer) + '</span>' + lexem;
} else {
result += processKeywords(modes[modes.length - 1].buffer + lexem) + '</span>';
}
while (end_level > 1) {
result += '</span>';
end_level--;
modes.length--;
}//while
modes.length--;
modes[modes.length - 1].buffer = '';
return;
}//if
}//processModeInfo
function highlight(value) {
var index = 0;
language.defaultMode.buffer = '';
do {
var mode_info = eatModeChunk(value, index);
processModeInfo(mode_info[0], mode_info[1], mode_info[2]);
index += mode_info[0].length + mode_info[1].length;
} while (!mode_info[2]);
if(modes.length > 1)
throw 'Illegal';
}//highlight
this.language_name = language_name;
var language = LANGUAGES[language_name];
var modes = [language.defaultMode];
var relevance = 0;
var keyword_count = 0;
var result = '';
try {
highlight(value);
this.relevance = relevance;
this.keyword_count = keyword_count;
this.result = result;
} catch (e) {
if (e == 'Illegal') {
this.relevance = 0;
this.keyword_count = 0;
this.result = escape(value);
} else {
throw e;
}//if
}//try
}//Highlighter
function contains(array, item) {
if (!array)
return false;
for (var key in array)
if (array[key] == item)
return true;
return false;
}//contains
function blockText(block) {
var result = '';
for (var i = 0; i < block.childNodes.length; i++)
if (block.childNodes[i].nodeType == 3)
result += block.childNodes[i].nodeValue;
else if (block.childNodes[i].nodeName == 'BR')
result += '\n';
else
throw 'Complex markup';
return result;
}//blockText
function initHighlight(block) {
if (block.className.search(/\bno\-highlight\b/) != -1)
return;
try {
blockText(block);
} catch (e) {
if (e == 'Complex markup')
return;
}//try
var classes = block.className.split(/\s+/);
for (var i = 0; i < classes.length; i++) {
if (LANGUAGES[classes[i]]) {
highlightLanguage(block, classes[i]);
return;
}//if
}//for
highlightAuto(block);
}//initHighlight
function highlightLanguage(block, language) {
var highlight = new Highlighter(language, blockText(block));
// See these 4 lines? This is IE's notion of "block.innerHTML = result". Love this browser :-/
var container = document.createElement('div');
container.innerHTML = '<pre><code class="' + block.className + '">' + highlight.result + '</code></pre>';
var environment = block.parentNode.parentNode;
environment.replaceChild(container.firstChild, block.parentNode);
}//highlightLanguage
function highlightAuto(block) {
var result = null;
var language = '';
var max_relevance = 2;
var relevance = 0;
var block_text = blockText(block);
for (var key in selected_languages) {
var highlight = new Highlighter(key, block_text);
relevance = highlight.keyword_count + highlight.relevance;
if (relevance > max_relevance) {
max_relevance = relevance;
result = highlight;
}//if
}//for
if(result) {
// See these 4 lines? This is IE's notion of "block.innerHTML = result". Love this browser :-/
var container = document.createElement('div');
container.innerHTML = '<pre><code class="' + result.language_name + '">' + result.result + '</code></pre>';
var environment = block.parentNode.parentNode;
environment.replaceChild(container.firstChild, block.parentNode);
}//if
}//highlightAuto
function langRe(language, value, global) {
var mode = 'm' + (language.case_insensitive ? 'i' : '') + (global ? 'g' : '');
return new RegExp(value, mode);
}//re
function compileRes() {
for (var i in LANGUAGES) {
var language = LANGUAGES[i];
for (var key in language.modes) {
if (language.modes[key].begin)
language.modes[key].beginRe = langRe(language, '^' + language.modes[key].begin);
if (language.modes[key].end)
language.modes[key].endRe = langRe(language, '^' + language.modes[key].end);
if (language.modes[key].illegal)
language.modes[key].illegalRe = langRe(language, '^(?:' + language.modes[key].illegal + ')');
language.defaultMode.illegalRe = langRe(language, '^(?:' + language.defaultMode.illegal + ')');
}//for
}//for
}//compileRes
function compileKeywords() {
function compileModeKeywords(mode) {
if (!mode.keywordGroups) {
for (var key in mode.keywords) {
if (mode.keywords[key] instanceof Object)
mode.keywordGroups = mode.keywords;
else
mode.keywordGroups = {'keyword': mode.keywords};
break;
}//for
}//if
}//compileModeKeywords
for (var i in LANGUAGES) {
var language = LANGUAGES[i];
compileModeKeywords(language.defaultMode);
for (var key in language.modes) {
compileModeKeywords(language.modes[key]);
}//for
}//for
}//compileKeywords
function initHighlighting() {
if (initHighlighting.called)
return;
initHighlighting.called = true;
compileRes();
compileKeywords();
if (arguments.length) {
for (var i = 0; i < arguments.length; i++) {
if (LANGUAGES[arguments[i]]) {
selected_languages[arguments[i]] = LANGUAGES[arguments[i]];
}//if
}//for
} else
selected_languages = LANGUAGES;
var pres = document.getElementsByTagName('pre');
for (var i = 0; i < pres.length; i++) {
if (pres[i].firstChild && pres[i].firstChild.nodeName == 'CODE')
initHighlight(pres[i].firstChild);
}//for
}//initHighlighting
function injectScripts(languages) {
var scripts = document.getElementsByTagName('SCRIPT');
for (var i=0; i < scripts.length; i++) {
if (scripts[i].src.match(/highlight\.js(\?.+)?$/)) {
var path = scripts[i].src.replace(/highlight\.js(\?.+)?$/, '');
break;
}//if
}//for
if (languages.length == 0) {
languages = DEFAULT_LANGUAGES;
}//if
var injected = {}
for (var i=0; i < languages.length; i++) {
var filename = LANGUAGE_GROUPS[languages[i]] ? LANGUAGE_GROUPS[languages[i]] : languages[i];
if (!injected[filename]) {
document.write('<script type="text/javascript" src="' + path + 'languages/' + filename + '.js"></script>');
injected[filename] = true;
}//if
}//for
}//injectScripts
<|fim▁hole|>function initHighlightingOnLoad() {
var original_arguments = arguments;
injectScripts(arguments);
var handler = function(){initHighlighting.apply(null, original_arguments)};
if (window.addEventListener) {
window.addEventListener('DOMContentLoaded', handler, false);
window.addEventListener('load', handler, false);
} else if (window.attachEvent)
window.attachEvent('onload', handler);
else
window.onload = handler;
}//initHighlightingOnLoad<|fim▁end|> | |
<|file_name|>event.go<|end_file_name|><|fim▁begin|>package instana
import (
"time"
)
// EventData is the construct serialized for the host agent
type EventData struct {
Title string `json:"title"`
Text string `json:"text"`
// Duration in milliseconds
Duration int `json:"duration"`
// Severity with value of -1, 5, 10 : see type severity
Severity int `json:"severity"`
Plugin string `json:"plugin,omitempty"`
ID string `json:"id,omitempty"`
Host string `json:"host"`
}
type severity int
//Severity values for events sent to the instana agent
const (
SeverityChange severity = -1
SeverityWarning severity = 5
SeverityCritical severity = 10
)
// Defaults for the Event API
const (
ServicePlugin = "com.instana.forge.connection.http.logical.LogicalWebApp"
ServiceHost = ""
)
// SendDefaultServiceEvent sends a default event which already contains the service and host
func SendDefaultServiceEvent(title string, text string, sev severity, duration time.Duration) {
if sensor == nil {
// Since no sensor was initialized, there is no default service (as
// configured on the sensor) so we send blank.
SendServiceEvent("", title, text, sev, duration)
} else {
SendServiceEvent(sensor.serviceName, title, text, sev, duration)
}
}
// SendServiceEvent send an event on a specific service
func SendServiceEvent(service string, title string, text string, sev severity, duration time.Duration) {
sendEvent(&EventData{
Title: title,
Text: text,
Severity: int(sev),
Plugin: ServicePlugin,
ID: service,
Host: ServiceHost,
Duration: int(duration / time.Millisecond),
})
}
// SendHostEvent send an event on the current host
func SendHostEvent(title string, text string, sev severity, duration time.Duration) {
sendEvent(&EventData{
Title: title,<|fim▁hole|>}
func sendEvent(event *EventData) {
if sensor == nil {
// If the sensor hasn't initialized we do so here so that we properly
// discover where the host agent may be as it varies between a
// normal host, docker, kubernetes etc..
InitSensor(&Options{})
}
//we do fire & forget here, because the whole pid dance isn't necessary to send events
go sensor.agent.request(sensor.agent.makeURL(agentEventURL), "POST", event)
}<|fim▁end|> | Text: text,
Duration: int(duration / time.Millisecond),
Severity: int(sev),
}) |
<|file_name|>file_logger.rs<|end_file_name|><|fim▁begin|>use std::io::Write;
use std::fs::{File, OpenOptions};
use {Logger, MessageType, format_message};
/// Write log to text file.
pub struct FileLogger {
log_file: String
}
impl FileLogger {
pub fn new(file_path: &str) -> FileLogger {
File::create(file_path).unwrap();
FileLogger { log_file: file_path.to_string() }
}
}
impl Logger for FileLogger {
fn log(&self, msg_type:MessageType, message:&str) {
let mut file = OpenOptions::new()
.append(true)<|fim▁hole|> }
}
#[test]
fn file_logger_works() {
let logger = FileLogger::new("test.log");
logger.info("Test info message.");
logger.warn("Test warn message.");
logger.error("Test error message.");
}<|fim▁end|> | .write(true)
.open(&self.log_file).unwrap();
file.write(format_message(msg_type, message).as_bytes()).unwrap(); |
<|file_name|>0011_auto_20170211_1640.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-02-11 15:40
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reports', '0010_auto_20170211_0306'),
]<|fim▁hole|> migrations.RemoveField(
model_name='listreport',
name='board',
),
migrations.RemoveField(
model_name='listreport',
name='list',
),
migrations.DeleteModel(
name='ListReport',
),
]<|fim▁end|> |
operations = [ |
<|file_name|>aiplatform_v1_generated_vizier_service_delete_study_sync.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteStudy
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_v1_generated_VizierService_DeleteStudy_sync]
from google.cloud import aiplatform_v1
def sample_delete_study():
# Create a client
client = aiplatform_v1.VizierServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.DeleteStudyRequest(
name="name_value",
)
# Make the request<|fim▁hole|># [END aiplatform_v1_generated_VizierService_DeleteStudy_sync]<|fim▁end|> | client.delete_study(request=request)
|
<|file_name|>myDay.js<|end_file_name|><|fim▁begin|>import { Meteor } from 'meteor/meteor';
import { Template } from 'meteor/templating';
import { lodash } from 'meteor/stevezhu:lodash';
import { Bert } from 'meteor/themeteorchef:bert';
import { moment } from 'meteor/momentjs:moment';
import 'meteor/sacha:spin';
import { Workshops } from '../../../api/workshops/schema.js';
import { UserQuestions } from '../../../api/userQuestions/schema.js';
import './myDay.jade';
import '../../components/connect/connect.js';
Template.myDay.onCreated(function() {
this.autorun(() => {
this.subscribe('allWorkshopsForTheDay');
this.subscribe('resultForQuestionsAnswered', Meteor.userId());
});
});
Template.myDay.helpers({
workshopData() {
return Workshops.findOne({ _id: this._id }, {
fields: {
name: 1,
dateStart: 1,
dateEnd: 1,
color: 1,
peopleToGo: 1,
description: 1
}
});
},
workshopColor(index) {
if (index % 2 === 0) {
return 'grey2';
} else {
return false;
}
},
customWorkshops() {
let questions = UserQuestions.find({ userId: Meteor.userId(), answered: true, deprecated: false }, { fields: { result: 1 } }).fetch();
let questionsObject = {};
let questionsArray = [];<|fim▁hole|> if (cur1.workshopId) {
if (questionsObject[cur1.workshopId]) {
questionsObject[cur1.workshopId].value += cur1.result;
questionsObject[cur1.workshopId].long += 1;
} else {
questionsObject[cur1.workshopId] = {
value: cur1.result,
long: 1
};
}
}
});
});
for (var prop in questionsObject) {
questionsArray.push({
_id: prop,
value: lodash.round(questionsObject[prop].value / questionsObject[prop].long * 100, 2)
});
}
questionsArray.sort((a, b) => {
if (a.value < b.value) {
return 1;
} else if (a.value > b.value) {
return -1;
} else {
return 0;
}
});
return questionsArray;
},
dateStart() {
return moment(this.dateStart).format('H:mm');
},
dateEnd() {
return moment(this.dateEnd).format('H:mm');
},
isUserAlreadyIn() {
if (lodash.findIndex(this.peopleToGo, ['userId', Meteor.userId()]) !== -1) {
return true;
} else {
return false;
}
}
});
Template.myDay.events({
'click .goToWorkshop': function(event) {
event.preventDefault();
const data = {
userId: Meteor.userId(),
workshopId: this._id
};
Meteor.call('addUserToWorkshop', data, (error) => {
if (error) {
return Bert.alert(error.message, 'danger', 'growl-top-right');
}
});
},
'click .removeFromWorkshop': function(event) {
event.preventDefault();
const data = {
userId: Meteor.userId(),
workshopId: this._id
};
Meteor.call('removeUserFromWorkshop', data, (error) => {
if (error) {
return Bert.alert(error.message, 'danger', 'growl-top-right');
}
});
}
});<|fim▁end|> | questions.map((cur) => {
cur.result.map((cur1) => { |
<|file_name|>sample_get_operations.py<|end_file_name|><|fim▁begin|># coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_get_operations.py
DESCRIPTION:
This sample demonstrates how to list/get all document model operations (succeeded, in-progress, failed)
associated with the Form Recognizer resource. Kinds of operations returned are "documentModelBuild",
"documentModelCompose", and "documentModelCopyTo". Note that operation information only persists for
24 hours. If the operation was successful, the document model can be accessed using get_model or list_models APIs.
USAGE:
python sample_get_operations.py
Set the environment variables with your own values before running the sample:
1) AZURE_FORM_RECOGNIZER_ENDPOINT - the endpoint to your Cognitive Services resource.
2) AZURE_FORM_RECOGNIZER_KEY - your Form Recognizer API key
"""
import os
def sample_get_operations():
# [START list_operations]
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer import DocumentModelAdministrationClient
endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]
key = os.environ["AZURE_FORM_RECOGNIZER_KEY"]
document_model_admin_client = DocumentModelAdministrationClient(endpoint=endpoint, credential=AzureKeyCredential(key))
operations = list(document_model_admin_client.list_operations())
print("The following document model operations exist under my resource:")
for operation in operations:
print("\nOperation ID: {}".format(operation.operation_id))
print("Operation kind: {}".format(operation.kind))
print("Operation status: {}".format(operation.status))
print("Operation percent completed: {}".format(operation.percent_completed))
print("Operation created on: {}".format(operation.created_on))
print("Operation last updated on: {}".format(operation.last_updated_on))
print("Resource location of successful operation: {}".format(operation.resource_location))
# [END list_operations]
# [START get_operation]
# Get an operation by ID
if operations:
print("\nGetting operation info by ID: {}".format(operations[0].operation_id))
operation_info = document_model_admin_client.get_operation(operations[0].operation_id)
if operation_info.status == "succeeded":<|fim▁hole|> print("My {} operation is completed.".format(operation_info.kind))
result = operation_info.result
print("Model ID: {}".format(result.model_id))
elif operation_info.status == "failed":
print("My {} operation failed.".format(operation_info.kind))
error = operation_info.error
print("{}: {}".format(error.code, error.message))
else:
print("My operation status is {}".format(operation_info.status))
else:
print("No operations found.")
# [END get_operation]
if __name__ == '__main__':
sample_get_operations()<|fim▁end|> | |
<|file_name|>jupyter_message.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Evcxr Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>// limitations under the License.
use crate::connection::{Connection, HmacSha256};
use anyhow::{anyhow, bail, Result};
use chrono::Utc;
use generic_array::GenericArray;
use json::{self, JsonValue};
use std::{self, fmt};
use uuid::Uuid;
struct RawMessage {
zmq_identities: Vec<Vec<u8>>,
jparts: Vec<Vec<u8>>,
}
impl RawMessage {
pub(crate) fn read(connection: &Connection) -> Result<RawMessage> {
Self::from_multipart(connection.socket.recv_multipart(0)?, connection)
}
pub(crate) fn from_multipart(
mut multipart: Vec<Vec<u8>>,
connection: &Connection,
) -> Result<RawMessage> {
let delimiter_index = multipart
.iter()
.position(|part| &part[..] == DELIMITER)
.ok_or_else(|| anyhow!("Missing delimeter"))?;
let jparts: Vec<_> = multipart.drain(delimiter_index + 2..).collect();
let hmac = multipart.pop().unwrap();
// Remove delimiter, so that what's left is just the identities.
multipart.pop();
let zmq_identities = multipart;
let raw_message = RawMessage {
zmq_identities,
jparts,
};
if let Some(mac_template) = &connection.mac {
let mut mac = mac_template.clone();
raw_message.digest(&mut mac);
use hmac::Mac;
if let Err(error) = mac.verify(GenericArray::from_slice(&hex::decode(&hmac)?)) {
bail!("{}", error);
}
}
Ok(raw_message)
}
fn send(self, connection: &Connection) -> Result<()> {
use hmac::Mac;
let hmac = if let Some(mac_template) = &connection.mac {
let mut mac = mac_template.clone();
self.digest(&mut mac);
hex::encode(mac.finalize().into_bytes().as_slice())
} else {
String::new()
};
let mut parts: Vec<&[u8]> = Vec::new();
for part in &self.zmq_identities {
parts.push(part);
}
parts.push(DELIMITER);
parts.push(hmac.as_bytes());
for part in &self.jparts {
parts.push(part);
}
connection.socket.send_multipart(&parts, 0)?;
Ok(())
}
fn digest(&self, mac: &mut HmacSha256) {
use hmac::Mac;
for part in &self.jparts {
mac.update(part);
}
}
}
#[derive(Clone)]
pub(crate) struct JupyterMessage {
zmq_identities: Vec<Vec<u8>>,
header: JsonValue,
parent_header: JsonValue,
metadata: JsonValue,
content: JsonValue,
}
const DELIMITER: &[u8] = b"<IDS|MSG>";
impl JupyterMessage {
pub(crate) fn read(connection: &Connection) -> Result<JupyterMessage> {
Self::from_raw_message(RawMessage::read(connection)?)
}
fn from_raw_message(raw_message: RawMessage) -> Result<JupyterMessage> {
fn message_to_json(message: &[u8]) -> Result<JsonValue> {
Ok(json::parse(std::str::from_utf8(message)?)?)
}
if raw_message.jparts.len() < 4 {
bail!("Insufficient message parts {}", raw_message.jparts.len());
}
Ok(JupyterMessage {
zmq_identities: raw_message.zmq_identities,
header: message_to_json(&raw_message.jparts[0])?,
parent_header: message_to_json(&raw_message.jparts[1])?,
metadata: message_to_json(&raw_message.jparts[2])?,
content: message_to_json(&raw_message.jparts[3])?,
})
}
pub(crate) fn message_type(&self) -> &str {
self.header["msg_type"].as_str().unwrap_or("")
}
pub(crate) fn code(&self) -> &str {
self.content["code"].as_str().unwrap_or("")
}
pub(crate) fn cursor_pos(&self) -> usize {
self.content["cursor_pos"].as_usize().unwrap_or_default()
}
pub(crate) fn target_name(&self) -> &str {
self.content["target_name"].as_str().unwrap_or("")
}
pub(crate) fn data(&self) -> &JsonValue {
&self.content["data"]
}
pub(crate) fn comm_id(&self) -> &str {
self.content["comm_id"].as_str().unwrap_or("")
}
// Creates a new child message of this message. ZMQ identities are not transferred.
pub(crate) fn new_message(&self, msg_type: &str) -> JupyterMessage {
let mut header = self.header.clone();
header["msg_type"] = JsonValue::String(msg_type.to_owned());
header["username"] = JsonValue::String("kernel".to_owned());
header["msg_id"] = JsonValue::String(Uuid::new_v4().to_string());
header["date"] = JsonValue::String(Utc::now().to_rfc3339());
JupyterMessage {
zmq_identities: Vec::new(),
header,
parent_header: self.header.clone(),
metadata: JsonValue::new_object(),
content: JsonValue::new_object(),
}
}
// Creates a reply to this message. This is a child with the message type determined
// automatically by replacing "request" with "reply". ZMQ identities are transferred.
pub(crate) fn new_reply(&self) -> JupyterMessage {
let mut reply = self.new_message(&self.message_type().replace("_request", "_reply"));
reply.zmq_identities = self.zmq_identities.clone();
reply
}
#[must_use = "Need to send this message for it to have any effect"]
pub(crate) fn comm_close_message(&self) -> JupyterMessage {
self.new_message("comm_close").with_content(object! {
"comm_id" => self.comm_id()
})
}
pub(crate) fn get_content(&self) -> &JsonValue {
&self.content
}
pub(crate) fn with_content(mut self, content: JsonValue) -> JupyterMessage {
self.content = content;
self
}
pub(crate) fn with_message_type(mut self, msg_type: &str) -> JupyterMessage {
self.header["msg_type"] = JsonValue::String(msg_type.to_owned());
self
}
pub(crate) fn without_parent_header(mut self) -> JupyterMessage {
self.parent_header = object! {};
self
}
pub(crate) fn send(&self, connection: &Connection) -> Result<()> {
// If performance is a concern, we can probably avoid the clone and to_vec calls with a bit
// of refactoring.
let raw_message = RawMessage {
zmq_identities: self.zmq_identities.clone(),
jparts: vec![
self.header.dump().as_bytes().to_vec(),
self.parent_header.dump().as_bytes().to_vec(),
self.metadata.dump().as_bytes().to_vec(),
self.content.dump().as_bytes().to_vec(),
],
};
raw_message.send(connection)
}
}
impl fmt::Debug for JupyterMessage {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "\nHEADER {}", self.header.pretty(2))?;
writeln!(f, "PARENT_HEADER {}", self.parent_header.pretty(2))?;
writeln!(f, "METADATA {}", self.metadata.pretty(2))?;
writeln!(f, "CONTENT {}\n", self.content.pretty(2))?;
Ok(())
}
}<|fim▁end|> | // See the License for the specific language governing permissions and |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import bawebauth.apps.bawebauth.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),<|fim▁hole|> operations = [
migrations.CreateModel(
name='Device',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, verbose_name='name')),
('ident', models.CharField(max_length=40, verbose_name='ident')),
('crdate', models.DateTimeField(auto_now_add=True, verbose_name='date created')),
('tstamp', models.DateTimeField(auto_now=True, verbose_name='date edited')),
('active', models.BooleanField(default=False, verbose_name='active')),
('enabled', models.BooleanField(default=False, verbose_name='enabled')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Usage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('send', bawebauth.apps.bawebauth.fields.PositiveBigIntegerField(verbose_name='bytes send')),
('received', bawebauth.apps.bawebauth.fields.PositiveBigIntegerField(verbose_name='bytes received')),
('crdate', models.DateTimeField(auto_now_add=True, verbose_name='date created')),
('device', models.ForeignKey(to='bawebauth.Device')),
],
options={
},
bases=(models.Model,),
),
]<|fim▁end|> | ]
|
<|file_name|>schema.py<|end_file_name|><|fim▁begin|>from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
<|fim▁hole|> sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
def quote_value(self, value):
return self.connection.escape(value)
def skip_default(self, field):
"""
MySQL doesn't accept default values for longtext and longblob
and implicitly treats these columns as nullable.
"""
return field.db_type(self.connection) in {'longtext', 'longblob'}
def add_field(self, model, field):
super(DatabaseSchemaEditor, self).add_field(model, field)
# Simulate the effect of a one-off default.
if self.skip_default(field) and field.default not in {None, NOT_PROVIDED}:
effective_default = self.effective_default(field)
self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
'table': self.quote_name(model._meta.db_table),
'column': self.quote_name(field.column),
}, [effective_default])<|fim▁end|> | |
<|file_name|>general_steps.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Copyright 2015 Telefonica Investigacion y Desarrollo, S.A.U
This file is part of Orion Context Broker.
Orion Context Broker is free software: you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Orion Context Broker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Orion Context Broker. If not, see http://www.gnu.org/licenses/.
For those usages not covered by this license please contact with
iot_support at tid dot es
"""
__author__ = 'Iván Arias León (ivan dot ariasleon at telefonica dot com)'
import behave
from behave import step
from iotqatools.helpers_utils import *
from iotqatools.cb_v2_utils import CB
from iotqatools.mongo_utils import Mongo
from iotqatools.remote_log_utils import Remote_Log
from iotqatools.fabric_utils import FabricSupport
from tools.properties_config import Properties # methods in properties class
from tools.NGSI_v2 import NGSI
# constants
properties_class = Properties()
CONTEXT_BROKER_ENV = u'context_broker_env'
MONGO_ENV = u'mongo_env'
# HTTP status code
status_codes = {'OK': 200,
'Created': 201,
'No Content': 204,
'Moved Permanently': 301,
'Redirect': 307,
'Bad Request': 400,
'unauthorized': 401,
'Not Found': 404,
'Method Not Allowed': 405,
'Not Acceptable': 406,
'Conflict': 409,
'Content Length Required': 411,
'Request Entity Too Large': 413,
'Unsupported Media Type': 415,
'Unprocessable Entity': 422,
'Internal Server Error': 500}
behave.use_step_matcher("re")
__logger__ = logging.getLogger("steps")
# --------------- general_operations ----------------------
@step(u'send a API entry point request')
def send_a_base_request(context):<|fim▁hole|> """
__logger__.debug("Sending a API entry point request: /v2 ...")
props = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=props["CB_PROTOCOL"], host=props["CB_HOST"], port=props["CB_PORT"])
context.resp = context.cb.get_base_request()
__logger__.info("...Sent a API entry point request: /v2 correctly")
@step(u'send a version request')
def send_a_version_request(context):
"""
send a version request
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Sending a version request...")
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.get_version_request()
__logger__.info("..Sent a version request correctly")
send_a_version_request = step(u'send a version request')(send_a_version_request)
@step(u'send a statistics request')
def send_a_statistics_request(context):
"""
send a statistics request
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Sending a statistics request...")
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.get_statistics_request()
__logger__.info("..Sent a statistics request correctly")
@step(u'send a cache statistics request')
def send_a_cache_statistics_request(context):
"""
send a cache statistics request
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Sending a statistics request...")
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.get_cache_statistics_request()
__logger__.info("..Sent a statistics request correctly")
@step(u'delete database in mongo')
def delete_database_in_mongo(context):
"""
Delete database used in mongo
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
fiware_service_header = u'Fiware-Service'
orion_prefix = u'orion'
database_name = orion_prefix
props_mongo = properties_class.read_properties()[MONGO_ENV] # mongo properties dict
mongo = Mongo(host=props_mongo["MONGO_HOST"], port=props_mongo["MONGO_PORT"], user=props_mongo["MONGO_USER"],
password=props_mongo["MONGO_PASS"])
headers = context.cb.get_headers()
if fiware_service_header in headers:
if headers[fiware_service_header] != EMPTY:
if headers[fiware_service_header].find(".") < 0:
database_name = "%s-%s" % (database_name, headers[fiware_service_header].lower())
else:
postfix = headers[fiware_service_header].lower()[0:headers[fiware_service_header].find(".")]
database_name = "%s-%s" % (database_name, postfix)
__logger__.debug("Deleting database \"%s\" in mongo..." % database_name)
mongo.connect(database_name)
mongo.drop_database()
mongo.disconnect()
__logger__.info("...Database \"%s\" is deleted" % database_name)
@step(u'check in log, label "([^"]*)" and message "([^"]*)"')
def check_in_log_label_and_text(context, label, text):
"""
Verify in log file if a label with a message exists
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param label: label to find
:param text: text to find (begin since the end)
"""
__logger__.debug("Looking for in log the \"%s\" label and the \"%s\" text..." % (label, text))
props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
remote_log = Remote_Log(file="%s/contextBroker.log" % props_cb_env["CB_LOG_FILE"], fabric=context.my_fab)
line = remote_log.find_line(label, text)
assert line is not None, " ERROR - the \"%s\" label and the \"%s\" text do not exist in the log" % (label, text)
__logger__.info("log line: \n%s" % line)
ngsi = NGSI()
ngsi.verify_log(context, line)
__logger__.info("...confirmed traces in log")
@step(u'delay for "([^"]*)" seconds')
def delay_for_seconds(context, seconds):
"""
delay for N seconds
:param seconds: seconds to delay
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.info("delay for \"%s\" seconds" % seconds)
time.sleep(int(seconds))
@step(u'retrieve the log level')
def retrieve_the_log_level(context):
"""
retrieve the log level in Context Broker
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.info("retrieving the log level in Context Broker")
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.retrieve_the_log_level()
__logger__.info("..retrieved the log level in Context Broker")
@step(u'change the log level')
def change_the_log_level(context):
"""
change the log level
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.info("changing the log level in Context Broker")
query_param = {}
if context.table is not None:
for row in context.table:
query_param[row["parameter"]] = row["value"]
__logger__.info("query param: %s = %s" % (row["parameter"], row["value"]))
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.change_the_log_level(query_param)
__logger__.info("..changed the log level in Context Broker")
# ------------------------------------- validations ----------------------------------------------
@step(u'verify that receive a.? "([^"]*)" http code')
def verify_that_receive_an_http_code(context, http_code):
"""
verify that receive an http code
:param http_code: http code expected
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("context: %s" % repr(context.resp.text))
__logger__.debug("Verifying that return an http codes...")
assert context.resp.status_code == status_codes[http_code], \
" ERROR - http code is wrong\n" \
" expected: %s \n" \
" received: %s" % (str(status_codes[http_code]), str(context.resp.status_code))
__logger__.info('...Verified that http code returned is "%s"' % http_code)
@step(u'verify "([^"]*)" url with "([^"]*)" value in response')
def verify_entry_point(context, url, value):
"""
verify API entry point response.
Ex:
{
"entities_url":"/v2/entities",
"types_url":"/v2/types",
"subscriptions_url":"/v2/subscriptions",
"registrations_url":"/v2/registrations"
}
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param url: url key to verify
:param value: value expected
"""
__logger__.debug("Verifying url in API entry point response...")
resp_dict = convert_str_to_dict(context.resp.text, "JSON")
assert resp_dict[url] == value, " ERROR - in \"%s\" url with \"%s\" value " % (url, value)
__logger__.info("...Verified url in API entry point response")
@step(u'verify statistics "([^"]*)" field does exists')
def verify_stat_fields(context, field_to_test):
"""
verify statistics and cache statistics fields in response.
Ex: /statistics
{
"uptime_in_secs":2,
"measuring_interval_in_secs":2
}
/cache/statistics
{
"ids":"",
"refresh":1,
"inserts":0,
"removes":0,
"updates":0,
"items":0
}
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param field_to_test: field to verify if it does exists
"""
__logger__.debug("Verifying statistics field: %s does exists..." % field_to_test)
resp_dict = convert_str_to_dict(context.resp.text, "JSON")
assert field_to_test in resp_dict.keys(), "ERROR - \"%s\" field does no exist in statistics response" % field_to_test
__logger__.info("...Verified that statistics field %s is correct" % field_to_test)
@step(u'verify version "([^"]*)" field does exists')
def verify_version_fields(context, field):
"""
verify version fields in response.
Ex:
{
"orion" : {
"version" : "0.23.0_20150722131636",
"uptime" : "0 d, 0 h, 4 m, 46 s",
"git_hash" : "3c0767f91997a25925229b836dc48bba0f4801ba",
"compile_time" : "Wed Jul 22 13:18:54 CEST 2015",
"compiled_by" : "develenv",
"compiled_in" : "ci-fiware-01"
}
}
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param field: field to verify if it does exists
"""
__logger__.debug("Verifying version field: %s does exists..." % field)
resp_dict = convert_str_to_dict(context.resp.text, "JSON")
assert "orion" in resp_dict, "ERROR - orion field does no exist in version response"
assert field in resp_dict["orion"], "ERROR - %s field does no exist in version response" % field
__logger__.info("...Verified that version field %s is correct" % field)
@step(u'verify if version is the expected')
def verify_if_version_is_the_expected(context):
"""
verify if version is the expected
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
resp_dict = convert_str_to_dict(str(context.resp.text), "JSON")
assert resp_dict["orion"]["version"].find(context.props_cb_env["CB_VERSION"]) >= 0, \
" ERROR in context broker version value, \n" \
" expected: %s \n" \
" installed: %s" % (context.props_cb_env["CB_VERSION"], resp_dict["orion"]["version"])
__logger__.info("-- version %s is correct in version request" % context.props_cb_env["CB_VERSION"])
@step(u'verify that receive several "([^"]*)" http code')
def verify_that_receive_several_http_codes(context, http_code):
"""
verify that receive several http codes in multi entities
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param http_code: http code in all entities
"""
__logger__.debug("Verifying that return an http code in several entities...")
entities_context = context.cb.get_entity_context()
for i in range(int(entities_context["entities_number"])):
assert context.resp_list[i].status_code == status_codes[http_code], \
" ERROR - http code is wrong in position: %s \n" \
"expected: %s \n" \
" received: %s" % (str(i), str(status_codes[http_code]), str(context.resp_list[i].status_code))
__logger__.debug(" -- status code \"%s\" is the expected in position: %s" % (http_code, str(i)))
__logger__.info("...Verified that http code returned in all entities are %s" % http_code)
@step(u'verify an error response')
def verify_error_message(context):
"""
verify error response
:param context: parameters to evaluate. It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Verifying error message ...")
ngsi = NGSI()
ngsi.verify_error_response(context, context.resp)
__logger__.info("...Verified that error message is the expected")
@step(u'verify several error responses')
def verify_error_message(context):
"""
verify error response
:param context: parameters to evaluate. It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Verifying error message in several entities...")
entities_context = context.cb.get_entity_context()
ngsi = NGSI()
for i in range(int(entities_context["entities_number"])):
ngsi.verify_error_response(context, context.resp_list[i])
__logger__.info("...Verified that error message is the expected in all entities ")
@step(u'verify headers in response')
def verify_headers_in_response(context):
"""
verify headers in response
Ex:
| parameter | value |
| fiware-total-count | 5 |
| location | /v2/subscriptions/.* |
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Verifying headers in response...")
ngsi = NGSI()
ngsi.verify_headers_response(context)
__logger__.info("...Verified headers in response")
@step(u'verify if the log level "([^"]*)" is the expected')
def verify_if_the_log_level_is_the_expected(context, level):
"""
verify if the log level is the expected
:param level: log level expected
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Verifying if the log level \"%s\" is the expected in response..." % level)
ngsi = NGSI()
ngsi.verify_log_level(context, level)
__logger__.info("...Verified log level in response")
@step(u'verify admin error "([^"]*)"')
def verify_admin_error(context, error):
"""
verify admin error message
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param error: error message expected
"""
__logger__.debug("Verifying the admin error message: %s..." % error)
ngsi = NGSI()
ngsi.verify_admin_error(context, error)
__logger__.info("...Verified that the admin error message is the expected")<|fim▁end|> | """
send a API entry point request
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave. |
<|file_name|>snake_oil_npv.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from ecl.summary import EclSum
OIL_PRICES = {
"2010-01-01": 78.33,
"2010-02-01": 76.39,
"2010-03-01": 81.20,
"2010-04-01": 84.29,
"2010-05-01": 73.74,
"2010-06-01": 75.34,
"2010-07-01": 76.32,
"2010-08-01": 76.60,
"2010-09-01": 75.24,
"2010-10-01": 81.89,
"2010-11-01": 84.25,
"2010-12-01": 89.15,
"2011-01-01": 89.17,
"2011-02-01": 88.58,
"2011-03-01": 102.86,
"2011-04-01": 109.53,
"2011-05-01": 100.90,
"2011-06-01": 96.26,
"2011-07-01": 97.30,
"2011-08-01": 86.33,
"2011-09-01": 85.52,
"2011-10-01": 86.32,
"2011-11-01": 97.16,
"2011-12-01": 98.56,
"2012-01-01": 100.27,
"2012-02-01": 102.20,
"2012-03-01": 106.16,
"2012-04-01": 103.32,
"2012-05-01": 94.65,
"2012-06-01": 82.30,
"2012-07-01": 87.90,
"2012-08-01": 94.13,
"2012-09-01": 94.51,
"2012-10-01": 89.49,
"2012-11-01": 86.53,
"2012-12-01": 87.86,
"2013-01-01": 94.76,<|fim▁hole|> "2013-04-01": 92.02,
"2013-05-01": 94.51,
"2013-06-01": 95.77,
"2013-07-01": 104.67,
"2013-08-01": 106.57,
"2013-09-01": 106.29,
"2013-10-01": 100.54,
"2013-11-01": 93.86,
"2013-12-01": 97.63,
"2014-01-01": 94.62,
"2014-02-01": 100.82,
"2014-03-01": 100.80,
"2014-04-01": 102.07,
"2014-05-01": 102.18,
"2014-06-01": 105.79,
"2014-07-01": 103.59,
"2014-08-01": 96.54,
"2014-09-01": 93.21,
"2014-10-01": 84.40,
"2014-11-01": 75.79,
"2014-12-01": 59.29,
"2015-01-01": 47.22,
"2015-02-01": 50.58,
"2015-03-01": 47.82,
"2015-04-01": 54.45,
"2015-05-01": 59.27,
"2015-06-01": 59.82,
"2015-07-01": 50.90,
"2015-08-01": 42.87,
"2015-09-01": 45.48,
}
if __name__ == "__main__":
ecl_sum = EclSum("SNAKE_OIL_FIELD")
start_time = ecl_sum.getStartTime()
date_ranges = ecl_sum.timeRange(start_time, interval="1M")
production_sums = ecl_sum.blockedProduction("FOPT", date_ranges)
npv = 0.0
for index in range(0, len(date_ranges) - 1):
date = date_ranges[index + 1] # end of period
production_sum = production_sums[index]
oil_price = OIL_PRICES[date.date().strftime("%Y-%m-%d")]
production_value = oil_price * production_sum
npv += production_value
with open("snake_oil_npv.txt", "w") as output_file:
output_file.write("NPV %s\n" % npv)
if npv < 80000:
rating = "POOR"
elif 80000 <= npv < 100000:
rating = "AVERAGE"
elif 100000 <= npv < 120000:
rating = "GOOD"
else:
rating = "EXCELLENT"
output_file.write("RATING %s\n" % rating)<|fim▁end|> | "2013-02-01": 95.31,
"2013-03-01": 92.94, |
<|file_name|>embed.py<|end_file_name|><|fim▁begin|># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
#
#
# Parts of this code is from IPyVolume (24.05.2017), used here under
# this copyright and license with permission from the author
# (see https://github.com/jupyter-widgets/ipywidgets/pull/1387)
"""
Functions for generating embeddable HTML/javascript of a widget.
"""
import json
import re
from .widgets import Widget, DOMWidget
from .widgets.widget_link import Link
from .widgets.docutils import doc_subst
from ._version import __html_manager_version__
snippet_template = u"""
{load}
<script type="application/vnd.jupyter.widget-state+json">
{json_data}
</script>
{widget_views}
"""
load_template = u"""<script src="{embed_url}"{use_cors}></script>"""
load_requirejs_template = u"""
<!-- Load require.js. Delete this if your page already loads require.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" crossorigin="anonymous"></script>
<script src="{embed_url}"{use_cors}></script>
"""
requirejs_snippet_template = u"""
<script type="application/vnd.jupyter.widget-state+json">
{json_data}
</script>
{widget_views}
"""
html_template = u"""<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>{title}</title>
</head>
<body>
{snippet}
</body>
</html>
"""
widget_view_template = u"""<script type="application/vnd.jupyter.widget-view+json">
{view_spec}
</script>"""
DEFAULT_EMBED_SCRIPT_URL = u'https://unpkg.com/@jupyter-widgets/html-manager@%s/dist/embed.js'%__html_manager_version__
DEFAULT_EMBED_REQUIREJS_URL = u'https://unpkg.com/@jupyter-widgets/html-manager@%s/dist/embed-amd.js'%__html_manager_version__
_doc_snippets = {}
_doc_snippets['views_attribute'] = """
views: widget or collection of widgets or None
The widgets to include views for. If None, all DOMWidgets are
included (not just the displayed ones).
"""
_doc_snippets['embed_kwargs'] = """
drop_defaults: boolean
Whether to drop default values from the widget states.
state: dict or None (default)
The state to include. When set to None, the state of all widgets
know to the widget manager is included. Otherwise it uses the
passed state directly. This allows for end users to include a
smaller state, under the responsibility that this state is
sufficient to reconstruct the embedded views.
indent: integer, string or None
The indent to use for the JSON state dump. See `json.dumps` for
full description.
embed_url: string or None
Allows for overriding the URL used to fetch the widget manager
for the embedded code. This defaults (None) to an `unpkg` CDN url.
requirejs: boolean (True)
Enables the requirejs-based embedding, which allows for custom widgets.
If True, the embed_url should point to an AMD module.
cors: boolean (True)
If True avoids sending user credentials while requesting the scripts.
When opening an HTML file from disk, some browsers may refuse to load
the scripts.
"""
def _find_widget_refs_by_state(widget, state):
"""Find references to other widgets in a widget's state"""
# Copy keys to allow changes to state during iteration:
keys = tuple(state.keys())
for key in keys:
value = getattr(widget, key)
# Trivial case: Direct references to other widgets:
if isinstance(value, Widget):
yield value
# Also check for buried references in known, JSON-able structures
# Note: This might miss references buried in more esoteric structures
elif isinstance(value, (list, tuple)):
for item in value:
if isinstance(item, Widget):
yield item
elif isinstance(value, dict):
for item in value.values():
if isinstance(item, Widget):
yield item
def _get_recursive_state(widget, store=None, drop_defaults=False):
"""Gets the embed state of a widget, and all other widgets it refers to as well"""
if store is None:
store = dict()
state = widget._get_embed_state(drop_defaults=drop_defaults)
store[widget.model_id] = state
# Loop over all values included in state (i.e. don't consider excluded values):
for ref in _find_widget_refs_by_state(widget, state['state']):
if ref.model_id not in store:
_get_recursive_state(ref, store, drop_defaults=drop_defaults)
return store
def add_resolved_links(store, drop_defaults):
"""Adds the state of any link models between two models in store"""
for widget_id, widget in Widget.widgets.items(): # go over all widgets
if isinstance(widget, Link) and widget_id not in store:
if widget.source[0].model_id in store and widget.target[0].model_id in store:
store[widget.model_id] = widget._get_embed_state(drop_defaults=drop_defaults)
def dependency_state(widgets, drop_defaults=True):
"""Get the state of all widgets specified, and their dependencies.
This uses a simple dependency finder, including:
- any widget directly referenced in the state of an included widget
- any widget in a list/tuple attribute in the state of an included widget
- any widget in a dict attribute in the state of an included widget
- any jslink/jsdlink between two included widgets
What this alogrithm does not do:
- Find widget references in nested list/dict structures
- Find widget references in other types of attributes
Note that this searches the state of the widgets for references, so if
a widget reference is not included in the serialized state, it won't
be considered as a dependency.
Parameters
----------
widgets: single widget or list of widgets.
This function will return the state of every widget mentioned
and of all their dependencies.
drop_defaults: boolean
Whether to drop default values from the widget states.
Returns
-------
A dictionary with the state of the widgets and any widget they
depend on.
"""
# collect the state of all relevant widgets
if widgets is None:
# Get state of all widgets, no smart resolution needed.
state = Widget.get_manager_state(drop_defaults=drop_defaults, widgets=None)['state']
else:
try:
widgets[0]<|fim▁hole|> except (IndexError, TypeError):
widgets = [widgets]
state = {}
for widget in widgets:
_get_recursive_state(widget, state, drop_defaults)
# Add any links between included widgets:
add_resolved_links(state, drop_defaults)
return state
@doc_subst(_doc_snippets)
def embed_data(views, drop_defaults=True, state=None):
"""Gets data for embedding.
Use this to get the raw data for embedding if you have special
formatting needs.
Parameters
----------
{views_attribute}
drop_defaults: boolean
Whether to drop default values from the widget states.
state: dict or None (default)
The state to include. When set to None, the state of all widgets
know to the widget manager is included. Otherwise it uses the
passed state directly. This allows for end users to include a
smaller state, under the responsibility that this state is
sufficient to reconstruct the embedded views.
Returns
-------
A dictionary with the following entries:
manager_state: dict of the widget manager state data
view_specs: a list of widget view specs
"""
if views is None:
views = [w for w in Widget.widgets.values() if isinstance(w, DOMWidget)]
else:
try:
views[0]
except (IndexError, TypeError):
views = [views]
if state is None:
# Get state of all known widgets
state = Widget.get_manager_state(drop_defaults=drop_defaults, widgets=None)['state']
# Rely on ipywidget to get the default values
json_data = Widget.get_manager_state(widgets=[])
# but plug in our own state
json_data['state'] = state
view_specs = [w.get_view_spec() for w in views]
return dict(manager_state=json_data, view_specs=view_specs)
script_escape_re = re.compile(r'<(script|/script|!--)', re.IGNORECASE)
def escape_script(s):
"""Escape a string that will be the content of an HTML script tag.
We replace the opening bracket of <script, </script, and <!-- with the unicode
equivalent. This is inspired by the documentation for the script tag at
https://html.spec.whatwg.org/multipage/scripting.html#restrictions-for-contents-of-script-elements
We only replace these three cases so that most html or other content
involving `<` is readable.
"""
return script_escape_re.sub(r'\u003c\1', s)
@doc_subst(_doc_snippets)
def embed_snippet(views,
drop_defaults=True,
state=None,
indent=2,
embed_url=None,
requirejs=True,
cors=True
):
"""Return a snippet that can be embedded in an HTML file.
Parameters
----------
{views_attribute}
{embed_kwargs}
Returns
-------
A unicode string with an HTML snippet containing several `<script>` tags.
"""
data = embed_data(views, drop_defaults=drop_defaults, state=state)
widget_views = u'\n'.join(
widget_view_template.format(view_spec=escape_script(json.dumps(view_spec)))
for view_spec in data['view_specs']
)
if embed_url is None:
embed_url = DEFAULT_EMBED_REQUIREJS_URL if requirejs else DEFAULT_EMBED_SCRIPT_URL
load = load_requirejs_template if requirejs else load_template
use_cors = ' crossorigin="anonymous"' if cors else ''
values = {
'load': load.format(embed_url=embed_url, use_cors=use_cors),
'json_data': escape_script(json.dumps(data['manager_state'], indent=indent)),
'widget_views': widget_views,
}
return snippet_template.format(**values)
@doc_subst(_doc_snippets)
def embed_minimal_html(fp, views, title=u'IPyWidget export', template=None, **kwargs):
"""Write a minimal HTML file with widget views embedded.
Parameters
----------
fp: filename or file-like object
The file to write the HTML output to.
{views_attribute}
title: title of the html page.
template: Template in which to embed the widget state.
This should be a Python string with placeholders
`{{title}}` and `{{snippet}}`. The `{{snippet}}` placeholder
will be replaced by all the widgets.
{embed_kwargs}
"""
snippet = embed_snippet(views, **kwargs)
values = {
'title': title,
'snippet': snippet,
}
if template is None:
template = html_template
html_code = template.format(**values)
# Check if fp is writable:
if hasattr(fp, 'write'):
fp.write(html_code)
else:
# Assume fp is a filename:
with open(fp, "w") as f:
f.write(html_code)<|fim▁end|> | |
<|file_name|>place_order.go<|end_file_name|><|fim▁begin|>// Code generated by go-swagger; DO NOT EDIT.
package store
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the generate command
import (
"net/http"
"github.com/go-openapi/runtime/middleware"
)
// PlaceOrderHandlerFunc turns a function with the right signature into a place order handler
type PlaceOrderHandlerFunc func(PlaceOrderParams) middleware.Responder
// Handle executing the request and returning a response
func (fn PlaceOrderHandlerFunc) Handle(params PlaceOrderParams) middleware.Responder {
return fn(params)
}
// PlaceOrderHandler interface for that can handle valid place order params
type PlaceOrderHandler interface {
Handle(PlaceOrderParams) middleware.Responder
}
// NewPlaceOrder creates a new http.Handler for the place order operation
func NewPlaceOrder(ctx *middleware.Context, handler PlaceOrderHandler) *PlaceOrder {
return &PlaceOrder{Context: ctx, Handler: handler}
}
/* PlaceOrder swagger:route POST /stores/order store placeOrder
Place an order for a pet
*/
type PlaceOrder struct {
Context *middleware.Context
Handler PlaceOrderHandler
}
func (o *PlaceOrder) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
route, rCtx, _ := o.Context.RouteInfo(r)
if rCtx != nil {
*r = *rCtx
}
var Params = NewPlaceOrderParams()
if err := o.Context.BindValidRequest(r, route, &Params); err != nil { // bind params<|fim▁hole|> res := o.Handler.Handle(Params) // actually handle the request
o.Context.Respond(rw, r, route.Produces, route, res)
}<|fim▁end|> | o.Context.Respond(rw, r, route.Produces, route, err)
return
}
|
<|file_name|>hello-world.rs<|end_file_name|><|fim▁begin|>extern crate hello_world;
#[test]
fn test_no_name() {
assert_eq!("Hello, World!", hello_world::hello(None));
}
<|fim▁hole|> assert_eq!("Hello, Alice!", hello_world::hello(Some("Alice")));
}
#[test]
//#[ignore]
fn test_other_same_name() {
assert_eq!("Hello, Bob!", hello_world::hello(Some("Bob")));
}<|fim▁end|> | #[test]
//#[ignore]
fn test_sample_name() { |
<|file_name|>sonification.src.js<|end_file_name|><|fim▁begin|>/**
* @license Highcharts JS v8.0.2 (2020-03-03)
* @module highcharts/modules/sonification<|fim▁hole|> *
* (c) 2012-2019 Øystein Moseng
*
* License: www.highcharts.com/license
*/
'use strict';
import '../../modules/sonification/sonification.js';<|fim▁end|> | * @requires highcharts
*
* Sonification module |
<|file_name|>metatype.py<|end_file_name|><|fim▁begin|>"""
mediatum - a multimedia content repository
Copyright (C) 2007 Arne Seifert <[email protected]>
Copyright (C) 2007 Matthias Kramm <[email protected]>
Copyright (C) 2013 Iryna Feuerstein <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from warnings import warn
class Context(object):
def __init__(self, field, value="", width=400, name="", lock=0, language=None, collection=None, container=None, user=None, ip=""):
if collection is not None:
warn("collections argument is deprecated, use container", DeprecationWarning)
if container is not None:
raise ValueError("container and collection cannot be used together")
container = collection
self.field = field
self.value = value
self.width = width
self.name = name
self.language = language
self.collection = container
self.container = container
self.ip = ip
self.user = user
self.lock = lock
class Metatype(object):
joiner = '\n'
@classmethod
def get_name4schema(cls):
name = cls.__name__
return name[2:] if name.startswith("m_") else name
def getEditorHTML(self, field, value="", width=400, lock=0, language=None, required=None):
return ""
def getSearchHTML(self, context):
None
def getFormattedValue(self, metafield, maskitem, mask, node, language, html):
None
def format_request_value_for_db(self, field, params, item, language=None):
"""Prepare value for the database from update request params.
:param field: associated field
:param params: dict which contains POST form values
:param item: field name prepended with language specifier. Is the same as field name for non-multilingual fields.
"""
# just fetch the unmodified alue from the params dict
return params.get(item)
def getMaskEditorHTML(self, field, metadatatype=None, language=None):
return ""
@classmethod
def isContainer(cls):
return False
def isFieldType(self):
return True
def getName(self):
return ""
def getInformation(self):
return {"moduleversion": "1.0"}
''' events '''
def event_metafield_changed(self, node, field):
None
def get_input_pattern(self, field):
return ''
def get_input_title(self, field):
return ''
def get_input_placeholder(self, field):
return ''
def is_required(self, required):
"""
It's necessary to return different types in order for the template to render properly.
Since required='' or even required='False' is still interpreted as a required field,
it needs to be completely removed from the template where applicable. TAL attributes
are removed if they evaluate to None.
@param required: 0 or 1
@return: str True or None object
"""<|fim▁hole|> return None
charmap = [
[' ', '160', 'no-break space'],
['&', '38', 'ampersand'],
['"', '34', 'quotation mark'],
# finance
['¢', '162', 'cent sign'],
['€', '8364', 'euro sign'],
['£', '163', 'pound sign'],
['¥', '165', 'yen sign'],
# signs
['©', '169', 'copyright sign'],
['®', '174', 'registered sign'],
['™', '8482', 'trade mark sign'],
['‰', '8240', 'per mille sign'],
['µ', '181', 'micro sign'],
['·', '183', 'middle dot'],
['•', '8226', 'bullet'],
['…', '8230', 'three dot leader'],
['′', '8242', 'minutes / feet'],
['″', '8243', 'seconds / inches'],
['§', '167', 'section sign'],
['¶', '182', 'paragraph sign'],
['ß', '223', 'sharp s / ess-zed'],
# quotations
['‹', '8249', 'single left-pointing angle quotation mark'],
['›', '8250', 'single right-pointing angle quotation mark'],
['«', '171', 'left pointing guillemet'],
['»', '187', 'right pointing guillemet'],
['‘', '8216', 'left single quotation mark'],
['’', '8217', 'right single quotation mark'],
['“', '8220', 'left double quotation mark'],
['”', '8221', 'right double quotation mark'],
['‚', '8218', 'single low-9 quotation mark'],
['„', '8222', 'double low-9 quotation mark'],
['<', '60', 'less-than sign'],
['>', '62', 'greater-than sign'],
['≤', '8804', 'less-than or equal to'],
['≥', '8805', 'greater-than or equal to'],
['–', '8211', 'en dash'],
['—', '8212', 'em dash'],
['¯', '175', 'macron'],
['‾', '8254', 'overline'],
['¤', '164', 'currency sign'],
['¦', '166', 'broken bar'],
['¨', '168', 'diaeresis'],
['¡', '161', 'inverted exclamation mark'],
['¿', '191', 'turned question mark'],
['ˆ', '710', 'circumflex accent'],
['˜', '732', 'small tilde'],
['°', '176', 'degree sign'],
['−', '8722', 'minus sign'],
['±', '177', 'plus-minus sign'],
['÷', '247', 'division sign'],
['⁄', '8260', 'fraction slash'],
['×', '215', 'multiplication sign'],
['¹', '185', 'superscript one'],
['²', '178', 'superscript two'],
['³', '179', 'superscript three'],
['¼', '188', 'fraction one quarter'],
['½', '189', 'fraction one half'],
['¾', '190', 'fraction three quarters'],
# math / logical
['ƒ', '402', 'function / florin'],
['∫', '8747', 'integral'],
['∑', '8721', 'n-ary sumation'],
['∞', '8734', 'infinity'],
['√', '8730', 'square root'],
['∼', '8764', 'similar to'],
['≅', '8773', 'approximately equal to'],
['≈', '8776', 'almost equal to'],
['≠', '8800', 'not equal to'],
['≡', '8801', 'identical to'],
['∈', '8712', 'element of'],
['∉', '8713', 'not an element of'],
['∋', '8715', 'contains as member'],
['∏', '8719', 'n-ary product'],
['∧', '8743', 'logical and'],
['∨', '8744', 'logical or'],
['¬', '172', 'not sign'],
['∩', '8745', 'intersection'],
['∪', '8746', 'union'],
['∂', '8706', 'partial differential'],
['∀', '8704', 'for all'],
['∃', '8707', 'there exists'],
['∅', '8709', 'diameter'],
['∇', '8711', 'backward difference'],
['∗', '8727', 'asterisk operator'],
['∝', '8733', 'proportional to'],
['∠', '8736', 'angle'],
# undefined
['´', '180', 'acute accent'],
['¸', '184', 'cedilla'],
['ª', '170', 'feminine ordinal indicator'],
['º', '186', 'masculine ordinal indicator'],
['†', '8224', 'dagger'],
['‡', '8225', 'double dagger'],
# alphabetical special chars
['À', '192', 'A - grave'],
['Á', '193', 'A - acute'],
['Â', '194', 'A - circumflex'],
['Ã', '195', 'A - tilde'],
['Ä', '196', 'A - diaeresis'],
['Å', '197', 'A - ring above'],
['Æ', '198', 'ligature AE'],
['Ç', '199', 'C - cedilla'],
['È', '200', 'E - grave'],
['É', '201', 'E - acute'],
['Ê', '202', 'E - circumflex'],
['Ë', '203', 'E - diaeresis'],
['Ì', '204', 'I - grave'],
['Í', '205', 'I - acute'],
['Î', '206', 'I - circumflex'],
['Ï', '207', 'I - diaeresis'],
['Ð', '208', 'ETH'],
['Ñ', '209', 'N - tilde'],
['Ò', '210', 'O - grave'],
['Ó', '211', 'O - acute'],
['Ô', '212', 'O - circumflex'],
['Õ', '213', 'O - tilde'],
['Ö', '214', 'O - diaeresis'],
['Ø', '216', 'O - slash'],
['Œ', '338', 'ligature OE'],
['Š', '352', 'S - caron'],
['Ù', '217', 'U - grave'],
['Ú', '218', 'U - acute'],
['Û', '219', 'U - circumflex'],
['Ü', '220', 'U - diaeresis'],
['Ý', '221', 'Y - acute'],
['Ÿ', '376', 'Y - diaeresis'],
['Þ', '222', 'THORN'],
['à', '224', 'a - grave'],
['á', '225', 'a - acute'],
['â', '226', 'a - circumflex'],
['ã', '227', 'a - tilde'],
['ä', '228', 'a - diaeresis'],
['å', '229', 'a - ring above'],
['æ', '230', 'ligature ae'],
['ç', '231', 'c - cedilla'],
['è', '232', 'e - grave'],
['é', '233', 'e - acute'],
['ê', '234', 'e - circumflex'],
['ë', '235', 'e - diaeresis'],
['ì', '236', 'i - grave'],
['í', '237', 'i - acute'],
['î', '238', 'i - circumflex'],
['ï', '239', 'i - diaeresis'],
['ð', '240', 'eth'],
['ñ', '241', 'n - tilde'],
['ò', '242', 'o - grave'],
['ó', '243', 'o - acute'],
['ô', '244', 'o - circumflex'],
['õ', '245', 'o - tilde'],
['ö', '246', 'o - diaeresis'],
['ø', '248', 'o slash'],
['œ', '339', 'ligature oe'],
['š', '353', 's - caron'],
['ù', '249', 'u - grave'],
['ú', '250', 'u - acute'],
['û', '251', 'u - circumflex'],
['ü', '252', 'u - diaeresis'],
['ý', '253', 'y - acute'],
['þ', '254', 'thorn'],
['ÿ', '255', 'y - diaeresis'],
['Α', '913', 'Alpha'],
['Β', '914', 'Beta'],
['Γ', '915', 'Gamma'],
['Δ', '916', 'Delta'],
['Ε', '917', 'Epsilon'],
['Ζ', '918', 'Zeta'],
['Η', '919', 'Eta'],
['Θ', '920', 'Theta'],
['Ι', '921', 'Iota'],
['Κ', '922', 'Kappa'],
['Λ', '923', 'Lambda'],
['Μ', '924', 'Mu'],
['Ν', '925', 'Nu'],
['Ξ', '926', 'Xi'],
['Ο', '927', 'Omicron'],
['Π', '928', 'Pi'],
['Ρ', '929', 'Rho'],
['Σ', '931', 'Sigma'],
['Τ', '932', 'Tau'],
['Υ', '933', 'Upsilon'],
['Φ', '934', 'Phi'],
['Χ', '935', 'Chi'],
['Ψ', '936', 'Psi'],
['Ω', '937', 'Omega'],
['α', '945', 'alpha'],
['β', '946', 'beta'],
['γ', '947', 'gamma'],
['δ', '948', 'delta'],
['ε', '949', 'epsilon'],
['ζ', '950', 'zeta'],
['η', '951', 'eta'],
['θ', '952', 'theta'],
['ι', '953', 'iota'],
['κ', '954', 'kappa'],
['λ', '955', 'lambda'],
['μ', '956', 'mu'],
['ν', '957', 'nu'],
['ξ', '958', 'xi'],
['ο', '959', 'omicron'],
['π', '960', 'pi'],
['ρ', '961', 'rho'],
['ς', '962', 'final sigma'],
['σ', '963', 'sigma'],
['τ', '964', 'tau'],
['υ', '965', 'upsilon'],
['φ', '966', 'phi'],
['χ', '967', 'chi'],
['ψ', '968', 'psi'],
['ω', '969', 'omega'],
# symbols
['ℵ', '8501', 'alef symbol'],
['ϖ', '982', 'pi symbol'],
['ℜ', '8476', 'real part symbol'],
['ϑ', '977', 'theta symbol'],
['ϒ', '978', 'upsilon - hook symbol'],
['℘', '8472', 'Weierstrass p'],
['ℑ', '8465', 'imaginary part'],
# arrows
['←', '8592', 'leftwards arrow'],
['↑', '8593', 'upwards arrow'],
['→', '8594', 'rightwards arrow'],
['↓', '8595', 'downwards arrow'],
['↔', '8596', 'left right arrow'],
['↵', '8629', 'carriage return'],
['⇐', '8656', 'leftwards double arrow'],
['⇑', '8657', 'upwards double arrow'],
['⇒', '8658', 'rightwards double arrow'],
['⇓', '8659', 'downwards double arrow'],
['⇔', '8660', 'left right double arrow'],
['∴', '8756', 'therefore'],
['⊂', '8834', 'subset of'],
['⊃', '8835', 'superset of'],
['⊄', '8836', 'not a subset of'],
['⊆', '8838', 'subset of or equal to'],
['⊇', '8839', 'superset of or equal to'],
['⊕', '8853', 'circled plus'],
['⊗', '8855', 'circled times'],
['⊥', '8869', 'perpendicular'],
['⋅', '8901', 'dot operator'],
['⌈', '8968', 'left ceiling'],
['⌉', '8969', 'right ceiling'],
['⌊', '8970', 'left floor'],
['⌋', '8971', 'right floor'],
['⟨', '9001', 'left-pointing angle bracket'],
['⟩', '9002', 'right-pointing angle bracket'],
['◊', '9674', 'lozenge'],
['♠', '9824', 'black spade suit'],
['♣', '9827', 'black club suit'],
['♥', '9829', 'black heart suit'],
['♦', '9830', 'black diamond suit'],
[' ', '8194', 'en space'],
[' ', '8195', 'em space'],
[' ', '8201', 'thin space'],
['‌', '8204', 'zero width non-joiner'],
['‍', '8205', 'zero width joiner'],
['‎', '8206', 'left-to-right mark'],
['‏', '8207', 'right-to-left mark'],
['­', '173', 'soft hyphen']
]<|fim▁end|> | if required:
return 'True'
else: |
<|file_name|>inference.js<|end_file_name|><|fim▁begin|>import traverse from "../lib";
import assert from "assert";
import { parse } from "babylon";
import * as t from "babel-types";
function getPath(code) {
const ast = parse(code, { plugins: ["flow", "asyncGenerators"] });
let path;
traverse(ast, {
Program: function (_path) {
path = _path;
_path.stop();
},
});
return path;
}
describe("inference", function () {
describe("baseTypeStrictlyMatches", function () {
it("it should work with null", function () {
const path = getPath("var x = null; x === null").get("body")[1].get("expression");
const left = path.get("left");
const right = path.get("right");
const strictMatch = left.baseTypeStrictlyMatches(right);
assert.ok(strictMatch, "null should be equal to null");
});
it("it should work with numbers", function () {
const path = getPath("var x = 1; x === 2").get("body")[1].get("expression");
const left = path.get("left");
const right = path.get("right");
const strictMatch = left.baseTypeStrictlyMatches(right);
assert.ok(strictMatch, "number should be equal to number");
});
it("it should bail when type changes", function () {
const path = getPath("var x = 1; if (foo) x = null;else x = 3; x === 2")
.get("body")[2].get("expression");
const left = path.get("left");
const right = path.get("right");
const strictMatch = left.baseTypeStrictlyMatches(right);
assert.ok(!strictMatch, "type might change in if statement");
});
it("it should differentiate between null and undefined", function () {
const path = getPath("var x; x === null").get("body")[1].get("expression");
const left = path.get("left");
const right = path.get("right");
const strictMatch = left.baseTypeStrictlyMatches(right);
assert.ok(!strictMatch, "null should not match undefined");
});
});
describe("getTypeAnnotation", function () {
it("should infer from type cast", function () {
const path = getPath("(x: number)").get("body")[0].get("expression");
assert.ok(t.isNumberTypeAnnotation(path.getTypeAnnotation()), "should be number");
});
it("should infer string from template literal", function () {
const path = getPath("`hey`").get("body")[0].get("expression");
assert.ok(t.isStringTypeAnnotation(path.getTypeAnnotation()), "should be string");
});
it("should infer number from +x", function () {
const path = getPath("+x").get("body")[0].get("expression");
assert.ok(t.isNumberTypeAnnotation(path.getTypeAnnotation()), "should be number");
});
it("should infer T from new T", function () {
const path = getPath("new T").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isGenericTypeAnnotation(type) && type.id.name === "T", "should be T");
});
it("should infer number from ++x", function () {
const path = getPath("++x").get("body")[0].get("expression");
assert.ok(t.isNumberTypeAnnotation(path.getTypeAnnotation()), "should be number");
});<|fim▁hole|> const path = getPath("--x").get("body")[0].get("expression");
assert.ok(t.isNumberTypeAnnotation(path.getTypeAnnotation()), "should be number");
});
it("should infer void from void x", function () {
const path = getPath("void x").get("body")[0].get("expression");
assert.ok(t.isVoidTypeAnnotation(path.getTypeAnnotation()), "should be void");
});
it("should infer string from typeof x", function () {
const path = getPath("typeof x").get("body")[0].get("expression");
assert.ok(t.isStringTypeAnnotation(path.getTypeAnnotation()), "should be string");
});
it("should infer boolean from !x", function () {
const path = getPath("!x").get("body")[0].get("expression");
assert.ok(t.isBooleanTypeAnnotation(path.getTypeAnnotation()), "should be boolean");
});
it("should infer type of sequence expression", function () {
const path = getPath("a,1").get("body")[0].get("expression");
assert.ok(t.isNumberTypeAnnotation(path.getTypeAnnotation()), "should be number");
});
it("should infer type of logical expression", function () {
const path = getPath("'a' && 1").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isUnionTypeAnnotation(type), "should be a union");
assert.ok(t.isStringTypeAnnotation(type.types[0]), "first type in union should be string");
assert.ok(t.isNumberTypeAnnotation(type.types[1]), "second type in union should be number");
});
it("should infer type of conditional expression", function () {
const path = getPath("q ? true : 0").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isUnionTypeAnnotation(type), "should be a union");
assert.ok(t.isBooleanTypeAnnotation(type.types[0]), "first type in union should be boolean");
assert.ok(t.isNumberTypeAnnotation(type.types[1]), "second type in union should be number");
});
it("should infer RegExp from RegExp literal", function () {
const path = getPath("/.+/").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isGenericTypeAnnotation(type) && type.id.name === "RegExp", "should be RegExp");
});
it("should infer Object from object expression", function () {
const path = getPath("({ a: 5 })").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isGenericTypeAnnotation(type) && type.id.name === "Object", "should be Object");
});
it("should infer Array from array expression", function () {
const path = getPath("[ 5 ]").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isGenericTypeAnnotation(type) && type.id.name === "Array", "should be Array");
});
it("should infer Function from function", function () {
const path = getPath("(function (): string {})").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isGenericTypeAnnotation(type) && type.id.name === "Function", "should be Function");
});
it("should infer call return type using function", function () {
const path = getPath("(function (): string {})()").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isStringTypeAnnotation(type), "should be string");
});
it("should infer call return type using async function", function () {
const path = getPath("(async function (): string {})()").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isGenericTypeAnnotation(type) && type.id.name === "Promise", "should be Promise");
});
it("should infer call return type using async generator function", function () {
const path = getPath("(async function * (): string {})()").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isGenericTypeAnnotation(type) && type.id.name === "AsyncIterator",
"should be AsyncIterator");
});
it("should infer number from x/y", function () {
const path = getPath("x/y").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isNumberTypeAnnotation(type), "should be number");
});
it("should infer boolean from x instanceof y", function () {
const path = getPath("x instanceof y").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isBooleanTypeAnnotation(type), "should be boolean");
});
it("should infer number from 1 + 2", function () {
const path = getPath("1 + 2").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isNumberTypeAnnotation(type), "should be number");
});
it("should infer string|number from x + y", function () {
const path = getPath("x + y").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isUnionTypeAnnotation(type), "should be a union");
assert.ok(t.isStringTypeAnnotation(type.types[0]), "first type in union should be string");
assert.ok(t.isNumberTypeAnnotation(type.types[1]), "second type in union should be number");
});
it("should infer type of tagged template literal", function () {
const path = getPath("(function (): RegExp {}) `hey`").get("body")[0].get("expression");
const type = path.getTypeAnnotation();
assert.ok(t.isGenericTypeAnnotation(type) && type.id.name === "RegExp", "should be RegExp");
});
});
});<|fim▁end|> | it("should infer number from --x", function () { |
<|file_name|>OvhHourlyEnum.java<|end_file_name|><|fim▁begin|>package net.minidev.ovh.api.price.dedicatedcloud._2014v1.sbg1a.infrastructure.filer;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Enum of Hourlys
*/
public enum OvhHourlyEnum {
@JsonProperty("iscsi-1200-GB")
iscsi_1200_GB("iscsi-1200-GB"),
@JsonProperty("iscsi-13200g-GB")
iscsi_13200g_GB("iscsi-13200g-GB"),
@JsonProperty("iscsi-3300-GB")
iscsi_3300_GB("iscsi-3300-GB"),
@JsonProperty("iscsi-6600-GB")
iscsi_6600_GB("iscsi-6600-GB"),
@JsonProperty("iscsi-800-GB")
iscsi_800_GB("iscsi-800-GB"),
@JsonProperty("nfs-100-GB")
nfs_100_GB("nfs-100-GB"),
@JsonProperty("nfs-1200-GB")
nfs_1200_GB("nfs-1200-GB"),
@JsonProperty("nfs-13200-GB")
nfs_13200_GB("nfs-13200-GB"),
@JsonProperty("nfs-1600-GB")
nfs_1600_GB("nfs-1600-GB"),
@JsonProperty("nfs-2400-GB")
nfs_2400_GB("nfs-2400-GB"),
@JsonProperty("nfs-3300-GB")
nfs_3300_GB("nfs-3300-GB"),
@JsonProperty("nfs-6600-GB")
nfs_6600_GB("nfs-6600-GB"),
@JsonProperty("nfs-800-GB")
nfs_800_GB("nfs-800-GB");
final String value;
OvhHourlyEnum(String s) {
this.value = s;<|fim▁hole|>
public String toString() {
return this.value;
}
}<|fim▁end|> | } |
<|file_name|>test_docx.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from zipfile import ZipFile
from io import BytesIO
from lxml import etree
from docxgen import *
from docxgen import nsmap
from . import check_tag
def test_init():
doc = Document()
check_tag(doc.doc, ['document', 'body'])
check_tag(doc.body, ['body'])
def test_save():
doc = Document()
tmp = BytesIO()
doc.save(tmp)
with ZipFile(tmp) as zippy:
assert(zippy.testzip() is None)
assert(set(zippy.namelist()) == set([
'[Content_Types].xml', '_rels/.rels', 'docProps/app.xml',
'word/fontTable.xml', 'word/numbering.xml', 'word/settings.xml',
'word/styles.xml', 'word/stylesWithEffects.xml',
'word/webSettings.xml', 'word/theme/theme1.xml',
'word/_rels/document.xml.rels', 'word/document.xml',
'docProps/core.xml'
]))
zxf = zippy.open('word/document.xml')
root = etree.parse(zxf)
check_tag(root, 'document body'.split())
def test_load():
# assume save works
d = Document()
tmp = BytesIO()
d.save(tmp)
doc = Document.load(tmp)
check_tag(doc.doc, 'document body'.split())
check_tag(doc.body, 'body'.split())
def test_dumps():
doc = Document()
assert doc.dumps()
def test_core_props():
doc = Document()
attrs = dict(lastModifiedBy='Joe Smith',
keywords=['egg', 'spam'],
title='Testing Doc',
subject='A boilerplate document',
creator='Jill Smith',
description='Core properties test.',<|fim▁hole|> created=datetime.now()
)
doc.update(attrs)
core = doc.get_core_props()
for key in ('title', 'subject', 'creator', 'description'):
attr = core.find('.//dc:%s' % key, namespaces=nsmap)
assert attr is not None
assert attr.text == attrs[key]
attr = core.find('.//cp:keywords', namespaces=nsmap)
assert attr is not None
assert attr.text == ','.join(attrs['keywords'])
attr = core.find('.//dcterms:created', namespaces=nsmap)
assert attr is not None
assert datetime.strptime(attr.text, '%Y-%m-%dT%H:%M:%SZ') == datetime(*attrs['created'].timetuple()[:6])<|fim▁end|> | |
<|file_name|>plotset.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import cm
import numpy as np# reshape
from cstoolkit import drange
from matplotlib.colors import LinearSegmentedColormap
"""
cmap_cs_precp = [ (242, 242, 242), (191, 239, 255), (178, 223, 238),
(154, 192, 205), ( 0, 235, 235), ( 0, 163, 247),
(153, 255, 51),( 0, 255, 0), ( 0, 199, 0), ( 0, 143, 0),
( 0, 63, 0), (255, 255, 0),(255, 204, 0) , (255, 143, 0),
(255, 0, 0), (215, 0, 0),
(255, 0, 255) ] #, (155, 87, 203)]
"""
cmap_cs_precp = [ (242, 242, 242), (178, 223, 238), (154, 192, 205), (68, 176, 213),
( 0, 163, 247), ( 0, 235, 235), (153, 255, 51 ), ( 0, 255, 0),
( 0, 199, 0), ( 0, 143, 0), ( 0, 63, 0), (255, 255, 0),
( 255, 204, 0), (255, 143, 0), (255, 0, 0), (215, 0, 0),
(255, 0, 255) ] #, (155, 87, 203)]
WBGYR=[#(255,255,255),
#(252,254,255),
#(250,253,255),
#(247,252,254),
#(244,251,254),
#(242,250,254),
#(239,249,254),
#(236,248,253),
#(234,247,253),
#(231,246,253),
#(229,245,253),
#(226,244,253),
#(223,243,252),
#(221,242,252),
#(218,241,252),
#(215,240,252),
#(213,239,252),
#(210,238,251),
#(207,237,251),
#(205,236,251),
#(202,235,251),
#(199,234,250),
#(197,233,250),
#(194,232,250),
#(191,231,250),
#(189,230,250),
#(186,229,249),
(183,228,249),
(181,227,249),
(178,226,249),
(176,225,249),
(173,224,248),
(170,223,248),
(168,222,248),
(165,221,248),
(162,220,247),
(157,218,247),
(155,216,246),
(152,214,245),
(150,212,243),
(148,210,242),
(146,208,241),
(143,206,240),
(141,204,238),
(139,202,237),
(136,200,236),
(134,197,235),
(132,195,234),
(129,193,232),
(127,191,231),
(125,189,230),
(123,187,229),
(120,185,228),
(118,183,226),
(116,181,225),
(113,179,224),
(111,177,223),
(109,175,221),
(106,173,220),
(104,171,219),
(102,169,218),
(100,167,217),
(97,165,215),
(95,163,214),
(93,160,213),
(90,158,212),
(88,156,211),
(86,154,209),
(83,152,208),
(81,150,207),
(79,148,206),
(77,146,204),
(72,142,202),
(72,143,198),
(72,144,195),
(72,145,191),
(72,146,188),
(72,147,184),
(72,148,181),
(72,149,177),
(72,150,173),
(72,151,170),
(72,153,166),
(72,154,163),
(72,155,159),
(72,156,156),
(72,157,152),
(72,158,148),
(72,159,145),
(72,160,141),
(72,161,138),
(73,162,134),
(73,163,131),
(73,164,127),
(73,165,124),
(73,166,120),
(73,167,116),
(73,168,113),
(73,169,109),
(73,170,106),
(73,172,102),
(73,173,99),
(73,174,95),
(73,175,91),
(73,176,88),
(73,177,84),
(73,178,81),
(73,179,77),
(73,181,70),
(78,182,71),
(83,184,71),
(87,185,72),
(92,187,72),
(97,188,73),
(102,189,74),
(106,191,74),
(111,192,75),
(116,193,75),
(121,195,76),
(126,196,77),
(130,198,77),
(135,199,78),
(140,200,78),
(145,202,79),
(150,203,80),
(154,204,80),
(159,206,81),
(164,207,81),
(169,209,82),
(173,210,82),
(178,211,83),
(183,213,84),
(188,214,84),
(193,215,85),
(197,217,85),
(202,218,86),
(207,220,87),
(212,221,87),
(217,222,88),
(221,224,88),
(226,225,89),
(231,226,90),
(236,228,90),
(240,229,91),
(245,231,91),
(250,232,92),
(250,229,91),
(250,225,89),
(250,222,88),
(249,218,86),
(249,215,85),
(249,212,84),
(249,208,82),
(249,205,81),
(249,201,80),
(249,198,78),
(249,195,77),
(248,191,75),
(248,188,74),
(248,184,73),
(248,181,71),
(248,178,70),
(248,174,69),
(248,171,67),
(247,167,66),
(247,164,64),
(247,160,63),
(247,157,62),
(247,154,60),
(247,150,59),
(247,147,58),
(246,143,56),
(246,140,55),
(246,137,53),
(246,133,52),
(246,130,51),
(246,126,49),
(246,123,48),
(246,120,47),
(245,116,45),
(245,113,44),
(245,106,41),
(244,104,41),
(243,102,41),
(242,100,41),
(241,98,41),
(240,96,41),
(239,94,41),
(239,92,41),
(238,90,41),
(237,88,41),
(236,86,41),
(235,84,41),
(234,82,41),
(233,80,41),
(232,78,41),
(231,76,41),
(230,74,41),
(229,72,41),
(228,70,41),
(228,67,40),
(227,65,40),
(226,63,40),
(225,61,40),
(224,59,40),
(223,57,40),
(222,55,40),
(221,53,40),
(220,51,40),
(219,49,40),
(218,47,40),
(217,45,40),
(217,43,40),
(216,41,40),
(215,39,40),
(214,37,40),
(213,35,40),
(211,31,40),
(209,31,40),
(207,30,39),
(206,30,39),
(204,30,38),
(202,30,38),
(200,29,38),
(199,29,37),
(197,29,37),
(195,29,36),
(193,28,36),
(192,28,36),
(190,28,35),
(188,27,35),
(186,27,34),
(185,27,34),
(183,27,34),
(181,26,33),
(179,26,33),
(178,26,32),
(176,26,32),
(174,25,31),
(172,25,31),
(171,25,31),
(169,25,30),
(167,24,30),
(165,24,29),
(164,24,29),
(162,23,29),
(160,23,28),
(158,23,28),
(157,23,27),
(155,22,27),
(153,22,27),
(151,22,26),
(150,22,26),
(146,21,25)]
hotcold18= [( 24 , 24 ,112),
( 16 , 78 ,139),
( 23 ,116 ,205),
( 72 ,118 ,255),
( 91 ,172 ,237),
( 173 ,215 ,230),
( 209 ,237 ,237),
( 229 ,239 ,249),
#( 242 ,255 ,255),
( 255 ,255 ,255),
#( 253 ,245 ,230),
( 255 ,228 ,180),
( 243 ,164 , 96),
( 237 ,118 , 0),
( 205 ,102 , 29),
( 224 , 49 , 15),
#( 255, 0 , 0),
( 255, 0 , 255),
(183,75,243),
(183,75,243)]
#(255,0,255)] #,
#(81,9,121)]
"""
( 237 , 0 , 0),
( 205 , 0 , 0),
( 139 , 0 , 0)]
"""
haxby= [ (37,57,175) ,
(37,68,187) ,
(38,79,199) ,
(38,90,211) ,
(39,101,223) ,
(39,113,235) ,
(40,124,247) ,
(41,134,251) ,
(43,144,252) ,
(44,154,253) ,
(46,164,253) ,
(47,174,254) ,
(49,184,255) ,
(54,193,255) ,
(62,200,255) ,
(71,207,255) ,
(80,214,255) ,
(89,221,255) ,
(98,229,255) ,
(107,235,254) ,
(112,235,241) ,
(117,235,228) ,
(122,235,215) ,
(127,236,202) ,
(132,236,189) ,
(137,236,177) ,
(147,238,172) ,
(157,241,171) ,
(168,244,169) ,
(178,247,167) ,
(189,250,165) ,
(200,253,163) ,
(208,253,159) ,
(213,250,152) ,
(219,247,146) ,
(224,244,139) ,
(230,241,133) ,
(236,238,126) ,
(240,235,120) ,
(243,227,115) ,
(245,220,109) ,
(248,212,104) ,
(250,205,98) ,
(252,197,93) ,
(255,190,88) ,
(255,185,84) ,
(255,181,81) ,
(255,176,78) ,
(255,172,75) ,
(255,167,72) ,
(255,163,69) ,
(255,163,74) ,
(255,167,85) ,
(255,171,95) ,
(255,175,105) ,
(255,179,115) ,
(255,183,126) ,
(255,189,139) ,
(255,200,158) ,
(255,211,178) ,
(255,222,197) ,
(255,233,216) ,
(255,244,236) ,
(255,255,255) ]
BWR=[ ( 36 , 0 , 216),
( 24 , 28 , 247),
( 40 , 87 , 255),
( 61 , 135 , 255),
( 86 , 176 , 255),
( 117 , 211 , 255),
( 153 , 234 , 255),
( 188 , 249 , 255),
( 234 , 255 , 255),
( 255 , 255 , 255),
( 255 , 241 , 188),
( 255 , 214 , 153),
( 255 , 172 , 117),
( 255 , 120 , 86),
( 255 , 61 , 61),
#( 247 , 39 , 53),
( 165 , 0 , 33)]
"""
( 216 , 21 , 47),
( 165 , 0 , 33)]
"""
BWR=[ #( 0 , 0 , 0),
( 16 , 78 , 139),
#( 23 , 116 , 205),
#( 61 , 135 , 255),
( 86 , 176 , 255),
( 117 , 211 , 255),
( 153 , 234 , 255),
( 188 , 249 , 255),
( 234 , 255 , 255),
( 255 , 255 , 255),
( 255 , 241 , 188),
( 255 , 214 , 153),
( 255 , 172 , 117),
( 255 , 120 , 86),
( 255 , 61 , 61),
( 165 , 0 , 33)]
#( 247 , 39 , 53)]
tableau20 = [ (127, 127, 127),(174, 199, 232), (31, 119, 180), (255, 187, 120),
(214, 39, 40),(152, 223, 138), (44, 160, 44), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (255, 127, 14),(199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229),(65,68,81),(0,0,0)]
def buildcmp(cmaplist):
for i in range(len(cmaplist)):
r, g, b = cmaplist[i]
cmaplist[i] = (r / 255., g / 255., b / 255.)
return LinearSegmentedColormap.from_list( "precip", cmaplist,N=len(cmaplist)),cmaplist
cmap_cs_precp,cs_precp_list=buildcmp(cmap_cs_precp)
cmap_haxby,haxby_list=buildcmp(haxby[::5])
cmap_BWR,BWR_list=buildcmp(BWR)
cmap_BWR.set_over('purple')
cmap_BWR.set_under('blue')
cmap_cs_precp.set_over('purple')
cmap_tableau20,tableau20=buildcmp(tableau20)
cmap_hotcold18,hotcold18=buildcmp(hotcold18)
cmap_hotcold18.set_over('blueviolet')
cmap_hotcold18.set_under('black')
cmap_WBGYR,WBGYR=buildcmp(WBGYR)
sim_nicename={"ERI":"ERI",
"ERI_CAR":"CWRF-CAR",
"cor":"Inter-annual cor of",
"Xcor":"Cor between ori of",
"Xcorbias":"Cor between bias of",
"RegCM":"RegCM4.6",
"ConRatio":"Conv to Total Pr in PCT95 day",
"PCT":"Extreme Precipitation",
"RAINYDAYS":"Days with Precipitation",
"NX":"North Xinjiang",
"SX":"South Xinjiang",
"WT":"West Tibet",
"ET":"East Tibet",
"ST":"South Tibet",
"IM":"Inner Mongolia",
"SW":"Southwest",
"NE":"Northeast",
"NC":"North China",
"CC":"Central China",
"SC":"South China",
"T2MAX":"T2X",
"AT2M" :"T2M",
"T2MIN":"T2N",
"PRAVG":"PR",
"AT2M97":"A97",
"SDII":"DI",
"CN_OBS":"OBS",
#"RAINYDAYS":"RD",
"run_RegCM4.6":"RegCM\n4.6",
"run_RegCM4.5":"RegCM\n4.5",
"ERI_run_0":"old CWRF",
"new_ERI_run_0":"CWRF\nMor",
"new_ERI_gsfc":"CWRF",
"new_ERI_albedo":"CWRF",
# "new_ERI_gsfc":"CWRF\nGSFC",
"new_ERI_morr":"Mor",
"run_00":"CTL",
"xoml":"new_xoml",
"run_01":"BMJ",
"run_02":"NKF",
"run_03":"NSAS",
"run_04":"TDK",
"run_06":"MB",
"run_06":"THO",
"run_07":"MOR",
"run_08":"WD6",
"run_09":"AER",
"run_10": "XR", # "Radall",
"run_11":"CCCMA",
"run_12":"FLG",
"run_13":"RRTMG",
"run_14":"MYNN",
"run_15":"ACM",
"run_16":"UW",
"run_17":"NOAH",
"run_18":"XOML",
"run_19":"F-M",
"run_20":"FMBMJ",
"run_21":"FMNKF",
"run_22":"FMNSAS",
"run_23":"FMTDK",
"run_24":"FMMB",#"scheme_cst_2",
"run_25":"FMTHO",#"scheme_cst_3",
"run_26":"FMMOR",#"scheme_cst_3",
"run_27":"boulac",#"scheme_ccb_1",
"run_28":"gfs",#"scheme_ccb_4",
"run_29":"mynn2",#"scheme_ccb_5",
"run_30":"new cloud",#"scheme_ccs_3",
"run_31":"boulac", #"NewTHO",
"run_32":"gfs2", #"NewMOR",
"run_33":"", #"NewMOR",
"run_34":"New Melt", #"NewMOR",
"run_35":"old CAM", #"NewMOR",<|fim▁hole|> "run_39":"CF", #"NewMOR",
"run_40":"NewDrain V0", #"NewMOR",
"run_41":"Warm start V1", #"NewMOR",
"run_42":"Cold start V1", #"NewMOR",
"run_43":"inflx ", #"NewMOR",
"run_44":"om ", #"NewMOR",
"run_45":"New Soil Water", #"NewMOR",
"run_46":"New Reff", #"NewMOR",
"run_47":"OISST", #"NewMOR",
"run_48":"NOoml", #"NewMOR",
"run_49":"NOocean", #"NewMOR",
"run_50":"MSA_newSW", #"ERIsst"
"run_51":"NoMSA ipf0", #"NewMOR",
"run_52":"new UWCAM", #"NewMOR",
"run_53":"NoMSA ipf2", #"NewMOR",
"run_54":"AERO_MSAon", #"NewMOR",
"run_55":"AERO_MSAold", #"NewMOR",
"run_56":"noAERO", #"NewMOR",
"run_57":"OBC_V0", #"SVNcode", #"NewMOR",
"run_58":"OBClg100", #"NewMOR",
"run_59":"OBClg111", #"NewMOR",
"run_60":"WRF", #"NewMOR",
"run_61":"ALBfix", #"NewMOR",
"run_62":"PSFC4_NSW", #"NewMOR",
"run_63":"PSFC4_OSW", #"NewMOR",
"run_64":"psfc4_osw_CAMUW", #"NewMOR",
"run_65":"git558faed", #"NewMOR",
"run_66":"psfc4morr", #"NewMOR",
"run_67":"newsw_morr", #"NewMOR",
"run_68":"psfc4_osw_v2", #"NewMOR",
"run_69":"WRFRUN", #
"run_70":"PSFC4_NSW", #oldini0
"run_71":"PSFC4_V0", #"PSFC4_SVNCODE"
"run_72":"OBC_OSW" , #"oldBC_osw"
"run_73":"PSFC4_br_OSW" , #"oldBC_osw"
"run_74":"OLDini_br_NSW" , #"oldBC_osw"
"run_75":"OLDini_br_V0" , #"oldBC_osw"
"run_76":"OLDini_br_558faed" , #"oldBC_osw"
"run_77":"OVEG_NSW" , #"oldBC_osw"
"run_78":"OVEG_OSW" , #"oldBC_osw"
"run_79":"OVEG_V0" , #"oldBC_osw"
"run_80":"HydRED" , #"oldBC_osw"
"run_81":"CTL" , #"oldBC_osw"
"run_82":"newcam" , #"oldBC_osw"
"run_oldSW_flw8_new":"CWRF",
"ERI_run_1":"CWRF/CLM4.5",
"CESM_run_0":"CWRF/CSSP",
"CESM_run_1":"CWRF/CLM4.5",
"PCR85-CESM_run_0":"CWRF/CSSP",
"PCR85-CESM_run_1":"CWRF/CLM4.5",
"run_CTL":"CTL ",
"CESM":"CESM",
"run_CLM4.5":"CLM4.5Hyd ",
"run_Red":"HydRed ",
"run_noxoml":"NO xoml ",
"run_nolake":"NO lake ",
"run_oldrad" :"Old Alb ",
"run_oldveg":"Old LAI ",
"run_noforzen":"Old frozen ",
"Mean":"Mean",
"Mean_Sub":"Mean_Sub",
"Med":"Med",
"P85":"P85",
"P80":"P80",
"P70":"P70",
"P10":"P10",
"P20":"P20",
"Obs":"OBS",
"OBS":"OBS",
"Max":"Max",
"run_1":"MY/MO/W1.5/MC0.5/TD0",
"run_2":"CAM/GSFC/W1.5/MC0.75/TD0",
"run_3":"MY/MO/W1.5/MC0.75/TD0",
"run_4":"MY/MO/W1/MC0.75/TD0",
"run_5":"MY/MO/W1/MC0.75/TD0.5",
"run_6":"MY/MO/W1/MC1/TD0",
"run_7":"MY/MO/W1/MC1/TD1"}
#plotres={'PRAVG':{},'PCT':{},'CDD':{},'RAINYDAYS':{},'AT2M':{},'ASWUPT':{}}
from collections import defaultdict
plotres= defaultdict(dict)
##########################set the plot related parameters#####################
#plotres['XRSUR']['cleve1']=[x*1e-6 for x in range(31)]
plotres['XRSUR']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['XRSUR']['cmp2']=cmp
#plotres['XRSUR']['convertcoef']=0.001
plotres['XRSUR']['unit']="kg/m2/day"
plotres['XRSUR']['mask']=True
plotres['XRSUR']['violion']=False
#plotres['XRBAS']['cleve1']=[x*1e-6 for x in range(31)]
plotres['XRBAS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['XRBAS']['cmp2']=cmp
plotres['XRBAS']['unit']="kg/m2/day"
plotres['XRBAS']['mask']=True
plotres['XRBAS']['violion']=False
#plotres['SFROFF']['cleve1']=[x*10000 for x in range(31)]
plotres['SFROFF']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['SFROFF']['cmp2']=cmp
#plotres['SFROFF']['convertcoef']=0.001
plotres['SFROFF']['unit']="kg/m2"
plotres['SFROFF']['mask']=True
plotres['SFROFF']['violion']=False
#plotres['XSMTg']['cleve1']=[x*20 for x in range(1,20)] #range(0, 1,0.05)
plotres['XSMTg']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['XSMTg']['cmp2']=cmp
plotres['XSMTg']['unit']="kg/m2"
plotres['XSMTg']['mask']=True
plotres['XSMTg']['violion']=False
plotres['XSMTg']['vlevel']=4
#plotres['AODNIR']['cleve0']=[x*0.05 for x in range(0,11)] #range(0, 1,0.05)
#plotres['AODNIR']['cleve1']=[x*0.05 for x in range(0,11)] #range(0, 1,0.05)
plotres['AODNIR']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['AODNIR']['cmp2']=cmp
#plotres['AODNIR']['convertcoef']=0.01
plotres['AODNIR']['unit']=""
plotres['AODNIR']['mask']=True
#plotres['AODVIS']['cleve0']=[x*0.05 for x in range(0,11)] #range(0, 1,0.05)
#plotres['AODVIS']['cleve1']=[x*0.05 for x in range(0,11)] #range(0, 1,0.05)
plotres['AODVIS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['AODVIS']['cmp2']=cmp
#plotres['AODVIS']['convertcoef']=0.01
plotres['AODVIS']['unit']=""
plotres['AODVIS']['mask']=True
#plotres['CLDFRAh']['cleve1']=[x*0.05 for x in range(0,21)] #range(0, 1,0.05)
plotres['CLDFRAh']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['CLDFRAh']['cmp2']=cmp
#plotres['CLDFRAh']['convertcoef']=0.01
plotres['CLDFRAh']['unit']=""
plotres['CLDFRAh']['mask']=True
plotres['CLDFRAh']['violion']=False
plotres['CLDFRAh']['vlevel']=3
#plotres['CLDFRAm']['cleve1']=[x*0.05 for x in range(0,21)] #range(0, 1,0.05)
plotres['CLDFRAm']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['CLDFRAm']['cmp2']=cmp
#plotres['CLDFRAm']['convertcoef']=0.01
plotres['CLDFRAm']['unit']=""
plotres['CLDFRAm']['mask']=True
plotres['CLDFRAm']['violion']=False
plotres['CLDFRAm']['vlevel']=2
#plotres['CLDFRAl']['cleve1']=[x*0.05 for x in range(0,21)] #range(0, 1,0.05)
plotres['CLDFRAl']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['CLDFRAl']['cmp2']=cmp
#plotres['CLDFRAl']['convertcoef']=0.01
plotres['CLDFRAl']['unit']=""
plotres['CLDFRAl']['mask']=True
plotres['CLDFRAl']['violion']=False
plotres['CLDFRAl']['vlevel']=1
#plotres['CLDFRA']['cleve1']=[x*0.05 for x in range(0,21)] #range(0, 1,0.05)
plotres['CLDFRA']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['CLDFRA']['cmp2']=cmp
#plotres['CLDFRA']['convertcoef']=0.01
plotres['CLDFRA']['unit']=""
plotres['CLDFRA']['mask']=True
plotres['CLDFRA']['violion']=False
plotres['CLDFRA']['vlevel']=0
#plotres['QVAPOR']['cleve1']=range(0, 20,1)
plotres['QVAPOR']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['QVAPOR']['cmp2']=cmp
plotres['QVAPOR']['convertcoef']=1000
plotres['QVAPOR']['unit']="$g/kg$"
plotres['QVAPOR']['mask']=False
plotres['QVAPOR']['violion']=False
plotres['QVAPOR']['vlevel']=21
#plotres['TCWPC']['cleve1']=range(0, 200,10)
plotres['TCWPC']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['TCWPC']['cmp2']=cmp
plotres['TCWPC']['unit']="$g/m^{2}$"
plotres['TCWPC']['mask']=True
plotres['TCWPC']['violion']=False
#plotres['V']['cleve1']=range(-10, 10,1)
plotres['V']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['V']['cmp2']=cmp
plotres['V']['unit']="$m/s$"
plotres['V']['mask']=False
plotres['V']['violion']=False
plotres['V']['vlevel']=21
#plotres['U']['cleve1']=range(-10, 10,1)
plotres['U']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['U']['cmp2']=cmp
plotres['U']['unit']="$m/s$"
plotres['U']['mask']=False
plotres['U']['violion']=False
plotres['U']['vlevel']=21
#plotres['PSL']['cleve1']=range(1000, 1024,1)
plotres['PSL']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['PSL']['cmp2']=cmp
plotres['PSL']['unit']="$\%$"
plotres['PSL']['convertcoef']=0.01
plotres['PSL']['mask']=False
plotres['PSL']['violion']=False
#plotres['PS']['cleve1']=range(700, 1030,5)
plotres['PS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['PS']['cmp2']=cmp
plotres['PS']['unit']="$\%$"
plotres['PS']['convertcoef']=0.01
plotres['PS']['mask']=False
plotres['PS']['violion']=False
#plotres['ALBEDO']['cleve1']=range(0, 60,5)
plotres['ALBEDO']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALBEDO']['cmp2']=cmp
plotres['ALBEDO']['unit']="$\%$"
plotres['ALBEDO']['convertcoef']=100
plotres['ALBEDO']['mask']=False
plotres['ALBEDO']['violion']=False
#plotres['ASWUPT']['cleve1']=range(80,160,10)
plotres['ASWUPT']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ASWUPT']['cmp2']=cmp
plotres['ASWUPT']['unit']="$W m^{-2}$"
plotres['ASWUPT']['mask']=True
plotres['ASWUPT']['violion']=False
#plotres['ASWUPS']['cleve1']=range(0,210,10)
plotres['ASWUPS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ASWUPS']['cmp2']=cmp
plotres['ASWUPS']['unit']="$W m^{-2}$"
plotres['ASWUPS']['mask']=True
plotres['ASWUPS']['violion']=False
#plotres['ALWDNS']['cleve1']=range(20,410,50)
#plotres['ALWDNS']['cleve0']=range(20,410,10)
plotres['ALWDNS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALWDNS']['cmp2']=cmp
plotres['ALWDNS']['unit']="$W m^{-2}$"
plotres['ALWDNS']['mask']=True
plotres['ALWDNS']['violion']=False
#plotres['ASWDNS']['cleve1']=range(20,410,50)
#plotres['ASWDNS']['cleve0']=range(20,410,10)
plotres['ASWDNS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ASWDNS']['cmp2']=cmp
plotres['ASWDNS']['unit']="$W m^{-2}$"
plotres['ASWDNS']['mask']=True
plotres['ASWDNS']['violion']=False
#plotres['ALWUPS']['cleve1']=range(200,510,10)
plotres['ALWUPS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALWUPS']['cmp2']=cmp
plotres['ALWUPS']['unit']="$W m^{-2}$"
plotres['ALWUPS']['mask']=True
plotres['ALWUPS']['violion']=False
#plotres['ALWDNS']['cleve1']=range(150,450,10)
plotres['ALWDNS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALWDNS']['cmp2']=cmp
plotres['ALWDNS']['unit']="$W m^{-2}$"
plotres['ALWDNS']['mask']=True
plotres['ALWDNS']['violion']=False
#plotres['ALWUPT']['cleve1']=range(150,360,10)
plotres['ALWUPT']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALWUPT']['cmp2']=cmp
plotres['ALWUPT']['unit']="$W m^{-2}$"
plotres['ALWUPT']['mask']=True
plotres['ALWUPT']['violion']=False
#plotres['PrMAX']['cleve0']=range(1,35)
#plotres['PrMAX']['cleve1']=range(0,51,5)
# import colormaps as cmaps
# cmp=cmap=cmaps.viridis
plotres['PrMAX']['cmp1']=plt.get_cmap('jet')
#plotres['PrMAX']['cmp1']=cm.s3pcpn
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['PrMAX']['cmp2']=cmp
plotres['PrMAX']['unit']="mm/day"
plotres['PrMAX']['convertcoef']=60*60*24
plotres['PrMAX']['mask']=True
plotres['PrMAX']['violion']=True
#plotres['PRAVG']['cleve1']=[0.5,1.0,1.5,2.0,2.5,3.0,3.5,4.0,4.5,5,6,7,8,9,10,11,12,13,14]
#plotres['PRAVG']['cleve3']=range(10)
plotres['PRAVG']['cmp1']=cmap_cs_precp
cmp =plt.get_cmap('Spectral_r');cmp.set_over('maroon');cmp.set_under('w')
plotres['PRAVG']['cmp3']=plt.get_cmap('RdYlBu_r') #cmap_WBGYR #plt.get_cmap('jet')
cmp =cmap_BWR
plotres['PRAVG']['cmp2']=cmp
plotres['PRAVG']['unit']="mm/day"
plotres['PRAVG']['violion']=True
#plotres['R95T']['cleve1']=[x*0.04 for x in range(0,21)] #range(0, 1,0.05)
#plotres['R95T']['cleve0']=[x*0.04 for x in range(0,21)] #range(0, 1,0.05)
plotres['R95T']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['R95T']['cmp2']=cmp
plotres['R95T']['unit']=""
plotres['R95T']['convertcoef']=1
#plotres['PCT']['cleve0']=[0,2,4,6,8,10,15,20,25,30,40,50,60]
#plotres['PCT']['cleve1']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['PCT']['cmp1']=cmap_cs_precp
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('w')
plotres['PCT']['cmp2']=cmp
plotres['PCT']['unit']="mm/day"
plotres['PCT']['convertcoef']=1
plotres['ConRatio']['cmp1']=cmap_cs_precp
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('w')
plotres['ConRatio']['cmp2']=cmp
plotres['ConRatio']['unit']=""
#plotres['PCT99']['cleve0']=[0,2,4,6,8,10,15,20,25,30,40,50,60]
#plotres['PCT99']['cleve1']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['PCT99']['cmp1']=cmap_cs_precp
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('w')
plotres['PCT99']['cmp2']=cmp
plotres['PCT99']['unit']="mm/day"
plotres['PCT99']['convertcoef']=1
#plotres['CDD']['cleve0']=[-20,-18,-16,-14,-10,-8,-6,-4,-2,2,4,6,8,10,12,14,16,18,20,22]
#plotres['CDD']['cleve1']=[4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['CDD']['cmp1']=cmap_cs_precp
plotres['CDD']['cmp2']=None
plotres['CDD']['unit']="day"
plotres['CDD']['convertcoef']=1
plotres['CDD']['mask']=True
#plotres['SDII']['cleve0']=range(1,15)
#plotres['SDII']['cleve1']=range(1,20)
plotres['SDII']['cmp1']=cmap_cs_precp
plotres['SDII']['cmp2']=None
plotres['SDII']['unit']="mm/day"
plotres['SDII']['convertcoef']=1
plotres['SDII']['mask']=True
#plotres['R5D']['cleve0']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
#plotres['R5D']['cleve1']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['R5D']['cmp1']=cmap_cs_precp
plotres['R5D']['cmp2']=None
plotres['R5D']['unit']="mm/day"
plotres['R5D']['convertcoef']=1 # divided by 5 days
plotres['R5D']['mask']=True
#plotres['R10']['cleve0']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
#plotres['R10']['cleve1']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['R10']['cmp1']=cmap_cs_precp
plotres['R10']['cmp2']=None
plotres['R10']['unit']="day"
plotres['R10']['convertcoef']=1
plotres['R10']['mask']=True
#plotres['RAINYDAYS']['cleve0']=range(5,95,5)
#plotres['RAINYDAYS']['cleve1']=range(5,95,5)
plotres['RAINYDAYS']['cmp1']=cmap_cs_precp
plotres['RAINYDAYS']['cmp2']=None
plotres['RAINYDAYS']['unit']="day"
plotres['RAINYDAYS']['convertcoef']=1
plotres['RAINYDAYS']['mask']=True
#plotres['T2MAX']['cleve1']=range(-10,41)
#plotres['T2MAX']['cleve0']=[-9,-8,-7,-6,-5,-4,-3,-2,-1,1,2,3,4,5,6,7]
#plotres['T2MAX']['cmp1']=plt.get_cmap('jet')
plotres['T2MAX']['cmp1']=cmap_cs_precp
plotres['T2MAX']['cmp1']=plt.get_cmap('jet')
cmp =cmap_BWR
plotres['T2MAX']['cmp2']=cmp
plotres['T2MAX']['unit']="$^\circ$C"
plotres['T2MAX']['convertcoef']=1
plotres['T2MAX']['mask']=True
plotres['T2MAX']['valuemask']=True
plotres['T2MAX']['shift']=-273.15
#plotres['T2MIN']['cleve1']=range(-10,41)
#plotres['T2MIN']['cleve0']=[-9,-8,-7,-6,-5,-4,-3,-2,-1,1,2,3,4,5,6,7]
#plotres['T2MIN']['cmp1']=plt.get_cmap('jet')
#plotres['T2MIN']['cmp1']=cmap_cs_precp
plotres['T2MIN']['cmp1']=plt.get_cmap('jet')
cmp =cmap_BWR
plotres['T2MIN']['cmp2']=cmp
plotres['T2MIN']['unit']="$^\circ$C"
plotres['T2MIN']['convertcoef']=1
plotres['T2MIN']['mask']=True
plotres['T2MIN']['valuemask']=True
plotres['T2MIN']['shift']=-273.15
#plotres['AT2M']['cleve0']=[-9,-8,-7,-6,-5,-4,-3,-2,-1,1,2,3,4,5,6,7]
#plotres['AT2M']['cleve1']=range(-10,31,2)
#plotres['AT2M']['cleve3']=range(10)
plotres['AT2M']['cmp1']=plt.get_cmap('jet')
cmp =cmap_BWR
plotres['AT2M']['cmp2']=cmp
plotres['AT2M']['unit']="$^\circ$C"
plotres['AT2M']['convertcoef']=1
plotres['AT2M']['valuemask']=True
plotres['AT2M']['shift']=-273.15
#plotres['AT2M97']['cleve0']=[-10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
#plotres['AT2M97']['cleve1']=range(-15,35,2)
#plotres['AT2M97']['cleve3']=range(10)
plotres['AT2M97']['cmp1']=plt.get_cmap('gist_rainbow_r')
cmp = plt.get_cmap('PuOr_r') #plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['AT2M97']['cmp2']=cmp
plotres['AT2M97']['unit']="$^\circ$C"
plotres['AT2M97']['convertcoef']=1
plotres['AT2M97']['valuemask']=True
plotres['AT2M97']['shift']=-273.15
#plotres['DTR']['cmp1']=cmap_cs_precp
plotres['DTR']['cmp1']=plt.get_cmap('jet')
cmp =cmap_BWR
plotres['DTR']['cmp2']=cmp
plotres['DTR']['unit']="$^\circ$C"
plotres['DTR']['convertcoef']=1
plotres['DTR']['valuemask']=True
plotres['RH']['cmp1']=plt.get_cmap('viridis_r')
cmp =cmap_BWR
plotres['RH']['cmp2']=cmp
plotres['RH']['unit']="$\%$"
plotres['RH']['convertcoef']=1
plotres['RH']['valuemask']=True
plotres['WIN']['cmp1']=cmap_haxby
cmp =cmap_BWR
plotres['WIN']['cmp2']=cmp
plotres['WIN']['unit']="$m/s$"
plotres['WIN']['convertcoef']=1
plotres['WIN']['valuemask']=True
plotres['GUST']['cmp1']=cmap_haxby
cmp =cmap_BWR
plotres['GUST']['cmp2']=cmp
plotres['GUST']['unit']="$m/s$"
plotres['GUST']['convertcoef']=1
plotres['GUST']['valuemask']=True<|fim▁end|> | "run_36":"NewSW", #"NewMOR",
"run_37":"ACM", #"NewMOR",
"run_38":"bedrock", #"NewMOR", |
<|file_name|>unloadBleeps.ts<|end_file_name|><|fim▁begin|>import { BleepsGenerics } from '../../constants';
import { unloadBleep } from '../unloadBleep';
const unloadBleeps = (bleeps: BleepsGenerics): void => {
Object.keys(bleeps).forEach(bleepName => unloadBleep(bleeps, bleepName));<|fim▁hole|><|fim▁end|> | };
export { unloadBleeps }; |
<|file_name|>0010_auto_20160906_1442.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-09-06 09:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hackerspace', '0009_verbal_subcategory'),
]
operations = [
migrations.RemoveField(
model_name='programmingquestion',
name='op1',
),
migrations.RemoveField(
model_name='programmingquestion',
name='op2',
),
migrations.RemoveField(
model_name='programmingquestion',
name='op3',
),
migrations.RemoveField(
model_name='programmingquestion',
name='op4',<|fim▁hole|> ),
migrations.AddField(
model_name='programmingquestion',
name='Output',
field=models.CharField(default='1', max_length=200),
preserve_default=False,
),
migrations.AddField(
model_name='programmingquestion',
name='TestCases',
field=models.CharField(default='2', max_length=200, verbose_name='Test Cases'),
preserve_default=False,
),
migrations.AddField(
model_name='quiz',
name='Answer',
field=models.CharField(default='3', max_length=200),
preserve_default=False,
),
migrations.AddField(
model_name='verbal',
name='Answer',
field=models.CharField(default='3', max_length=200),
preserve_default=False,
),
migrations.AlterField(
model_name='programmingquestion',
name='subCategory',
field=models.CharField(choices=[(1, 'Strings'), (2, 'Dynamic Programming'), (3, 'Arrays'), (4, 'Data Structures')], max_length=200),
),
migrations.AlterField(
model_name='test',
name='ProgrammingTagName',
field=models.CharField(choices=[(1, 'Strings'), (2, 'Dynamic Programming'), (3, 'Arrays'), (4, 'Data Structures')], max_length=200, verbose_name='Programming Tags'),
),
migrations.AlterField(
model_name='verbal',
name='subCategory',
field=models.CharField(choices=[(1, 'Comprehension'), (2, 'Error Identification')], max_length=200),
),
]<|fim▁end|> | |
<|file_name|>toolbar.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|><|fim▁end|> |
Editor.polymerElement({
}); |
<|file_name|>libtiff-prepare-test.py<|end_file_name|><|fim▁begin|># Copyright (C) 2016 Fan Long, Martin Rianrd and MIT CSAIL
# Prophet
#
# This file is part of Prophet.
#
# Prophet is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Prophet is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Prophet. If not, see <http://www.gnu.org/licenses/>.
#!/usr/bin/env python
from os import system, chdir, getcwd
from sys import argv
import subprocess
build_cmd = argv[1];<|fim▁hole|>dep_dir = argv[2];
src_dir = argv[3];
test_dir = argv[4];
rev = argv[5];
if (len(argv) < 7):
out_dir = test_dir + "-" + rev;
else:
out_dir = argv[6];
work_dir = "__tmp" + rev;
system("cp -rf " + src_dir + " " + work_dir);
ori_dir = getcwd();
chdir(work_dir);
system("git checkout -f " + rev);
system("git clean -f -d");
chdir(ori_dir);
system(build_cmd + " -p " + dep_dir + " " + work_dir);
system("mv " + work_dir + "/test " + work_dir+"/ori_test");
system("cp -rf " + test_dir + " " + work_dir + "/test");
chdir(work_dir + "/test");
system("GENEXPOUT=1 CMPEXPOUT=0 make check");
chdir(ori_dir);
print "Goint to generate testdir for revision " + rev + " case: " + out_dir;
system("cp -rf " + test_dir + " " + out_dir);
system("cp -rf " + work_dir + "/test/*.exp " + work_dir + "/test/*.tol " + out_dir+"/");
system("rm -rf " + work_dir);<|fim▁end|> | |
<|file_name|>external.go<|end_file_name|><|fim▁begin|>package main
import (
"bufio"
"crypto/tls"
"io"
"log"
"net"
"time"
"unicode/utf8"
)
const (
ONE_SECOND_NS = 1000 * 1000 * 1000 // One second in nanoseconds
)
/*******************
* ExternalManager *
*******************/
type ExternalManager struct {
connections map[string]*External
fromServer chan *Line
}
func NewExternalManager(fromServer chan *Line) *ExternalManager {
return &ExternalManager{make(map[string]*External), fromServer}
}
func (self *ExternalManager) Connect(addr string) {
server, pass := splitNetPass(addr)
if self.connections[server] == nil {
self.connections[server] = NewExternal(server, pass, self.fromServer)
go self.connections[server].Consume()
}
}
func (self *ExternalManager) Identify(network, password string) {
ext := self.connections[network]
if ext == nil {
log.Println("Error: no network for ", network)
return
}
ext.Identify(password)
}
func (self *ExternalManager) SendMessage(network, channel, msg string) {
ext := self.connections[network]
if ext == nil {
log.Println("Error: no network for ", network)
return
}
ext.SendMessage(channel, msg)
}
func (self *ExternalManager) SendAction(network, channel, msg string) {
ext := self.connections[network]
if ext == nil {
log.Println("Error: no network for ", network)
return
}
ext.SendAction(channel, msg)
}
func (self *ExternalManager) doCommand(network, content string) {
ext := self.connections[network]
if ext == nil {
log.Println("Error: no network for ", network)
return
}
ext.doCommand(content)
}
func (self *ExternalManager) Close() error {
for _, conn := range self.connections {
conn.Close()
}
self.connections = nil
return nil
}
/************
* External *
************/
type External struct {
network string
pass string
socket net.Conn
fromServer chan *Line
rawLog *log.Logger
isIdentified bool
}
func NewExternal(server string, pass string, fromServer chan *Line) *External {
logFilename := *logdir + "/server_raw.log"
logFile := openLogFile(logFilename)
rawLog := log.New(logFile, "", log.LstdFlags)
log.Println("Logging raw IRC messages to:", logFilename)
conn := &External{
network: server,
pass: pass,<|fim▁hole|>
return conn
}
func (self *External) connect() {
var err error
self.socket, err = sock(self.network, 5)
if err != nil {
log.Fatal("Error connecting to IRC server: ", err)
}
time.Sleep(ONE_SECOND_NS)
if self.pass != "" {
self.SendRaw("PASS " + self.pass)
}
}
/* A socket connection to give network (ip:port). */
func sock(network string, tries int) (net.Conn, error) {
var socket net.Conn
var err error
for tries > 0 {
socket, err = tls.Dial("tcp", network, nil) // Always try TLS first
if err == nil {
log.Println("Secure TLS connection to", network)
return socket, nil
}
socket, err = net.Dial("tcp", network)
if err == nil {
log.Println("Insecure connection to", network)
return socket, nil
}
log.Println("Connection attempt failed:", err)
time.Sleep(ONE_SECOND_NS)
tries--
}
return socket, err
}
// Identify with NickServ. Must of already sent NICK.
func (self *External) Identify(password string) {
if !self.isIdentified {
log.Println("Identifying with NickServ")
self.SendMessage("NickServ", "identify "+password)
self.isIdentified = true
}
}
// Send a regular (non-system command) IRC message
func (self *External) SendMessage(channel, msg string) {
fullmsg := "PRIVMSG " + channel + " :" + msg
self.SendRaw(fullmsg)
}
// Send a /me action message
func (self *External) SendAction(channel, msg string) {
fullmsg := "PRIVMSG " + channel + " :\u0001ACTION " + msg + "\u0001"
self.SendRaw(fullmsg)
}
// Send message down socket. Add \n at end first.
func (self *External) SendRaw(msg string) {
var err error
msg = msg + "\n"
self.rawLog.Print(" -->", msg)
_, err = self.socket.Write([]byte(msg))
if err == io.EOF {
log.Println("SendRaw: IRC server closed connection.")
self.Close()
} else if err != nil {
log.Fatal("Error writing to socket: ", err)
}
}
// Process a slash command
func (self *External) doCommand(content string) {
content = content[1:]
self.SendRaw(content)
}
// Read IRC messages from the connection and act on them
func (self *External) Consume() {
defer logPanic()
var contentData []byte
var content string
var err error
bufRead := bufio.NewReader(self.socket)
for {
self.socket.SetReadDeadline(time.Now().Add(ONE_SECOND_NS))
contentData, err = bufRead.ReadBytes('\n')
if err != nil {
netErr, ok := err.(net.Error)
if ok && netErr.Timeout() == true {
continue
} else if err == io.EOF {
log.Println("Consume: IRC server closed connection.")
self.Close()
// Reconnect
log.Println("Attempting to reconnect")
self.connect()
bufRead = bufio.NewReader(self.socket)
continue
} else {
log.Fatal("Consume Error:", err)
}
}
if len(contentData) == 0 {
continue
}
content = toUnicode(contentData)
self.rawLog.Println(content)
line, err := ParseLine(content)
if err == nil {
line.Network = self.network
self.act(line)
} else {
log.Println("Invalid line:", content)
}
}
}
// Converts an array of bytes to a string
// If the bytes are valid UTF-8, return those (as string),
// otherwise assume we have ISO-8859-1 (latin1, and kinda windows-1252),
// and use the bytes as unicode code points, because ISO-8859-1 is a
// subset of unicode
func toUnicode(data []byte) string {
var result string
if utf8.Valid(data) {
result = string(data)
} else {
runes := make([]rune, len(data))
for index, val := range data {
runes[index] = rune(val)
}
result = string(runes)
}
return result
}
// Do something with a line
func (self *External) act(line *Line) {
if line.Command == "PING" {
// Reply, and send message on to client
self.SendRaw("PONG " + line.Content)
} else if line.Command == "VERSION" {
versionMsg := "NOTICE " + line.User + " :\u0001VERSION " + VERSION + "\u0001\n"
self.SendRaw(versionMsg)
}
self.fromServer <- line
}
func (self *External) Close() error {
return self.socket.Close()
}<|fim▁end|> | fromServer: fromServer,
rawLog: rawLog,
}
conn.connect() |
<|file_name|>logic.js<|end_file_name|><|fim▁begin|>const fs = require('fs');
const util = require('../../util');
// test equality
function equal(a, b, options) {<|fim▁hole|> return options.inverse(this);
}
// great than
function gt(a, b, options) {
if (a >= b) {
return options.fn(this);
}
return options.inverse(this);
}
// between
function between(a, b, c, options) {
if (a >= b && a <= c) {
return options.fn(this);
}
return options.inverse(this);
}
// less than
function lt(a, b, options) {
if (a < b) {
return options.fn(this);
}
return options.inverse(this);
}
function ignoreNan(val, symbol) {
const isNumber = (val >= 0 || val < 0);
return isNumber ? `${util.roundDecimal(val, 2)} ${symbol}` : '';
}
// test inequality
function inequal(a, b, options) {
if (a !== b) {
return options.fn(this);
}
return options.inverse(this);
}
// test File Exit
function fileExist(a, b, options) {
try {
fs.statSync(`${a}${b}`);
return options.fn(this);
} catch (err) {
return options.inverse(this);
}
}
exports.equal = equal;
exports.gt = gt;
exports.lt = lt;
exports.between = between;
exports.ignoreNan = ignoreNan;
exports.inequal = inequal;
exports.fileExist = fileExist;<|fim▁end|> | if (a === b) {
return options.fn(this);
} |
<|file_name|>datatype-date-format_fr-CA.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:866863ba9feea81e907968a3bc09f6876511c89dbf5be91d1ed9358a2b6ad4a3<|fim▁hole|><|fim▁end|> | size 563 |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod xmobar;
use sensors::battery::{PowerInfo};<|fim▁hole|> pub load: u32,
pub cpus: u32,
pub memused: u32,
pub swpused: u32,
pub net_rx: u32,
pub net_tx: u32,
}
pub trait Output {
fn refresh(&StatusData);
}<|fim▁end|> |
pub struct StatusData {
pub power_info: PowerInfo,
pub time: String, |
<|file_name|>formclasswizarddialog.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage<|fim▁hole|>** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
****************************************************************************/
#include "formclasswizarddialog.h"
#include "formclasswizardpage.h"
#include "formclasswizardparameters.h"
#include <designer/formtemplatewizardpage.h>
#include <coreplugin/basefilewizardfactory.h>
#include <QDebug>
enum { FormPageId, ClassPageId };
namespace Designer {
namespace Internal {
// ----------------- FormClassWizardDialog
FormClassWizardDialog::FormClassWizardDialog(const WizardPageList &extensionPages,
QWidget *parent) :
Core::BaseFileWizard(parent),
m_formPage(new FormTemplateWizardPage),
m_classPage(new FormClassWizardPage)
{
setWindowTitle(tr("Qt Designer Form Class"));
setPage(FormPageId, m_formPage);
setPage(ClassPageId, m_classPage);
foreach (QWizardPage *p, extensionPages)
addPage(p);
}
QString FormClassWizardDialog::path() const
{
return m_classPage->path();
}
void FormClassWizardDialog::setPath(const QString &p)
{
m_classPage->setPath(p);
}
bool FormClassWizardDialog::validateCurrentPage()
{
return QWizard::validateCurrentPage();
}
void FormClassWizardDialog::initializePage(int id)
{
QWizard::initializePage(id);
// Switching from form to class page: store XML template and set a suitable
// class name in the class page based on the form base class
if (id == ClassPageId) {
QString formBaseClass;
QString uiClassName;
m_rawFormTemplate = m_formPage->templateContents();
// Strip namespaces from the ui class and suggest it as a new class
// name
if (FormTemplateWizardPage::getUIXmlData(m_rawFormTemplate, &formBaseClass, &uiClassName))
m_classPage->setClassName(FormTemplateWizardPage::stripNamespaces(uiClassName));
}
}
FormClassWizardParameters FormClassWizardDialog::parameters() const
{
FormClassWizardParameters rc;
m_classPage->getParameters(&rc);
// Name the ui class in the Ui namespace after the class specified
rc.uiTemplate = FormTemplateWizardPage::changeUiClassName(m_rawFormTemplate, rc.className);
return rc;
}
} // namespace Internal
} // namespace Designer<|fim▁end|> | ** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the |
<|file_name|>test_duct.py<|end_file_name|><|fim▁begin|>import unittest
from openmdao.main.api import set_as_top
from openmdao.util.testutil import assert_rel_error
from pycycle import duct, flowstation
class DuctTestCase(unittest.TestCase):
def test_start(self):
comp = set_as_top(duct.Duct())
comp.dPqP = 0
comp.Q_dot = -237.8
comp.MNexit_des = .4<|fim▁hole|> fs.setTotalTP(1424.01, .34)
fs.Mach = .4
comp.Fl_I = fs
comp.design = True
comp.run()
assert_rel_error(self,comp.Fl_O.W, 1.080, .005)
assert_rel_error(self,comp.Fl_O.Pt, .34, .005)
assert_rel_error(self,comp.Fl_O.Tt, 540.00, .005)
assert_rel_error(self,comp.Fl_O.rhos, .001566, .005)
assert_rel_error(self,comp.Fl_O.Mach, 0.4, .005)
assert_rel_error(self,comp.Fl_O.area, 221.4, .005)
#check off design
comp.run()
assert_rel_error(self,comp.Fl_O.W, 1.080, .005)
assert_rel_error(self,comp.Fl_O.Pt, .34, .005)
assert_rel_error(self,comp.Fl_O.Tt, 540.00, .005)
assert_rel_error(self,comp.Fl_O.rhos, .001566, .005)
assert_rel_error(self,comp.Fl_O.Mach, 0.4, .005)
assert_rel_error(self,comp.Fl_O.area, 221.4, .005)
#vary something
comp.dPqP = .1
comp.run()
assert_rel_error(self,comp.Fl_O.W, 1.080, .005)
assert_rel_error(self,comp.Fl_O.Pt, .306, .005)
assert_rel_error(self,comp.Fl_O.Tt, 540.00, .005)
assert_rel_error(self,comp.Fl_O.rhos, .0013783, .005)
assert_rel_error(self,comp.Fl_O.Mach, 0.4572, .005)
assert_rel_error(self,comp.Fl_O.area, 221.4, .005)
if __name__ == "__main__":
unittest.main()<|fim▁end|> |
fs = flowstation.FlowStation()
fs.W = 1.080 |
<|file_name|>BrothComponents.tsx<|end_file_name|><|fim▁begin|>import { actions } from "common/actions";<|fim▁hole|>import Icon from "renderer/basics/Icon";
import { hook } from "renderer/hocs/hook";
import BrothComponent from "renderer/pages/PreferencesPage/BrothComponent";
import { T } from "renderer/t";
class BrothComponents extends React.Component<Props> {
render() {
const { packageNames } = this.props;
return (
<div className="section">
<Icon icon="list" /> {T(["preferences.advanced.components"])}
<span
className="button"
onClick={this.checkForUpdates}
style={{
marginLeft: "10px",
borderBottom: "1px solid",
}}
>
{T(["menu.help.check_for_update"])}
</span>
{packageNames.map((name) => (
<BrothComponent name={name} />
))}
</div>
);
}
checkForUpdates = () => {
const { dispatch } = this.props;
dispatch(actions.checkForComponentUpdates({}));
};
}
interface Props {
dispatch: Dispatch;
packageNames: string[];
}
export default hook((map) => ({
packageNames: map((rs) => rs.broth.packageNames),
}))(BrothComponents);<|fim▁end|> | import { Dispatch } from "common/types";
import React from "react"; |
<|file_name|>NavTabs.js<|end_file_name|><|fim▁begin|><Card>
<Card.Header>
<Nav variant="tabs" defaultActiveKey="#first">
<Nav.Item>
<Nav.Link href="#first">Active</Nav.Link>
</Nav.Item>
<Nav.Item>
<Nav.Link href="#link">Link</Nav.Link>
</Nav.Item>
<Nav.Item>
<Nav.Link href="#disabled" disabled>
Disabled
</Nav.Link>
</Nav.Item>
</Nav>
</Card.Header>
<Card.Body>
<Card.Title>Special title treatment</Card.Title><|fim▁hole|> <Card.Text>
With supporting text below as a natural lead-in to additional content.
</Card.Text>
<Button variant="primary">Go somewhere</Button>
</Card.Body>
</Card>;<|fim▁end|> | |
<|file_name|>astronomical-object.ts<|end_file_name|><|fim▁begin|>/** Holds the base definition of a generic astronomical object **/
export abstract class AstronomicalObject {
/**
* rotationTimeByEarthDays - how many earth days it takes for a full rotation
* orbitTimeByEarthDays - how many earth days it takes for a full orbit
* glObject - webgl object representation
*/
constructor(private orbitTimeByEarthDays: number, private rotationTimeByEarthDays: number, private orbitRadius: number, private glObject: any) {}
public getOrbitRadius(): number {
return this.orbitRadius;
}
public getRotationTimeByEarthDays(): number {
return this.rotationTimeByEarthDays;
}
public getOrbitTimeByEarthDays(): number {
return this.orbitTimeByEarthDays;
}
public getGLObject(): any {
return this.glObject;
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>gimbal.py<|end_file_name|><|fim▁begin|>from os.path import join
from math import pi
from math import sqrt
from math import radians
import cv2
import numpy as np
from numpy import dot
from scipy.optimize import minimize
from scipy.optimize import differential_evolution
import matplotlib.pyplot as plt
from prototype.utils.euler import euler2rot
from prototype.utils.filesystem import walkdir
from prototype.models.gimbal import GimbalModel
from prototype.vision.common import focal_length
from prototype.vision.common import camera_intrinsics
from prototype.vision.camera.camera_model import PinholeCameraModel
from prototype.vision.camera.distortion_model import project_pinhole_equi
from prototype.calibration.chessboard import Chessboard
from prototype.calibration.camera import CameraIntrinsics
from prototype.viz.common import axis_equal_3dplot
from prototype.viz.plot_gimbal import PlotGimbal
from prototype.viz.plot_chessboard import PlotChessboard
class PreprocessData:
""" Preprocess calibration data
Attributes
----------
image_dir : string
Image base directory
images : np.array
Calibration images
images_ud : np.array
Undistorted calibration images
chessboard : Chessboard
Chessboard
intrinsics : CameraIntrinsics
Camera intrinsics
corners2d : np.array
Image corners
corners3d : np.array
Image point location
corners2d_ud : np.array
Undistorted image corners
corners3d_ud : np.array
Undistorted image point location
"""
def __init__(self, data_type, **kwargs):
self.data_type = data_type
if self.data_type == "IMAGES":
self.images_dir = kwargs["images_dir"]
self.images = []
self.images_ud = []
self.chessboard = kwargs["chessboard"]
self.intrinsics = kwargs["intrinsics"]
elif self.data_type == "PREPROCESSED":
self.data_path = kwargs["data_path"]
# Result
self.target_points = []
self.corners2d = []
self.corners3d = []
self.corners2d_ud = []
self.corners3d_ud = []
def ideal2pixel(self, points, K):
""" Ideal points to pixel coordinates
Parameters
----------
cam_id : int
Camera ID
points : np.array
Points in ideal coordinates
Returns
-------
pixels : np.array
Points in pixel coordinates
"""
# Get camera intrinsics
fx = K[0, 0]
fy = K[1, 1]
cx = K[0, 2]
cy = K[1, 2]
# Convert ideal points to pixel coordinates
pixels = []
nb_points = len(points)
for p in points.reshape((nb_points, 2)):
px = (p[0] * fx) + cx
py = (p[1] * fy) + cy
pixels.append([px, py])
return np.array(pixels)
def get_viz(self, i):
""" Return a visualization of the original and undistorted image with
detected chessboard corners and a 3D coordinate axis drawn on the
images. The original and undistorted image with the visualizations
will be stacked horizontally.
Parameters
----------
i : int
i-th Image frame
Returns
-------
image_viz : np.array
Image visualization
"""
# Visualize original image
image = self.images[i]
corners2d = self.corners2d[i]
K = self.intrinsics.K()
image = self.chessboard.draw_viz(image, corners2d, K)
# Visualize undistorted image
image_ud = self.images_ud[i]
corners2d_ud = self.corners2d_ud[i]
K_new = self.intrinsics.K_new
image_ud = self.chessboard.draw_viz(image_ud, corners2d_ud, K_new)
# Create visualization
image_viz = np.hstack((image, image_ud))
return image_viz
def preprocess(self):
""" Preprocess images """
image_files = walkdir(self.images_dir)
nb_images = len(image_files)
# Loop through calibration images
for i in range(nb_images):
# Load images and find chessboard corners
image = cv2.imread(image_files[i])
corners = self.chessboard.find_corners(image)
self.images.append(image)
# Calculate camera to chessboard transform
K = self.intrinsics.K()
P_c = self.chessboard.calc_corner_positions(corners, K)
nb_corners = corners.shape[0]
self.corners2d.append(corners.reshape((nb_corners, 2)))
self.corners3d.append(P_c)
# Undistort corners in camera 0
corners_ud = self.intrinsics.undistort_points(corners)
image_ud, K_new = self.intrinsics.undistort_image(image)
pixels_ud = self.ideal2pixel(corners_ud, K_new)
self.images_ud.append(image_ud)
# Calculate camera to chessboard transform
K_new = self.intrinsics.K_new
P_c = self.chessboard.calc_corner_positions(pixels_ud, K_new)
self.corners2d_ud.append(pixels_ud)
self.corners3d_ud.append(P_c)
self.corners2d = np.array(self.corners2d)
self.corners3d = np.array(self.corners3d)
self.corners2d_ud = np.array(self.corners2d_ud)
self.corners3d_ud = np.array(self.corners3d_ud)
def parse_gridpoints_line(self, line, data):
# Parse line
elements = line.strip().split(" ")
elements = [float(x) for x in elements]
x, y, z = elements[0:3]
u, v = elements[3:5]
# Form point 3d and 2d
point3d = [x, y, z]
point2d = [u, v]
# Add to storage
data["target_points"].append(point3d)
data["corners3d"].append(point3d)
data["corners2d"].append(point2d)
def parse_transform(self, line, data):
# Parse transform
elements = line.strip().split(" ")
elements = [float(x) for x in elements]
data["T_c_t"] += elements
def parse_gimbal_angles(self, line, data):
# Parse gimbal angles
elements = line.strip().split(" ")
data["gimbal_angles"] += [float(x) for x in elements]
def transform_corners(self, data):
data["T_c_t"] = np.array(data["T_c_t"]).reshape((4, 4))
data["corners3d"] = np.array(data["corners3d"])
data["corners2d"] = np.array(data["corners2d"])
# Transform the 3d points
# -- Convert 3d points to homogeneous coordinates
nb_corners = data["corners3d"].shape[0]
ones = np.ones((nb_corners, 1))
corners_homo = np.block([data["corners3d"], ones])
corners_homo = corners_homo.T
# -- Transform 3d points
X = np.dot(data["T_c_t"], corners_homo)
X = X.T
data["corners3d"] = X[:, 0:3]
def load_preprocessed_file(self, filepath):
# Setup
datafile = open(filepath, "r")
mode = None
# Data
data = {
"target_points": [],
"corners3d": [],
"corners2d": [],
"gimbal_angles": [],
"T_c_t": [] # Transform, target to camera
}
# Parse file
for line in datafile:
line = line.strip()
if line == "gridpoints:":
mode = "gridpoints"
elif line == "tmatrix:":
mode = "tmatrix"
elif line == "gimbalangles:":
mode = "gimbalangles"
elif line == "end:":
mode = None
else:
if mode == "gridpoints":
self.parse_gridpoints_line(line, data)
elif mode == "tmatrix":
self.parse_transform(line, data)
elif mode == "gimbalangles":
self.parse_gimbal_angles(line, data)
# Finish up
self.transform_corners(data)
data["target_points"] = np.array(data["target_points"])
data["corners2d_ud"] = data["corners2d"]
data["corners3d_ud"] = data["corners3d"]
datafile.close()
return data
def load_preprocessed(self):
files = walkdir(self.data_path)
files.sort(key=lambda f: int(os.path.splitext(os.path.basename(f))[0]))
if len(files) == 0:
err_msg = "No data files found in [%s]!" % (self.data_path)
raise RuntimeError(err_msg)
for f in files:
data = self.load_preprocessed_file(f)
self.target_points.append(data["target_points"])
self.corners2d.append(data["corners2d"])
self.corners3d.append(data["corners3d"])
self.target_points = np.array(self.target_points)
self.corners2d = np.array(self.corners2d)
self.corners3d = np.array(self.corners3d)
self.corners2d_ud = self.corners2d
self.corners3d_ud = self.corners3d
def load(self):
if self.data_type == "IMAGES":
self.preprocess()
elif self.data_type == "PREPROCESSED":
self.load_preprocessed()
class DataLoader:
""" Gimbal extrinsics calibration data loader
Attributes
----------
data_path : str
Data path
cam0_dir : str
Camera 0 image dir
cam1_dir : str
Camera 1 image dir
imu_filename : str
IMU data path
chessboard : Chessboard
Chessboard
imu_data : np.array
IMU data
"""
def __init__(self, **kwargs):
self.data_path = kwargs.get("data_path")
self.preprocessed = kwargs.get("preprocessed", False)
self.inspect_data = kwargs.get("inspect_data", False)
self.joint_file = kwargs["joint_file"]
if self.preprocessed is False:
self.image_dirs = kwargs["image_dirs"]
self.intrinsic_files = kwargs["intrinsic_files"]
self.chessboard = Chessboard(**kwargs)
else:
self.data_dirs = kwargs["data_dirs"]
self.intrinsic_files = kwargs["intrinsic_files"]
def load_joint_data(self):
""" Load joint data
Parameters
----------
joint_fpath : str
Joint data file path
Returns
-------
joint_data : np.array
IMU data
"""
joint_file = open(join(self.data_path, self.joint_file), "r")
joint_data = np.loadtxt(joint_file, delimiter=",")
joint_file.close()
return joint_data
def draw_corners(self, image, corners, color=(0, 255, 0)):
""" Draw corners
Parameters
----------
image : np.array
Image
corners : np.array
Corners
"""
image = np.copy(image)
for i in range(len(corners)):
corner = tuple(corners[i][0].astype(int).tolist())
image = cv2.circle(image, corner, 2, color, -1)
return image
def check_nb_images(self, data):
""" Check number of images in data """
nb_cameras = len(self.image_dirs)
nb_images = len(data[0].images)
for i in range(1, nb_cameras):
if len(data[i].images) != nb_images:
err = "Number of images mismatch! [{0}] - [{1}]".format(
self.image_dirs[0],
self.image_dirs[i]
)
raise RuntimeError(err)
return True
def preprocess_images(self):
""" Preprocess images """
# Load camera data
nb_cameras = len(self.image_dirs)
data = []
for i in range(nb_cameras):
image_dir = join(self.data_path, self.image_dirs[i])
intrinsics_file = join(self.data_path, self.intrinsic_files[i])
intrinsics = CameraIntrinsics(intrinsics_file)
data_entry = PreprocessData("IMAGES",
images_dir=image_dir,
chessboard=self.chessboard,
intrinsics=intrinsics)
data_entry.load()
data.append(data_entry)
# Inspect data
self.check_nb_images(data)
if self.inspect_data is False:
return data
nb_images = len(data[0].images)
for i in range(nb_images):
viz = data[0].get_viz(i)
for n in range(1, nb_cameras):
viz = np.vstack((viz, data[n].get_viz(i)))
cv2.imshow("Image", viz)
cv2.waitKey(0)
return data
def filter_common_observations(self, i, data):
cam0_idx = 0
cam1_idx = 0
P_s = []
P_d = []
Q_s = []
Q_d = []
# Find common target points and store the
# respective points in 3d and 2d
for pt_a in data[0].target_points[i]:
for pt_b in data[1].target_points[i]:
if np.array_equal(pt_a, pt_b):
# Corners 3d observed in both the static and dynamic cam
P_s.append(data[0].corners3d_ud[i][cam0_idx])
P_d.append(data[1].corners3d_ud[i][cam1_idx])
# Corners 2d observed in both the static and dynamic cam
Q_s.append(data[0].corners2d_ud[i][cam0_idx])
Q_d.append(data[1].corners2d_ud[i][cam1_idx])
break
else:
cam1_idx += 1
cam0_idx += 1
cam1_idx = 0
P_s = np.array(P_s)
P_d = np.array(P_d)
Q_s = np.array(Q_s)
Q_d = np.array(Q_d)
return [P_s, P_d, Q_s, Q_d]
def load_preprocessed(self):
# Load data from each camera
data = []
for i in range(len(self.data_dirs)):
intrinsics_path = join(self.data_path, self.intrinsic_files[i])
intrinsics = CameraIntrinsics(intrinsics_path)
data_path = join(self.data_path, self.data_dirs[i])
data_entry = PreprocessData("PREPROCESSED",
data_path=data_path,
intrinsics=intrinsics)
data_entry.load()
data.append(data_entry)
# Find common measurements between cameras
Z = []
nb_measurements = len(data[0].target_points)
# -- Iterate through measurement sets
for i in range(nb_measurements):
Z_i = self.filter_common_observations(i, data)
Z.append(Z_i)
# Camera intrinsics
intrinsics_path = join(self.data_path, self.intrinsic_files[0])
# K_s = CameraIntrinsics(intrinsics_path).K()
C_s_intrinsics = CameraIntrinsics(intrinsics_path)
K_s = C_s_intrinsics.calc_Knew()
D_s = C_s_intrinsics.distortion_coeffs
intrinsics_path = join(self.data_path, self.intrinsic_files[1])
# K_d = CameraIntrinsics(intrinsics_path).K()
C_d_intrinsics = CameraIntrinsics(intrinsics_path)
K_d = C_d_intrinsics.calc_Knew()
D_d = C_d_intrinsics.distortion_coeffs
return Z, K_s, K_d, D_s, D_d
def load(self):
""" Load calibration data """
# Load joint data
joint_data = self.load_joint_data()
# Load data
if self.preprocessed is False:
data = self.preprocess_images()
K = len(data[0].corners2d_ud)
# Setup measurement sets
Z = []
for i in range(K):
# Corners 3d observed in both the static and dynamic cam
P_s = data[0].corners3d_ud[i]
P_d = data[1].corners3d_ud[i]
# Corners 2d observed in both the static and dynamic cam
Q_s = data[0].corners2d_ud[i]
Q_d = data[1].corners2d_ud[i]
Z_i = [P_s, P_d, Q_s, Q_d]
Z.append(Z_i)
K_s = data[0].intrinsics.K_new
K_d = data[1].intrinsics.K_new
D_s = data[0].intrinsics.distortion_coeffs
D_d = data[1].intrinsics.distortion_coeffs
return Z, K_s, K_d, D_s, D_d, joint_data
else:
Z, K_s, K_d, D_s, D_d = self.load_preprocessed()
return Z, K_s, K_d, D_s, D_d, joint_data
class GimbalCalibrator:
""" Gimbal Extrinsics Calibrator
Attributes
----------
gimbal_model : GimbalModel
Gimbal model
data : GECDataLoader
Calibration data
"""
def __init__(self, **kwargs):
self.gimbal_model = kwargs.get("gimbal_model", GimbalModel())
if kwargs.get("sim_mode", False):
# Load sim data
self.Z = kwargs["Z"]
self.K_s = kwargs["K_s"]
self.K_d = kwargs["K_d"]
self.D_s = kwargs["D_s"]
self.D_d = kwargs["D_d"]
self.joint_data = kwargs["joint_data"]
self.K = len(self.Z)
else:
# Load data
self.loader = DataLoader(**kwargs)
# -- Measurement set and joint data
data = self.loader.load()
self.Z, self.K_s, self.K_d, self.D_s, self.D_d, self.joint_data = data
# -- Number of measurement set
self.K = len(self.Z)
def setup_problem(self):
""" Setup the calibration optimization problem
Returns
-------
x : np.array
Vector of optimization parameters to be optimized
"""
print("Setting up optimization problem ...")
# Parameters to be optimized
# x_size = 6 + 5 + 3 + self.K * 2
x_size = 6 + 5 + 3
x = np.zeros(x_size)
# -- tau_s
x[0] = self.gimbal_model.tau_s[0]
x[1] = self.gimbal_model.tau_s[1]
x[2] = self.gimbal_model.tau_s[2]
x[3] = self.gimbal_model.tau_s[3]
x[4] = self.gimbal_model.tau_s[4]
x[5] = self.gimbal_model.tau_s[5]
# -- tau_d
x[6] = self.gimbal_model.tau_d[0]
x[7] = self.gimbal_model.tau_d[1]
x[8] = self.gimbal_model.tau_d[2]
x[9] = self.gimbal_model.tau_d[4]
x[10] = self.gimbal_model.tau_d[5]
# -- alpha, a, d
x[11] = self.gimbal_model.link[1]
x[12] = self.gimbal_model.link[2]
x[13] = self.gimbal_model.link[3]
# -- Joint angles
# x[14:] = self.joint_data[:, 0:2].ravel()
return x, self.Z, self.K_s, self.K_d, self.D_s, self.D_d
def reprojection_error(self, x, *args):
"""Reprojection Error
Parameters
----------
x : np.array
Parameters to be optimized
args : tuple of (Z, K_s, K_d)
Z: list of measurement sets
K_s: np.array static camera intrinsics matrix K
K_d: np.array dynamic camera intrinsics matrix K
Returns
-------
residual : np.array
Reprojection error
"""
# Map the optimization params back into the transforms
# -- tau_s
tau_s = x[0:6]
# -- tau_d
tau_d_tx = x[6]
tau_d_ty = x[7]
tau_d_tz = x[8]
tau_d_pitch = x[9]
tau_d_yaw = x[10]
# -- alpha, a, d
alpha, a, d = x[11:14]
# -- Joint angles
# roll_angles = []
# pitch_angles = []
# for i in range(self.K):
# roll_angles.append(x[14 + (2 * i)])
# pitch_angles.append(x[14 + (2 * i) + 1])
roll_angles = self.joint_data[:, 0]
pitch_angles = self.joint_data[:, 1]
# Set gimbal model
self.gimbal_model.tau_s = tau_s
self.gimbal_model.tau_d = [tau_d_tx, tau_d_ty, tau_d_tz,
None, tau_d_pitch, tau_d_yaw]
self.gimbal_model.link = [None, alpha, a, d]
# Loop through all measurement sets
Z, K_s, K_d, D_s, D_d = args
residuals = []
for k in range(int(self.K)):
# Get the k-th measurements
P_s, P_d, Q_s, Q_d = Z[k]
# Get joint angles
roll = roll_angles[k]
pitch = pitch_angles[k]
self.gimbal_model.set_attitude([roll, pitch])
# Get static to dynamic camera transform
T_sd = self.gimbal_model.calc_transforms()[2]
# Calculate reprojection error in the static camera
nb_P_d_corners = len(P_d)
err_s = np.zeros(nb_P_d_corners * 2)
for i in range(nb_P_d_corners):
# -- Transform 3D world point from dynamic to static camera
P_d_homo = np.append(P_d[i], 1.0)
P_s_cal = dot(T_sd, P_d_homo)[0:3]
# -- Project 3D world point to image plane
Q_s_cal = project_pinhole_equi(P_s_cal, K_s, D_s)
# -- Calculate reprojection error
err_s[(i * 2):(i * 2 + 2)] = Q_s[i] - Q_s_cal
# Calculate reprojection error in the dynamic camera
nb_P_s_corners = len(P_s)
err_d = np.zeros(nb_P_s_corners * 2)
for i in range(nb_P_s_corners):
# -- Transform 3D world point from dynamic to static camera
P_s_homo = np.append(P_s[i], 1.0)
P_d_cal = dot(np.linalg.inv(T_sd), P_s_homo)[0:3]
# -- Project 3D world point to image plane
Q_d_cal = project_pinhole_equi(P_d_cal, K_d, D_d)
# -- Calculate reprojection error
err_d[(i * 2):(i * 2 + 2)] = Q_d[i] - Q_d_cal
# # Stack residuals
residuals += err_s.tolist() + err_d.tolist()
# Calculate Sum of Squared Differences (SSD)
cost = np.sum(np.array(residuals)**2)
return cost
def optimize(self):
""" Optimize Gimbal Extrinsics """
# Setup
x, Z, K_s, K_d, D_s, D_d = self.setup_problem()
args = (Z, K_s, K_d, D_s, D_d)
# Optimize
print("Optimizing!")
print("This can take a while...")
# result = least_squares(fun=self.reprojection_error,
# x0=x,
# args=args,
# method="Nelder-Mead",
# options={'disp': True})
tau_s_tx = self.gimbal_model.tau_s[0]
tau_s_ty = self.gimbal_model.tau_s[1]
tau_s_tz = self.gimbal_model.tau_s[2]
tau_s_roll = self.gimbal_model.tau_s[3]
tau_s_pitch = self.gimbal_model.tau_s[4]
tau_s_yaw = self.gimbal_model.tau_s[5]
# -- tau_d
tau_d_tx = self.gimbal_model.tau_d[0]
tau_d_ty = self.gimbal_model.tau_d[1]
tau_d_tz = self.gimbal_model.tau_d[2]
tau_d_pitch = self.gimbal_model.tau_d[4]
tau_d_yaw = self.gimbal_model.tau_d[5]
# -- alpha, a, d
alpha = self.gimbal_model.link[1]
a = self.gimbal_model.link[2]
d = self.gimbal_model.link[3]
bounds = [
(tau_s_tx - 0.2, tau_s_tx + 0.2),
(tau_s_ty - 0.2, tau_s_ty + 0.2),
(tau_s_tz - 0.2, tau_s_tz + 0.2),
(tau_s_roll - 0.2, tau_s_roll + 0.2),
(tau_s_pitch - 0.2, tau_s_pitch + 0.2),
(tau_s_yaw - 0.2, tau_s_yaw + 0.2),
(tau_d_tx - 0.2, tau_d_tx + 0.2),
(tau_d_ty - 0.2, tau_d_ty + 0.2),
(tau_d_tz - 0.2, tau_d_tz + 0.2),
(tau_d_pitch - 0.2, tau_d_pitch + 0.2),
(tau_d_yaw - 0.2, tau_d_yaw + 0.2),
(alpha - 0.1, alpha + 0.1),
(a - 0.1, a + 0.1),
(d - 0.1, d + 0.1)
]
result = differential_evolution(func=self.reprojection_error,
bounds=bounds,
maxiter=1000,
args=args,
disp=True)
# Parse results
tau_s = result.x[0:6]
tau_d_tx = result.x[6]
tau_d_ty = result.x[7]
tau_d_tz = result.x[8]
tau_d_roll = 0.0
tau_d_pitch = result.x[9]
tau_d_yaw = result.x[10]
tau_d = [tau_d_tx, tau_d_ty, tau_d_tz,
tau_d_roll, tau_d_pitch, tau_d_yaw]
alpha, a, d = result.x[11:14]
self.gimbal_model.tau_s = tau_s
self.gimbal_model.tau_d = tau_d
self.gimbal_model.link = [0.0, alpha, a, d]
print("Results:")
print("---------------------------------")
print("tau_s: ", self.gimbal_model.tau_s)
print("tau_d: ", self.gimbal_model.tau_d)
print("w1: ", self.gimbal_model.link)
# Plot gimbal
self.gimbal_model.set_attitude([0.0, 0.0])
plot_gimbal = PlotGimbal(gimbal=self.gimbal_model)
plot_gimbal.plot()
plt.show()
class GimbalDataGenerator:
def __init__(self, intrinsics_file):
self.intrinsics = CameraIntrinsics(intrinsics_file)
# Chessboard
self.chessboard = Chessboard(t_G=np.array([0.3, 0.1, 0.1]),
nb_rows=11,
nb_cols=11,
square_size=0.02)
self.plot_chessboard = PlotChessboard(chessboard=self.chessboard)
# Gimbal
self.gimbal = GimbalModel()
self.gimbal.set_attitude([0.0, 0.0])
self.plot_gimbal = PlotGimbal(gimbal=self.gimbal)
# Cameras
self.static_camera = self.setup_static_camera()
self.gimbal_camera = self.setup_gimbal_camera()
def setup_static_camera(self):
image_width = 640
image_height = 480
fov = 120
fx, fy = focal_length(image_width, image_height, fov)
cx, cy = (image_width / 2.0, image_height / 2.0)
K = camera_intrinsics(fx, fy, cx, cy)
cam_model = PinholeCameraModel(image_width, image_height, K)
return cam_model
def setup_gimbal_camera(self):
image_width = 640
image_height = 480
fov = 120
fx, fy = focal_length(image_width, image_height, fov)
cx, cy = (image_width / 2.0, image_height / 2.0)
K = camera_intrinsics(fx, fy, cx, cy)
cam_model = PinholeCameraModel(image_width, image_height, K)
return cam_model
def calc_static_camera_view(self):
# Transforming chessboard grid points in global to camera frame
R = np.eye(3)
t = np.zeros(3)
R_CG = euler2rot([-pi / 2.0, 0.0, -pi / 2.0], 123)
X = dot(R_CG, self.chessboard.grid_points3d.T)
x = self.static_camera.project(X, R, t).T[:, 0:2]
return x
def calc_gimbal_camera_view(self):
# Create transform from global to static camera frame
t_g_sg = np.array([0.0, 0.0, 0.0])
R_sg = euler2rot([-pi / 2.0, 0.0, -pi / 2.0], 321)
T_gs = np.array([[R_sg[0, 0], R_sg[0, 1], R_sg[0, 2], t_g_sg[0]],
[R_sg[1, 0], R_sg[1, 1], R_sg[1, 2], t_g_sg[1]],
[R_sg[2, 0], R_sg[2, 1], R_sg[2, 2], t_g_sg[2]],
[0.0, 0.0, 0.0, 1.0]])
# Calculate transform from global to dynamic camera frame
links = self.gimbal.calc_transforms()
T_sd = links[-1]
T_gd = dot(T_gs, T_sd)
# Project chessboard grid points in global to dynamic camera frame
# -- Convert 3D points to homogeneous coordinates
X = self.chessboard.grid_points3d.T
X = np.block([[X], [np.ones(X.shape[1])]])
# -- Project to dynamica camera image frame
X = dot(np.linalg.inv(T_gd), X)[0:3, :]
x = dot(self.gimbal_camera.K, X)
# -- Normalize points
x[0, :] = x[0, :] / x[2, :]
x[1, :] = x[1, :] / x[2, :]
x = x[0:2, :].T
return x, X.T
def plot_static_camera_view(self, ax):
x = self.calc_static_camera_view()
ax.scatter(x[:, 0], x[:, 1], marker="o", color="red")
def plot_gimbal_camera_view(self, ax):
x, X = self.calc_gimbal_camera_view()
ax.scatter(x[:, 0], x[:, 1], marker="o", color="red")
def plot_camera_views(self):
# Plot static camera view
ax = plt.subplot(211)
ax.axis('square')
self.plot_static_camera_view(ax)
ax.set_title("Static Camera View", y=1.08)
ax.set_xlim((0, self.static_camera.image_width))
ax.set_ylim((0, self.static_camera.image_height))
ax.invert_yaxis()
ax.xaxis.tick_top()
# Plot gimbal camera view
ax = plt.subplot(212)
ax.axis('square')
self.plot_gimbal_camera_view(ax)
ax.set_title("Gimbal Camera View", y=1.08)
ax.set_xlim((0, self.gimbal_camera.image_width))
ax.set_ylim((0, self.gimbal_camera.image_height))
ax.invert_yaxis()
ax.xaxis.tick_top()
# Overall plot settings
plt.tight_layout()
def plot(self):
# Plot camera views
self.plot_camera_views()
# Plot gimbal and chessboard
fig = plt.figure()
ax = fig.gca(projection='3d')
self.plot_gimbal.plot(ax)
self.plot_chessboard.plot(ax)
axis_equal_3dplot(ax)
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
plt.show()
def calc_roll_pitch_combo(self, nb_images):
nb_combo = int(sqrt(nb_images))
roll_lim = [radians(-10), radians(10)]
pitch_lim = [radians(-10), radians(10)]
roll_vals = np.linspace(roll_lim[0], roll_lim[1], num=nb_combo)
pitch_vals = np.linspace(pitch_lim[0], pitch_lim[1], num=nb_combo)
<|fim▁hole|> # Setup
nb_images = 4
R_CG = euler2rot([-pi / 2.0, 0.0, -pi / 2.0], 123)
# Generate static camera data
self.intrinsics.K_new = self.intrinsics.K()
static_cam_data = PreprocessData("IMAGES",
images_dir=None,
intrinsics=self.intrinsics,
chessboard=self.chessboard)
x = self.calc_static_camera_view()
X = dot(R_CG, self.chessboard.grid_points3d.T).T
for i in range(nb_images):
static_cam_data.corners2d_ud.append(x)
static_cam_data.corners3d_ud.append(X)
static_cam_data.corners2d_ud = np.array(static_cam_data.corners2d_ud)
static_cam_data.corners3d_ud = np.array(static_cam_data.corners3d_ud)
# Generate gimbal data
roll_vals, pitch_vals = self.calc_roll_pitch_combo(nb_images)
gimbal_cam_data = PreprocessData("IMAGES",
images_dir=None,
intrinsics=self.intrinsics,
chessboard=self.chessboard)
joint_data = []
for roll in roll_vals:
for pitch in pitch_vals:
self.gimbal.set_attitude([roll, pitch])
x, X = self.calc_gimbal_camera_view()
gimbal_cam_data.corners2d_ud.append(x)
gimbal_cam_data.corners3d_ud.append(X)
joint_data.append([roll, pitch])
gimbal_cam_data.corners2d_ud = np.array(gimbal_cam_data.corners2d_ud)
gimbal_cam_data.corners3d_ud = np.array(gimbal_cam_data.corners3d_ud)
joint_data = np.array(joint_data)
# Setup measurement sets
Z = []
for i in range(nb_images):
# Corners 3d observed in both the static and dynamic cam
P_s = static_cam_data.corners3d_ud[i]
P_d = gimbal_cam_data.corners3d_ud[i]
# Corners 2d observed in both the static and dynamic cam
Q_s = static_cam_data.corners2d_ud[i]
Q_d = gimbal_cam_data.corners2d_ud[i]
Z_i = [P_s, P_d, Q_s, Q_d]
Z.append(Z_i)
K_s = static_cam_data.intrinsics.K_new
K_d = gimbal_cam_data.intrinsics.K_new
# Distortion - assume no distortion
D_s = np.zeros((4,))
D_d = np.zeros((4,))
return Z, K_s, K_d, D_s, D_d, joint_data<|fim▁end|> | return roll_vals, pitch_vals
def generate(self): |
<|file_name|>bencode.hpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2014, Arvid Norberg
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TORRENT_BENCODE_HPP_INCLUDED
#define TORRENT_BENCODE_HPP_INCLUDED
// OVERVIEW
//
// Bencoding is a common representation in bittorrent used for
// for dictionary, list, int and string hierarchies. It's used
// to encode .torrent files and some messages in the network
// protocol. libtorrent also uses it to store settings, resume
// data and other state between sessions.
//
// Strings in bencoded structures are not necessarily representing
// text. Strings are raw byte buffers of a certain length. If a
// string is meant to be interpreted as text, it is required to
// be UTF-8 encoded. See `BEP 3`_.
//
// There are two mechanims to *decode* bencoded buffers in libtorrent.
//
// The most flexible one is bdecode(), which returns a structure
// represented by entry. When a buffer is decoded with this function,
// it can be discarded. The entry does not contain any references back
// to it. This means that bdecode() actually copies all the data out
// of the buffer and into its own hierarchy. This makes this
// function potentially expensive, if you're parsing large amounts
// of data.
//
// Another consideration is that bdecode() is a recursive parser.
// For this reason, in order to avoid DoS attacks by triggering
// a stack overflow, there is a recursion limit. This limit is
// a sanity check to make sure it doesn't run the risk of
// busting the stack.
//
// The second mechanism is lazy_bdecode(), which returns a
// bencoded structure represented by lazy_entry. This function
// builds a tree that points back into the original buffer.
// The returned lazy_entry will not be valid once the buffer
// it was parsed out of is discarded.
//
// Not only is this function more efficient because of less
// memory allocation and data copy, the parser is also not
// recursive, which means it probably performs a little bit
// better and can have a higher recursion limit on the structures
// it's parsing.
#include <stdlib.h>
#include <string>
#include <exception>
#include <iterator> // for distance
#ifdef _MSC_VER
#pragma warning(push, 1)
#endif
#include <boost/static_assert.hpp>
#ifdef _MSC_VER
#pragma warning(pop)
#endif
#include "libtorrent/entry.hpp"
#include "libtorrent/config.hpp"
#include "libtorrent/assert.hpp"
#include "libtorrent/escape_string.hpp"
#include "libtorrent/io.hpp" // for write_string
namespace libtorrent
{
#ifndef TORRENT_NO_DEPRECATE
// thrown by bdecode() if the provided bencoded buffer does not contain
// valid encoding.
struct TORRENT_EXPORT invalid_encoding: std::exception
{
// hidden
virtual const char* what() const throw() { return "invalid bencoding"; }
};
#endif
namespace detail
{
// this is used in the template, so it must be available to the client
TORRENT_EXPORT char const* integer_to_str(char* buf, int size
, entry::integer_type val);
template <class OutIt>
int write_integer(OutIt& out, entry::integer_type val)
{
// the stack allocated buffer for keeping the
// decimal representation of the number can
// not hold number bigger than this:
BOOST_STATIC_ASSERT(sizeof(entry::integer_type) <= 8);
char buf[21];
int ret = 0;
for (char const* str = integer_to_str(buf, 21, val);
*str != 0; ++str)
{
*out = *str;
++out;
++ret;
}
return ret;
}
template <class OutIt>
void write_char(OutIt& out, char c)
{
*out = c;
++out;
}
template <class InIt>
std::string read_until(InIt& in, InIt end, char end_token, bool& err)
{
std::string ret;
if (in == end)
{
err = true;
return ret;
}
while (*in != end_token)
{
ret += *in;
++in;
if (in == end)
{
err = true;
return ret;
}
}
return ret;
}
template<class InIt>
void read_string(InIt& in, InIt end, int len, std::string& str, bool& err)
{
TORRENT_ASSERT(len >= 0);
for (int i = 0; i < len; ++i)
{
if (in == end)
{
err = true;
return;
}
str += *in;
++in;
}
}
template<class OutIt>
int bencode_recursive(OutIt& out, const entry& e)
{
int ret = 0;
switch(e.type())
{
case entry::int_t:
write_char(out, 'i');
ret += write_integer(out, e.integer());
write_char(out, 'e');
ret += 2;
break;
case entry::string_t:
ret += write_integer(out, e.string().length());
write_char(out, ':');
ret += write_string(e.string(), out);
ret += 1;
break;
case entry::list_t:
write_char(out, 'l');
for (entry::list_type::const_iterator i = e.list().begin(); i != e.list().end(); ++i)
ret += bencode_recursive(out, *i);
write_char(out, 'e');
ret += 2;
break;
case entry::dictionary_t:
write_char(out, 'd');
for (entry::dictionary_type::const_iterator i = e.dict().begin();
i != e.dict().end(); ++i)
{
// write key
ret += write_integer(out, i->first.length());
write_char(out, ':');
ret += write_string(i->first, out);
// write value
ret += bencode_recursive(out, i->second);
ret += 1;
}
write_char(out, 'e');
ret += 2;
break;
default:
// trying to encode a structure with uninitialized values!
TORRENT_ASSERT_VAL(false, e.type());
// do nothing
break;
}
return ret;
}
template<class InIt>
void bdecode_recursive(InIt& in, InIt end, entry& ret, bool& err, int depth)
{
if (depth >= 100)
{
err = true;
return;
}
if (in == end)
{
err = true;
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
switch (*in)
{
// ----------------------------------------------
// integer
case 'i':
{
++in; // 'i'
std::string val = read_until(in, end, 'e', err);
if (err) return;
TORRENT_ASSERT(*in == 'e');
++in; // 'e'
ret = entry(entry::int_t);
char* end_pointer;
ret.integer() = strtoll(val.c_str(), &end_pointer, 10);
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
if (end_pointer == val.c_str())
{
err = true;
return;
}
} break;
// ----------------------------------------------
// list
case 'l':
{
ret = entry(entry::list_t);
++in; // 'l'
while (*in != 'e')
{
ret.list().push_back(entry());
entry& e = ret.list().back();
bdecode_recursive(in, end, e, err, depth + 1);
if (err)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
if (in == end)
{
err = true;
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
}
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
TORRENT_ASSERT(*in == 'e');
++in; // 'e'
} break;
// ----------------------------------------------
// dictionary
case 'd':
{
ret = entry(entry::dictionary_t);
++in; // 'd'
while (*in != 'e')
{
entry key;
bdecode_recursive(in, end, key, err, depth + 1);
if (err || key.type() != entry::string_t)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
entry& e = ret[key.string()];
bdecode_recursive(in, end, e, err, depth + 1);
if (err)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
if (in == end)
{
err = true;
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
}
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
TORRENT_ASSERT(*in == 'e');
++in; // 'e'
} break;
// ----------------------------------------------
// string
default:
if (is_digit((unsigned char)*in))
{
std::string len_s = read_until(in, end, ':', err);
if (err)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
TORRENT_ASSERT(*in == ':');
++in; // ':'
int len = atoi(len_s.c_str());
ret = entry(entry::string_t);
read_string(in, end, len, ret.string(), err);
if (err)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
}
else
{
err = true;
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
}
}
}
// These functions will encode data to bencoded_ or decode bencoded_ data.
//
// If possible, lazy_bdecode() should be preferred over ``bdecode()``.
//
// The entry_ class is the internal representation of the bencoded data
// and it can be used to retrieve information, an entry_ can also be build by
// the program and given to ``bencode()`` to encode it into the ``OutIt``
// iterator.
//
// The ``OutIt`` and ``InIt`` are iterators
// (InputIterator_ and OutputIterator_ respectively). They
// are templates and are usually instantiated as ostream_iterator_,
// back_insert_iterator_ or istream_iterator_. These
// functions will assume that the iterator refers to a character
// (``char``). So, if you want to encode entry ``e`` into a buffer
// in memory, you can do it like this::
//
// std::vector<char> buffer;
// bencode(std::back_inserter(buf), e);
//
// .. _InputIterator: http://www.sgi.com/tech/stl/InputIterator.html
// .. _OutputIterator: http://www.sgi.com/tech/stl/OutputIterator.html
// .. _ostream_iterator: http://www.sgi.com/tech/stl/ostream_iterator.html
// .. _back_insert_iterator: http://www.sgi.com/tech/stl/back_insert_iterator.html
// .. _istream_iterator: http://www.sgi.com/tech/stl/istream_iterator.html
//
// If you want to decode a torrent file from a buffer in memory, you can do it like this::
//
// std::vector<char> buffer;
// // ...
// entry e = bdecode(buf.begin(), buf.end());<|fim▁hole|> //
// const char* buf;
// // ...
// entry e = bdecode(buf, buf + data_size);
//
// Now we just need to know how to retrieve information from the entry.
//
// If ``bdecode()`` encounters invalid encoded data in the range given to it
// it will return a default constructed ``entry`` object.
template<class OutIt> int bencode(OutIt out, const entry& e)
{
return detail::bencode_recursive(out, e);
}
template<class InIt> entry bdecode(InIt start, InIt end)
{
entry e;
bool err = false;
detail::bdecode_recursive(start, end, e, err, 0);
#ifdef TORRENT_DEBUG
TORRENT_ASSERT(e.m_type_queried == false);
#endif
if (err) return entry();
return e;
}
template<class InIt> entry bdecode(InIt start, InIt end, int& len)
{
entry e;
bool err = false;
InIt s = start;
detail::bdecode_recursive(start, end, e, err, 0);
len = std::distance(s, start);
TORRENT_ASSERT(len >= 0);
if (err) return entry();
return e;
}
}
#endif // TORRENT_BENCODE_HPP_INCLUDED<|fim▁end|> | //
// Or, if you have a raw char buffer:: |
<|file_name|>server.rs<|end_file_name|><|fim▁begin|>use hyper::Uri;
<|fim▁hole|> /// Track whether the upstream server wants the client host or server host header
map_host: bool,
}
impl Server {
pub fn new(url: Uri, map_host: bool) -> Self {
Server {
url: url,
map_host: map_host,
}
}
pub fn url(&self) -> Uri {
self.url.clone()
}
pub fn map_host(&self) -> bool {
self.map_host
}
}<|fim▁end|> | #[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct Server {
url: Uri,
|
<|file_name|>calendar.js<|end_file_name|><|fim▁begin|>/* Copyright Mihai Bazon, 2002-2005 | www.bazon.net/mishoo
* -----------------------------------------------------------
*
* The DHTML Calendar, version 1.0 "It is happening again"
*
* Details and latest version at:
* www.dynarch.com/projects/calendar
*
* This script is developed by Dynarch.com. Visit us at www.dynarch.com.
*
* This script is distributed under the GNU Lesser General Public License.
* Read the entire license text here: http://www.gnu.org/licenses/lgpl.html
*/
// $Id: calendar.js,v 1.1 2010/05/21 13:49:25 a.zahner Exp $
/** The Calendar object constructor. */
Calendar = function (firstDayOfWeek, dateStr, onSelected, onClose) {
// member variables
this.activeDiv = null;
this.currentDateEl = null;
this.getDateStatus = null;
this.getDateToolTip = null;
this.getDateText = null;
this.timeout = null;
this.onSelected = onSelected || null;
this.onClose = onClose || null;
this.dragging = false;
this.hidden = false;
this.minYear = 1970;
this.maxYear = 2050;
this.dateFormat = Calendar._TT["DEF_DATE_FORMAT"];
this.ttDateFormat = Calendar._TT["TT_DATE_FORMAT"];
this.isPopup = true;
this.weekNumbers = true;
this.firstDayOfWeek = typeof firstDayOfWeek == "number" ? firstDayOfWeek : Calendar._FD; // 0 for Sunday, 1 for Monday, etc.
this.showsOtherMonths = false;
this.dateStr = dateStr;
this.ar_days = null;
this.showsTime = false;
this.time24 = true;
this.yearStep = 2;
this.hiliteToday = true;
this.multiple = null;
// HTML elements
this.table = null;
this.element = null;
this.tbody = null;
this.firstdayname = null;
// Combo boxes
this.monthsCombo = null;
this.yearsCombo = null;
this.hilitedMonth = null;
this.activeMonth = null;
this.hilitedYear = null;
this.activeYear = null;
// Information
this.dateClicked = false;
// one-time initializations
if (typeof Calendar._SDN == "undefined") {
// table of short day names
if (typeof Calendar._SDN_len == "undefined")
Calendar._SDN_len = 3;
var ar = new Array();
for (var i = 8; i > 0;) {
ar[--i] = Calendar._DN[i].substr(0, Calendar._SDN_len);
}
Calendar._SDN = ar;
// table of short month names
if (typeof Calendar._SMN_len == "undefined")
Calendar._SMN_len = 3;
ar = new Array();
for (var i = 12; i > 0;) {
ar[--i] = Calendar._MN[i].substr(0, Calendar._SMN_len);
}
Calendar._SMN = ar;
}
};
// ** constants
/// "static", needed for event handlers.
Calendar._C = null;
/// detect a special case of "web browser"
Calendar.is_ie = ( /msie/i.test(navigator.userAgent) &&
!/opera/i.test(navigator.userAgent) );
Calendar.is_ie5 = ( Calendar.is_ie && /msie 5\.0/i.test(navigator.userAgent) );
/// detect Opera browser
Calendar.is_opera = /opera/i.test(navigator.userAgent);
/// detect KHTML-based browsers
Calendar.is_khtml = /Konqueror|Safari|KHTML/i.test(navigator.userAgent);
// BEGIN: UTILITY FUNCTIONS; beware that these might be moved into a separate
// library, at some point.
Calendar.getAbsolutePos = function(el) {
var SL = 0, ST = 0;
var is_div = /^div$/i.test(el.tagName);
if (is_div && el.scrollLeft)
SL = el.scrollLeft;
if (is_div && el.scrollTop)
ST = el.scrollTop;
var r = { x: el.offsetLeft - SL, y: el.offsetTop - ST };
if (el.offsetParent) {
var tmp = this.getAbsolutePos(el.offsetParent);
r.x += tmp.x;
r.y += tmp.y;
}
return r;
};
Calendar.isRelated = function (el, evt) {
var related = evt.relatedTarget;
if (!related) {
var type = evt.type;
if (type == "mouseover") {
related = evt.fromElement;
} else if (type == "mouseout") {
related = evt.toElement;
}
}
while (related) {
if (related == el) {
return true;
}
related = related.parentNode;
}
return false;
};
Calendar.removeClass = function(el, className) {
if (!(el && el.className)) {
return;
}
var cls = el.className.split(" ");
var ar = new Array();
for (var i = cls.length; i > 0;) {
if (cls[--i] != className) {
ar[ar.length] = cls[i];
}
}
el.className = ar.join(" ");
};
Calendar.addClass = function(el, className) {
Calendar.removeClass(el, className);
el.className += " " + className;
};
// FIXME: the following 2 functions totally suck, are useless and should be replaced immediately.
Calendar.getElement = function(ev) {
var f = Calendar.is_ie ? window.event.srcElement : ev.currentTarget;
while (f.nodeType != 1 || /^div$/i.test(f.tagName))
f = f.parentNode;
return f;
};
Calendar.getTargetElement = function(ev) {
var f = Calendar.is_ie ? window.event.srcElement : ev.target;
while (f.nodeType != 1)
f = f.parentNode;
return f;
};
Calendar.stopEvent = function(ev) {
ev || (ev = window.event);
if (Calendar.is_ie) {
ev.cancelBubble = true;
ev.returnValue = false;
} else {
ev.preventDefault();
ev.stopPropagation();
}
return false;
};
Calendar.addEvent = function(el, evname, func) {
if (el.attachEvent) { // IE
el.attachEvent("on" + evname, func);
} else if (el.addEventListener) { // Gecko / W3C
el.addEventListener(evname, func, true);
} else {
el["on" + evname] = func;
}
};
Calendar.removeEvent = function(el, evname, func) {
if (el.detachEvent) { // IE
el.detachEvent("on" + evname, func);
} else if (el.removeEventListener) { // Gecko / W3C
el.removeEventListener(evname, func, true);
} else {
el["on" + evname] = null;
}
};
Calendar.createElement = function(type, parent) {
var el = null;
if (document.createElementNS) {
// use the XHTML namespace; IE won't normally get here unless
// _they_ "fix" the DOM2 implementation.
el = document.createElementNS("http://www.w3.org/1999/xhtml", type);
} else {
el = document.createElement(type);
}
if (typeof parent != "undefined") {
parent.appendChild(el);
}
return el;
};
// END: UTILITY FUNCTIONS
// BEGIN: CALENDAR STATIC FUNCTIONS
/** Internal -- adds a set of events to make some element behave like a button. */
Calendar._add_evs = function(el) {
with (Calendar) {
addEvent(el, "mouseover", dayMouseOver);
addEvent(el, "mousedown", dayMouseDown);
addEvent(el, "mouseout", dayMouseOut);
addEvent(el, "click", function(event){
event=event||window.event;
Calendar.stopEvent(event);
});
if (is_ie) {
addEvent(el, "dblclick", dayMouseDblClick);
el.setAttribute("unselectable", true);
}
}
};
Calendar.findMonth = function(el) {
if (typeof el.month != "undefined") {
return el;
} else if (typeof el.parentNode.month != "undefined") {
return el.parentNode;
}
return null;
};
Calendar.findYear = function(el) {
if (typeof el.year != "undefined") {
return el;
} else if (typeof el.parentNode.year != "undefined") {
return el.parentNode;
}
return null;
};
Calendar.showMonthsCombo = function () {
var cal = Calendar._C;
if (!cal) {
return false;
}
var cal = cal;
var cd = cal.activeDiv;
var mc = cal.monthsCombo;
if (cal.hilitedMonth) {
Calendar.removeClass(cal.hilitedMonth, "hilite");
}
if (cal.activeMonth) {
Calendar.removeClass(cal.activeMonth, "active");
}
var mon = cal.monthsCombo.getElementsByTagName("div")[cal.date.getMonth()];
Calendar.addClass(mon, "active");
cal.activeMonth = mon;
var s = mc.style;
s.display = "block";
if (cd.navtype < 0)
s.left = cd.offsetLeft + "px";
else {
var mcw = mc.offsetWidth;
if (typeof mcw == "undefined")
// Konqueror brain-dead techniques
mcw = 50;
s.left = (cd.offsetLeft + cd.offsetWidth - mcw) + "px";
}
s.top = (cd.offsetTop + cd.offsetHeight) + "px";
};
Calendar.showYearsCombo = function (fwd) {
var cal = Calendar._C;
if (!cal) {
return false;
}
var cal = cal;
var cd = cal.activeDiv;
var yc = cal.yearsCombo;
if (cal.hilitedYear) {
Calendar.removeClass(cal.hilitedYear, "hilite");
}
if (cal.activeYear) {
Calendar.removeClass(cal.activeYear, "active");
}
cal.activeYear = null;
var Y = cal.date.getFullYear() + (fwd ? 1 : -1);
var yr = yc.firstChild;
var show = false;
for (var i = 12; i > 0; --i) {
if (Y >= cal.minYear && Y <= cal.maxYear) {
yr.innerHTML = Y;
yr.year = Y;
yr.style.display = "block";
show = true;
} else {
yr.style.display = "none";
}
yr = yr.nextSibling;
Y += fwd ? cal.yearStep : -cal.yearStep;
}
if (show) {
var s = yc.style;
s.display = "block";
if (cd.navtype < 0)
s.left = cd.offsetLeft + "px";
else {
var ycw = yc.offsetWidth;
if (typeof ycw == "undefined")
// Konqueror brain-dead techniques
ycw = 50;
s.left = (cd.offsetLeft + cd.offsetWidth - ycw) + "px";
}
s.top = (cd.offsetTop + cd.offsetHeight) + "px";
}
};
// event handlers
Calendar.tableMouseUp = function(ev) {
var cal = Calendar._C;
if (!cal) {
return false;
}
if (cal.timeout) {
clearTimeout(cal.timeout);
}
var el = cal.activeDiv;
if (!el) {
return false;
}
var target = Calendar.getTargetElement(ev);
ev || (ev = window.event);
Calendar.removeClass(el, "active");
if (target == el || target.parentNode == el) {
Calendar.cellClick(el, ev);
}
var mon = Calendar.findMonth(target);
var date = null;
if (mon) {
date = new Date(cal.date);
if (mon.month != date.getMonth()) {
date.setMonth(mon.month);
cal.setDate(date);
cal.dateClicked = false;
cal.callHandler();
}
} else {
var year = Calendar.findYear(target);
if (year) {
date = new Date(cal.date);
if (year.year != date.getFullYear()) {
date.setFullYear(year.year);
cal.setDate(date);
cal.dateClicked = false;
cal.callHandler();
}
}
}
with (Calendar) {
removeEvent(document, "mouseup", tableMouseUp);
removeEvent(document, "mouseover", tableMouseOver);
removeEvent(document, "mousemove", tableMouseOver);
cal._hideCombos();
_C = null;
return stopEvent(ev);
}
};
Calendar.tableMouseOver = function (ev) {
var cal = Calendar._C;
if (!cal) {
return;
}
var el = cal.activeDiv;
var target = Calendar.getTargetElement(ev);
if (target == el || target.parentNode == el) {
Calendar.addClass(el, "hilite active");
Calendar.addClass(el.parentNode, "rowhilite");
} else {
if (typeof el.navtype == "undefined" || (el.navtype != 50 && (el.navtype == 0 || Math.abs(el.navtype) > 2)))
Calendar.removeClass(el, "active");
Calendar.removeClass(el, "hilite");
Calendar.removeClass(el.parentNode, "rowhilite");
}
ev || (ev = window.event);
if (el.navtype == 50 && target != el) {
var pos = Calendar.getAbsolutePos(el);
var w = el.offsetWidth;
var x = ev.clientX;
var dx;
var decrease = true;
if (x > pos.x + w) {
dx = x - pos.x - w;
decrease = false;
} else
dx = pos.x - x;
if (dx < 0) dx = 0;
var range = el._range;
var current = el._current;
var count = Math.floor(dx / 10) % range.length;
for (var i = range.length; --i >= 0;)
if (range[i] == current)
break;
while (count-- > 0)
if (decrease) {
if (--i < 0)
i = range.length - 1;
} else if ( ++i >= range.length )
i = 0;
var newval = range[i];
el.innerHTML = newval;
cal.onUpdateTime();
}
var mon = Calendar.findMonth(target);
if (mon) {
if (mon.month != cal.date.getMonth()) {
if (cal.hilitedMonth) {
Calendar.removeClass(cal.hilitedMonth, "hilite");
}
Calendar.addClass(mon, "hilite");
cal.hilitedMonth = mon;
} else if (cal.hilitedMonth) {
Calendar.removeClass(cal.hilitedMonth, "hilite");
}
} else {
if (cal.hilitedMonth) {
Calendar.removeClass(cal.hilitedMonth, "hilite");
}
var year = Calendar.findYear(target);
if (year) {
if (year.year != cal.date.getFullYear()) {
if (cal.hilitedYear) {
Calendar.removeClass(cal.hilitedYear, "hilite");
}
Calendar.addClass(year, "hilite");
cal.hilitedYear = year;
} else if (cal.hilitedYear) {
Calendar.removeClass(cal.hilitedYear, "hilite");
}
} else if (cal.hilitedYear) {
Calendar.removeClass(cal.hilitedYear, "hilite");
}
}
return Calendar.stopEvent(ev);
};
Calendar.tableMouseDown = function (ev) {
if (Calendar.getTargetElement(ev) == Calendar.getElement(ev)) {
return Calendar.stopEvent(ev);
}
};
Calendar.calDragIt = function (ev) {
var cal = Calendar._C;
if (!(cal && cal.dragging)) {
return false;
}
var posX;
var posY;
if (Calendar.is_ie) {
posY = window.event.clientY + document.body.scrollTop;
posX = window.event.clientX + document.body.scrollLeft;
} else {
posX = ev.pageX;
posY = ev.pageY;
}
cal.hideShowCovered();
var st = cal.element.style;
st.left = (posX - cal.xOffs) + "px";
st.top = (posY - cal.yOffs) + "px";
return Calendar.stopEvent(ev);
};
Calendar.calDragEnd = function (ev) {
var cal = Calendar._C;
if (!cal) {
return false;
}
cal.dragging = false;
with (Calendar) {
removeEvent(document, "mousemove", calDragIt);
removeEvent(document, "mouseup", calDragEnd);
tableMouseUp(ev);
}
cal.hideShowCovered();
};
Calendar.dayMouseDown = function(ev) {
var el = Calendar.getElement(ev);
if (el.disabled) {
return false;
}
var cal = el.calendar;
cal.activeDiv = el;
Calendar._C = cal;
if (el.navtype != 300) with (Calendar) {
if (el.navtype == 50) {
el._current = el.innerHTML;
addEvent(document, "mousemove", tableMouseOver);
} else
addEvent(document, Calendar.is_ie5 ? "mousemove" : "mouseover", tableMouseOver);
addClass(el, "hilite active");
addEvent(document, "mouseup", tableMouseUp);
} else if (cal.isPopup) {
cal._dragStart(ev);
}
if (el.navtype == -1 || el.navtype == 1) {
if (cal.timeout) clearTimeout(cal.timeout);
cal.timeout = setTimeout("Calendar.showMonthsCombo()", 250);
} else if (el.navtype == -2 || el.navtype == 2) {
if (cal.timeout) clearTimeout(cal.timeout);
cal.timeout = setTimeout((el.navtype > 0) ? "Calendar.showYearsCombo(true)" : "Calendar.showYearsCombo(false)", 250);
} else {
cal.timeout = null;
}
return Calendar.stopEvent(ev);
};
Calendar.dayMouseDblClick = function(ev) {
Calendar.cellClick(Calendar.getElement(ev), ev || window.event);
if (Calendar.is_ie) {
document.selection.empty();
}
};
Calendar.dayMouseOver = function(ev) {
var el = Calendar.getElement(ev);
if (Calendar.isRelated(el, ev) || Calendar._C || el.disabled) {
return false;
}
if (el.ttip) {
if (el.ttip.substr(0, 1) == "_") {
el.ttip = el.caldate.print(el.calendar.ttDateFormat) + el.ttip.substr(1);
}
el.calendar.tooltips.innerHTML = el.ttip;
}
if (el.navtype != 300) {
Calendar.addClass(el, "hilite");
if (el.caldate) {
Calendar.addClass(el.parentNode, "rowhilite");
}
}
return Calendar.stopEvent(ev);
};
Calendar.dayMouseOut = function(ev) {
with (Calendar) {
var el = getElement(ev);
if (isRelated(el, ev) || _C || el.disabled)
return false;
removeClass(el, "hilite");
if (el.caldate)
removeClass(el.parentNode, "rowhilite");
if (el.calendar)
el.calendar.tooltips.innerHTML = _TT["SEL_DATE"];
return stopEvent(ev);
}
};
/**
* A generic "click" handler :) handles all types of buttons defined in this
* calendar.
*/
Calendar.cellClick = function(el, ev) {
var cal = el.calendar;
var closing = false;
var newdate = false;
var date = null;
if (typeof el.navtype == "undefined") {
if (cal.currentDateEl) {
Calendar.removeClass(cal.currentDateEl, "selected");
Calendar.addClass(el, "selected");
closing = (cal.currentDateEl == el);
if (!closing) {
cal.currentDateEl = el;
}<|fim▁hole|> }
cal.date.setDateOnly(el.caldate);
date = cal.date;
var other_month = !(cal.dateClicked = !el.otherMonth);
if (!other_month && !cal.currentDateEl)
cal._toggleMultipleDate(new Date(date));
else
newdate = !el.disabled;
// a date was clicked
if (other_month)
cal._init(cal.firstDayOfWeek, date);
} else {
if (el.navtype == 200) {
Calendar.removeClass(el, "hilite");
cal.callCloseHandler();
return;
}
date = new Date(cal.date);
if (el.navtype == 0)
date.setDateOnly(new Date()); // TODAY
// unless "today" was clicked, we assume no date was clicked so
// the selected handler will know not to close the calenar when
// in single-click mode.
// cal.dateClicked = (el.navtype == 0);
cal.dateClicked = false;
var year = date.getFullYear();
var mon = date.getMonth();
function setMonth(m) {
var day = date.getDate();
var max = date.getMonthDays(m);
if (day > max) {
date.setDate(max);
}
date.setMonth(m);
};
switch (el.navtype) {
case 400:
Calendar.removeClass(el, "hilite");
var text = Calendar._TT["ABOUT"];
if (typeof text != "undefined") {
text += cal.showsTime ? Calendar._TT["ABOUT_TIME"] : "";
} else {
// FIXME: this should be removed as soon as lang files get updated!
text = "Help and about box text is not translated into this language.\n" +
"If you know this language and you feel generous please update\n" +
"the corresponding file in \"lang\" subdir to match calendar-en.js\n" +
"and send it back to <[email protected]> to get it into the distribution ;-)\n\n" +
"Thank you!\n" +
"http://dynarch.com/mishoo/calendar.epl\n";
}
alert(text);
return;
case -2:
if (year > cal.minYear) {
date.setFullYear(year - 1);
}
break;
case -1:
if (mon > 0) {
setMonth(mon - 1);
} else if (year-- > cal.minYear) {
date.setFullYear(year);
setMonth(11);
}
break;
case 1:
if (mon < 11) {
setMonth(mon + 1);
} else if (year < cal.maxYear) {
date.setFullYear(year + 1);
setMonth(0);
}
break;
case 2:
if (year < cal.maxYear) {
date.setFullYear(year + 1);
}
break;
case 100:
cal.setFirstDayOfWeek(el.fdow);
return;
case 50:
var range = el._range;
var current = el.innerHTML;
for (var i = range.length; --i >= 0;)
if (range[i] == current)
break;
if (ev && ev.shiftKey) {
if (--i < 0)
i = range.length - 1;
} else if ( ++i >= range.length )
i = 0;
var newval = range[i];
el.innerHTML = newval;
cal.onUpdateTime();
return;
case 0:
// TODAY will bring us here
if ((typeof cal.getDateStatus == "function") &&
cal.getDateStatus(date, date.getFullYear(), date.getMonth(), date.getDate())) {
return false;
}
break;
}
if (!date.equalsTo(cal.date)) {
cal.setDate(date);
newdate = true;
} else if (el.navtype == 0)
newdate = closing = true;
}
if (newdate) {
ev && cal.callHandler();
}
if (closing) {
Calendar.removeClass(el, "hilite");
ev && cal.callCloseHandler();
}
};
// END: CALENDAR STATIC FUNCTIONS
// BEGIN: CALENDAR OBJECT FUNCTIONS
/**
* This function creates the calendar inside the given parent. If _par is
* null than it creates a popup calendar inside the BODY element. If _par is
* an element, be it BODY, then it creates a non-popup calendar (still
* hidden). Some properties need to be set before calling this function.
*/
Calendar.prototype.create = function (_par) {
var parent = null;
if (! _par) {
// default parent is the document body, in which case we create
// a popup calendar.
parent = document.getElementsByTagName("body")[0];
this.isPopup = true;
} else {
parent = _par;
this.isPopup = false;
}
this.date = this.dateStr ? new Date(this.dateStr) : new Date();
var table = Calendar.createElement("table");
this.table = table;
table.cellSpacing = 0;
table.cellPadding = 0;
table.calendar = this;
Calendar.addEvent(table, "mousedown", Calendar.tableMouseDown);
var div = Calendar.createElement("div");
this.element = div;
div.className = "calendar";
if (this.isPopup) {
div.style.position = "absolute";
div.style.display = "none";
}
div.appendChild(table);
var thead = Calendar.createElement("thead", table);
var cell = null;
var row = null;
var cal = this;
var hh = function (text, cs, navtype) {
cell = Calendar.createElement("td", row);
cell.colSpan = cs;
cell.className = "button";
if (navtype != 0 && Math.abs(navtype) <= 2)
cell.className += " nav";
Calendar._add_evs(cell);
cell.calendar = cal;
cell.navtype = navtype;
cell.innerHTML = "<div unselectable='on'>" + text + "</div>";
return cell;
};
row = Calendar.createElement("tr", thead);
var title_length = 6;
(this.isPopup) && --title_length;
(this.weekNumbers) && ++title_length;
hh("?", 1, 400).ttip = Calendar._TT["INFO"];
this.title = hh("", title_length, 300);
this.title.className = "title";
if (this.isPopup) {
this.title.ttip = Calendar._TT["DRAG_TO_MOVE"];
this.title.style.cursor = "move";
hh("×", 1, 200).ttip = Calendar._TT["CLOSE"];
}
row = Calendar.createElement("tr", thead);
row.className = "headrow";
this._nav_py = hh("«", 1, -2);
this._nav_py.ttip = Calendar._TT["PREV_YEAR"];
this._nav_pm = hh("‹", 1, -1);
this._nav_pm.ttip = Calendar._TT["PREV_MONTH"];
this._nav_now = hh(Calendar._TT["TODAY"], this.weekNumbers ? 4 : 3, 0);
this._nav_now.ttip = Calendar._TT["GO_TODAY"];
this._nav_nm = hh("›", 1, 1);
this._nav_nm.ttip = Calendar._TT["NEXT_MONTH"];
this._nav_ny = hh("»", 1, 2);
this._nav_ny.ttip = Calendar._TT["NEXT_YEAR"];
// day names
row = Calendar.createElement("tr", thead);
row.className = "daynames";
if (this.weekNumbers) {
cell = Calendar.createElement("td", row);
cell.className = "name wn";
cell.innerHTML = Calendar._TT["WK"];
}
for (var i = 7; i > 0; --i) {
cell = Calendar.createElement("td", row);
if (!i) {
cell.navtype = 100;
cell.calendar = this;
Calendar._add_evs(cell);
}
}
this.firstdayname = (this.weekNumbers) ? row.firstChild.nextSibling : row.firstChild;
this._displayWeekdays();
var tbody = Calendar.createElement("tbody", table);
this.tbody = tbody;
for (i = 6; i > 0; --i) {
row = Calendar.createElement("tr", tbody);
if (this.weekNumbers) {
cell = Calendar.createElement("td", row);
}
for (var j = 7; j > 0; --j) {
cell = Calendar.createElement("td", row);
cell.calendar = this;
Calendar._add_evs(cell);
}
}
if (this.showsTime) {
row = Calendar.createElement("tr", tbody);
row.className = "time";
cell = Calendar.createElement("td", row);
cell.className = "time";
cell.colSpan = 2;
cell.innerHTML = Calendar._TT["TIME"] || " ";
cell = Calendar.createElement("td", row);
cell.className = "time";
cell.colSpan = this.weekNumbers ? 4 : 3;
(function(){
function makeTimePart(className, init, range_start, range_end) {
var part = Calendar.createElement("span", cell);
part.className = className;
part.innerHTML = init;
part.calendar = cal;
part.ttip = Calendar._TT["TIME_PART"];
part.navtype = 50;
part._range = [];
if (typeof range_start != "number")
part._range = range_start;
else {
for (var i = range_start; i <= range_end; ++i) {
var txt;
if (i < 10 && range_end >= 10) txt = '0' + i;
else txt = '' + i;
part._range[part._range.length] = txt;
}
}
Calendar._add_evs(part);
return part;
};
var hrs = cal.date.getHours();
var mins = cal.date.getMinutes();
var t12 = !cal.time24;
var pm = (hrs > 12);
if (t12 && pm) hrs -= 12;
var H = makeTimePart("hour", hrs, t12 ? 1 : 0, t12 ? 12 : 23);
var span = Calendar.createElement("span", cell);
span.innerHTML = ":";
span.className = "colon";
var M = makeTimePart("minute", mins, 0, 59);
var AP = null;
cell = Calendar.createElement("td", row);
cell.className = "time";
cell.colSpan = 2;
if (t12)
AP = makeTimePart("ampm", pm ? "pm" : "am", ["am", "pm"]);
else
cell.innerHTML = " ";
cal.onSetTime = function() {
var pm, hrs = this.date.getHours(),
mins = this.date.getMinutes();
if (t12) {
pm = (hrs >= 12);
if (pm) hrs -= 12;
if (hrs == 0) hrs = 12;
AP.innerHTML = pm ? "pm" : "am";
}
H.innerHTML = (hrs < 10) ? ("0" + hrs) : hrs;
M.innerHTML = (mins < 10) ? ("0" + mins) : mins;
};
cal.onUpdateTime = function() {
var date = this.date;
var h = parseInt(H.innerHTML, 10);
if (t12) {
if (/pm/i.test(AP.innerHTML) && h < 12)
h += 12;
else if (/am/i.test(AP.innerHTML) && h == 12)
h = 0;
}
var d = date.getDate();
var m = date.getMonth();
var y = date.getFullYear();
date.setHours(h);
date.setMinutes(parseInt(M.innerHTML, 10));
date.setFullYear(y);
date.setMonth(m);
date.setDate(d);
this.dateClicked = false;
this.callHandler();
};
})();
} else {
this.onSetTime = this.onUpdateTime = function() {};
}
var tfoot = Calendar.createElement("tfoot", table);
row = Calendar.createElement("tr", tfoot);
row.className = "footrow";
cell = hh(Calendar._TT["SEL_DATE"], this.weekNumbers ? 8 : 7, 300);
cell.className = "ttip";
if (this.isPopup) {
cell.ttip = Calendar._TT["DRAG_TO_MOVE"];
cell.style.cursor = "move";
}
this.tooltips = cell;
div = Calendar.createElement("div", this.element);
this.monthsCombo = div;
div.className = "combo";
for (i = 0; i < Calendar._MN.length; ++i) {
var mn = Calendar.createElement("div");
mn.className = Calendar.is_ie ? "label-IEfix" : "label";
mn.month = i;
mn.innerHTML = Calendar._SMN[i];
div.appendChild(mn);
}
div = Calendar.createElement("div", this.element);
this.yearsCombo = div;
div.className = "combo";
for (i = 12; i > 0; --i) {
var yr = Calendar.createElement("div");
yr.className = Calendar.is_ie ? "label-IEfix" : "label";
div.appendChild(yr);
}
this._init(this.firstDayOfWeek, this.date);
parent.appendChild(this.element);
};
/** keyboard navigation, only for popup calendars */
Calendar._keyEvent = function(ev) {
var cal = window._dynarch_popupCalendar;
if (!cal || cal.multiple)
return false;
(Calendar.is_ie) && (ev = window.event);
var act = (Calendar.is_ie || ev.type == "keypress"),
K = ev.keyCode;
if (ev.ctrlKey) {
switch (K) {
case 37: // KEY left
act && Calendar.cellClick(cal._nav_pm);
break;
case 38: // KEY up
act && Calendar.cellClick(cal._nav_py);
break;
case 39: // KEY right
act && Calendar.cellClick(cal._nav_nm);
break;
case 40: // KEY down
act && Calendar.cellClick(cal._nav_ny);
break;
default:
return false;
}
} else switch (K) {
case 32: // KEY space (now)
Calendar.cellClick(cal._nav_now);
break;
case 27: // KEY esc
act && cal.callCloseHandler();
break;
case 37: // KEY left
case 38: // KEY up
case 39: // KEY right
case 40: // KEY down
if (act) {
var prev, x, y, ne, el, step;
prev = K == 37 || K == 38;
step = (K == 37 || K == 39) ? 1 : 7;
function setVars() {
el = cal.currentDateEl;
var p = el.pos;
x = p & 15;
y = p >> 4;
ne = cal.ar_days[y][x];
};setVars();
function prevMonth() {
var date = new Date(cal.date);
date.setDate(date.getDate() - step);
cal.setDate(date);
};
function nextMonth() {
var date = new Date(cal.date);
date.setDate(date.getDate() + step);
cal.setDate(date);
};
while (1) {
switch (K) {
case 37: // KEY left
if (--x >= 0)
ne = cal.ar_days[y][x];
else {
x = 6;
K = 38;
continue;
}
break;
case 38: // KEY up
if (--y >= 0)
ne = cal.ar_days[y][x];
else {
prevMonth();
setVars();
}
break;
case 39: // KEY right
if (++x < 7)
ne = cal.ar_days[y][x];
else {
x = 0;
K = 40;
continue;
}
break;
case 40: // KEY down
if (++y < cal.ar_days.length)
ne = cal.ar_days[y][x];
else {
nextMonth();
setVars();
}
break;
}
break;
}
if (ne) {
if (!ne.disabled)
Calendar.cellClick(ne);
else if (prev)
prevMonth();
else
nextMonth();
}
}
break;
case 13: // KEY enter
if (act)
Calendar.cellClick(cal.currentDateEl, ev);
break;
default:
return false;
}
return Calendar.stopEvent(ev);
};
/**
* (RE)Initializes the calendar to the given date and firstDayOfWeek
*/
Calendar.prototype._init = function (firstDayOfWeek, date) {
var today = new Date(),
TY = today.getFullYear(),
TM = today.getMonth(),
TD = today.getDate();
this.table.style.visibility = "hidden";
var year = date.getFullYear();
if (year < this.minYear) {
year = this.minYear;
date.setFullYear(year);
} else if (year > this.maxYear) {
year = this.maxYear;
date.setFullYear(year);
}
this.firstDayOfWeek = firstDayOfWeek;
this.date = new Date(date);
var month = date.getMonth();
var mday = date.getDate();
var no_days = date.getMonthDays();
// calendar voodoo for computing the first day that would actually be
// displayed in the calendar, even if it's from the previous month.
// WARNING: this is magic. ;-)
date.setDate(1);
var day1 = (date.getDay() - this.firstDayOfWeek) % 7;
if (day1 < 0)
day1 += 7;
date.setDate(-day1);
date.setDate(date.getDate() + 1);
var row = this.tbody.firstChild;
var MN = Calendar._SMN[month];
var ar_days = this.ar_days = new Array();
var weekend = Calendar._TT["WEEKEND"];
var dates = this.multiple ? (this.datesCells = {}) : null;
for (var i = 0; i < 6; ++i, row = row.nextSibling) {
var cell = row.firstChild;
if (this.weekNumbers) {
cell.className = "day wn";
cell.innerHTML = date.getWeekNumber();
cell = cell.nextSibling;
}
row.className = "daysrow";
var hasdays = false, iday, dpos = ar_days[i] = [];
for (var j = 0; j < 7; ++j, cell = cell.nextSibling, date.setDate(iday + 1)) {
iday = date.getDate();
var wday = date.getDay();
cell.className = "day";
cell.pos = i << 4 | j;
dpos[j] = cell;
var current_month = (date.getMonth() == month);
if (!current_month) {
if (this.showsOtherMonths) {
cell.className += " othermonth";
cell.otherMonth = true;
} else {
cell.className = "emptycell";
cell.innerHTML = " ";
cell.disabled = true;
continue;
}
} else {
cell.otherMonth = false;
hasdays = true;
}
cell.disabled = false;
cell.innerHTML = this.getDateText ? this.getDateText(date, iday) : iday;
if (dates)
dates[date.print("%Y%m%d")] = cell;
if (this.getDateStatus) {
var status = this.getDateStatus(date, year, month, iday);
if (this.getDateToolTip) {
var toolTip = this.getDateToolTip(date, year, month, iday);
if (toolTip)
cell.title = toolTip;
}
if (status === true) {
cell.className += " disabled";
cell.disabled = true;
} else {
if (/disabled/i.test(status))
cell.disabled = true;
cell.className += " " + status;
}
}
if (!cell.disabled) {
cell.caldate = new Date(date);
cell.ttip = "_";
if (!this.multiple && current_month
&& iday == mday && this.hiliteToday) {
cell.className += " selected";
this.currentDateEl = cell;
}
if (date.getFullYear() == TY &&
date.getMonth() == TM &&
iday == TD) {
cell.className += " today";
cell.ttip += Calendar._TT["PART_TODAY"];
}
if (weekend.indexOf(wday.toString()) != -1)
cell.className += cell.otherMonth ? " oweekend" : " weekend";
}
}
if (!(hasdays || this.showsOtherMonths))
row.className = "emptyrow";
}
this.title.innerHTML = Calendar._MN[month] + ", " + year;
this.onSetTime();
this.table.style.visibility = "visible";
this._initMultipleDates();
// PROFILE
// this.tooltips.innerHTML = "Generated in " + ((new Date()) - today) + " ms";
};
Calendar.prototype._initMultipleDates = function() {
if (this.multiple) {
for (var i in this.multiple) {
var cell = this.datesCells[i];
var d = this.multiple[i];
if (!d)
continue;
if (cell)
cell.className += " selected";
}
}
};
Calendar.prototype._toggleMultipleDate = function(date) {
if (this.multiple) {
var ds = date.print("%Y%m%d");
var cell = this.datesCells[ds];
if (cell) {
var d = this.multiple[ds];
if (!d) {
Calendar.addClass(cell, "selected");
this.multiple[ds] = date;
} else {
Calendar.removeClass(cell, "selected");
delete this.multiple[ds];
}
}
}
};
Calendar.prototype.setDateToolTipHandler = function (unaryFunction) {
this.getDateToolTip = unaryFunction;
};
/**
* Calls _init function above for going to a certain date (but only if the
* date is different than the currently selected one).
*/
Calendar.prototype.setDate = function (date) {
if (!date.equalsTo(this.date)) {
this._init(this.firstDayOfWeek, date);
}
};
/**
* Refreshes the calendar. Useful if the "disabledHandler" function is
* dynamic, meaning that the list of disabled date can change at runtime.
* Just * call this function if you think that the list of disabled dates
* should * change.
*/
Calendar.prototype.refresh = function () {
this._init(this.firstDayOfWeek, this.date);
};
/** Modifies the "firstDayOfWeek" parameter (pass 0 for Synday, 1 for Monday, etc.). */
Calendar.prototype.setFirstDayOfWeek = function (firstDayOfWeek) {
this._init(firstDayOfWeek, this.date);
this._displayWeekdays();
};
/**
* Allows customization of what dates are enabled. The "unaryFunction"
* parameter must be a function object that receives the date (as a JS Date
* object) and returns a boolean value. If the returned value is true then
* the passed date will be marked as disabled.
*/
Calendar.prototype.setDateStatusHandler = Calendar.prototype.setDisabledHandler = function (unaryFunction) {
this.getDateStatus = unaryFunction;
};
/** Customization of allowed year range for the calendar. */
Calendar.prototype.setRange = function (a, z) {
this.minYear = a;
this.maxYear = z;
};
/** Calls the first user handler (selectedHandler). */
Calendar.prototype.callHandler = function () {
if (this.onSelected) {
this.onSelected(this, this.date.print(this.dateFormat));
}
};
/** Calls the second user handler (closeHandler). */
Calendar.prototype.callCloseHandler = function () {
if (this.onClose) {
this.onClose(this);
}
this.hideShowCovered();
};
/** Removes the calendar object from the DOM tree and destroys it. */
Calendar.prototype.destroy = function () {
var el = this.element.parentNode;
el.removeChild(this.element);
Calendar._C = null;
window._dynarch_popupCalendar = null;
};
/**
* Moves the calendar element to a different section in the DOM tree (changes
* its parent).
*/
Calendar.prototype.reparent = function (new_parent) {
var el = this.element;
el.parentNode.removeChild(el);
new_parent.appendChild(el);
};
// This gets called when the user presses a mouse button anywhere in the
// document, if the calendar is shown. If the click was outside the open
// calendar this function closes it.
Calendar._checkCalendar = function(ev) {
var calendar = window._dynarch_popupCalendar;
if (!calendar) {
return false;
}
var el = Calendar.is_ie ? Calendar.getElement(ev) : Calendar.getTargetElement(ev);
for (; el != null && el != calendar.element; el = el.parentNode);
if (el == null) {
// calls closeHandler which should hide the calendar.
window._dynarch_popupCalendar.callCloseHandler();
return Calendar.stopEvent(ev);
}
};
/** Shows the calendar. */
Calendar.prototype.show = function () {
var rows = this.table.getElementsByTagName("tr");
for (var i = rows.length; i > 0;) {
var row = rows[--i];
Calendar.removeClass(row, "rowhilite");
var cells = row.getElementsByTagName("td");
for (var j = cells.length; j > 0;) {
var cell = cells[--j];
Calendar.removeClass(cell, "hilite");
Calendar.removeClass(cell, "active");
}
}
this.element.style.display = "block";
this.hidden = false;
if (this.isPopup) {
window._dynarch_popupCalendar = this;
Calendar.addEvent(document, "keydown", Calendar._keyEvent);
Calendar.addEvent(document, "keypress", Calendar._keyEvent);
Calendar.addEvent(document, "mousedown", Calendar._checkCalendar);
}
this.hideShowCovered();
};
/**
* Hides the calendar. Also removes any "hilite" from the class of any TD
* element.
*/
Calendar.prototype.hide = function () {
if (this.isPopup) {
Calendar.removeEvent(document, "keydown", Calendar._keyEvent);
Calendar.removeEvent(document, "keypress", Calendar._keyEvent);
Calendar.removeEvent(document, "mousedown", Calendar._checkCalendar);
}
this.element.style.display = "none";
this.hidden = true;
this.hideShowCovered();
};
/**
* Shows the calendar at a given absolute position (beware that, depending on
* the calendar element style -- position property -- this might be relative
* to the parent's containing rectangle).
*/
Calendar.prototype.showAt = function (x, y) {
var s = this.element.style;
s.left = x + "px";
s.top = y + "px";
this.show();
};
/** Shows the calendar near a given element. */
Calendar.prototype.showAtElement = function (el, opts) {
var self = this;
var p = Calendar.getAbsolutePos(el);
if (!opts || typeof opts != "string") {
this.showAt(p.x, p.y + el.offsetHeight);
return true;
}
function fixPosition(box) {
if (box.x < 0)
box.x = 0;
if (box.y < 0)
box.y = 0;
var cp = document.createElement("div");
var s = cp.style;
s.position = "absolute";
s.right = s.bottom = s.width = s.height = "0px";
document.body.appendChild(cp);
var br = Calendar.getAbsolutePos(cp);
document.body.removeChild(cp);
if (Calendar.is_ie) {
br.y += document.body.scrollTop;
br.x += document.body.scrollLeft;
} else {
br.y += window.scrollY;
br.x += window.scrollX;
}
var tmp = box.x + box.width - br.x;
if (tmp > 0) box.x -= tmp;
tmp = box.y + box.height - br.y;
if (tmp > 0) box.y -= tmp;
};
this.element.style.display = "block";
Calendar.continuation_for_the_fucking_khtml_browser = function() {
var w = self.element.offsetWidth;
var h = self.element.offsetHeight;
self.element.style.display = "none";
var valign = opts.substr(0, 1);
var halign = "l";
if (opts.length > 1) {
halign = opts.substr(1, 1);
}
// vertical alignment
switch (valign) {
case "T": p.y -= h; break;
case "B": p.y += el.offsetHeight; break;
case "C": p.y += (el.offsetHeight - h) / 2; break;
case "t": p.y += el.offsetHeight - h; break;
case "b": break; // already there
}
// horizontal alignment
switch (halign) {
case "L": p.x -= w; break;
case "R": p.x += el.offsetWidth; break;
case "C": p.x += (el.offsetWidth - w) / 2; break;
case "l": p.x += el.offsetWidth - w; break;
case "r": break; // already there
}
p.width = w;
p.height = h + 40;
self.monthsCombo.style.display = "none";
fixPosition(p);
self.showAt(p.x, p.y);
};
if (Calendar.is_khtml)
setTimeout("Calendar.continuation_for_the_fucking_khtml_browser()", 10);
else
Calendar.continuation_for_the_fucking_khtml_browser();
};
/** Customizes the date format. */
Calendar.prototype.setDateFormat = function (str) {
this.dateFormat = str;
};
/** Customizes the tooltip date format. */
Calendar.prototype.setTtDateFormat = function (str) {
this.ttDateFormat = str;
};
/**
* Tries to identify the date represented in a string. If successful it also
* calls this.setDate which moves the calendar to the given date.
*/
Calendar.prototype.parseDate = function(str, fmt) {
if (!fmt)
fmt = this.dateFormat;
this.setDate(Date.parseDate(str, fmt));
};
Calendar.prototype.hideShowCovered = function () {
if (!Calendar.is_ie && !Calendar.is_opera)
return;
function getVisib(obj){
var value = obj.style.visibility;
if (!value) {
if (document.defaultView && typeof (document.defaultView.getComputedStyle) == "function") { // Gecko, W3C
if (!Calendar.is_khtml)
value = document.defaultView.
getComputedStyle(obj, "").getPropertyValue("visibility");
else
value = '';
} else if (obj.currentStyle) { // IE
value = obj.currentStyle.visibility;
} else
value = '';
}
return value;
};
var tags = new Array("applet", "iframe", "select");
var el = this.element;
var p = Calendar.getAbsolutePos(el);
var EX1 = p.x;
var EX2 = el.offsetWidth + EX1;
var EY1 = p.y;
var EY2 = el.offsetHeight + EY1;
for (var k = tags.length; k > 0; ) {
var ar = document.getElementsByTagName(tags[--k]);
var cc = null;
for (var i = ar.length; i > 0;) {
cc = ar[--i];
p = Calendar.getAbsolutePos(cc);
var CX1 = p.x;
var CX2 = cc.offsetWidth + CX1;
var CY1 = p.y;
var CY2 = cc.offsetHeight + CY1;
if (this.hidden || (CX1 > EX2) || (CX2 < EX1) || (CY1 > EY2) || (CY2 < EY1)) {
if (!cc.__msh_save_visibility) {
cc.__msh_save_visibility = getVisib(cc);
}
cc.style.visibility = cc.__msh_save_visibility;
} else {
if (!cc.__msh_save_visibility) {
cc.__msh_save_visibility = getVisib(cc);
}
cc.style.visibility = "hidden";
}
}
}
};
/** Internal function; it displays the bar with the names of the weekday. */
Calendar.prototype._displayWeekdays = function () {
var fdow = this.firstDayOfWeek;
var cell = this.firstdayname;
var weekend = Calendar._TT["WEEKEND"];
for (var i = 0; i < 7; ++i) {
cell.className = "day name";
var realday = (i + fdow) % 7;
if (i) {
cell.ttip = Calendar._TT["DAY_FIRST"].replace("%s", Calendar._DN[realday]);
cell.navtype = 100;
cell.calendar = this;
cell.fdow = realday;
Calendar._add_evs(cell);
}
if (weekend.indexOf(realday.toString()) != -1) {
Calendar.addClass(cell, "weekend");
}
cell.innerHTML = Calendar._SDN[(i + fdow) % 7];
cell = cell.nextSibling;
}
};
/** Internal function. Hides all combo boxes that might be displayed. */
Calendar.prototype._hideCombos = function () {
this.monthsCombo.style.display = "none";
this.yearsCombo.style.display = "none";
};
/** Internal function. Starts dragging the element. */
Calendar.prototype._dragStart = function (ev) {
if (this.dragging) {
return;
}
this.dragging = true;
var posX;
var posY;
if (Calendar.is_ie) {
posY = window.event.clientY + document.body.scrollTop;
posX = window.event.clientX + document.body.scrollLeft;
} else {
posY = ev.clientY + window.scrollY;
posX = ev.clientX + window.scrollX;
}
var st = this.element.style;
this.xOffs = posX - parseInt(st.left);
this.yOffs = posY - parseInt(st.top);
with (Calendar) {
addEvent(document, "mousemove", calDragIt);
addEvent(document, "mouseup", calDragEnd);
}
};
// BEGIN: DATE OBJECT PATCHES
/** Adds the number of days array to the Date object. */
Date._MD = new Array(31,28,31,30,31,30,31,31,30,31,30,31);
/** Constants used for time computations */
Date.SECOND = 1000 /* milliseconds */;
Date.MINUTE = 60 * Date.SECOND;
Date.HOUR = 60 * Date.MINUTE;
Date.DAY = 24 * Date.HOUR;
Date.WEEK = 7 * Date.DAY;
Date.parseDate = function(str, fmt) {
var today = new Date();
var y = 0;
var m = -1;
var d = 0;
var a = str.split(/\W+/);
var b = fmt.match(/%./g);
var i = 0, j = 0;
var hr = 0;
var min = 0;
for (i = 0; i < a.length; ++i) {
if (!a[i])
continue;
switch (b[i]) {
case "%d":
case "%e":
d = parseInt(a[i], 10);
break;
case "%m":
m = parseInt(a[i], 10) - 1;
break;
case "%Y":
case "%y":
y = parseInt(a[i], 10);
(y < 100) && (y += (y > 29) ? 1900 : 2000);
break;
case "%b":
case "%B":
for (j = 0; j < 12; ++j) {
if (Calendar._MN[j].substr(0, a[i].length).toLowerCase() == a[i].toLowerCase()) { m = j; break; }
}
break;
case "%H":
case "%I":
case "%k":
case "%l":
hr = parseInt(a[i], 10);
break;
case "%P":
case "%p":
if (/pm/i.test(a[i]) && hr < 12)
hr += 12;
else if (/am/i.test(a[i]) && hr >= 12)
hr -= 12;
break;
case "%M":
min = parseInt(a[i], 10);
break;
}
}
if (isNaN(y)) y = today.getFullYear();
if (isNaN(m)) m = today.getMonth();
if (isNaN(d)) d = today.getDate();
if (isNaN(hr)) hr = today.getHours();
if (isNaN(min)) min = today.getMinutes();
if (y != 0 && m != -1 && d != 0)
return new Date(y, m, d, hr, min, 0);
y = 0; m = -1; d = 0;
for (i = 0; i < a.length; ++i) {
if (a[i].search(/[a-zA-Z]+/) != -1) {
var t = -1;
for (j = 0; j < 12; ++j) {
if (Calendar._MN[j].substr(0, a[i].length).toLowerCase() == a[i].toLowerCase()) { t = j; break; }
}
if (t != -1) {
if (m != -1) {
d = m+1;
}
m = t;
}
} else if (parseInt(a[i], 10) <= 12 && m == -1) {
m = a[i]-1;
} else if (parseInt(a[i], 10) > 31 && y == 0) {
y = parseInt(a[i], 10);
(y < 100) && (y += (y > 29) ? 1900 : 2000);
} else if (d == 0) {
d = a[i];
}
}
if (y == 0)
y = today.getFullYear();
if (m != -1 && d != 0)
return new Date(y, m, d, hr, min, 0);
return today;
};
/** Returns the number of days in the current month */
Date.prototype.getMonthDays = function(month) {
var year = this.getFullYear();
if (typeof month == "undefined") {
month = this.getMonth();
}
if (((0 == (year%4)) && ( (0 != (year%100)) || (0 == (year%400)))) && month == 1) {
return 29;
} else {
return Date._MD[month];
}
};
/** Returns the number of day in the year. */
Date.prototype.getDayOfYear = function() {
var now = new Date(this.getFullYear(), this.getMonth(), this.getDate(), 0, 0, 0);
var then = new Date(this.getFullYear(), 0, 0, 0, 0, 0);
var time = now - then;
return Math.floor(time / Date.DAY);
};
/** Returns the number of the week in year, as defined in ISO 8601. */
Date.prototype.getWeekNumber = function() {
var d = new Date(this.getFullYear(), this.getMonth(), this.getDate(), 0, 0, 0);
var DoW = d.getDay();
d.setDate(d.getDate() - (DoW + 6) % 7 + 3); // Nearest Thu
var ms = d.valueOf(); // GMT
d.setMonth(0);
d.setDate(4); // Thu in Week 1
return Math.round((ms - d.valueOf()) / (7 * 864e5)) + 1;
};
/** Checks date and time equality */
Date.prototype.equalsTo = function(date) {
return ((this.getFullYear() == date.getFullYear()) &&
(this.getMonth() == date.getMonth()) &&
(this.getDate() == date.getDate()) &&
(this.getHours() == date.getHours()) &&
(this.getMinutes() == date.getMinutes()));
};
/** Set only the year, month, date parts (keep existing time) */
Date.prototype.setDateOnly = function(date) {
var tmp = new Date(date);
this.setDate(1);
this.setFullYear(tmp.getFullYear());
this.setMonth(tmp.getMonth());
this.setDate(tmp.getDate());
};
/** Prints the date in a string according to the given format. */
Date.prototype.print = function (str) {
var m = this.getMonth();
var d = this.getDate();
var y = this.getFullYear();
var wn = this.getWeekNumber();
var w = this.getDay();
var s = {};
var hr = this.getHours();
var pm = (hr >= 12);
var ir = (pm) ? (hr - 12) : hr;
var dy = this.getDayOfYear();
if (ir == 0)
ir = 12;
var min = this.getMinutes();
var sec = this.getSeconds();
s["%a"] = Calendar._SDN[w]; // abbreviated weekday name [FIXME: I18N]
s["%A"] = Calendar._DN[w]; // full weekday name
s["%b"] = Calendar._SMN[m]; // abbreviated month name [FIXME: I18N]
s["%B"] = Calendar._MN[m]; // full month name
// FIXME: %c : preferred date and time representation for the current locale
s["%C"] = 1 + Math.floor(y / 100); // the century number
s["%d"] = (d < 10) ? ("0" + d) : d; // the day of the month (range 01 to 31)
s["%e"] = d; // the day of the month (range 1 to 31)
// FIXME: %D : american date style: %m/%d/%y
// FIXME: %E, %F, %G, %g, %h (man strftime)
s["%H"] = (hr < 10) ? ("0" + hr) : hr; // hour, range 00 to 23 (24h format)
s["%I"] = (ir < 10) ? ("0" + ir) : ir; // hour, range 01 to 12 (12h format)
s["%j"] = (dy < 100) ? ((dy < 10) ? ("00" + dy) : ("0" + dy)) : dy; // day of the year (range 001 to 366)
s["%k"] = hr; // hour, range 0 to 23 (24h format)
s["%l"] = ir; // hour, range 1 to 12 (12h format)
s["%m"] = (m < 9) ? ("0" + (1+m)) : (1+m); // month, range 01 to 12
s["%M"] = (min < 10) ? ("0" + min) : min; // minute, range 00 to 59
s["%n"] = "\n"; // a newline character
s["%p"] = pm ? "PM" : "AM";
s["%P"] = pm ? "pm" : "am";
// FIXME: %r : the time in am/pm notation %I:%M:%S %p
// FIXME: %R : the time in 24-hour notation %H:%M
s["%s"] = Math.floor(this.getTime() / 1000);
s["%S"] = (sec < 10) ? ("0" + sec) : sec; // seconds, range 00 to 59
s["%t"] = "\t"; // a tab character
// FIXME: %T : the time in 24-hour notation (%H:%M:%S)
s["%U"] = s["%W"] = s["%V"] = (wn < 10) ? ("0" + wn) : wn;
s["%u"] = w + 1; // the day of the week (range 1 to 7, 1 = MON)
s["%w"] = w; // the day of the week (range 0 to 6, 0 = SUN)
// FIXME: %x : preferred date representation for the current locale without the time
// FIXME: %X : preferred time representation for the current locale without the date
s["%y"] = ('' + y).substr(2, 2); // year without the century (range 00 to 99)
s["%Y"] = y; // year with the century
s["%%"] = "%"; // a literal '%' character
var re = /%./g;
if (!Calendar.is_ie5 && !Calendar.is_khtml)
return str.replace(re, function (par) { return s[par] || par; });
var a = str.match(re);
for (var i = 0; i < a.length; i++) {
var tmp = s[a[i]];
if (tmp) {
re = new RegExp(a[i], 'g');
str = str.replace(re, tmp);
}
}
return str;
};
Date.prototype.__msh_oldSetFullYear = Date.prototype.setFullYear;
Date.prototype.setFullYear = function(y) {
var d = new Date(this);
d.__msh_oldSetFullYear(y);
if (d.getMonth() != this.getMonth())
this.setDate(28);
this.__msh_oldSetFullYear(y);
};
// END: DATE OBJECT PATCHES
// global object that remembers the calendar
window._dynarch_popupCalendar = null;<|fim▁end|> | |
<|file_name|>0c431867c679_pets_now_have_a_description.py<|end_file_name|><|fim▁begin|>"""Pets now have a description
Revision ID: 0c431867c679<|fim▁hole|>Create Date: 2016-11-07 18:36:25.912155
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0c431867c679'
down_revision = '5b1bdc1f3125'
branch_labels = None
depends_on = None
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('pet', sa.Column('description', sa.Text(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('pet', 'description')
### end Alembic commands ###<|fim▁end|> | Revises: 5b1bdc1f3125 |
<|file_name|>stream.rs<|end_file_name|><|fim▁begin|>use crate::future::poll_fn;
use crate::io::{AsyncRead, AsyncWrite, Interest, PollEvented, ReadBuf, Ready};
use crate::net::unix::split::{split, ReadHalf, WriteHalf};
use crate::net::unix::split_owned::{split_owned, OwnedReadHalf, OwnedWriteHalf};
use crate::net::unix::ucred::{self, UCred};
use crate::net::unix::SocketAddr;
use std::convert::TryFrom;
use std::fmt;
use std::io::{self, Read, Write};
use std::net::Shutdown;
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};
use std::os::unix::net;
use std::path::Path;
use std::pin::Pin;
use std::task::{Context, Poll};
cfg_io_util! {
use bytes::BufMut;
}
cfg_net_unix! {
/// A structure representing a connected Unix socket.
///
/// This socket can be connected directly with `UnixStream::connect` or accepted
/// from a listener with `UnixListener::incoming`. Additionally, a pair of
/// anonymous Unix sockets can be created with `UnixStream::pair`.
///
/// To shut down the stream in the write direction, you can call the
/// [`shutdown()`] method. This will cause the other peer to receive a read of
/// length 0, indicating that no more data will be sent. This only closes
/// the stream in one direction.
///
/// [`shutdown()`]: fn@crate::io::AsyncWriteExt::shutdown
pub struct UnixStream {
io: PollEvented<mio::net::UnixStream>,
}
}<|fim▁hole|>impl UnixStream {
/// Connects to the socket named by `path`.
///
/// This function will create a new Unix socket and connect to the path
/// specified, associating the returned stream with the default event loop's
/// handle.
pub async fn connect<P>(path: P) -> io::Result<UnixStream>
where
P: AsRef<Path>,
{
let stream = mio::net::UnixStream::connect(path)?;
let stream = UnixStream::new(stream)?;
poll_fn(|cx| stream.io.registration().poll_write_ready(cx)).await?;
if let Some(e) = stream.io.take_error()? {
return Err(e);
}
Ok(stream)
}
/// Waits for any of the requested ready states.
///
/// This function is usually paired with `try_read()` or `try_write()`. It
/// can be used to concurrently read / write to the same socket on a single
/// task without splitting the socket.
///
/// # Cancel safety
///
/// This method is cancel safe. Once a readiness event occurs, the method
/// will continue to return immediately until the readiness event is
/// consumed by an attempt to read or write that fails with `WouldBlock` or
/// `Poll::Pending`.
///
/// # Examples
///
/// Concurrently read and write to the stream on the same task without
/// splitting.
///
/// ```no_run
/// use tokio::io::Interest;
/// use tokio::net::UnixStream;
/// use std::error::Error;
/// use std::io;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// loop {
/// let ready = stream.ready(Interest::READABLE | Interest::WRITABLE).await?;
///
/// if ready.is_readable() {
/// let mut data = vec![0; 1024];
/// // Try to read data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_read(&mut data) {
/// Ok(n) => {
/// println!("read {} bytes", n);
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
///
/// }
///
/// if ready.is_writable() {
/// // Try to write data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_write(b"hello world") {
/// Ok(n) => {
/// println!("write {} bytes", n);
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
/// }
/// }
/// }
/// ```
pub async fn ready(&self, interest: Interest) -> io::Result<Ready> {
let event = self.io.registration().readiness(interest).await?;
Ok(event.ready)
}
/// Waits for the socket to become readable.
///
/// This function is equivalent to `ready(Interest::READABLE)` and is usually
/// paired with `try_read()`.
///
/// # Cancel safety
///
/// This method is cancel safe. Once a readiness event occurs, the method
/// will continue to return immediately until the readiness event is
/// consumed by an attempt to read that fails with `WouldBlock` or
/// `Poll::Pending`.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
/// use std::error::Error;
/// use std::io;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// // Connect to a peer
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// let mut msg = vec![0; 1024];
///
/// loop {
/// // Wait for the socket to be readable
/// stream.readable().await?;
///
/// // Try to read data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_read(&mut msg) {
/// Ok(n) => {
/// msg.truncate(n);
/// break;
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
/// }
///
/// println!("GOT = {:?}", msg);
/// Ok(())
/// }
/// ```
pub async fn readable(&self) -> io::Result<()> {
self.ready(Interest::READABLE).await?;
Ok(())
}
/// Polls for read readiness.
///
/// If the unix stream is not currently ready for reading, this method will
/// store a clone of the `Waker` from the provided `Context`. When the unix
/// stream becomes ready for reading, `Waker::wake` will be called on the
/// waker.
///
/// Note that on multiple calls to `poll_read_ready` or `poll_read`, only
/// the `Waker` from the `Context` passed to the most recent call is
/// scheduled to receive a wakeup. (However, `poll_write_ready` retains a
/// second, independent waker.)
///
/// This function is intended for cases where creating and pinning a future
/// via [`readable`] is not feasible. Where possible, using [`readable`] is
/// preferred, as this supports polling from multiple tasks at once.
///
/// # Return value
///
/// The function returns:
///
/// * `Poll::Pending` if the unix stream is not ready for reading.
/// * `Poll::Ready(Ok(()))` if the unix stream is ready for reading.
/// * `Poll::Ready(Err(e))` if an error is encountered.
///
/// # Errors
///
/// This function may encounter any standard I/O error except `WouldBlock`.
///
/// [`readable`]: method@Self::readable
pub fn poll_read_ready(&self, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
self.io.registration().poll_read_ready(cx).map_ok(|_| ())
}
/// Try to read data from the stream into the provided buffer, returning how
/// many bytes were read.
///
/// Receives any pending data from the socket but does not wait for new data
/// to arrive. On success, returns the number of bytes read. Because
/// `try_read()` is non-blocking, the buffer does not have to be stored by
/// the async task and can exist entirely on the stack.
///
/// Usually, [`readable()`] or [`ready()`] is used with this function.
///
/// [`readable()`]: UnixStream::readable()
/// [`ready()`]: UnixStream::ready()
///
/// # Return
///
/// If data is successfully read, `Ok(n)` is returned, where `n` is the
/// number of bytes read. `Ok(0)` indicates the stream's read half is closed
/// and will no longer yield data. If the stream is not ready to read data
/// `Err(io::ErrorKind::WouldBlock)` is returned.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
/// use std::error::Error;
/// use std::io;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// // Connect to a peer
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// loop {
/// // Wait for the socket to be readable
/// stream.readable().await?;
///
/// // Creating the buffer **after** the `await` prevents it from
/// // being stored in the async task.
/// let mut buf = [0; 4096];
///
/// // Try to read data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_read(&mut buf) {
/// Ok(0) => break,
/// Ok(n) => {
/// println!("read {} bytes", n);
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
/// }
///
/// Ok(())
/// }
/// ```
pub fn try_read(&self, buf: &mut [u8]) -> io::Result<usize> {
self.io
.registration()
.try_io(Interest::READABLE, || (&*self.io).read(buf))
}
/// Tries to read data from the stream into the provided buffers, returning
/// how many bytes were read.
///
/// Data is copied to fill each buffer in order, with the final buffer
/// written to possibly being only partially filled. This method behaves
/// equivalently to a single call to [`try_read()`] with concatenated
/// buffers.
///
/// Receives any pending data from the socket but does not wait for new data
/// to arrive. On success, returns the number of bytes read. Because
/// `try_read_vectored()` is non-blocking, the buffer does not have to be
/// stored by the async task and can exist entirely on the stack.
///
/// Usually, [`readable()`] or [`ready()`] is used with this function.
///
/// [`try_read()`]: UnixStream::try_read()
/// [`readable()`]: UnixStream::readable()
/// [`ready()`]: UnixStream::ready()
///
/// # Return
///
/// If data is successfully read, `Ok(n)` is returned, where `n` is the
/// number of bytes read. `Ok(0)` indicates the stream's read half is closed
/// and will no longer yield data. If the stream is not ready to read data
/// `Err(io::ErrorKind::WouldBlock)` is returned.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
/// use std::error::Error;
/// use std::io::{self, IoSliceMut};
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// // Connect to a peer
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// loop {
/// // Wait for the socket to be readable
/// stream.readable().await?;
///
/// // Creating the buffer **after** the `await` prevents it from
/// // being stored in the async task.
/// let mut buf_a = [0; 512];
/// let mut buf_b = [0; 1024];
/// let mut bufs = [
/// IoSliceMut::new(&mut buf_a),
/// IoSliceMut::new(&mut buf_b),
/// ];
///
/// // Try to read data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_read_vectored(&mut bufs) {
/// Ok(0) => break,
/// Ok(n) => {
/// println!("read {} bytes", n);
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
/// }
///
/// Ok(())
/// }
/// ```
pub fn try_read_vectored(&self, bufs: &mut [io::IoSliceMut<'_>]) -> io::Result<usize> {
self.io
.registration()
.try_io(Interest::READABLE, || (&*self.io).read_vectored(bufs))
}
cfg_io_util! {
/// Tries to read data from the stream into the provided buffer, advancing the
/// buffer's internal cursor, returning how many bytes were read.
///
/// Receives any pending data from the socket but does not wait for new data
/// to arrive. On success, returns the number of bytes read. Because
/// `try_read_buf()` is non-blocking, the buffer does not have to be stored by
/// the async task and can exist entirely on the stack.
///
/// Usually, [`readable()`] or [`ready()`] is used with this function.
///
/// [`readable()`]: UnixStream::readable()
/// [`ready()`]: UnixStream::ready()
///
/// # Return
///
/// If data is successfully read, `Ok(n)` is returned, where `n` is the
/// number of bytes read. `Ok(0)` indicates the stream's read half is closed
/// and will no longer yield data. If the stream is not ready to read data
/// `Err(io::ErrorKind::WouldBlock)` is returned.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
/// use std::error::Error;
/// use std::io;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// // Connect to a peer
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// loop {
/// // Wait for the socket to be readable
/// stream.readable().await?;
///
/// let mut buf = Vec::with_capacity(4096);
///
/// // Try to read data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_read_buf(&mut buf) {
/// Ok(0) => break,
/// Ok(n) => {
/// println!("read {} bytes", n);
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
/// }
///
/// Ok(())
/// }
/// ```
pub fn try_read_buf<B: BufMut>(&self, buf: &mut B) -> io::Result<usize> {
self.io.registration().try_io(Interest::READABLE, || {
use std::io::Read;
let dst = buf.chunk_mut();
let dst =
unsafe { &mut *(dst as *mut _ as *mut [std::mem::MaybeUninit<u8>] as *mut [u8]) };
// Safety: We trust `UnixStream::read` to have filled up `n` bytes in the
// buffer.
let n = (&*self.io).read(dst)?;
unsafe {
buf.advance_mut(n);
}
Ok(n)
})
}
}
/// Waits for the socket to become writable.
///
/// This function is equivalent to `ready(Interest::WRITABLE)` and is usually
/// paired with `try_write()`.
///
/// # Cancel safety
///
/// This method is cancel safe. Once a readiness event occurs, the method
/// will continue to return immediately until the readiness event is
/// consumed by an attempt to write that fails with `WouldBlock` or
/// `Poll::Pending`.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
/// use std::error::Error;
/// use std::io;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// // Connect to a peer
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// loop {
/// // Wait for the socket to be writable
/// stream.writable().await?;
///
/// // Try to write data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_write(b"hello world") {
/// Ok(n) => {
/// break;
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
/// }
///
/// Ok(())
/// }
/// ```
pub async fn writable(&self) -> io::Result<()> {
self.ready(Interest::WRITABLE).await?;
Ok(())
}
/// Polls for write readiness.
///
/// If the unix stream is not currently ready for writing, this method will
/// store a clone of the `Waker` from the provided `Context`. When the unix
/// stream becomes ready for writing, `Waker::wake` will be called on the
/// waker.
///
/// Note that on multiple calls to `poll_write_ready` or `poll_write`, only
/// the `Waker` from the `Context` passed to the most recent call is
/// scheduled to receive a wakeup. (However, `poll_read_ready` retains a
/// second, independent waker.)
///
/// This function is intended for cases where creating and pinning a future
/// via [`writable`] is not feasible. Where possible, using [`writable`] is
/// preferred, as this supports polling from multiple tasks at once.
///
/// # Return value
///
/// The function returns:
///
/// * `Poll::Pending` if the unix stream is not ready for writing.
/// * `Poll::Ready(Ok(()))` if the unix stream is ready for writing.
/// * `Poll::Ready(Err(e))` if an error is encountered.
///
/// # Errors
///
/// This function may encounter any standard I/O error except `WouldBlock`.
///
/// [`writable`]: method@Self::writable
pub fn poll_write_ready(&self, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
self.io.registration().poll_write_ready(cx).map_ok(|_| ())
}
/// Tries to write a buffer to the stream, returning how many bytes were
/// written.
///
/// The function will attempt to write the entire contents of `buf`, but
/// only part of the buffer may be written.
///
/// This function is usually paired with `writable()`.
///
/// # Return
///
/// If data is successfully written, `Ok(n)` is returned, where `n` is the
/// number of bytes written. If the stream is not ready to write data,
/// `Err(io::ErrorKind::WouldBlock)` is returned.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
/// use std::error::Error;
/// use std::io;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// // Connect to a peer
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// loop {
/// // Wait for the socket to be writable
/// stream.writable().await?;
///
/// // Try to write data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_write(b"hello world") {
/// Ok(n) => {
/// break;
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
/// }
///
/// Ok(())
/// }
/// ```
pub fn try_write(&self, buf: &[u8]) -> io::Result<usize> {
self.io
.registration()
.try_io(Interest::WRITABLE, || (&*self.io).write(buf))
}
/// Tries to write several buffers to the stream, returning how many bytes
/// were written.
///
/// Data is written from each buffer in order, with the final buffer read
/// from possible being only partially consumed. This method behaves
/// equivalently to a single call to [`try_write()`] with concatenated
/// buffers.
///
/// This function is usually paired with `writable()`.
///
/// [`try_write()`]: UnixStream::try_write()
///
/// # Return
///
/// If data is successfully written, `Ok(n)` is returned, where `n` is the
/// number of bytes written. If the stream is not ready to write data,
/// `Err(io::ErrorKind::WouldBlock)` is returned.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
/// use std::error::Error;
/// use std::io;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// // Connect to a peer
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// let bufs = [io::IoSlice::new(b"hello "), io::IoSlice::new(b"world")];
///
/// loop {
/// // Wait for the socket to be writable
/// stream.writable().await?;
///
/// // Try to write data, this may still fail with `WouldBlock`
/// // if the readiness event is a false positive.
/// match stream.try_write_vectored(&bufs) {
/// Ok(n) => {
/// break;
/// }
/// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
/// continue;
/// }
/// Err(e) => {
/// return Err(e.into());
/// }
/// }
/// }
///
/// Ok(())
/// }
/// ```
pub fn try_write_vectored(&self, buf: &[io::IoSlice<'_>]) -> io::Result<usize> {
self.io
.registration()
.try_io(Interest::WRITABLE, || (&*self.io).write_vectored(buf))
}
/// Tries to read or write from the socket using a user-provided IO operation.
///
/// If the socket is ready, the provided closure is called. The closure
/// should attempt to perform IO operation from the socket by manually
/// calling the appropriate syscall. If the operation fails because the
/// socket is not actually ready, then the closure should return a
/// `WouldBlock` error and the readiness flag is cleared. The return value
/// of the closure is then returned by `try_io`.
///
/// If the socket is not ready, then the closure is not called
/// and a `WouldBlock` error is returned.
///
/// The closure should only return a `WouldBlock` error if it has performed
/// an IO operation on the socket that failed due to the socket not being
/// ready. Returning a `WouldBlock` error in any other situation will
/// incorrectly clear the readiness flag, which can cause the socket to
/// behave incorrectly.
///
/// The closure should not perform the IO operation using any of the methods
/// defined on the Tokio `UnixStream` type, as this will mess with the
/// readiness flag and can cause the socket to behave incorrectly.
///
/// Usually, [`readable()`], [`writable()`] or [`ready()`] is used with this function.
///
/// [`readable()`]: UnixStream::readable()
/// [`writable()`]: UnixStream::writable()
/// [`ready()`]: UnixStream::ready()
pub fn try_io<R>(
&self,
interest: Interest,
f: impl FnOnce() -> io::Result<R>,
) -> io::Result<R> {
self.io.registration().try_io(interest, f)
}
/// Creates new `UnixStream` from a `std::os::unix::net::UnixStream`.
///
/// This function is intended to be used to wrap a UnixStream from the
/// standard library in the Tokio equivalent. The conversion assumes
/// nothing about the underlying stream; it is left up to the user to set
/// it in non-blocking mode.
///
/// # Panics
///
/// This function panics if thread-local runtime is not set.
///
/// The runtime is usually set implicitly when this function is called
/// from a future driven by a tokio runtime, otherwise runtime can be set
/// explicitly with [`Runtime::enter`](crate::runtime::Runtime::enter) function.
pub fn from_std(stream: net::UnixStream) -> io::Result<UnixStream> {
let stream = mio::net::UnixStream::from_std(stream);
let io = PollEvented::new(stream)?;
Ok(UnixStream { io })
}
/// Turns a [`tokio::net::UnixStream`] into a [`std::os::unix::net::UnixStream`].
///
/// The returned [`std::os::unix::net::UnixStream`] will have nonblocking
/// mode set as `true`. Use [`set_nonblocking`] to change the blocking
/// mode if needed.
///
/// # Examples
///
/// ```
/// use std::error::Error;
/// use std::io::Read;
/// use tokio::net::UnixListener;
/// # use tokio::net::UnixStream;
/// # use tokio::io::AsyncWriteExt;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
///
/// let mut data = [0u8; 12];
/// let listener = UnixListener::bind(&bind_path)?;
/// # let handle = tokio::spawn(async {
/// # let mut stream = UnixStream::connect(bind_path).await.unwrap();
/// # stream.write(b"Hello world!").await.unwrap();
/// # });
/// let (tokio_unix_stream, _) = listener.accept().await?;
/// let mut std_unix_stream = tokio_unix_stream.into_std()?;
/// # handle.await.expect("The task being joined has panicked");
/// std_unix_stream.set_nonblocking(false)?;
/// std_unix_stream.read_exact(&mut data)?;
/// # assert_eq!(b"Hello world!", &data);
/// Ok(())
/// }
/// ```
/// [`tokio::net::UnixStream`]: UnixStream
/// [`std::os::unix::net::UnixStream`]: std::os::unix::net::UnixStream
/// [`set_nonblocking`]: fn@std::os::unix::net::UnixStream::set_nonblocking
pub fn into_std(self) -> io::Result<std::os::unix::net::UnixStream> {
self.io
.into_inner()
.map(|io| io.into_raw_fd())
.map(|raw_fd| unsafe { std::os::unix::net::UnixStream::from_raw_fd(raw_fd) })
}
/// Creates an unnamed pair of connected sockets.
///
/// This function will create a pair of interconnected Unix sockets for
/// communicating back and forth between one another. Each socket will
/// be associated with the default event loop's handle.
pub fn pair() -> io::Result<(UnixStream, UnixStream)> {
let (a, b) = mio::net::UnixStream::pair()?;
let a = UnixStream::new(a)?;
let b = UnixStream::new(b)?;
Ok((a, b))
}
pub(crate) fn new(stream: mio::net::UnixStream) -> io::Result<UnixStream> {
let io = PollEvented::new(stream)?;
Ok(UnixStream { io })
}
/// Returns the socket address of the local half of this connection.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
///
/// # async fn dox() -> Result<(), Box<dyn std::error::Error>> {
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// println!("{:?}", stream.local_addr()?);
/// # Ok(())
/// # }
/// ```
pub fn local_addr(&self) -> io::Result<SocketAddr> {
self.io.local_addr().map(SocketAddr)
}
/// Returns the socket address of the remote half of this connection.
///
/// # Examples
///
/// ```no_run
/// use tokio::net::UnixStream;
///
/// # async fn dox() -> Result<(), Box<dyn std::error::Error>> {
/// let dir = tempfile::tempdir().unwrap();
/// let bind_path = dir.path().join("bind_path");
/// let stream = UnixStream::connect(bind_path).await?;
///
/// println!("{:?}", stream.peer_addr()?);
/// # Ok(())
/// # }
/// ```
pub fn peer_addr(&self) -> io::Result<SocketAddr> {
self.io.peer_addr().map(SocketAddr)
}
/// Returns effective credentials of the process which called `connect` or `pair`.
pub fn peer_cred(&self) -> io::Result<UCred> {
ucred::get_peer_cred(self)
}
/// Returns the value of the `SO_ERROR` option.
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.io.take_error()
}
/// Shuts down the read, write, or both halves of this connection.
///
/// This function will cause all pending and future I/O calls on the
/// specified portions to immediately return with an appropriate value
/// (see the documentation of `Shutdown`).
pub(super) fn shutdown_std(&self, how: Shutdown) -> io::Result<()> {
self.io.shutdown(how)
}
// These lifetime markers also appear in the generated documentation, and make
// it more clear that this is a *borrowed* split.
#[allow(clippy::needless_lifetimes)]
/// Splits a `UnixStream` into a read half and a write half, which can be used
/// to read and write the stream concurrently.
///
/// This method is more efficient than [`into_split`], but the halves cannot be
/// moved into independently spawned tasks.
///
/// [`into_split`]: Self::into_split()
pub fn split<'a>(&'a mut self) -> (ReadHalf<'a>, WriteHalf<'a>) {
split(self)
}
/// Splits a `UnixStream` into a read half and a write half, which can be used
/// to read and write the stream concurrently.
///
/// Unlike [`split`], the owned halves can be moved to separate tasks, however
/// this comes at the cost of a heap allocation.
///
/// **Note:** Dropping the write half will shut down the write half of the
/// stream. This is equivalent to calling [`shutdown()`] on the `UnixStream`.
///
/// [`split`]: Self::split()
/// [`shutdown()`]: fn@crate::io::AsyncWriteExt::shutdown
pub fn into_split(self) -> (OwnedReadHalf, OwnedWriteHalf) {
split_owned(self)
}
}
impl TryFrom<net::UnixStream> for UnixStream {
type Error = io::Error;
/// Consumes stream, returning the tokio I/O object.
///
/// This is equivalent to
/// [`UnixStream::from_std(stream)`](UnixStream::from_std).
fn try_from(stream: net::UnixStream) -> io::Result<Self> {
Self::from_std(stream)
}
}
impl AsyncRead for UnixStream {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
self.poll_read_priv(cx, buf)
}
}
impl AsyncWrite for UnixStream {
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<io::Result<usize>> {
self.poll_write_priv(cx, buf)
}
fn poll_write_vectored(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
bufs: &[io::IoSlice<'_>],
) -> Poll<io::Result<usize>> {
self.poll_write_vectored_priv(cx, bufs)
}
fn is_write_vectored(&self) -> bool {
true
}
fn poll_flush(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<io::Result<()>> {
Poll::Ready(Ok(()))
}
fn poll_shutdown(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<io::Result<()>> {
self.shutdown_std(std::net::Shutdown::Write)?;
Poll::Ready(Ok(()))
}
}
impl UnixStream {
// == Poll IO functions that takes `&self` ==
//
// To read or write without mutable access to the `UnixStream`, combine the
// `poll_read_ready` or `poll_write_ready` methods with the `try_read` or
// `try_write` methods.
pub(crate) fn poll_read_priv(
&self,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
// Safety: `UnixStream::read` correctly handles reads into uninitialized memory
unsafe { self.io.poll_read(cx, buf) }
}
pub(crate) fn poll_write_priv(
&self,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<io::Result<usize>> {
self.io.poll_write(cx, buf)
}
pub(super) fn poll_write_vectored_priv(
&self,
cx: &mut Context<'_>,
bufs: &[io::IoSlice<'_>],
) -> Poll<io::Result<usize>> {
self.io.poll_write_vectored(cx, bufs)
}
}
impl fmt::Debug for UnixStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.io.fmt(f)
}
}
impl AsRawFd for UnixStream {
fn as_raw_fd(&self) -> RawFd {
self.io.as_raw_fd()
}
}<|fim▁end|> | |
<|file_name|>resources.rs<|end_file_name|><|fim▁begin|>// +--------------------------------------------------------------------------+
// | Copyright 2016 Matthew D. Steele <[email protected]> |
// | |
// | This file is part of System Syzygy. |
// | |
// | System Syzygy is free software: you can redistribute it and/or modify it |
// | under the terms of the GNU General Public License as published by the |
// | Free Software Foundation, either version 3 of the License, or (at your |
// | option) any later version. |
// | |
// | System Syzygy is distributed in the hope that it will be useful, but |
// | WITHOUT ANY WARRANTY; without even the implied warranty of |
// | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
// | General Public License for details. |
// | |
// | You should have received a copy of the GNU General Public License along |
// | with System Syzygy. If not, see <http://www.gnu.org/licenses/>. |
// +--------------------------------------------------------------------------+
use ahi;
use sdl2::render::Canvas as SdlCanvas;
use sdl2::video::Window as SdlWindow;
use std::collections::HashMap;
use std::io;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use super::background::Background;
use super::font::Font;
use super::loader::ResourceLoader;
use super::sprite::Sprite;
// ========================================================================= //
pub struct Resources<'a> {
renderer: &'a SdlCanvas<SdlWindow>,
cache: &'a mut ResourceCache,
}
impl<'a> Resources<'a> {
pub fn new(
renderer: &'a SdlCanvas<SdlWindow>,
cache: &'a mut ResourceCache,
) -> Resources<'a> {
Resources { renderer, cache }
}
pub fn get_background(&mut self, name: &str) -> Rc<Background> {
self.cache.get_background(self.renderer, name)
}
pub fn get_font(&mut self, name: &str) -> Rc<Font> {
self.cache.get_font(self.renderer, name)
}
pub fn get_sprites(&mut self, name: &str) -> Vec<Sprite> {
self.cache.get_sprites(self.renderer, name)
}
}
// ========================================================================= //
pub struct ResourceCache {
backgrounds: HashMap<String, Rc<Background>>,
fonts: HashMap<String, Rc<Font>>,
sprites: HashMap<String, Vec<Sprite>>,
loader: ResourceLoader,
}
impl ResourceCache {
pub fn new() -> ResourceCache {
ResourceCache {
backgrounds: HashMap::new(),
fonts: HashMap::new(),
sprites: HashMap::new(),
loader: ResourceLoader::new(),
}
}
fn get_background(
&mut self,
renderer: &SdlCanvas<SdlWindow>,
name: &str,
) -> Rc<Background> {
if let Some(background) = self.backgrounds.get(name) {
return background.clone();
}
if cfg!(debug_assertions) {
println!("Loading background: {}", name);
}
let path =
PathBuf::from("backgrounds").join(name).with_extension("bg");
let file = self.loader.load(&path).expect(name);
let background = Rc::new(
Background::load(&path, file, |name| {
self.get_sprites(renderer, &format!("tiles/{}", name))
})
.expect(name),
);
self.backgrounds.insert(name.to_string(), background.clone());
background
}
fn get_font(
&mut self,
renderer: &SdlCanvas<SdlWindow>,
name: &str,
) -> Rc<Font> {
if let Some(font) = self.fonts.get(name) {
return font.clone();
}
if cfg!(debug_assertions) {
println!("Loading font: {}", name);
}
let path = PathBuf::from("fonts").join(name).with_extension("ahf");
let ahf = load_ahf_from_file(&self.loader, &path).expect(name);
let font = Rc::new(Font::new(renderer, &ahf));
self.fonts.insert(name.to_string(), font.clone());<|fim▁hole|> fn get_sprites(
&mut self,
renderer: &SdlCanvas<SdlWindow>,
name: &str,
) -> Vec<Sprite> {
if let Some(vec) = self.sprites.get(name) {
return vec.clone();
}
if cfg!(debug_assertions) {
println!("Loading sprites: {}", name);
}
let path = PathBuf::from("sprites").join(name).with_extension("ahi");
let ahi = load_ahi_from_file(&self.loader, &path).expect(name);
let vec: Vec<Sprite> =
ahi.iter().map(|image| Sprite::new(renderer, image)).collect();
self.sprites.insert(name.to_string(), vec.clone());
vec
}
}
// ========================================================================= //
fn load_ahf_from_file(
loader: &ResourceLoader,
path: &Path,
) -> io::Result<ahi::Font> {
let mut file = loader.load(path)?;
ahi::Font::read(&mut file)
}
fn load_ahi_from_file(
loader: &ResourceLoader,
path: &Path,
) -> io::Result<Vec<ahi::Image>> {
let mut file = loader.load(path)?;
ahi::Image::read_all(&mut file)
}
// ========================================================================= //<|fim▁end|> | font
}
|
<|file_name|>bootstrap-collapse.js<|end_file_name|><|fim▁begin|>/* =============================================================
* bootstrap-collapse.js v2.2.2
* http://twitter.github.com/bootstrap/javascript.html#collapse
* =============================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============================================================ */
!function ($) {
"use strict"; // jshint ;_;
/* COLLAPSE PUBLIC CLASS DEFINITION
* ================================ */
var Collapse = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, $.fn.collapse.defaults, options)
if (this.options.parent) {
this.$parent = $(this.options.parent)
}
this.options.toggle && this.toggle()
}
Collapse.prototype = {
constructor: Collapse
, dimension: function () {
var hasWidth = this.$element.hasClass('width')
return hasWidth ? 'width' : 'height'
}
, show: function () {
var dimension
, scroll
, actives
, hasData
if (this.transitioning) return
dimension = this.dimension()
scroll = $.camelCase(['scroll', dimension].join('-'))
actives = this.$parent && this.$parent.find('> .accordion-group > .in')
if (actives && actives.length) {
hasData = actives.data('collapse')
if (hasData && hasData.transitioning) return
actives.collapse('hide')
hasData || actives.data('collapse', null)
}
this.$element[dimension](0)
this.transition('addClass', $.Event('show'), 'shown')
$.support.transition && this.$element[dimension](this.$element[0][scroll])
}
, hide: function () {<|fim▁hole|> this.transition('removeClass', $.Event('hide'), 'hidden')
this.$element[dimension](0)
}
, reset: function (size) {
var dimension = this.dimension()
this.$element
.removeClass('collapse')
[dimension](size || 'auto')
[0].offsetWidth
this.$element[size !== null ? 'addClass' : 'removeClass']('collapse')
return this
}
, transition: function (method, startEvent, completeEvent) {
var that = this
, complete = function () {
if (startEvent.type == 'show') that.reset()
that.transitioning = 0
that.$element.trigger(completeEvent)
}
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
this.transitioning = 1
this.$element[method]('in')
$.support.transition && this.$element.hasClass('collapse') ?
this.$element.one($.support.transition.end, complete) :
complete()
}
, toggle: function () {
this[this.$element.hasClass('in') ? 'hide' : 'show']()
}
}
/* COLLAPSE PLUGIN DEFINITION
* ========================== */
var old = $.fn.collapse
$.fn.collapse = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('collapse')
, options = typeof option == 'object' && option
if (!data) $this.data('collapse', (data = new Collapse(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.collapse.defaults = {
toggle: true
}
$.fn.collapse.Constructor = Collapse
/* COLLAPSE NO CONFLICT
* ==================== */
$.fn.collapse.noConflict = function () {
$.fn.collapse = old
return this
}
/* COLLAPSE DATA-API
* ================= */
$(document).on('click.collapse.data-api', '[data-toggle=collapse]', function (e) {
var $this = $(this), href
, target = $this.attr('data-target')
|| e.preventDefault()
|| (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') //strip for ie7
, option = $(target).data('collapse') ? 'toggle' : $this.data()
$this[$(target).hasClass('in') ? 'addClass' : 'removeClass']('collapsed')
$(target).collapse(option)
})
}(window.jQuery);<|fim▁end|> | var dimension
if (this.transitioning) return
dimension = this.dimension()
this.reset(this.$element[dimension]()) |
<|file_name|>common.go<|end_file_name|><|fim▁begin|>// Copyright 2016 Keybase Inc. All rights reserved.
// Use of this source code is governed by a BSD
// license that can be found in the LICENSE file.
package libdokan
import (
"strings"
"time"
"github.com/keybase/client/go/kbfs/data"
"github.com/keybase/client/go/kbfs/dokan"
"github.com/keybase/client/go/kbfs/idutil"
"github.com/keybase/client/go/kbfs/kbfsmd"
"golang.org/x/net/context"
)
const (
// PublicName is the name of the parent of all public top-level folders.
PublicName = "public"
// PrivateName is the name of the parent of all private top-level folders.
PrivateName = "private"
// TeamName is the name of the parent of all team top-level folders.
TeamName = "team"
// CtxOpID is the display name for the unique operation Dokan ID tag.
CtxOpID = "DID"
// WrongUserErrorFileName is the name of error directory for other users.
WrongUserErrorFileName = `kbfs.access.denied.for.other.windows.users.txt`
// WrongUserErrorContents is the contents of the file.
WrongUserErrorContents = `Access to KBFS is limited to the windows user (sid) running KBFS.`
)
// CtxTagKey is the type used for unique context tags
type CtxTagKey int
const (
// CtxIDKey is the type of the tag for unique operation IDs.
CtxIDKey CtxTagKey = iota
)
// eiToStat converts from a libkbfs.EntryInfo and error to a *dokan.Stat and error.
// Note that handling symlinks to directories requires extra processing not done here.
func eiToStat(ei data.EntryInfo, err error) (*dokan.Stat, error) {
if err != nil {
return nil, errToDokan(err)
}
st := &dokan.Stat{}
fillStat(st, &ei)
return st, nil
}
// fillStat fill a dokan.Stat from a libkbfs.DirEntry.
// Note that handling symlinks to directories requires extra processing not done here.
func fillStat(a *dokan.Stat, de *data.EntryInfo) {
a.FileSize = int64(de.Size)
a.LastWrite = time.Unix(0, de.Mtime)
a.LastAccess = a.LastWrite
a.Creation = time.Unix(0, de.Ctime)
switch de.Type {
case data.File, data.Exec:
a.FileAttributes = dokan.FileAttributeNormal
case data.Dir:
a.FileAttributes = dokan.FileAttributeDirectory
case data.Sym:
a.FileAttributes = dokan.FileAttributeReparsePoint
a.ReparsePointTag = dokan.IOReparseTagSymlink
}
}
// addFileAttribute adds a file attribute to the stat struct.
func addFileAttribute(a *dokan.Stat, fa dokan.FileAttribute) {
// FileAttributeNormal is valid only if no other attribute is set.
// Thus clear the normal flag (if set) from the attributes and or
// the new flag.
a.FileAttributes = (a.FileAttributes &^ dokan.FileAttributeNormal) | fa
}
// errToDokan makes some libkbfs errors easier to digest in dokan. Not needed in most places.
func errToDokan(err error) error {
switch err.(type) {
case idutil.NoSuchNameError:
return dokan.ErrObjectNameNotFound
case idutil.NoSuchUserError:
return dokan.ErrObjectNameNotFound
case kbfsmd.ServerErrorUnauthorized:
return dokan.ErrAccessDenied
case nil:
return nil
}
return err
}
// defaultDirectoryInformation returns default directory information.
func defaultDirectoryInformation() (*dokan.Stat, error) {
var st dokan.Stat
st.FileAttributes = dokan.FileAttributeDirectory
return &st, nil
}
// defaultFileInformation returns default file information.
func defaultFileInformation() (*dokan.Stat, error) {
var st dokan.Stat
st.FileAttributes = dokan.FileAttributeNormal
return &st, nil
}
// defaultSymlinkFileInformation returns default symlink to file information.
func defaultSymlinkFileInformation() (*dokan.Stat, error) {
var st dokan.Stat
st.FileAttributes = dokan.FileAttributeReparsePoint
st.ReparsePointTag = dokan.IOReparseTagSymlink
return &st, nil
}
// defaultSymlinkDirInformation returns default symlink to directory information.
func defaultSymlinkDirInformation() (*dokan.Stat, error) {
var st dokan.Stat
st.FileAttributes = dokan.FileAttributeReparsePoint | dokan.FileAttributeDirectory
st.ReparsePointTag = dokan.IOReparseTagSymlink
return &st, nil
}
// lowerTranslateCandidate returns whether a path components
// has a (different) lowercase translation.
func lowerTranslateCandidate(oc *openContext, s string) string {
if !oc.isUppercasePath {
return ""
}
c := strings.ToLower(s)
if c == s {
return ""
}
return c
}<|fim▁hole|>}
type stringFile struct {
emptyFile
data string
}
// GetFileInformation does stats for dokan.
func (s *stringFile) GetFileInformation(ctx context.Context, fi *dokan.FileInfo) (*dokan.Stat, error) {
a, err := defaultFileInformation()
if err != nil {
return nil, err
}
a.FileAttributes |= dokan.FileAttributeReadonly
a.FileSize = int64(len(s.data))
t := time.Now()
a.LastWrite = t
a.LastAccess = t
a.Creation = t
return a, nil
}
// ReadFile does reads for dokan.
func (s *stringFile) ReadFile(ctx context.Context, fi *dokan.FileInfo, bs []byte, offset int64) (int, error) {
data := s.data
if offset >= int64(len(data)) {
return 0, nil
}
data = data[int(offset):]
return copy(bs, data), nil
}<|fim▁end|> |
func stringReadFile(contents string) dokan.File {
return &stringFile{data: contents} |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import itertools
import os.path
import sys
import time
from . import core
from . import file_io
from . import geometry
from . import stringconv
from . import version
#
# Functions
#
def save_output(profileli, opt):
""" Save a summary of results of evaluated profiles
"""
def m(x, pixelwidth):
return geometry.to_metric_units(x, pixelwidth)
def m2(x, pixelwidth):
# For area units...
return geometry.to_metric_units(x, pixelwidth**2)
def na(x):
if x in (None, -1):
return "N/A"
else:
return x
def write_session_summary():
with file_io.FileWriter("session.summary", opt) as f:
f.writerow(["%s version:" % version.title,
"%s (Last modified %s %s, %s)"
% ((version.version,) + version.date)])
f.writerow(["Number of evaluated profiles:", len(eval_proli)])
if err_fli:
f.writerow(["Number of non-evaluated profiles:", len(err_fli)])
f.writerow(["Metric unit:", eval_proli[0].metric_unit])
f.writerow(["Spatial resolution:", opt.spatial_resolution, eval_proli[0].metric_unit])
f.writerow(["Shell width:", opt.shell_width, eval_proli[0].metric_unit])
f.writerow(["Interpoint distances calculated:",
stringconv.yes_or_no(opt.determine_interpoint_dists)])
if opt.determine_interpoint_dists:
f.writerow(["Interpoint distance mode:", opt.interpoint_dist_mode])
f.writerow(["Shortest interpoint distances:",
stringconv.yes_or_no(opt.interpoint_shortest_dist)])
f.writerow(["Lateral interpoint distances:",
stringconv.yes_or_no(opt.interpoint_lateral_dist)])
f.writerow(["Monte Carlo simulations performed:",
stringconv.yes_or_no(opt.run_monte_carlo)])
if opt.run_monte_carlo:
f.writerow(["Number of Monte Carlo runs:", opt.monte_carlo_runs])
f.writerow(["Monte Carlo simulation window:", opt.monte_carlo_simulation_window])
f.writerow(["Strict localization in simulation window:",
stringconv.yes_or_no(opt.monte_carlo_strict_location)])
f.writerow(["Clusters determined:", stringconv.yes_or_no(opt.determine_clusters)])
if opt.determine_clusters:
f.writerow(["Within-cluster distance:",
opt.within_cluster_dist, eval_proli[0].metric_unit])
if clean_fli:
f.writerow(["Input files processed cleanly:"])
f.writerows([[fn] for fn in clean_fli])
if nop_fli:
f.writerow(["Input files processed but which generated no point distances:"])
f.writerows([[fn] for fn in nop_fli])
if warn_fli:
f.writerow(["Input files processed but which generated "
"warnings (see log for details):"])
f.writerows([[fn] for fn in warn_fli])
if err_fli:
f.writerow(["Input files not processed or not included in "
"summary (see log for details):"])
f.writerows([[fn] for fn in err_fli])
def write_profile_summary():
with file_io.FileWriter("profile.summary", opt) as f:
f.writerow(["Postsynaptic element length",
"Presynaptic element length",
"Number of PSDs:",
"Total postsynaptic membrane length incl perforations:",
"Total postsynaptic membrane length excl perforations:",
"Total PSD area:",
"Particles (total)",
"Particles in PSD",
"Particles within %s %s of PSD"
% (opt.spatial_resolution, eval_proli[0].metric_unit),
"Shell particles strictly synaptic and postsynaptic",
"Shell particles strictly synaptic and postsynaptic "
"or associated with postsynaptic membrane",
"Synaptic particles associated w/ postsynaptic "
"membrane",
"Synaptic particles associated w/ presynaptic membrane",
"Perisynaptic particles associated w/ postsynaptic "
"membrane",
"Perisynaptic particles associated w/ presynaptic "
"membrane",
"Within-perforation particles associated w/ "
"postsynaptic membrane",
"Within-perforation particles associated w/ "
"presynaptic membrane",
"Presynaptic profile",
"Postsynaptic profile",
"ID",
"Input file",
"Comment"])
f.writerows([[m(pro.posel.length(), pro.pixelwidth),
m(pro.prsel.length(), pro.pixelwidth),
len(pro.psdli),
m(pro.total_posm.length(), pro.pixelwidth),
sum([m(psd.posm.length(), pro.pixelwidth)
for psd in pro.psdli]),
sum([m2(psd.psdposm.area(), pro.pixelwidth)
for psd in pro.psdli]),
len(pro.pli),
len([p for p in pro.pli if p.is_within_psd]),
len([p for p in pro.pli if p.is_associated_with_psd]),
len([p for p in pro.pli
if p.strict_lateral_location == "synaptic" and
p.axodendritic_location == "postsynaptic" and
p.is_within_postsynaptic_membrane_shell]),
len([p for p in pro.pli
if p.strict_lateral_location == "synaptic" and
(p.axodendritic_location == "postsynaptic" and
p.is_within_postsynaptic_membrane_shell) or
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "synaptic" and
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "synaptic" and
p.is_presynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "perisynaptic" and
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "perisynaptic" and
p.is_presynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "within perforation"
and p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "within perforation"
and p.is_presynaptic_membrane_associated]),
pro.presyn_profile,
pro.postsyn_profile,
pro.id,
pro.comment,
os.path.basename(pro.inputfn)] for pro in eval_proli])
def write_point_summary(ptype):
if ptype == "particle":
pli = "pli"
pstr = "particle"
elif ptype == "random":
if not opt.use_random:
return
else:
pli = "randomli"
pstr = "point"
else:
return
with file_io.FileWriter("%s.summary" % ptype, opt) as f:
f.writerow(["%s number (as appearing in input file)" % pstr.capitalize(),
"Coordinates (in pixels)",
"Axodendritic location",
"Distance to postsynaptic element membrane",
"Distance to presynaptic element membrane",
"Lateral location",
"Strict lateral location",
"Lateral distance to nearest PSD center",
"Normalized lateral distance to nearest PSD center",
"Within PSD",
"Within %s %s of PSD" % (opt.spatial_resolution, eval_proli[0].metric_unit),
"Total postsynaptic membrane length incl perforations",
"Total postsynaptic membrane length excl perforations",
"Length of laterally closest PSD",
"Presynaptic profile",
"Postsynaptic profile",
"Profile ID",
"Input file",
"Comment"])
f.writerows([[n+1,
p,
p.axodendritic_location,
m(p.dist_to_posel, pro.pixelwidth),
m(p.dist_to_prsel, pro.pixelwidth),
p.lateral_location,
p.strict_lateral_location,
m(p.lateral_dist_psd, pro.pixelwidth),
p.norm_lateral_dist_psd,
stringconv.yes_or_no(p.is_within_psd),
stringconv.yes_or_no(p.is_associated_with_psd),
m(pro.total_posm.length(), pro.pixelwidth),
m(sum([psd.posm.length() for psd in pro.psdli]),
pro.pixelwidth),
m(p.nearest_psd.posm.length(), pro.pixelwidth),
pro.presyn_profile,
pro.postsyn_profile,
pro.id,
os.path.basename(pro.inputfn),
pro.comment] for pro in eval_proli for n, p in
enumerate(pro.__dict__[pli])])
def write_cluster_summary():
if not opt.determine_clusters:
return
with file_io.FileWriter("cluster.summary", opt) as f:
f.writerow(["Cluster number",
"Number of particles in cluster",
"Distance to postsynaptic membrane of centroid",
"Distance to nearest cluster along postsynaptic element membrane",
"Profile ID",
"Input file",
"Comment"])
f.writerows([[n + 1,
len(c),
m(c.dist_to_posel, pro.pixelwidth),
m(na(c.dist_to_nearest_cluster), pro.pixelwidth),
pro.id,
os.path.basename(pro.inputfn),
pro.comment]for pro in eval_proli for n, c in
enumerate(pro.clusterli)])
def write_interpoint_summaries():
if not opt.determine_interpoint_dists:
return
ip_rels = dict([(key, val)
for key, val in opt.interpoint_relations.items()
if val and 'simulated' not in key])
if not opt.use_random:
for key, val in opt.interpoint_relations.items():
if 'random' in key and val:
del ip_rels[key]
if (len(ip_rels) == 0 or not
(opt.interpoint_shortest_dist or opt.interpoint_lateral_dist)):
return
table = []
if opt.interpoint_dist_mode == 'all':
s = "all distances"
else:
s = "nearest neighbour distances"
table.append(["Mode: " + s])
headerli = list(ip_rels.keys())
prefixli = []
for key, val in ip_rels.items():
prefix = key[0] + key[key.index("- ") + 2] + "_"
prefixli.append(prefix)
if opt.interpoint_shortest_dist and opt.interpoint_lateral_dist:
headerli.extend(headerli)
prefixli.extend([t + 'lat' for t in prefixli])
topheaderli = []
if opt.interpoint_shortest_dist:
topheaderli.append("Shortest distances")
if opt.interpoint_lateral_dist:
topheaderli.extend([""] * (len(ip_rels) - 1))
if opt.interpoint_lateral_dist:
topheaderli.append("Lateral distances along postsynaptic element "
"membrane")
table.extend([topheaderli, headerli])
cols = [[] for _ in prefixli]
for pro in eval_proli:
for n, li in enumerate([pro.__dict__[prefix + "distli"]
for prefix in prefixli]):
cols[n].extend([m(e, pro.pixelwidth) for e in li])
# transpose cols and append to table
table.extend(list(itertools.zip_longest(*cols, fillvalue="")))
with file_io.FileWriter("interpoint.summary", opt) as f:
f.writerows(table)
def write_mc_dist_to_psd(dtype):
if not opt.run_monte_carlo:
return
table = []
if dtype == 'metric':
table.append(["Lateral distances in %s to center of the nearest PSD"
% eval_proli[0].metric_unit])
elif dtype == 'normalized':
table.append(["Normalized lateral distances to the center of the nearest PSD"])
table.append(["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)])
for pro in eval_proli:
if dtype == 'metric':
table.extend(zip(*[[m(p.lateral_dist_psd, pro.pixelwidth) for p in li["pli"]]
for li in pro.mcli]))
elif dtype == 'normalized':
table.extend(zip(*[[p.norm_lateral_dist_psd for p in li["pli"]]
for li in pro.mcli]))
with file_io.FileWriter("simulated.PSD.%s.lateral.distances" % dtype, opt) as f:
f.writerows(table)
def write_mc_dist_to_posel():
if not opt.run_monte_carlo:
return
table = [["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)]]
for pro in eval_proli:
table.extend(itertools.zip_longest(*[[m(p.dist_to_posel, pro.pixelwidth)
for p in li['pli']] for li in pro.mcli]))
with file_io.FileWriter(
"simulated.postsynaptic.element.membrane.distances", opt) as f:
f.writerows(table)
def write_mc_ip_dists(dist_type):<|fim▁hole|>
def m_li(*_li):
return [m(x, pro.pixelwidth) for x in _li]
if not (opt.run_monte_carlo and opt.determine_interpoint_dists):
return
for ip_type in [key for key, val in opt.interpoint_relations.items()
if 'simulated' in key and val]:
if ((dist_type == 'shortest' and not opt.interpoint_shortest_dist) or
(dist_type == 'lateral' and not opt.interpoint_lateral_dist)):
return
if dist_type == 'lateral':
short_dist_type = 'lat'
else:
short_dist_type = ''
table = [["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)]]
for pro in eval_proli:
table.extend(itertools.zip_longest(*[m(p, pro.pixelwidth)
for li in pro.mcli
for p in li[ip_type]["%sdist"
% short_dist_type]]))
with file_io.FileWriter("%s.interpoint.%s.distances"
% (ip_type.replace(" ", ""), dist_type), opt) as f:
f.writerows(table)
def write_mc_cluster_summary():
if not (opt.determine_clusters and opt.run_monte_carlo):
return
table = [["N particles in cluster", "Run",
"Distance to postsynaptic element membrane from centroid",
"Distance to nearest cluster along postsynaptic element membrane",
"Profile ID",
"Input file",
"Comment"]]
for pro in eval_proli:
for n in range(0, opt.monte_carlo_runs):
for c in pro.mcli[n]["clusterli"]:
table.append([len(c), n + 1,
m(c.dist_to_posel, pro.pixelwidth),
m(na(c.dist_to_nearest_cluster),
pro.pixelwidth),
pro.id,
os.path.basename(pro.inputfn),
pro.comment])
with file_io.FileWriter("simulated.clusters", opt) as f:
f.writerows(table)
sys.stdout.write("\nSaving summaries to %s:\n" % opt.output_dir)
opt.save_result = {'any_saved': False, 'any_err': False}
eval_proli = [profile for profile in profileli if not profile.errflag]
clean_fli = [profile.inputfn for profile in profileli
if not (profile.errflag or profile.warnflag)]
warn_fli = [profile.inputfn for profile in profileli if profile.warnflag]
err_fli = [profile.inputfn for profile in profileli if profile.errflag]
nop_fli = [profile.inputfn for profile in eval_proli if not profile.pli]
write_session_summary()
write_profile_summary()
write_point_summary('particle')
write_point_summary('random')
write_interpoint_summaries()
write_cluster_summary()
write_mc_dist_to_posel()
write_mc_dist_to_psd('metric')
write_mc_dist_to_psd('normalized')
write_mc_ip_dists('shortest')
write_mc_ip_dists('lateral')
write_mc_cluster_summary()
if opt.save_result['any_err']:
sys.stdout.write("Note: One or more summaries could not be saved.\n")
if opt.save_result['any_saved']:
sys.stdout.write("Done.\n")
else:
sys.stdout.write("No summaries saved.\n")
def reset_options(opt):
""" Deletes certain options that should always be set anew for each run
(each time the "Start" button is pressed)
"""
for optstr in ('metric_unit', 'use_random'):
if hasattr(opt, optstr):
delattr(opt, optstr)
def show_options(opt):
sys.stdout.write("{} version: {} (Last modified {} {}, {})\n".format(
version.title, version.version, *version.date))
sys.stdout.write("Output file format: %s\n" % opt.output_file_format)
sys.stdout.write("Suffix of output files: %s\n" % opt.output_filename_suffix)
sys.stdout.write("Output directory: %s\n" % opt.output_dir)
sys.stdout.write("Spatial resolution: %d\n" % opt.spatial_resolution)
sys.stdout.write("Shell width: %d metric units\n" % opt.shell_width)
sys.stdout.write("Interpoint distances calculated: %s\n"
% stringconv.yes_or_no(opt.determine_interpoint_dists))
if opt.determine_interpoint_dists:
sys.stdout.write("Interpoint distance mode: %s\n" % opt.interpoint_dist_mode.capitalize())
sys.stdout.write("Shortest interpoint distances: %s\n"
% stringconv.yes_or_no(opt.interpoint_shortest_dist))
sys.stdout.write("Lateral interpoint distances: %s\n"
% stringconv.yes_or_no(opt.interpoint_lateral_dist))
sys.stdout.write("Monte Carlo simulations performed: %s\n"
% stringconv.yes_or_no(opt.run_monte_carlo))
if opt.run_monte_carlo:
sys.stdout.write("Number of Monte Carlo runs: %d\n"
% opt.monte_carlo_runs)
sys.stdout.write("Monte Carlo simulation window: %s\n"
% opt.monte_carlo_simulation_window)
sys.stdout.write("Strict localization in simulation window: %s\n"
% stringconv.yes_or_no(opt.monte_carlo_strict_location))
sys.stdout.write("Clusters determined: %s\n" % stringconv.yes_or_no(opt.determine_clusters))
if opt.determine_clusters:
sys.stdout.write("Within-cluster distance: %d\n" % opt.within_cluster_dist)
def get_output_format(opt):
if opt.output_file_format == 'excel':
try:
import openpyxl
except ImportError:
sys.stdout.write("Unable to write Excel files: resorting to csv format.\n")
opt.output_file_format = 'csv'
if opt.output_file_format == 'csv':
opt.output_filename_ext = '.csv'
opt.csv_format = {'dialect': 'excel', 'lineterminator': '\n'}
if opt.csv_delimiter == 'tab':
opt.csv_format['delimiter'] = '\t'
if opt.output_filename_date_suffix:
from datetime import date
opt.output_filename_suffix = "." + date.today().isoformat()
if opt.output_filename_other_suffix != '':
opt.output_filename_suffix += "." + opt.output_filename_other_suffix
def main_proc(parent):
""" Process profile data files
"""
opt = parent.opt
if not opt.input_file_list:
sys.stdout.write("No input files.\n")
return 0
i, n = 0, 0
profileli = []
sys.stdout.write("--- Session started %s local time ---\n" % time.ctime())
for f in opt.input_file_list:
if opt.input_file_list.count(f) > 1:
sys.stdout.write("Duplicate input filename %s:\n => removing first occurrence in "
"list\n" % f)
opt.input_file_list.remove(f)
get_output_format(opt)
reset_options(opt)
show_options(opt)
while True:
if i < len(opt.input_file_list):
inputfn = opt.input_file_list[i]
i += 1
else:
sys.stdout.write("\nNo more input files...\n")
break
parent.process_queue.put(("new_file", inputfn))
profileli.append(core.ProfileData(inputfn, opt))
profileli[-1].process()
if opt.stop_requested:
sys.stdout.write("\n--- Session aborted by user %s local time ---\n" % time.ctime())
return 3
if not profileli[-1].errflag:
n += 1
if profileli[-1].warnflag:
sys.stdout.write("Warning(s) found while processing input file.\n")
continue
else:
sys.stdout.write("Error(s) found while processing input file =>\n"
" => No distances could be determined.\n")
continue
# no more input files
errfli = [pro.inputfn for pro in profileli if pro.errflag]
warnfli = [pro.inputfn for pro in profileli if pro.warnflag]
if errfli:
sys.stdout.write("\n%s input %s generated one or more errors:\n"
% (stringconv.plurality("This", len(errfli)),
stringconv.plurality("file", len(errfli))))
sys.stdout.write("%s\n" % "\n".join([fn for fn in errfli]))
if warnfli:
sys.stdout.write("\n%s input %s generated one or more warnings:\n"
% (stringconv.plurality("This", len(warnfli)),
stringconv.plurality("file", len(warnfli))))
sys.stdout.write("%s\n" % "\n".join([fn for fn in warnfli]))
if n > 0:
parent.process_queue.put(("saving_summaries", ""))
save_output(profileli, opt)
else:
sys.stdout.write("\nNo files processed.\n")
sys.stdout.write("--- Session ended %s local time ---\n" % time.ctime())
parent.process_queue.put(("done", ""))
if errfli:
return 0
elif warnfli:
return 2
else:
return 1
# End of main.py<|fim▁end|> | |
<|file_name|>state.rs<|end_file_name|><|fim▁begin|>use nalgebra::{Point2, Scalar, Vector2};
use std::collections::HashSet;
use std::hash::Hash;
use event::{ElementState, React};
/// An atomic state of an input element.
pub trait State: Copy + Eq {
// TODO: Use a default type (`Self`) here once that feature stabilizes.
/// Representation of a difference between states.
type Difference;
/// Gets the transition between a live and snapshot state. If no transition
/// has occurred, returns `None`.
fn transition(live: Self, snapshot: Self) -> Option<Self> {
if live == snapshot {
None
}
else {
Some(live)
}
}
}
impl State for bool {
type Difference = Self;
}
impl State for ElementState {
type Difference = Self;
}
impl<T> State for Point2<T>
where
T: Eq + Scalar,
{
type Difference = Vector2<T>;
}
/// An input element, such as a button, key, or position.
pub trait Element: Copy + Sized {
/// Representation of the state of the element.
type State: State;
}
/// A state with a composite representation. This is used for input elements
/// which have a cardinality greater than one. For example, a mouse may have
/// more than one button.
pub trait CompositeState<E>
where
E: Element,
{
// TODO: Use a default type (`E::State`) here once that feature stabilizes.
/// Representation of the composite state.
type Composite;
/// Gets the composite state.
fn composite(&self) -> &Self::Composite;
}
/// Provides a state for an input element.
pub trait InputState<E>
where
E: Element,
{
/// Gets the state of an input element.
fn state(&self, element: E) -> E::State;
}
// Blanket implementation for `InputState` for composite states represented by
// a `HashSet`, such as keys and buttons.
impl<E, T> InputState<E> for T
where
T: CompositeState<E, Composite = HashSet<E>>,
E: Element<State = ElementState> + Eq + Hash,
{
fn state(&self, element: E) -> E::State {
if self.composite().contains(&element) {
ElementState::Pressed
}
else {
ElementState::Released
}
}
}
/// Provides a transition state for an input element.
pub trait InputTransition<E>
where
E: Element,
{
/// Gets the transition state of an input element.
fn transition(&self, element: E) -> Option<E::State>;
}
impl<E, T> InputTransition<E> for T
where<|fim▁hole|> fn transition(&self, element: E) -> Option<E::State> {
E::State::transition(self.live().state(element), self.snapshot().state(element))
}
}
/// Determines the difference in state for an input element.
pub trait InputDifference<E>
where
E: Element,
{
/// Iterable representation of differences in state.
type Difference: IntoIterator<Item = (E, <E::State as State>::Difference)>;
/// Gets the difference in state for an input element.
fn difference(&self) -> Self::Difference;
}
// Blanket implementation for `InputDifference` for composite states
// represented by a `HashSet`, such as keys and buttons.
impl<E, S, T> InputDifference<E> for T
where
T: Input,
T::State: CompositeState<E, Composite = HashSet<E>> + InputState<E>,
E: Element<State = S> + Eq + Hash,
S: State<Difference = S>,
{
type Difference = Vec<(E, <E::State as State>::Difference)>;
fn difference(&self) -> Self::Difference {
self.live()
.composite()
.symmetric_difference(self.snapshot().composite())
.map(|element| (*element, self.live().state(*element)))
.collect()
}
}
/// An input device with a live state and snapshot state. These are updated via
/// `React` and `Snapshot` and provide information about the live state and
/// changes based on the snapshot state.
pub trait Input: React + Snapshot {
/// Aggregate state for the input device.
type State;
/// Gets the live state.
fn live(&self) -> &Self::State;
// TODO: The term "snapshot" is ambiguous. Here, it refers to the snapshot
// of the state of an input device. In the `Snapshot` trait, it is
// used as a verb for the operation of taking a snapshot (copying the
// live state into the snapshot state). However, the `Input` trait is
// not exposed outside of this module, so this shouldn't affect
// client code.
/// Gets the snapshot state.
fn snapshot(&self) -> &Self::State;
}
/// Provides snapshotting for an input device. Input devices maintain a live
/// state and snapshot state, which are updated via `React` and this trait,
/// respectively.
pub trait Snapshot {
/// Snapshots the live state.
fn snapshot(&mut self);
}<|fim▁end|> | T: Input,
T::State: InputState<E>,
E: Element,
{ |
<|file_name|>webcore.py<|end_file_name|><|fim▁begin|># Copyright 2011,2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Webcore is a basic web server framework based on the SocketServer-based
BaseHTTPServer that comes with Python. The big difference is that this
one can carve up URL-space by prefix, such that "/foo/*" gets handled by
a different request handler than "/bar/*". I refer to this as "splitting".
You should also be able to make a request handler written without splitting
run under Webcore. This may not work for all request handlers, but it
definitely works for some. :) The easiest way to do this is with the
wrapRequestHandler() function, like so:
from CGIHTTPServer import CGIHTTPRequestHandler as CHRH
core.WebServer.set_handler("/foo", wrapRequestHandler(CHRH))
.. now URLs under the /foo/ directory will let you browse through the
filesystem next to pox.py. If you create a cgi-bin directory next to
pox.py, you'll be able to run executables in it.
For this specific purpose, there's actually a SplitCGIRequestHandler
which demonstrates wrapping a normal request handler while also
customizing it a bit -- SplitCGIRequestHandler shoehorns in functionality
to use arbitrary base paths.
BaseHTTPServer is not very fast and needs to run on its own thread.
It'd actually be great to have a version of this written against, say,
CherryPy, but I did want to include a simple, dependency-free web solution.
"""
from SocketServer import ThreadingMixIn
from BaseHTTPServer import *
from time import sleep
import select
import threading
import random
import hashlib
import base64
from pox.core import core
import os
import posixpath
import urllib
import cgi
import errno
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
log = core.getLogger()
weblog = log.getChild("server")
def _setAttribs (parent, child):
attrs = ['command', 'request_version', 'close_connection',
'raw_requestline', 'requestline', 'path', 'headers', 'wfile',
'rfile', 'server', 'client_address']
for a in attrs:
setattr(child, a, getattr(parent, a))
setattr(child, 'parent', parent)
import SimpleHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class SplitRequestHandler (BaseHTTPRequestHandler):
"""
To write HTTP handlers for POX, inherit from this class instead of
BaseHTTPRequestHandler. The interface should be the same -- the same
variables should be set, and the same do_GET(), etc. methods should
be called.
In addition, there will be a self.args which can be specified
when you set_handler() on the server.
"""
# Also a StreamRequestHandler
def __init__ (self, parent, prefix, args):
_setAttribs(parent, self)
self.parent = parent
self.args = args
self.prefix = prefix
self._init()
def _init (self):
"""
This is called by __init__ during initialization. You can
override it to, for example, parse .args.
"""
pass
def handle_one_request (self):
raise RuntimeError("Not supported")
def handle(self):
raise RuntimeError("Not supported")
def _split_dispatch (self, command, handler = None):
if handler is None: handler = self
mname = 'do_' + self.command
if not hasattr(handler, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
method = getattr(handler, mname)
return method()
def log_request (self, code = '-', size = '-'):
weblog.debug(self.prefix + (':"%s" %s %s' %
(self.requestline, str(code), str(size))))
def log_error (self, fmt, *args):
weblog.error(self.prefix + ':' + (fmt % args))
def log_message (self, fmt, *args):
weblog.info(self.prefix + ':' + (fmt % args))
_favicon = ("47494638396110001000c206006a5797927bc18f83ada9a1bfb49ceabda"
+ "4f4ffffffffffff21f904010a0007002c000000001000100000034578badcfe30b20"
+ "1c038d4e27a0f2004e081e2172a4051942abba260309ea6b805ab501581ae3129d90"
+ "1275c6404b80a72f5abcd4a2454cb334dbd9e58e74693b97425e07002003b")
_favicon = ''.join([chr(int(_favicon[n:n+2],16))
for n in xrange(0,len(_favicon),2)])
<|fim▁hole|> """
A default page to say hi from POX.
"""
def do_GET (self):
"""Serve a GET request."""
self.do_content(True)
def do_HEAD (self):
"""Serve a HEAD request."""
self.do_content(False)
def do_content (self, is_get):
if self.path == "/":
self.send_info(is_get)
elif self.path.startswith("/favicon."):
self.send_favicon(is_get)
else:
self.send_error(404, "File not found on CoreHandler")
def send_favicon (self, is_get = False):
self.send_response(200)
self.send_header("Content-type", "image/gif")
self.send_header("Content-Length", str(len(_favicon)))
self.end_headers()
if is_get:
self.wfile.write(_favicon)
def send_info (self, is_get = False):
r = "<html><head><title>POX</title></head>\n"
r += "<body>\n<h1>POX Webserver</h1>\n<h2>Components</h2>\n"
r += "<ul>"
for k in sorted(core.components):
v = core.components[k]
r += "<li>%s - %s</li>\n" % (cgi.escape(str(k)), cgi.escape(str(v)))
r += "</ul>\n\n<h2>Web Prefixes</h2>"
r += "<ul>"
m = [map(cgi.escape, map(str, [x[0],x[1],x[3]]))
for x in self.args.matches]
m.sort()
for v in m:
r += "<li><a href='{0}'>{0}</a> - {1} {2}</li>\n".format(*v)
r += "</ul></body></html>\n"
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(len(r)))
self.end_headers()
if is_get:
self.wfile.write(r)
class StaticContentHandler (SplitRequestHandler, SimpleHTTPRequestHandler):
# We slightly modify SimpleHTTPRequestHandler to serve from given
# directories and inherit from from Python, but
# modified to serve from given directories and to inherit from
# SplitRequestHandler.
"""
A SplitRequestHandler for serving static content
This is largely the same as the Python SimpleHTTPRequestHandler, but
we modify it to serve from arbitrary directories at arbitrary
positions in the URL space.
"""
server_version = "StaticContentHandler/1.0"
def send_head (self):
# We override this and handle the directory redirection case because
# we want to include the per-split prefix.
path = self.translate_path(self.path)
if os.path.isdir(path):
if not self.path.endswith('/'):
self.send_response(301)
self.send_header("Location", self.prefix + self.path + "/")
self.end_headers()
return None
return SimpleHTTPRequestHandler.send_head(self)
def list_directory (self, dirpath):
# dirpath is an OS path
try:
d = os.listdir(dirpath)
except OSError as e:
if e.errno == errno.EACCES:
self.send_error(403, "This directory is not listable")
elif e.errno == errno.ENOENT:
self.send_error(404, "This directory does not exist")
else:
self.send_error(400, "Unknown error")
return None
d.sort(key=str.lower)
r = StringIO()
r.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n")
path = posixpath.join(self.prefix, cgi.escape(self.path).lstrip("/"))
r.write("<html><head><title>" + path + "</title></head>\n")
r.write("<body><pre>")
parts = path.rstrip("/").split("/")
r.write('<a href="/">/</a>')
for i,part in enumerate(parts):
link = urllib.quote("/".join(parts[:i+1]))
if i > 0: part += "/"
r.write('<a href="%s">%s</a>' % (link, cgi.escape(part)))
r.write("\n" + "-" * (0+len(path)) + "\n")
dirs = []
files = []
for f in d:
if f.startswith("."): continue
if os.path.isdir(os.path.join(dirpath, f)):
dirs.append(f)
else:
files.append(f)
def entry (n, rest=''):
link = urllib.quote(n)
name = cgi.escape(n)
r.write('<a href="%s">%s</a>\n' % (link,name+rest))
for f in dirs:
entry(f, "/")
for f in files:
entry(f)
r.write("</pre></body></html>")
r.seek(0)
self.send_response(200)
self.send_header("Content-Type", "text/html")
self.send_header("Content-Length", str(len(r.getvalue())))
self.end_headers()
return r
def translate_path (self, path, include_prefix = True):
"""
Translate a web-path to a local filesystem path
Odd path elements (e.g., ones that contain local filesystem path
separators) are stripped.
"""
def fixpath (p):
o = []
skip = 0
while True:
p,tail = posixpath.split(p)
if p in ('/','') and tail == '': break
if tail in ('','.', os.path.curdir, os.path.pardir): continue
if os.path.sep in tail: continue
if os.path.altsep and os.path.altsep in tail: continue
if os.path.splitdrive(tail)[0] != '': continue
if tail == '..':
skip += 1
continue
if skip:
skip -= 1
continue
o.append(tail)
o.reverse()
return o
# Remove query string / fragment
if "?" in path: path = path[:path.index("?")]
if "#" in path: path = path[:path.index("#")]
path = fixpath(path)
if path:
path = os.path.join(*path)
else:
path = ''
if include_prefix:
path = os.path.join(os.path.abspath(self.args['root']), path)
return path
def wrapRequestHandler (handlerClass):
return type("Split" + handlerClass.__name__,
(SplitRequestHandler, handlerClass, object), {})
from CGIHTTPServer import CGIHTTPRequestHandler
class SplitCGIRequestHandler (SplitRequestHandler,
CGIHTTPRequestHandler, object):
"""
Runs CGIRequestHandler serving from an arbitrary path.
This really should be a feature of CGIRequestHandler and the way of
implementing it here is scary and awful, but it at least sort of works.
"""
__lock = threading.Lock()
def _split_dispatch (self, command):
with self.__lock:
olddir = os.getcwd()
try:
os.chdir(self.args)
return SplitRequestHandler._split_dispatch(self, command)
finally:
os.chdir(olddir)
class SplitterRequestHandler (BaseHTTPRequestHandler):
def __init__ (self, *args, **kw):
#self.rec = Recording(args[0])
#self.args = args
#self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
#BaseHTTPRequestHandler.__init__(self, self.rec, *args[1:], **kw)
BaseHTTPRequestHandler.__init__(self, *args, **kw)
def log_request (self, code = '-', size = '-'):
weblog.debug('splitter:"%s" %s %s',
self.requestline, str(code), str(size))
def log_error (self, fmt, *args):
weblog.error('splitter:' + fmt % args)
def log_message (self, fmt, *args):
weblog.info('splitter:' + fmt % args)
def handle_one_request(self):
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request(): # An error code has been sent, just exit
return
handler = None
while True:
for m in self.server.matches:
if self.path.startswith(m[0]):
#print m,self.path
handler = m[1](self, m[0], m[3])
#pb = self.rec.getPlayback()
#handler = m[1](pb, *self.args[1:])
_setAttribs(self, handler)
if m[2]:
# Trim. Behavior is not "perfect"
handler.path = self.path[len(m[0]):]
if m[0].endswith('/'):
handler.path = '/' + handler.path
break
if handler is None:
handler = self
if not self.path.endswith('/'):
# Handle splits like directories
self.send_response(301)
self.send_header("Location", self.path + "/")
self.end_headers()
break
break
return handler._split_dispatch(self.command)
class SplitThreadedServer(ThreadingMixIn, HTTPServer):
matches = [] # Tuples of (Prefix, TrimPrefix, Handler)
# def __init__ (self, *args, **kw):
# BaseHTTPRequestHandler.__init__(self, *args, **kw)
# self.matches = self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def set_handler (self, prefix, handler, args = None, trim_prefix = True):
# Not very efficient
assert (handler is None) or (issubclass(handler, SplitRequestHandler))
self.matches = [m for m in self.matches if m[0] != prefix]
if handler is None: return
self.matches.append((prefix, handler, trim_prefix, args))
self.matches.sort(key=lambda e:len(e[0]),reverse=True)
def add_static_dir (self, www_path, local_path=None, relative=False):
"""
Serves a directory of static content.
www_path is the prefix of the URL that maps to this directory.
local_path is the directory to serve content from. If it's not
specified, it is assume to be a directory with the same name as
www_path.
relative, if True, means that the local path is to be a sibling
of the calling module.
For an example, see the launch() function in this module.
"""
if not www_path.startswith('/'): www_path = '/' + www_path
if local_path is None:
local_path = www_path[1:]
if relative:
local_path = os.path.basename(local_path)
if relative:
import inspect
path = inspect.stack()[1][1]
path = os.path.dirname(path)
local_path = os.path.join(path, local_path)
local_path = os.path.abspath(local_path)
log.debug("Serving %s at %s", local_path, www_path)
self.set_handler(www_path, StaticContentHandler,
{'root':local_path}, True);
def launch (address='', port=8000, static=False):
httpd = SplitThreadedServer((address, int(port)), SplitterRequestHandler)
core.register("WebServer", httpd)
httpd.set_handler("/", CoreHandler, httpd, True)
#httpd.set_handler("/foo", StaticContentHandler, {'root':'.'}, True)
#httpd.set_handler("/f", StaticContentHandler, {'root':'pox'}, True)
#httpd.set_handler("/cgis", SplitCGIRequestHandler, "pox/web/www_root")
if static is True:
httpd.add_static_dir('static', 'www_root', relative=True)
elif static is False:
pass
else:
static = static.split(",")
for entry in static:
if entry.lower() == "":
httpd.add_static_dir('static', 'www_root', relative=True)
continue
if ':' not in entry:
directory = entry
prefix = os.path.split(directory)
if prefix[1] == '':
prefix = os.path.split(prefix[0])
prefix = prefix[1]
assert prefix != ''
else:
prefix,directory = entry.split(":")
directory = os.path.expanduser(directory)
httpd.add_static_dir(prefix, directory, relative=False)
def run ():
try:
log.debug("Listening on %s:%i" % httpd.socket.getsockname())
httpd.serve_forever()
except:
pass
log.info("Server quit")
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()<|fim▁end|> | class CoreHandler (SplitRequestHandler): |
<|file_name|>dynamic_module_resolver_test.cc<|end_file_name|><|fim▁begin|>// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/core/script/dynamic_module_resolver.h"
#include "base/test/scoped_feature_list.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/common/features.h"
#include "third_party/blink/renderer/bindings/core/v8/referrer_script_info.h"
#include "third_party/blink/renderer/bindings/core/v8/script_function.h"
#include "third_party/blink/renderer/bindings/core/v8/script_promise_resolver.h"
#include "third_party/blink/renderer/bindings/core/v8/script_value.h"
#include "third_party/blink/renderer/bindings/core/v8/v8_binding_for_core.h"
#include "third_party/blink/renderer/bindings/core/v8/v8_binding_for_testing.h"
#include "third_party/blink/renderer/core/dom/document.h"
#include "third_party/blink/renderer/core/loader/modulescript/module_script_fetch_request.h"
#include "third_party/blink/renderer/core/script/js_module_script.h"
#include "third_party/blink/renderer/core/testing/dummy_modulator.h"
#include "third_party/blink/renderer/core/testing/module_test_base.h"
#include "v8/include/v8.h"
namespace blink {
namespace {
constexpr const char* kTestReferrerURL = "https://example.com/referrer.js";
constexpr const char* kTestDependencyURL = "https://example.com/dependency.js";
const KURL TestReferrerURL() {
return KURL(kTestReferrerURL);
}
const KURL TestDependencyURL() {
return KURL(kTestDependencyURL);
}
class DynamicModuleResolverTestModulator final : public DummyModulator {
public:
explicit DynamicModuleResolverTestModulator(ScriptState* script_state)
: script_state_(script_state) {}
~DynamicModuleResolverTestModulator() override = default;
void ResolveTreeFetch(ModuleScript* module_script) {
ASSERT_TRUE(pending_client_);
pending_client_->NotifyModuleTreeLoadFinished(module_script);
pending_client_ = nullptr;
}
void SetExpectedFetchTreeURL(const KURL& url) {
expected_fetch_tree_url_ = url;
}
bool fetch_tree_was_called() const { return fetch_tree_was_called_; }
void Trace(Visitor*) override;
private:
// Implements Modulator:
ScriptState* GetScriptState() final { return script_state_; }
ModuleScript* GetFetchedModuleScript(const KURL& url) final {
EXPECT_EQ(TestReferrerURL(), url);
ModuleScript* module_script =
JSModuleScript::CreateForTest(this, v8::Local<v8::Module>(), url);
return module_script;
}
KURL ResolveModuleSpecifier(const String& module_request,
const KURL& base_url,
String* failure_reason) final {
if (module_request == "invalid-specifier")
return KURL();
return KURL(base_url, module_request);
}
void ClearIsAcquiringImportMaps() final {}
void FetchTree(const KURL& url,
ResourceFetcher*,
mojom::RequestContextType,
network::mojom::RequestDestination,
const ScriptFetchOptions&,
ModuleScriptCustomFetchType custom_fetch_type,
ModuleTreeClient* client) final {
EXPECT_EQ(expected_fetch_tree_url_, url);
// Currently there are no usage of custom fetch hooks for dynamic import in
// web specifications.
EXPECT_EQ(ModuleScriptCustomFetchType::kNone, custom_fetch_type);
pending_client_ = client;
fetch_tree_was_called_ = true;
}
ModuleEvaluationResult ExecuteModule(
ModuleScript* module_script,
CaptureEvalErrorFlag capture_error) final {
EXPECT_EQ(CaptureEvalErrorFlag::kCapture, capture_error);
ScriptState::EscapableScope scope(script_state_);
ModuleEvaluationResult result = ModuleRecord::Evaluate(
script_state_, module_script->V8Module(), module_script->SourceURL());
return result.Escape(&scope);
}
Member<ScriptState> script_state_;
Member<ModuleTreeClient> pending_client_;
KURL expected_fetch_tree_url_;
bool fetch_tree_was_called_ = false;
};
void DynamicModuleResolverTestModulator::Trace(Visitor* visitor) {
visitor->Trace(script_state_);
visitor->Trace(pending_client_);
DummyModulator::Trace(visitor);
}
// CaptureExportedStringFunction implements a javascript function
// with a single argument of type module namespace.
// CaptureExportedStringFunction captures the exported string value
// from the module namespace as a WTF::String, exposed via CapturedValue().
class CaptureExportedStringFunction final : public ScriptFunction {
public:
CaptureExportedStringFunction(ScriptState* script_state,
const String& export_name)
: ScriptFunction(script_state), export_name_(export_name) {}
v8::Local<v8::Function> Bind() { return BindToV8Function(); }
bool WasCalled() const { return was_called_; }
const String& CapturedValue() const { return captured_value_; }
private:
ScriptValue Call(ScriptValue value) override {
was_called_ = true;<|fim▁hole|>
v8::Isolate* isolate = GetScriptState()->GetIsolate();
v8::Local<v8::Context> context = GetScriptState()->GetContext();
v8::Local<v8::Object> module_namespace =
value.V8Value()->ToObject(context).ToLocalChecked();
v8::Local<v8::Value> exported_value =
module_namespace->Get(context, V8String(isolate, export_name_))
.ToLocalChecked();
captured_value_ =
ToCoreString(exported_value->ToString(context).ToLocalChecked());
return ScriptValue();
}
const String export_name_;
bool was_called_ = false;
String captured_value_;
};
// CaptureErrorFunction implements a javascript function which captures
// name and error of the exception passed as its argument.
class CaptureErrorFunction final : public ScriptFunction {
public:
explicit CaptureErrorFunction(ScriptState* script_state)
: ScriptFunction(script_state) {}
v8::Local<v8::Function> Bind() { return BindToV8Function(); }
bool WasCalled() const { return was_called_; }
const String& Name() const { return name_; }
const String& Message() const { return message_; }
private:
ScriptValue Call(ScriptValue value) override {
was_called_ = true;
v8::Isolate* isolate = GetScriptState()->GetIsolate();
v8::Local<v8::Context> context = GetScriptState()->GetContext();
v8::Local<v8::Object> error_object =
value.V8Value()->ToObject(context).ToLocalChecked();
v8::Local<v8::Value> name =
error_object->Get(context, V8String(isolate, "name")).ToLocalChecked();
name_ = ToCoreString(name->ToString(context).ToLocalChecked());
v8::Local<v8::Value> message =
error_object->Get(context, V8String(isolate, "message"))
.ToLocalChecked();
message_ = ToCoreString(message->ToString(context).ToLocalChecked());
return ScriptValue();
}
bool was_called_ = false;
String name_;
String message_;
};
class DynamicModuleResolverTestNotReached final : public ScriptFunction {
public:
static v8::Local<v8::Function> CreateFunction(ScriptState* script_state) {
auto* not_reached =
MakeGarbageCollected<DynamicModuleResolverTestNotReached>(script_state);
return not_reached->BindToV8Function();
}
explicit DynamicModuleResolverTestNotReached(ScriptState* script_state)
: ScriptFunction(script_state) {}
private:
ScriptValue Call(ScriptValue) override {
ADD_FAILURE();
return ScriptValue();
}
};
class DynamicModuleResolverTest : public testing::Test,
public ParametrizedModuleTest {
public:
void SetUp() override { ParametrizedModuleTest::SetUp(); }
void TearDown() override { ParametrizedModuleTest::TearDown(); }
};
} // namespace
TEST_P(DynamicModuleResolverTest, ResolveSuccess) {
V8TestingScope scope;
auto* modulator = MakeGarbageCollected<DynamicModuleResolverTestModulator>(
scope.GetScriptState());
modulator->SetExpectedFetchTreeURL(TestDependencyURL());
auto* promise_resolver =
MakeGarbageCollected<ScriptPromiseResolver>(scope.GetScriptState());
ScriptPromise promise = promise_resolver->Promise();
auto* capture = MakeGarbageCollected<CaptureExportedStringFunction>(
scope.GetScriptState(), "foo");
promise.Then(capture->Bind(),
DynamicModuleResolverTestNotReached::CreateFunction(
scope.GetScriptState()));
auto* resolver = MakeGarbageCollected<DynamicModuleResolver>(modulator);
resolver->ResolveDynamically("./dependency.js", TestReferrerURL(),
ReferrerScriptInfo(), promise_resolver);
v8::MicrotasksScope::PerformCheckpoint(scope.GetIsolate());
EXPECT_FALSE(capture->WasCalled());
v8::Local<v8::Module> record = ModuleRecord::Compile(
scope.GetIsolate(), "export const foo = 'hello';", TestReferrerURL(),
TestReferrerURL(), ScriptFetchOptions(), TextPosition::MinimumPosition(),
ASSERT_NO_EXCEPTION);
ModuleScript* module_script =
JSModuleScript::CreateForTest(modulator, record, TestDependencyURL());
EXPECT_TRUE(ModuleRecord::Instantiate(scope.GetScriptState(), record,
TestReferrerURL())
.IsEmpty());
modulator->ResolveTreeFetch(module_script);
v8::MicrotasksScope::PerformCheckpoint(scope.GetIsolate());
EXPECT_TRUE(capture->WasCalled());
EXPECT_EQ("hello", capture->CapturedValue());
}
TEST_P(DynamicModuleResolverTest, ResolveSpecifierFailure) {
V8TestingScope scope;
auto* modulator = MakeGarbageCollected<DynamicModuleResolverTestModulator>(
scope.GetScriptState());
modulator->SetExpectedFetchTreeURL(TestDependencyURL());
auto* promise_resolver =
MakeGarbageCollected<ScriptPromiseResolver>(scope.GetScriptState());
ScriptPromise promise = promise_resolver->Promise();
auto* capture =
MakeGarbageCollected<CaptureErrorFunction>(scope.GetScriptState());
promise.Then(DynamicModuleResolverTestNotReached::CreateFunction(
scope.GetScriptState()),
capture->Bind());
auto* resolver = MakeGarbageCollected<DynamicModuleResolver>(modulator);
resolver->ResolveDynamically("invalid-specifier", TestReferrerURL(),
ReferrerScriptInfo(), promise_resolver);
v8::MicrotasksScope::PerformCheckpoint(scope.GetIsolate());
EXPECT_TRUE(capture->WasCalled());
EXPECT_EQ("TypeError", capture->Name());
EXPECT_TRUE(capture->Message().StartsWith("Failed to resolve"));
}
TEST_P(DynamicModuleResolverTest, FetchFailure) {
V8TestingScope scope;
auto* modulator = MakeGarbageCollected<DynamicModuleResolverTestModulator>(
scope.GetScriptState());
modulator->SetExpectedFetchTreeURL(TestDependencyURL());
auto* promise_resolver =
MakeGarbageCollected<ScriptPromiseResolver>(scope.GetScriptState());
ScriptPromise promise = promise_resolver->Promise();
auto* capture =
MakeGarbageCollected<CaptureErrorFunction>(scope.GetScriptState());
promise.Then(DynamicModuleResolverTestNotReached::CreateFunction(
scope.GetScriptState()),
capture->Bind());
auto* resolver = MakeGarbageCollected<DynamicModuleResolver>(modulator);
resolver->ResolveDynamically("./dependency.js", TestReferrerURL(),
ReferrerScriptInfo(), promise_resolver);
EXPECT_FALSE(capture->WasCalled());
modulator->ResolveTreeFetch(nullptr);
v8::MicrotasksScope::PerformCheckpoint(scope.GetIsolate());
EXPECT_TRUE(capture->WasCalled());
EXPECT_EQ("TypeError", capture->Name());
EXPECT_TRUE(capture->Message().StartsWith("Failed to fetch"));
}
TEST_P(DynamicModuleResolverTest, ExceptionThrown) {
V8TestingScope scope;
auto* modulator = MakeGarbageCollected<DynamicModuleResolverTestModulator>(
scope.GetScriptState());
modulator->SetExpectedFetchTreeURL(TestDependencyURL());
auto* promise_resolver =
MakeGarbageCollected<ScriptPromiseResolver>(scope.GetScriptState());
ScriptPromise promise = promise_resolver->Promise();
auto* capture =
MakeGarbageCollected<CaptureErrorFunction>(scope.GetScriptState());
promise.Then(DynamicModuleResolverTestNotReached::CreateFunction(
scope.GetScriptState()),
capture->Bind());
auto* resolver = MakeGarbageCollected<DynamicModuleResolver>(modulator);
resolver->ResolveDynamically("./dependency.js", TestReferrerURL(),
ReferrerScriptInfo(), promise_resolver);
EXPECT_FALSE(capture->WasCalled());
v8::Local<v8::Module> record = ModuleRecord::Compile(
scope.GetIsolate(), "throw Error('bar')", TestReferrerURL(),
TestReferrerURL(), ScriptFetchOptions(), TextPosition::MinimumPosition(),
ASSERT_NO_EXCEPTION);
ModuleScript* module_script =
JSModuleScript::CreateForTest(modulator, record, TestDependencyURL());
EXPECT_TRUE(ModuleRecord::Instantiate(scope.GetScriptState(), record,
TestReferrerURL())
.IsEmpty());
modulator->ResolveTreeFetch(module_script);
v8::MicrotasksScope::PerformCheckpoint(scope.GetIsolate());
EXPECT_TRUE(capture->WasCalled());
EXPECT_EQ("Error", capture->Name());
EXPECT_EQ("bar", capture->Message());
}
TEST_P(DynamicModuleResolverTest, ResolveWithNullReferrerScriptSuccess) {
V8TestingScope scope;
scope.GetDocument().SetURL(KURL("https://example.com"));
auto* modulator = MakeGarbageCollected<DynamicModuleResolverTestModulator>(
scope.GetScriptState());
modulator->SetExpectedFetchTreeURL(TestDependencyURL());
auto* promise_resolver =
MakeGarbageCollected<ScriptPromiseResolver>(scope.GetScriptState());
ScriptPromise promise = promise_resolver->Promise();
auto* capture = MakeGarbageCollected<CaptureExportedStringFunction>(
scope.GetScriptState(), "foo");
promise.Then(capture->Bind(),
DynamicModuleResolverTestNotReached::CreateFunction(
scope.GetScriptState()));
auto* resolver = MakeGarbageCollected<DynamicModuleResolver>(modulator);
resolver->ResolveDynamically("./dependency.js", /* null referrer */ KURL(),
ReferrerScriptInfo(), promise_resolver);
v8::MicrotasksScope::PerformCheckpoint(scope.GetIsolate());
EXPECT_FALSE(capture->WasCalled());
v8::Local<v8::Module> record = ModuleRecord::Compile(
scope.GetIsolate(), "export const foo = 'hello';", TestDependencyURL(),
TestDependencyURL(), ScriptFetchOptions(),
TextPosition::MinimumPosition(), ASSERT_NO_EXCEPTION);
ModuleScript* module_script =
JSModuleScript::CreateForTest(modulator, record, TestDependencyURL());
EXPECT_TRUE(ModuleRecord::Instantiate(scope.GetScriptState(), record,
TestDependencyURL())
.IsEmpty());
modulator->ResolveTreeFetch(module_script);
v8::MicrotasksScope::PerformCheckpoint(scope.GetIsolate());
EXPECT_TRUE(capture->WasCalled());
EXPECT_EQ("hello", capture->CapturedValue());
}
TEST_P(DynamicModuleResolverTest, ResolveWithReferrerScriptInfoBaseURL) {
V8TestingScope scope;
scope.GetDocument().SetURL(KURL("https://example.com"));
auto* modulator = MakeGarbageCollected<DynamicModuleResolverTestModulator>(
scope.GetScriptState());
modulator->SetExpectedFetchTreeURL(
KURL("https://example.com/correct/dependency.js"));
auto* promise_resolver =
MakeGarbageCollected<ScriptPromiseResolver>(scope.GetScriptState());
auto* resolver = MakeGarbageCollected<DynamicModuleResolver>(modulator);
KURL wrong_base_url("https://example.com/wrong/bar.js");
KURL correct_base_url("https://example.com/correct/baz.js");
resolver->ResolveDynamically(
"./dependency.js", wrong_base_url,
ReferrerScriptInfo(correct_base_url, ScriptFetchOptions()),
promise_resolver);
v8::MicrotasksScope::PerformCheckpoint(scope.GetIsolate());
EXPECT_TRUE(modulator->fetch_tree_was_called());
}
// Instantiate tests once with TLA and once without:
INSTANTIATE_TEST_SUITE_P(DynamicModuleResolverTestGroup,
DynamicModuleResolverTest,
testing::Bool(),
ParametrizedModuleTestParamName());
} // namespace blink<|fim▁end|> | |
<|file_name|>asignaturas_actuales.py<|end_file_name|><|fim▁begin|>import time
t1=.3
t2=.1
path="~/Dropbox/Ingenieria/asignaturas_actuales"<|fim▁hole|>keyboard.send_keys(path)
time.sleep(t1)
keyboard.send_key("<enter>")<|fim▁end|> |
time.sleep(t2)
keyboard.send_key("<f6>")
time.sleep(t2) |
<|file_name|>completerlib.py<|end_file_name|><|fim▁begin|># encoding: utf-8
"""Implementations for various useful completers.
These are all loaded by default by IPython.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011 The IPython Development Team.
#
# Distributed under the terms of the BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function
# Stdlib imports
import glob
import inspect
import os
import re
import sys
try:
# Python >= 3.3
from importlib.machinery import all_suffixes
_suffixes = all_suffixes()
except ImportError:
from imp import get_suffixes
_suffixes = [ s[0] for s in get_suffixes() ]
# Third-party imports
from time import time
from zipimport import zipimporter
# Our own imports
from IPython.core.completer import expand_user, compress_user
from IPython.core.error import TryNext
from IPython.utils._process_common import arg_split
from IPython.utils.py3compat import string_types
# FIXME: this should be pulled in with the right call via the component system
from IPython import get_ipython
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
# Time in seconds after which the rootmodules will be stored permanently in the
# ipython ip.db database (kept in the user's .ipython dir).
TIMEOUT_STORAGE = 2
# Time in seconds after which we give up
TIMEOUT_GIVEUP = 20
# Regular expression for the python import statement
import_re = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*?)'
r'(?P<package>[/\\]__init__)?'
r'(?P<suffix>%s)$' %
r'|'.join(re.escape(s) for s in _suffixes))
# RE for the ipython %run command (python + ipython scripts)
magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$')
#-----------------------------------------------------------------------------
# Local utilities
#-----------------------------------------------------------------------------
def module_list(path):
"""
Return the list containing the names of the modules available in the given
folder.
"""
# sys.path has the cwd as an empty string, but isdir/listdir need it as '.'
if path == '':
path = '.'
# A few local constants to be used in loops below
pjoin = os.path.join
if os.path.isdir(path):
# Build a list of all files in the directory and all files
# in its subdirectories. For performance reasons, do not
# recurse more than one level into subdirectories.
files = []
for root, dirs, nondirs in os.walk(path, followlinks=True):
subdir = root[len(path)+1:]
if subdir:
files.extend(pjoin(subdir, f) for f in nondirs)
dirs[:] = [] # Do not recurse into additional subdirectories.
else:
files.extend(nondirs)
else:
try:
files = list(zipimporter(path)._files.keys())
except:
files = []
# Build a list of modules which match the import_re regex.
modules = []
for f in files:
m = import_re.match(f)
if m:
modules.append(m.group('name'))
return list(set(modules))
def get_root_modules():
"""
Returns a list containing the names of all the modules available in the
folders of the pythonpath.
ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
"""
ip = get_ipython()
rootmodules_cache = ip.db.get('rootmodules_cache', {})
rootmodules = list(sys.builtin_module_names)
start_time = time()
store = False
for path in sys.path:
try:
modules = rootmodules_cache[path]
except KeyError:
modules = module_list(path)
try:
modules.remove('__init__')
except ValueError:
pass
if path not in ('', '.'): # cwd modules should not be cached
rootmodules_cache[path] = modules
if time() - start_time > TIMEOUT_STORAGE and not store:
store = True
print("\nCaching the list of root modules, please wait!")
print("(This will only be done once - type '%rehashx' to "
"reset cache!)\n")
sys.stdout.flush()
if time() - start_time > TIMEOUT_GIVEUP:
print("This is taking too long, we give up.\n")
return []
rootmodules.extend(modules)
if store:
ip.db['rootmodules_cache'] = rootmodules_cache
rootmodules = list(set(rootmodules))
return rootmodules
def is_importable(module, attr, only_modules):
if only_modules:
return inspect.ismodule(getattr(module, attr))
else:
return not(attr[:2] == '__' and attr[-2:] == '__')
def try_import(mod, only_modules=False):
try:
m = __import__(mod)
except:
return []
mods = mod.split('.')
for module in mods[1:]:
m = getattr(m, module)<|fim▁hole|> if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init:
completions.extend( [attr for attr in dir(m) if
is_importable(m, attr, only_modules)])
completions.extend(getattr(m, '__all__', []))
if m_is_init:
completions.extend(module_list(os.path.dirname(m.__file__)))
completions = {c for c in completions if isinstance(c, string_types)}
completions.discard('__init__')
return list(completions)
#-----------------------------------------------------------------------------
# Completion-related functions.
#-----------------------------------------------------------------------------
def quick_completer(cmd, completions):
""" Easily create a trivial completer for a command.
Takes either a list of completions, or all completions in string (that will
be split on whitespace).
Example::
[d:\ipython]|1> import ipy_completers
[d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
[d:\ipython]|3> foo b<TAB>
bar baz
[d:\ipython]|3> foo ba
"""
if isinstance(completions, string_types):
completions = completions.split()
def do_complete(self, event):
return completions
get_ipython().set_hook('complete_command',do_complete, str_key = cmd)
def module_completion(line):
"""
Returns a list containing the completion possibilities for an import line.
The line looks like this :
'import xml.d'
'from xml.dom import'
"""
words = line.split(' ')
nwords = len(words)
# from whatever <tab> -> 'import '
if nwords == 3 and words[0] == 'from':
return ['import ']
# 'from xy<tab>' or 'import xy<tab>'
if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) :
if nwords == 1:
return get_root_modules()
mod = words[1].split('.')
if len(mod) < 2:
return get_root_modules()
completion_list = try_import('.'.join(mod[:-1]), True)
return ['.'.join(mod[:-1] + [el]) for el in completion_list]
# 'from xyz import abc<tab>'
if nwords >= 3 and words[0] == 'from':
mod = words[1]
return try_import(mod)
#-----------------------------------------------------------------------------
# Completers
#-----------------------------------------------------------------------------
# These all have the func(self, event) signature to be used as custom
# completers
def module_completer(self,event):
"""Give completions after user has typed 'import ...' or 'from ...'"""
# This works in all versions of python. While 2.5 has
# pkgutil.walk_packages(), that particular routine is fairly dangerous,
# since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full
# of possibly problematic side effects.
# This search the folders in the sys.path for available modules.
return module_completion(event.line)
# FIXME: there's a lot of logic common to the run, cd and builtin file
# completers, that is currently reimplemented in each.
def magic_run_completer(self, event):
"""Complete files that end in .py or .ipy or .ipynb for the %run command.
"""
comps = arg_split(event.line, strict=False)
# relpath should be the current token that we need to complete.
if (len(comps) > 1) and (not event.line.endswith(' ')):
relpath = comps[-1].strip("'\"")
else:
relpath = ''
#print("\nev=", event) # dbg
#print("rp=", relpath) # dbg
#print('comps=', comps) # dbg
lglob = glob.glob
isdir = os.path.isdir
relpath, tilde_expand, tilde_val = expand_user(relpath)
# Find if the user has already typed the first filename, after which we
# should complete on all files, since after the first one other files may
# be arguments to the input script.
if any(magic_run_re.match(c) for c in comps):
matches = [f.replace('\\','/') + ('/' if isdir(f) else '')
for f in lglob(relpath+'*')]
else:
dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)]
pys = [f.replace('\\','/')
for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') +
lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')]
matches = dirs + pys
#print('run comp:', dirs+pys) # dbg
return [compress_user(p, tilde_expand, tilde_val) for p in matches]
def cd_completer(self, event):
"""Completer function for cd, which only returns directories."""
ip = get_ipython()
relpath = event.symbol
#print(event) # dbg
if event.line.endswith('-b') or ' -b ' in event.line:
# return only bookmark completions
bkms = self.db.get('bookmarks', None)
if bkms:
return bkms.keys()
else:
return []
if event.symbol == '-':
width_dh = str(len(str(len(ip.user_ns['_dh']) + 1)))
# jump in directory history by number
fmt = '-%0' + width_dh +'d [%s]'
ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])]
if len(ents) > 1:
return ents
return []
if event.symbol.startswith('--'):
return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']]
# Expand ~ in path and normalize directory separators.
relpath, tilde_expand, tilde_val = expand_user(relpath)
relpath = relpath.replace('\\','/')
found = []
for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*')
if os.path.isdir(f)]:
if ' ' in d:
# we don't want to deal with any of that, complex code
# for this is elsewhere
raise TryNext
found.append(d)
if not found:
if os.path.isdir(relpath):
return [compress_user(relpath, tilde_expand, tilde_val)]
# if no completions so far, try bookmarks
bks = self.db.get('bookmarks',{})
bkmatches = [s for s in bks if s.startswith(event.symbol)]
if bkmatches:
return bkmatches
raise TryNext
return [compress_user(p, tilde_expand, tilde_val) for p in found]
def reset_completer(self, event):
"A completer for %reset magic"
return '-f -s in out array dhist'.split()<|fim▁end|> |
m_is_init = hasattr(m, '__file__') and '__init__' in m.__file__
completions = [] |
<|file_name|>restyle_damage.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The restyle damage is a hint that tells layout which kind of operations may
//! be needed in presence of incremental style changes.
#![deny(missing_docs)]
use computed_values::display;
use heapsize::HeapSizeOf;
use properties::ServoComputedValues;
use std::fmt;
use std::sync::Arc;
bitflags! {
#[doc = "Individual layout actions that may be necessary after restyling."]
pub flags ServoRestyleDamage: u8 {
#[doc = "Repaint the node itself."]
#[doc = "Currently unused; need to decide how this propagates."]
const REPAINT = 0x01,
#[doc = "The stacking-context-relative position of this node or its descendants has \
changed."]
#[doc = "Propagates both up and down the flow tree."]
const REPOSITION = 0x02,
#[doc = "Recompute the overflow regions (bounding box of object and all descendants)."]
#[doc = "Propagates down the flow tree because the computation is bottom-up."]
const STORE_OVERFLOW = 0x04,
#[doc = "Recompute intrinsic inline_sizes (minimum and preferred)."]
#[doc = "Propagates down the flow tree because the computation is"]
#[doc = "bottom-up."]
const BUBBLE_ISIZES = 0x08,
#[doc = "Recompute actual inline-sizes and block-sizes, only taking out-of-flow children \
into account. \
Propagates up the flow tree because the computation is top-down."]
const REFLOW_OUT_OF_FLOW = 0x10,
#[doc = "Recompute actual inline_sizes and block_sizes."]
#[doc = "Propagates up the flow tree because the computation is"]
#[doc = "top-down."]
const REFLOW = 0x20,
#[doc = "Re-resolve generated content. \
Propagates up the flow tree because the computation is inorder."]
const RESOLVE_GENERATED_CONTENT = 0x40,
#[doc = "The entire flow needs to be reconstructed."]
const RECONSTRUCT_FLOW = 0x80
}
}
impl HeapSizeOf for ServoRestyleDamage {
fn heap_size_of_children(&self) -> usize { 0 }
}
impl ServoRestyleDamage {
/// Compute the appropriate restyle damage for a given style change between
/// `old` and `new`.
pub fn compute(old: &Arc<ServoComputedValues>,
new: &Arc<ServoComputedValues>) -> ServoRestyleDamage {
compute_damage(old, new)
}
/// Returns a bitmask that represents a flow that needs to be rebuilt and
/// reflowed.
///
/// FIXME(bholley): Do we ever actually need this? Shouldn't RECONSTRUCT_FLOW
/// imply everything else?
pub fn rebuild_and_reflow() -> ServoRestyleDamage {
REPAINT | REPOSITION | STORE_OVERFLOW | BUBBLE_ISIZES | REFLOW_OUT_OF_FLOW | REFLOW |
RECONSTRUCT_FLOW
}
/// Returns a bitmask indicating that the frame needs to be reconstructed.
pub fn reconstruct() -> ServoRestyleDamage {
RECONSTRUCT_FLOW
}
/// Supposing a flow has the given `position` property and this damage,
/// returns the damage that we should add to the *parent* of this flow.
pub fn damage_for_parent(self, child_is_absolutely_positioned: bool) -> ServoRestyleDamage {
if child_is_absolutely_positioned {
self & (REPAINT | REPOSITION | STORE_OVERFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
} else {
self & (REPAINT | REPOSITION | STORE_OVERFLOW | REFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
}
}
/// Supposing the *parent* of a flow with the given `position` property has
/// this damage, returns the damage that we should add to this flow.
pub fn damage_for_child(self,
parent_is_absolutely_positioned: bool,
child_is_absolutely_positioned: bool)
-> ServoRestyleDamage {
match (parent_is_absolutely_positioned, child_is_absolutely_positioned) {
(false, true) => {
// Absolute children are out-of-flow and therefore insulated from changes.
//
// FIXME(pcwalton): Au contraire, if the containing block dimensions change!
self & (REPAINT | REPOSITION)
}
(true, false) => {
// Changing the position of an absolutely-positioned block requires us to reflow
// its kids.
if self.contains(REFLOW_OUT_OF_FLOW) {
self | REFLOW
} else {
self
}
}
_ => {
// TODO(pcwalton): Take floatedness into account.
self & (REPAINT | REPOSITION | REFLOW)
}
}
}
/// Servo doesn't implement this optimization.
pub fn handled_for_descendants(self) -> Self {
Self::empty()
}
}
impl Default for ServoRestyleDamage {
fn default() -> Self {
Self::empty()
}
}
impl fmt::Display for ServoRestyleDamage {<|fim▁hole|> let to_iter =
[ (REPAINT, "Repaint")
, (REPOSITION, "Reposition")
, (STORE_OVERFLOW, "StoreOverflow")
, (BUBBLE_ISIZES, "BubbleISizes")
, (REFLOW_OUT_OF_FLOW, "ReflowOutOfFlow")
, (REFLOW, "Reflow")
, (RESOLVE_GENERATED_CONTENT, "ResolveGeneratedContent")
, (RECONSTRUCT_FLOW, "ReconstructFlow")
];
for &(damage, damage_str) in &to_iter {
if self.contains(damage) {
if !first_elem { try!(write!(f, " | ")); }
try!(write!(f, "{}", damage_str));
first_elem = false;
}
}
if first_elem {
try!(write!(f, "NoDamage"));
}
Ok(())
}
}
// NB: We need the braces inside the RHS due to Rust #8012. This particular
// version of this macro might be safe anyway, but we want to avoid silent
// breakage on modifications.
macro_rules! add_if_not_equal(
($old:ident, $new:ident, $damage:ident,
[ $($effect:ident),* ], [ $($style_struct_getter:ident.$name:ident),* ]) => ({
if $( ($old.$style_struct_getter().$name != $new.$style_struct_getter().$name) )||* {
$damage.insert($($effect)|*);
true
} else {
false
}
})
);
fn compute_damage(old: &ServoComputedValues, new: &ServoComputedValues) -> ServoRestyleDamage {
let mut damage = ServoRestyleDamage::empty();
// This should check every CSS property, as enumerated in the fields of
// http://doc.servo.org/style/properties/struct.ServoComputedValues.html
// FIXME: Test somehow that every property is included.
add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES, REFLOW_OUT_OF_FLOW,
REFLOW, RECONSTRUCT_FLOW], [
get_box.float, get_box.display, get_box.position, get_counters.content,
get_counters.counter_reset, get_counters.counter_increment,
get_inheritedbox._servo_under_display_none,
get_list.quotes, get_list.list_style_type,
// If these text or font properties change, we need to reconstruct the flow so that
// text shaping is re-run.
get_inheritedtext.letter_spacing, get_inheritedtext.text_rendering,
get_inheritedtext.text_transform, get_inheritedtext.word_spacing,
get_inheritedtext.overflow_wrap, get_inheritedtext.text_justify,
get_inheritedtext.white_space, get_inheritedtext.word_break, get_text.text_overflow,
get_font.font_family, get_font.font_style, get_font.font_variant, get_font.font_weight,
get_font.font_size, get_font.font_stretch,
get_inheritedbox.direction, get_inheritedbox.writing_mode,
get_text.text_decoration_line, get_text.unicode_bidi,
get_inheritedtable.empty_cells, get_inheritedtable.caption_side,
get_column.column_width, get_column.column_count
]) || (new.get_box().display == display::T::inline &&
add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW, REFLOW, RECONSTRUCT_FLOW], [
// For inline boxes only, border/padding styles are used in flow construction (to decide
// whether to create fragments for empty flows).
get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left
])) || add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW, REFLOW],
[get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_margin.margin_top, get_margin.margin_right,
get_margin.margin_bottom, get_margin.margin_left,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left,
get_position.width, get_position.height,
get_inheritedtext.line_height,
get_inheritedtext.text_align, get_inheritedtext.text_indent,
get_table.table_layout,
get_inheritedtable.border_collapse,
get_inheritedtable.border_spacing,
get_column.column_gap,
get_position.flex_direction,
get_position.flex_wrap,
get_position.justify_content,
get_position.align_items,
get_position.align_content,
get_position.order,
get_position.flex_basis,
get_position.flex_grow,
get_position.flex_shrink,
get_position.align_self
]) || add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, REFLOW_OUT_OF_FLOW], [
get_position.top, get_position.left,
get_position.right, get_position.bottom,
get_effects.opacity,
get_box.transform, get_box.transform_style, get_box.transform_origin,
get_box.perspective, get_box.perspective_origin
]) || add_if_not_equal!(old, new, damage,
[REPAINT], [
get_color.color, get_background.background_color,
get_background.background_image, get_background.background_position_x,
get_background.background_position_y, get_background.background_repeat,
get_background.background_attachment, get_background.background_clip,
get_background.background_origin, get_background.background_size,
get_border.border_top_color, get_border.border_right_color,
get_border.border_bottom_color, get_border.border_left_color,
get_border.border_top_style, get_border.border_right_style,
get_border.border_bottom_style, get_border.border_left_style,
get_border.border_top_left_radius, get_border.border_top_right_radius,
get_border.border_bottom_left_radius, get_border.border_bottom_right_radius,
get_position.z_index, get_box._servo_overflow_clip_box,
get_inheritedtext._servo_text_decorations_in_effect,
get_pointing.cursor, get_pointing.pointer_events,
get_effects.box_shadow, get_effects.clip, get_inheritedtext.text_shadow, get_effects.filter,
get_effects.mix_blend_mode, get_inheritedbox.image_rendering,
// Note: May require REFLOW et al. if `visibility: collapse` is implemented.
get_inheritedbox.visibility
]);
// If the layer requirements of this flow have changed due to the value
// of the transform, then reflow is required to rebuild the layers.
if old.transform_requires_layer() != new.transform_requires_layer() {
damage.insert(ServoRestyleDamage::rebuild_and_reflow());
}
damage
}<|fim▁end|> | fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let mut first_elem = true;
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms
from django.contrib.auth.models import Group
from common.forms import ModelFormWithHelper
from common.helpers import SubmitCancelFormHelper
from community.constants import COMMUNITY_ADMIN
from community.models import Community, CommunityPage
from community.utils import get_groups
from users.models import SystersUser
class CommunityForm(ModelFormWithHelper):
"""Form to edit Community profile"""
class Meta:
model = Community
fields = ('name', 'slug', 'order', 'email', 'mailing_list',
'parent_community', 'website', 'facebook', 'googleplus',
'twitter')
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_profile' " \
"community.slug %}"
class AddCommunityPageForm(ModelFormWithHelper):
"""Form to create new CommunityPage. The author and the community of the
page are expected to be provided when initializing the form:
* author - currently logged in user, aka the author of the page
* community - to which Community the CommunityPage belongs
"""
class Meta:
model = CommunityPage
fields = ('title', 'slug', 'order', 'content')
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_landing' " \
"community.slug %}"
def __init__(self, *args, **kwargs):
self.author = kwargs.pop('author')
self.community = kwargs.pop('community')
super(AddCommunityPageForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
"""Override save to add author and community to the instance"""
instance = super(AddCommunityPageForm, self).save(commit=False)
instance.author = SystersUser.objects.get(user=self.author)
instance.community = self.community
if commit:
instance.save()
return instance
class EditCommunityPageForm(ModelFormWithHelper):
"""Form to edit a CommunityPage."""
class Meta:
model = CommunityPage
fields = ('slug', 'title', 'order', 'content')
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_page' community.slug " \
"object.slug %}"
class PermissionGroupsForm(forms.Form):
"""Form to manage (select/deselect) user permission groups"""
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
community = kwargs.pop('community')
super(PermissionGroupsForm, self).__init__(*args, **kwargs)
# get all community groups and remove community admin group
# from the list of choices
self.groups = list(get_groups(community.name))
admin_group = Group.objects.get(
name=COMMUNITY_ADMIN.format(community.name))
self.groups.remove(admin_group)
choices = [(group.pk, group.name) for group in self.groups]
self.fields['groups'] = forms.\
MultipleChoiceField(choices=choices, label="", required=False,
widget=forms.CheckboxSelectMultiple)
self.member_groups = self.user.get_member_groups(self.groups)
self.fields['groups'].initial = [group.pk for group in
self.member_groups]
self.helper = SubmitCancelFormHelper(
self, cancel_href="{% url 'community_users' community.slug %}")
def save(self):
"""Update the groups of which the user is member of"""
group_pks = [int(pk) for pk in self.cleaned_data['groups']]
for member_group in self.member_groups:
if member_group.pk not in group_pks:
self.user.leave_group(member_group)
<|fim▁hole|><|fim▁end|> | for pk in group_pks:
group = Group.objects.get(pk=pk)
if not self.user.is_group_member(group.name):
self.user.join_group(group) |
<|file_name|>major_minor_demo1.py<|end_file_name|><|fim▁begin|>"""
Demonstrate how to use major and minor tickers.
The two relevant userland classes are Locators and Formatters.
Locators determine where the ticks are and formatters control the
formatting of ticks.
Minor ticks are off by default (NullLocator and NullFormatter). You
can turn minor ticks on w/o labels by setting the minor locator. You
can also turn labeling on for the minor ticker by setting the minor
formatter
Make a plot with major ticks that are multiples of 20 and minor ticks
that are multiples of 5. Label major ticks with %d formatting but
don't label minor ticks
The MultipleLocator ticker class is used to place ticks on multiples of
some base. The FormatStrFormatter uses a string format string (e.g.,
'%d' or '%1.2f' or '%1.1f cm' ) to format the tick
The pyplot interface grid command changes the grid settings of the
major ticks of the y and y axis together. If you want to control the
grid of the minor ticks for a given axis, use for example
ax.xaxis.grid(True, which='minor')
Note, you should not use the same locator between different Axis
because the locator stores references to the Axis data and view limits
"""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
majorLocator = MultipleLocator(20)
majorFormatter = FormatStrFormatter('%d')
minorLocator = MultipleLocator(5)
t = np.arange(0.0, 100.0, 0.1)
s = np.sin(0.1*np.pi*t)*np.exp(-t*0.01)
fig, ax = plt.subplots()<|fim▁hole|>
# for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.show()<|fim▁end|> | plt.plot(t, s)
ax.xaxis.set_major_locator(majorLocator)
ax.xaxis.set_major_formatter(majorFormatter) |
<|file_name|>driver.py<|end_file_name|><|fim▁begin|>"""
HSA driver bridge implementation
"""
from collections.abc import Sequence
import sys
import atexit
import os
import ctypes
import struct
import traceback
import weakref
import logging
from contextlib import contextmanager
from collections import defaultdict, deque
from functools import total_ordering
from numba import mviewbuf
from numba.core import utils, config
from .error import HsaSupportError, HsaDriverError, HsaApiError
from numba.roc.hsadrv import enums, enums_ext, drvapi
import numpy as np
_logger = logging.getLogger(__name__)
class HsaKernelTimedOut(HsaDriverError):
pass
def _device_type_to_string(device):
try:
return ['CPU', 'GPU', 'DSP'][device]
except IndexError:
return 'Unknown'
DEFAULT_HSA_DRIVER = '/opt/rocm/lib/libhsa-runtime64.so'
def _find_driver():
envpath = os.environ.get('NUMBA_HSA_DRIVER', DEFAULT_HSA_DRIVER)
if envpath == '0':
# Force fail
_raise_driver_not_found()
# Determine DLL type
if (struct.calcsize('P') != 8
or sys.platform == 'win32'
or sys.platform == 'darwin'):
_raise_platform_not_supported()
else:
# Assume to be *nix like and 64 bit
dlloader = ctypes.CDLL
dldir = ['/usr/lib', '/usr/lib64']
dlname = 'libhsa-runtime64.so'
if envpath is not None:
try:
envpath = os.path.abspath(envpath)
except ValueError:
raise HsaSupportError("NUMBA_HSA_DRIVER %s is not a valid path" %
envpath)
if not os.path.isfile(envpath):
raise HsaSupportError("NUMBA_HSA_DRIVER %s is not a valid file "
"path. Note it must be a filepath of the .so/"
".dll/.dylib or the driver" % envpath)
candidates = [envpath]
else:
# First search for the name in the default library path.
# If that is not found, try the specific path.
candidates = [dlname] + [os.path.join(x, dlname) for x in dldir]
# Load the driver; Collect driver error information
path_not_exist = []
driver_load_error = []
for path in candidates:
try:
dll = dlloader(path)
except OSError as e:
# Problem opening the DLL
path_not_exist.append(not os.path.isfile(path))
driver_load_error.append(e)
else:
return dll
# Problem loading driver
if all(path_not_exist):
_raise_driver_not_found()
else:
errmsg = '\n'.join(str(e) for e in driver_load_error)
_raise_driver_error(errmsg)
PLATFORM_NOT_SUPPORTED_ERROR = """
HSA is not currently supported on this platform ({0}).
"""
def _raise_platform_not_supported():
raise HsaSupportError(PLATFORM_NOT_SUPPORTED_ERROR.format(sys.platform))
DRIVER_NOT_FOUND_MSG = """
The HSA runtime library cannot be found.
If you are sure that the HSA is installed, try setting environment
variable NUMBA_HSA_DRIVER with the file path of the HSA runtime shared
library.
"""
def _raise_driver_not_found():
raise HsaSupportError(DRIVER_NOT_FOUND_MSG)
DRIVER_LOAD_ERROR_MSG = """
A HSA runtime library was found, but failed to load with error:
%s
"""
def _raise_driver_error(e):
raise HsaSupportError(DRIVER_LOAD_ERROR_MSG % e)
MISSING_FUNCTION_ERRMSG = """driver missing function: %s.
"""
class Recycler(object):
def __init__(self):
self._garbage = []
self.enabled = True
def free(self, obj):
self._garbage.append(obj)
self.service()
def _cleanup(self):
for obj in self._garbage:
obj._finalizer(obj)
del self._garbage[:]
def service(self):
if self.enabled:
if len(self._garbage) > 10:
self._cleanup()
def drain(self):
self._cleanup()
self.enabled = False
# The Driver ###########################################################
class Driver(object):
"""
Driver API functions are lazily bound.
"""
_singleton = None
_agent_map = None
_api_prototypes = drvapi.API_PROTOTYPES # avoid premature GC at exit
_hsa_properties = {
'version_major': (enums.HSA_SYSTEM_INFO_VERSION_MAJOR, ctypes.c_uint16),
'version_minor': (enums.HSA_SYSTEM_INFO_VERSION_MINOR, ctypes.c_uint16),
'timestamp': (enums.HSA_SYSTEM_INFO_TIMESTAMP, ctypes.c_uint64),
'timestamp_frequency': (enums.HSA_SYSTEM_INFO_TIMESTAMP_FREQUENCY, ctypes.c_uint16),
'signal_max_wait': (enums.HSA_SYSTEM_INFO_SIGNAL_MAX_WAIT, ctypes.c_uint64),
}
def __new__(cls):
obj = cls._singleton
if obj is not None:
return obj
else:
obj = object.__new__(cls)
cls._singleton = obj
return obj
def __init__(self):
try:
if config.DISABLE_HSA:
raise HsaSupportError("HSA disabled by user")
self.lib = _find_driver()
self.is_initialized = False
self.initialization_error = None
except HsaSupportError as e:
self.is_initialized = True
self.initialization_error = e
self._agent_map = None
self._programs = {}
self._recycler = Recycler()
self._active_streams = weakref.WeakSet()
def _initialize_api(self):
if self.is_initialized:
return
self.is_initialized = True
try:
self.hsa_init()
except HsaApiError as e:
self.initialization_error = e
raise HsaDriverError("Error at driver init: \n%s:" % e)
else:
@atexit.register
def shutdown():
try:
for agent in self.agents:
agent.release()
except AttributeError:
# this is because no agents initialised
# so self.agents isn't present
pass
else:
self._recycler.drain()
def _initialize_agents(self):
if self._agent_map is not None:
return
self._initialize_api()
agent_ids = []
def on_agent(agent_id, ctxt):
agent_ids.append(agent_id)
return enums.HSA_STATUS_SUCCESS
callback = drvapi.HSA_ITER_AGENT_CALLBACK_FUNC(on_agent)
self.hsa_iterate_agents(callback, None)
agent_map = dict((agent_id, Agent(agent_id)) for agent_id in agent_ids)
self._agent_map = agent_map
@property
def is_available(self):
self._initialize_api()
return self.initialization_error is None
@property
def agents(self):
self._initialize_agents()
return self._agent_map.values()
def create_program(self, model=enums.HSA_MACHINE_MODEL_LARGE,
profile=enums.HSA_PROFILE_FULL,
rounding_mode=enums.HSA_DEFAULT_FLOAT_ROUNDING_MODE_DEFAULT,
options=None):
program = drvapi.hsa_ext_program_t()
assert options is None
self.hsa_ext_program_create(model, profile, rounding_mode,
options, ctypes.byref(program))
return Program(program)
def create_signal(self, initial_value, consumers=None):
if consumers is None:
consumers = tuple(self.agents)
consumers_len = len(consumers)
consumers_type = drvapi.hsa_agent_t * consumers_len
consumers = consumers_type(*[c._id for c in consumers])
result = drvapi.hsa_signal_t()
self.hsa_signal_create(initial_value, consumers_len, consumers,
ctypes.byref(result))
return Signal(result.value)
def __getattr__(self, fname):
# Initialize driver
self._initialize_api()
# First try if it is an hsa property
try:
enum, typ = self._hsa_properties[fname]
result = typ()
self.hsa_system_get_info(enum, ctypes.byref(result))
return result.value
except KeyError:
pass
# if not a property... try if it is an api call
try:
proto = self._api_prototypes[fname]
except KeyError:
raise AttributeError(fname)
if self.initialization_error is not None:
raise HsaSupportError("Error at driver init: \n%s:" %
self.initialization_error)
# Find function in driver library
libfn = self._find_api(fname)
for key, val in proto.items():
setattr(libfn, key, val)
def driver_wrapper(fn):
def wrapped(*args, **kwargs):
_logger.debug('call driver api: %s', fname)
return fn(*args, **kwargs)
return wrapped
retval = driver_wrapper(libfn)
setattr(self, fname, retval)
return retval
def _find_api(self, fname):
# Try regular
try:
return getattr(self.lib, fname)
except AttributeError:
pass
# Not found.
# Delay missing function error to use
def absent_function(*args, **kws):
raise HsaDriverError(MISSING_FUNCTION_ERRMSG % fname)
setattr(self, fname, absent_function)
return absent_function
@property
def components(self):
"""Returns a ordered list of components
The first device should be picked first
"""
return list(filter(lambda a: a.is_component, reversed(sorted(
self.agents))))
def create_stream(self):
st = Stream()
self._active_streams.add(st)
return st
def implicit_sync(self):
"""
Implicit synchronization for all asynchronous streams
across all devices.
"""
_logger.info("implicit sync")
for st in self._active_streams:
st.synchronize()
hsa = Driver()
class HsaWrapper(object):
def __getattr__(self, fname):
try:
enum, typ = self._hsa_properties[fname]
except KeyError:
raise AttributeError(
"%r object has no attribute %r" % (self.__class__, fname))
func = getattr(hsa, self._hsa_info_function)
result = typ()
is_array_type = hasattr(typ, '_length_')
# if the result is not ctypes array, get a reference)
result_buff = result if is_array_type else ctypes.byref(result)
func(self._id, enum, result_buff)
if not is_array_type or typ._type_ == ctypes.c_char:
return result.value
else:
return list(result)
def __dir__(self):
return sorted(set(dir(type(self)) +
self.__dict__.keys() +
self._hsa_properties.keys()))
@total_ordering
class Agent(HsaWrapper):
"""Abstracts a HSA compute agent.
This will wrap and provide an OO interface for hsa_agent_t C-API elements
"""
# Note this will be handled in a rather unconventional way. When agents get
# initialized by the driver, a set of instances for all the available agents
# will be created. After that creation, the __new__ and __init__ methods will
# be replaced, and the constructor will act as a mapping from an agent_id to
# the equivalent Agent object. Any attempt to create an Agent with a non
# existing agent_id will result in an error.
#
# the logic for this resides in Driver._initialize_agents
_hsa_info_function = 'hsa_agent_get_info'
_hsa_properties = {
'name': (enums.HSA_AGENT_INFO_NAME, ctypes.c_char * 64),
'vendor_name': (enums.HSA_AGENT_INFO_VENDOR_NAME, ctypes.c_char * 64),
'feature': (enums.HSA_AGENT_INFO_FEATURE, drvapi.hsa_agent_feature_t),
'wavefront_size': (
enums.HSA_AGENT_INFO_WAVEFRONT_SIZE, ctypes.c_uint32),
'workgroup_max_dim': (
enums.HSA_AGENT_INFO_WORKGROUP_MAX_DIM, ctypes.c_uint16 * 3),
'grid_max_dim': (enums.HSA_AGENT_INFO_GRID_MAX_DIM, drvapi.hsa_dim3_t),
'grid_max_size': (enums.HSA_AGENT_INFO_GRID_MAX_SIZE, ctypes.c_uint32),
'fbarrier_max_size': (
enums.HSA_AGENT_INFO_FBARRIER_MAX_SIZE, ctypes.c_uint32),
'queues_max': (enums.HSA_AGENT_INFO_QUEUES_MAX, ctypes.c_uint32),
'queue_max_size': (
enums.HSA_AGENT_INFO_QUEUE_MAX_SIZE, ctypes.c_uint32),
'queue_type': (
enums.HSA_AGENT_INFO_QUEUE_TYPE, drvapi.hsa_queue_type_t),
'node': (enums.HSA_AGENT_INFO_NODE, ctypes.c_uint32),
'_device': (enums.HSA_AGENT_INFO_DEVICE, drvapi.hsa_device_type_t),
'cache_size': (enums.HSA_AGENT_INFO_CACHE_SIZE, ctypes.c_uint32 * 4),
'isa': (enums.HSA_AGENT_INFO_ISA, drvapi.hsa_isa_t),
}
def __init__(self, agent_id):
# This init will only happen when initializing the agents. After
# the agent initialization the instances of this class are considered
# initialized and locked, so this method will be removed.
self._id = agent_id
self._recycler = hsa._recycler
self._queues = set()
self._initialize_regions()
self._initialize_mempools()
@property
def device(self):
return _device_type_to_string(self._device)
@property
def is_component(self):
return (self.feature & enums.HSA_AGENT_FEATURE_KERNEL_DISPATCH) != 0
@property
def regions(self):
return self._regions
@property
def mempools(self):
return self._mempools
@property
def wavebits(self):
"""
log2(wavefront_size)
"""
# assume wavefront_size will always be a power of 2
return bin(self.wavefront_size)[::-1].index('1')
def _initialize_regions(self):
region_ids = []
def on_region(region_id, ctxt):
region_ids.append(region_id)
return enums.HSA_STATUS_SUCCESS
callback = drvapi.HSA_AGENT_ITERATE_REGIONS_CALLBACK_FUNC(on_region)
hsa.hsa_agent_iterate_regions(self._id, callback, None)
self._regions = _RegionList([MemRegion.instance_for(self, region_id)
for region_id in region_ids])
def _initialize_mempools(self):
mempool_ids = []
def on_region(_id, ctxt=None):
mempool_ids.append(_id)
return enums.HSA_STATUS_SUCCESS
callback = drvapi.HSA_AMD_AGENT_ITERATE_MEMORY_POOLS_CALLBACK(on_region)
hsa.hsa_amd_agent_iterate_memory_pools(self._id, callback, None)
self._mempools = _RegionList([MemPool.instance_for(self, mempool_id)
for mempool_id in mempool_ids])
def _create_queue(self, size, callback=None, data=None,
private_segment_size=None, group_segment_size=None,
queue_type=None):
assert queue_type is not None
assert size <= self.queue_max_size
cb_typ = drvapi.HSA_QUEUE_CALLBACK_FUNC
cb = ctypes.cast(None, cb_typ) if callback is None else cb_typ(callback)
result = ctypes.POINTER(drvapi.hsa_queue_t)()
private_segment_size = (ctypes.c_uint32(-1)
if private_segment_size is None
else private_segment_size)
group_segment_size = (ctypes.c_uint32(-1)
if group_segment_size is None
else group_segment_size)
hsa.hsa_queue_create(self._id, size, queue_type, cb, data,
private_segment_size, group_segment_size,
ctypes.byref(result))
q = Queue(self, result)
self._queues.add(q)
return weakref.proxy(q)
def create_queue_single(self, *args, **kwargs):
kwargs['queue_type'] = enums.HSA_QUEUE_TYPE_SINGLE
return self._create_queue(*args, **kwargs)
def create_queue_multi(self, *args, **kwargs):
kwargs['queue_type'] = enums.HSA_QUEUE_TYPE_MULTI
return self._create_queue(*args, **kwargs)
def release(self):
"""
Release all resources
Called at system teardown
"""
for q in list(self._queues):
q.release()
def release_queue(self, queue):
self._queues.remove(queue)
self._recycler.free(queue)
def __repr__(self):
return "<HSA agent ({0}): {1} {2} '{3}'{4}>".format(self._id,
self.device,
self.vendor_name,
self.name,
" (component)" if self.is_component else "")
def _rank(self):
return (self.is_component, self.grid_max_size, self._device)
def __lt__(self, other):
if isinstance(self, Agent):
return self._rank() < other._rank()
else:
return NotImplemented
def __eq__(self, other):
if isinstance(self, Agent):
return self._rank() == other._rank()
else:
return NotImplemented
def __hash__(self):
return hash(self._rank())
def create_context(self):
return Context(self)
class _RegionList(Sequence):
__slots__ = '_all', 'globals', 'readonlys', 'privates', 'groups'
def __init__(self, lst):
self._all = tuple(lst)
self.globals = tuple(x for x in lst if x.kind == 'global')
self.readonlys = tuple(x for x in lst if x.kind == 'readonly')
self.privates = tuple(x for x in lst if x.kind == 'private')
self.groups = tuple(x for x in lst if x.kind == 'group')
def __len__(self):
return len(self._all)
def __contains__(self, item):
return item in self._all
def __reversed__(self):
return reversed(self._all)
def __getitem__(self, idx):
return self._all[idx]
class MemPool(HsaWrapper):
"""Abstracts a HSA mem pool.
This will wrap and provide an OO interface for hsa_amd_memory_pool_t
C-API elements
"""
_hsa_info_function = 'hsa_amd_memory_pool_get_info'
_hsa_properties = {
'segment': (
enums_ext.HSA_AMD_MEMORY_POOL_INFO_SEGMENT,
drvapi.hsa_amd_segment_t
),
'_flags': (
enums_ext.HSA_AMD_MEMORY_POOL_INFO_GLOBAL_FLAGS,
ctypes.c_uint32
),
'size': (enums_ext.HSA_AMD_MEMORY_POOL_INFO_SIZE,
ctypes.c_size_t),
'alloc_allowed': (enums_ext.HSA_AMD_MEMORY_POOL_INFO_RUNTIME_ALLOC_ALLOWED,
ctypes.c_bool),
'alloc_granule': (enums_ext.HSA_AMD_MEMORY_POOL_INFO_RUNTIME_ALLOC_GRANULE,
ctypes.c_size_t),
'alloc_alignment': (enums_ext.HSA_AMD_MEMORY_POOL_INFO_RUNTIME_ALLOC_ALIGNMENT,
ctypes.c_size_t),
'accessible_by_all': (enums_ext.HSA_AMD_MEMORY_POOL_INFO_ACCESSIBLE_BY_ALL,
ctypes.c_bool),
}
_segment_name_map = {
enums_ext.HSA_AMD_SEGMENT_GLOBAL: 'global',
enums_ext.HSA_AMD_SEGMENT_READONLY: 'readonly',
enums_ext.HSA_AMD_SEGMENT_PRIVATE: 'private',
enums_ext.HSA_AMD_SEGMENT_GROUP: 'group',
}
def __init__(self, agent, pool):
"""Do not instantiate MemPool objects directly, use the factory class
method 'instance_for' to ensure MemPool identity"""
self._id = pool
self._owner_agent = agent
self._as_parameter_ = self._id
@property
def kind(self):
return self._segment_name_map[self.segment]
@property
def agent(self):
return self._owner_agent
def supports(self, check_flag):
"""
Determines if a given feature is supported by this MemRegion.
Feature flags are found in "./enums_exp.py" under:
* hsa_amd_memory_pool_global_flag_t
Params:
check_flag: Feature flag to test
"""
if self.kind == 'global':
return self._flags & check_flag
else:
return False
def allocate(self, nbytes):
assert self.alloc_allowed
assert nbytes >= 0
buff = ctypes.c_void_p()
flags = ctypes.c_uint32(0) # From API docs "Must be 0"!
hsa.hsa_amd_memory_pool_allocate(self._id, nbytes, flags, ctypes.byref(buff))
if buff.value is None:
raise HsaDriverError("Failed to allocate from {}".format(self))
return buff
_instance_dict = {}
@classmethod
def instance_for(cls, owner, _id):
try:
return cls._instance_dict[_id]
except KeyError:
new_instance = cls(owner, _id)
cls._instance_dict[_id] = new_instance
return new_instance
class MemRegion(HsaWrapper):
"""Abstracts a HSA memory region.
This will wrap and provide an OO interface for hsa_region_t C-API elements
"""
_hsa_info_function = 'hsa_region_get_info'
_hsa_properties = {
'segment': (
enums.HSA_REGION_INFO_SEGMENT,
drvapi.hsa_region_segment_t
),
'_flags': (
enums.HSA_REGION_INFO_GLOBAL_FLAGS,
drvapi.hsa_region_global_flag_t
),
'host_accessible': (enums_ext.HSA_AMD_REGION_INFO_HOST_ACCESSIBLE,
ctypes.c_bool),
'size': (enums.HSA_REGION_INFO_SIZE,
ctypes.c_size_t),
'alloc_max_size': (enums.HSA_REGION_INFO_ALLOC_MAX_SIZE,
ctypes.c_size_t),
'alloc_alignment': (enums.HSA_REGION_INFO_RUNTIME_ALLOC_ALIGNMENT,
ctypes.c_size_t),
'alloc_granule': (enums.HSA_REGION_INFO_RUNTIME_ALLOC_GRANULE,
ctypes.c_size_t),
'alloc_allowed': (enums.HSA_REGION_INFO_RUNTIME_ALLOC_ALLOWED,
ctypes.c_bool),
}
_segment_name_map = {
enums.HSA_REGION_SEGMENT_GLOBAL: 'global',
enums.HSA_REGION_SEGMENT_READONLY: 'readonly',
enums.HSA_REGION_SEGMENT_PRIVATE: 'private',
enums.HSA_REGION_SEGMENT_GROUP: 'group',
}
def __init__(self, agent, region_id):
"""Do not instantiate MemRegion objects directly, use the factory class
method 'instance_for' to ensure MemRegion identity"""
self._id = region_id
self._owner_agent = agent
self._as_parameter_ = self._id
@property
def kind(self):
return self._segment_name_map[self.segment]
@property
def agent(self):
return self._owner_agent
def supports(self, check_flag):
"""
Determines if a given feature is supported by this MemRegion.
Feature flags are found in "./enums.py" under:
* hsa_region_global_flag_t
Params:
check_flag: Feature flag to test
"""
if self.kind == 'global':
return self._flags & check_flag
else:
return False
def allocate(self, nbytes):
assert self.alloc_allowed
assert nbytes <= self.alloc_max_size
assert nbytes >= 0
buff = ctypes.c_void_p()
hsa.hsa_memory_allocate(self._id, nbytes, ctypes.byref(buff))
return buff
def free(self, ptr):
hsa.hsa_memory_free(ptr)
_instance_dict = {}
@classmethod
def instance_for(cls, owner, _id):
try:
return cls._instance_dict[_id]
except KeyError:
new_instance = cls(owner, _id)
cls._instance_dict[_id] = new_instance
return new_instance
class Queue(object):
def __init__(self, agent, queue_ptr):
"""The id in a queue is a pointer to the queue object returned by hsa_queue_create.
The Queue object has ownership on that queue object"""
self._agent = weakref.proxy(agent)
self._id = queue_ptr
self._as_parameter_ = self._id
self._finalizer = hsa.hsa_queue_destroy
def release(self):
self._agent.release_queue(self)
def __getattr__(self, fname):
return getattr(self._id.contents, fname)
@contextmanager
def _get_packet(self, packet_type):
# Write AQL packet at the calculated queue index address
queue_struct = self._id.contents
queue_mask = queue_struct.size - 1
assert (ctypes.sizeof(packet_type) ==
ctypes.sizeof(drvapi.hsa_kernel_dispatch_packet_t))
packet_array_t = (packet_type * queue_struct.size)
# Obtain the current queue write index
index = hsa.hsa_queue_add_write_index_acq_rel(self._id, 1)
while True:
read_offset = hsa.hsa_queue_load_read_index_acquire(self._id)
if read_offset <= index < read_offset + queue_struct.size:
break
queue_offset = index & queue_mask
queue = packet_array_t.from_address(queue_struct.base_address)
packet = queue[queue_offset]
# zero init
ctypes.memset(ctypes.addressof(packet), 0, ctypes.sizeof(packet_type))
yield packet
# Increment write index
# Ring the doorbell
hsa.hsa_signal_store_release(self._id.contents.doorbell_signal, index)
def insert_barrier(self, dep_signal):
with self._get_packet(drvapi.hsa_barrier_and_packet_t) as packet:
# Populate packet
packet.dep_signal0 = dep_signal._id
header = 0
header |= enums.HSA_FENCE_SCOPE_SYSTEM << enums.HSA_PACKET_HEADER_ACQUIRE_FENCE_SCOPE
header |= enums.HSA_FENCE_SCOPE_SYSTEM << enums.HSA_PACKET_HEADER_RELEASE_FENCE_SCOPE
header |= enums.HSA_PACKET_TYPE_BARRIER_AND << enums.HSA_PACKET_HEADER_TYPE
header |= 1 << enums.HSA_PACKET_HEADER_BARRIER
# Original example calls for an atomic store.
# Since we are on x86, store of aligned 16 bit is atomic.
# The C code is
# __atomic_store_n((uint16_t*)(&dispatch_packet->header), header, __ATOMIC_RELEASE);
packet.header = header
def dispatch(self, symbol, kernargs,
workgroup_size=None,
grid_size=None,
signal=None):
_logger.info("dispatch %s", symbol.name)
dims = len(workgroup_size)
assert dims == len(grid_size)
assert 0 < dims <= 3
assert grid_size >= workgroup_size
if workgroup_size > tuple(self._agent.workgroup_max_dim)[:dims]:
msg = "workgroupsize is too big {0} > {1}"
raise HsaDriverError(msg.format(workgroup_size,
tuple(self._agent.workgroup_max_dim)[:dims]))
s = signal if signal is not None else hsa.create_signal(1)
# Note: following vector_copy.c
with self._get_packet(drvapi.hsa_kernel_dispatch_packet_t) as packet:
# Populate packet
packet.setup |= dims << enums.HSA_KERNEL_DISPATCH_PACKET_SETUP_DIMENSIONS
packet.workgroup_size_x = workgroup_size[0]
packet.workgroup_size_y = workgroup_size[1] if dims > 1 else 1
packet.workgroup_size_z = workgroup_size[2] if dims > 2 else 1
packet.grid_size_x = grid_size[0]
packet.grid_size_y = grid_size[1] if dims > 1 else 1
packet.grid_size_z = grid_size[2] if dims > 2 else 1
packet.completion_signal = s._id
packet.kernel_object = symbol.kernel_object
packet.kernarg_address = (0 if kernargs is None
else kernargs.value)
packet.private_segment_size = symbol.private_segment_size
packet.group_segment_size = symbol.group_segment_size
header = 0
header |= enums.HSA_FENCE_SCOPE_SYSTEM << enums.HSA_PACKET_HEADER_ACQUIRE_FENCE_SCOPE
header |= enums.HSA_FENCE_SCOPE_SYSTEM << enums.HSA_PACKET_HEADER_RELEASE_FENCE_SCOPE
header |= enums.HSA_PACKET_TYPE_KERNEL_DISPATCH << enums.HSA_PACKET_HEADER_TYPE
# Original example calls for an atomic store.
# Since we are on x86, store of aligned 16 bit is atomic.
# The C code is
# __atomic_store_n((uint16_t*)(&dispatch_packet->header), header, __ATOMIC_RELEASE);
packet.header = header
# Wait on the dispatch completion signal
# synchronous if no signal was provided
if signal is None:
_logger.info('wait for synchronous kernel to complete')
timeout = 10
if not s.wait_until_ne_one(timeout=timeout):
msg = "Kernel timed out after {timeout} second"
raise HsaKernelTimedOut(msg.format(timeout=timeout))
def __dir__(self):
return sorted(set(dir(self._id.contents) +
self.__dict__.keys()))
def owned(self):
return ManagedQueueProxy(self)
class ManagedQueueProxy(object):
def __init__(self, queue):
self._queue = weakref.ref(queue)
def __getattr__(self, item):
return getattr(self._queue(), item)
class Signal(object):
"""The id for the signal is going to be the hsa_signal_t returned by create_signal.
Lifetime of the underlying signal will be tied with this object".
Note that it is likely signals will have lifetime issues."""
def __init__(self, signal_id):
self._id = signal_id
self._as_parameter_ = self._id
weakref.finalize(self, hsa.hsa_signal_destroy, self._id)
def load_relaxed(self):
return hsa.hsa_signal_load_relaxed(self._id)
def load_acquire(self):
return hsa.hsa_signal_load_acquire(self._id)
def wait_until_ne_one(self, timeout=None):
"""
Returns a boolean to indicate whether the wait has timeout
"""
one = 1
mhz = 10 ** 6
if timeout is None:
# Infinite
expire = -1 # UINT_MAX
else:
# timeout as seconds
expire = timeout * hsa.timestamp_frequency * mhz
# XXX: use active wait instead of blocked seem to avoid hang in docker
hsa.hsa_signal_wait_acquire(self._id, enums.HSA_SIGNAL_CONDITION_NE,
one, expire,
enums.HSA_WAIT_STATE_ACTIVE)
return self.load_relaxed() != one
class BrigModule(object):
def __init__(self, brig_buffer):
"""
Take a byte buffer of a Brig module
"""
buf = ctypes.create_string_buffer(brig_buffer)
self._buffer = buf
self._id = ctypes.cast(ctypes.addressof(buf),
drvapi.hsa_ext_module_t)
@classmethod
def from_file(cls, file_name):
with open(file_name, 'rb') as fin:
buf = fin.read()
return BrigModule(buf)
def __len__(self):
return len(self._buffer)
def __repr__(self):
return "<BrigModule id={0} size={1}bytes>".format(hex(id(self)),
len(self))
class Program(object):
def __init__(self, model=enums.HSA_MACHINE_MODEL_LARGE,
profile=enums.HSA_PROFILE_FULL,
rounding_mode=enums.HSA_DEFAULT_FLOAT_ROUNDING_MODE_DEFAULT,
options=None, version_major=1, version_minor=0):
self._id = drvapi.hsa_ext_program_t()
assert options is None
def check_fptr_return(hsa_status):
if hsa_status is not enums.HSA_STATUS_SUCCESS:
msg = ctypes.c_char_p()
hsa.hsa_status_string(hsa_status, ctypes.byref(msg))
_logger.info(msg.value.decode("utf-8"))
exit(-hsa_status)
support = ctypes.c_bool(0)
hsa.hsa_system_extension_supported(enums.HSA_EXTENSION_FINALIZER,
version_major,
version_minor,
ctypes.byref(support))
assert support.value, ('HSA system extension %s.%s not supported' %
(version_major, version_minor))
# struct of function pointers
self._ftabl = drvapi.hsa_ext_finalizer_1_00_pfn_t()
# populate struct
hsa.hsa_system_get_extension_table(enums.HSA_EXTENSION_FINALIZER,
version_major,
version_minor,
ctypes.byref(self._ftabl))
ret = self._ftabl.hsa_ext_program_create(model, profile,
rounding_mode, options,
ctypes.byref(self._id))
check_fptr_return(ret)
self._as_parameter_ = self._id
weakref.finalize(self, self._ftabl.hsa_ext_program_destroy,
self._id)
def add_module(self, module):
self._ftabl.hsa_ext_program_add_module(self._id, module._id)
def finalize(self, isa, callconv=0, options=None):
"""
The program object is safe to be deleted after ``finalize``.
"""
code_object = drvapi.hsa_code_object_t()
control_directives = drvapi.hsa_ext_control_directives_t()
ctypes.memset(ctypes.byref(control_directives), 0,
ctypes.sizeof(control_directives))
self._ftabl.hsa_ext_program_finalize(self._id,
isa,
callconv,
control_directives,
options,
enums.HSA_CODE_OBJECT_TYPE_PROGRAM,
ctypes.byref(code_object))
return CodeObject(code_object)
class CodeObject(object):
def __init__(self, code_object):
self._id = code_object
self._as_parameter_ = self._id
weakref.finalize(self, hsa.hsa_code_object_destroy, self._id)
class Executable(object):
def __init__(self):
ex = drvapi.hsa_executable_t()
hsa.hsa_executable_create(enums.HSA_PROFILE_FULL,
enums.HSA_EXECUTABLE_STATE_UNFROZEN,
None,
ctypes.byref(ex))
self._id = ex
self._as_parameter_ = self._id
weakref.finalize(self, hsa.hsa_executable_destroy, self._id)
def load(self, agent, code_object):
hsa.hsa_executable_load_code_object(self._id, agent._id,
code_object._id, None)
def freeze(self):
"""Freeze executable before we can query for symbol"""
hsa.hsa_executable_freeze(self._id, None)
def get_symbol(self, agent, name):
symbol = drvapi.hsa_executable_symbol_t()
hsa.hsa_executable_get_symbol(self._id, None,
ctypes.create_string_buffer(
name.encode('ascii')),
agent._id, 0,
ctypes.byref(symbol))
return Symbol(name, symbol)
class Symbol(HsaWrapper):
_hsa_info_function = 'hsa_executable_symbol_get_info'
_hsa_properties = {
'kernel_object': (
enums.HSA_EXECUTABLE_SYMBOL_INFO_KERNEL_OBJECT,
ctypes.c_uint64,
),
'kernarg_segment_size': (
enums.HSA_EXECUTABLE_SYMBOL_INFO_KERNEL_KERNARG_SEGMENT_SIZE,
ctypes.c_uint32,
),
'group_segment_size': (
enums.HSA_EXECUTABLE_SYMBOL_INFO_KERNEL_GROUP_SEGMENT_SIZE,
ctypes.c_uint32,
),
'private_segment_size': (
enums.HSA_EXECUTABLE_SYMBOL_INFO_KERNEL_PRIVATE_SEGMENT_SIZE,
ctypes.c_uint32,
),
}
def __init__(self, name, symbol_id):
self._id = symbol_id
self.name = name
class MemoryPointer(object):
__hsa_memory__ = True
def __init__(self, context, pointer, size, finalizer=None):
assert isinstance(context, Context)
self.context = context
self.device_pointer = pointer
self.size = size
self._hsa_memsize_ = size
self.finalizer = finalizer
self.is_managed = finalizer is not None
self.is_alive = True
self.refct = 0
def __del__(self):
try:
if self.is_managed and self.is_alive:
self.finalizer()
except:
traceback.print_exc()
def own(self):
return OwnedPointer(weakref.proxy(self))
def free(self):
"""
Forces the device memory to the trash.
"""
if self.is_managed:
if not self.is_alive:
raise RuntimeError("Freeing dead memory")
self.finalizer()
self.is_alive = False
def view(self):
pointer = self.device_pointer.value
view = MemoryPointer(self.context, pointer, self.size)
return OwnedPointer(weakref.proxy(self), view)
@property
def device_ctypes_pointer(self):
return self.device_pointer
def allow_access_to(self, *agents):
"""
Grant access to given *agents*.
Upon return, only the listed-agents and the owner agent have direct
access to this pointer.
"""
ct = len(agents)
if ct == 0:
return
agent_array = (ct * drvapi.hsa_agent_t)(*[a._id for a in agents])
hsa.hsa_amd_agents_allow_access(ct, agent_array, None,
self.device_pointer)
class HostMemory(mviewbuf.MemAlloc):
def __init__(self, context, owner, pointer, size):
self.context = context
self.owned = owner
self.size = size
self.host_pointer = pointer
self.handle = self.host_pointer
# For buffer interface
self._buflen_ = self.size
self._bufptr_ = self.host_pointer.value
def own(self):
return self
class OwnedPointer(object):
def __init__(self, memptr, view=None):
self._mem = memptr
self._mem.refct += 1
if view is None:
self._view = self._mem
else:
assert not view.is_managed
self._view = view
def __del__(self):
try:
self._mem.refct -= 1
assert self._mem.refct >= 0
if self._mem.refct == 0:
self._mem.free()
except ReferenceError:
pass
except:
traceback.print_exc()
def __getattr__(self, fname):
"""Proxy MemoryPointer methods
"""
return getattr(self._view, fname)
class Context(object):
"""
A context is associated with a component
"""
"""
Parameters:
agent the agent, and instance of the class Agent
"""
# a weak set of active Stream objects
_active_streams = weakref.WeakSet()
def __init__(self, agent):
self._agent = weakref.proxy(agent)
if self._agent.is_component: # only components have queues
qs = agent.queue_max_size
defq = self._agent.create_queue_multi(qs, callback=self._callback)
self._defaultqueue = defq.owned()
self.allocations = utils.UniqueDict()
# get pools
coarse_flag = enums_ext.HSA_AMD_MEMORY_POOL_GLOBAL_FLAG_COARSE_GRAINED
fine_flag = enums_ext.HSA_AMD_MEMORY_POOL_GLOBAL_FLAG_FINE_GRAINED
alloc_mps = [mp for mp in agent.mempools.globals if mp.alloc_allowed]
self._coarsegrain_mempool = None
self._finegrain_mempool = None
for mp in alloc_mps:
if mp.supports(coarse_flag):
self._coarsegrain_mempool = mp
if mp.supports(fine_flag):
self._finegrain_mempool = mp
def _callback(self, status, queue):
drvapi._check_error(status, queue)
sys.exit(1)
@property
def unproxy(self):
# This is a trick to help handle weakproxy comparison with actual
# instance.
# See https://stackoverflow.com/a/49319989 for inspiration and the
# whole page for more general discussion.
return self
@property
def default_queue(self):
return self._defaultqueue
@property
def agent(self):
return self._agent
@property
def coarsegrain_mempool(self):
if self._coarsegrain_mempool is None:
msg = 'coarsegrain mempool is not available in {}'.format(self._agent)
raise ValueError(msg)
return self._coarsegrain_mempool
@property
def finegrain_mempool(self):
if self._finegrain_mempool is None:
msg = 'finegrain mempool is not available in {}'.format(self._agent)
raise ValueError(msg)
return self._finegrain_mempool
def memalloc(self, nbytes, memTypeFlags=None, hostAccessible=True):
"""
Allocates memory.
Parameters:
nbytes the number of bytes to allocate.
memTypeFlags the flags for which the memory region must have support,\
due to the inherent rawness of the underlying call, the\
validity of the flag is not checked, cf. C language.
hostAccessible boolean as to whether the region in which the\
allocation takes place should be host accessible
"""
hw = self._agent.device
all_reg = self._agent.regions
flag_ok_r = list() # regions which pass the memTypeFlags test
regions = list()
# don't support DSP
if hw == "GPU" or hw == "CPU":
# check user requested flags
if memTypeFlags is not None:
for r in all_reg:
count = 0
for flags in memTypeFlags:
if r.supports(flags):
count += 1
if count == len(memTypeFlags):
flag_ok_r.append(r)
else:
flag_ok_r = all_reg
# check system required flags for allocation
for r in flag_ok_r:
# check the mem region is coarse grained if dGPU present
# TODO: this probably ought to explicitly check for a dGPU.
if (hw == "GPU" and
not r.supports(enums.HSA_REGION_GLOBAL_FLAG_COARSE_GRAINED)):
continue
# check accessibility criteria
if hostAccessible:
if r.host_accessible:
regions.append(r)
else:
if not r.host_accessible:
regions.append(r)
else:
raise RuntimeError("Unknown device type string \"%s\"" % hw)
assert len(regions) > 0, "No suitable memory regions found."
# walk though valid regions trying to malloc until there's none left
mem = None
for region_id in regions:
try:
mem = MemRegion.instance_for(self._agent, region_id)\
.allocate(nbytes)
except HsaApiError: # try next memory region if an allocation fails
pass
else: # allocation succeeded, stop looking for memory
break
if mem is None:
raise RuntimeError("Memory allocation failed. No agent/region \
combination could meet allocation restraints \
(hardware = %s, size = %s, flags = %s)." % \
( hw, nbytes, memTypeFlags))
fin = _make_mem_finalizer(hsa.hsa_memory_free)
ret = MemoryPointer(weakref.proxy(self), mem, nbytes,
finalizer=fin(self, mem))
if mem.value is None:
raise RuntimeError("MemoryPointer has no value")
self.allocations[mem.value] = ret
return ret.own()
def mempoolalloc(self, nbytes, allow_access_to=(), finegrain=False):
"""
Allocates memory in a memory pool.
Parameters:
*nbytes* the number of bytes to allocate.
*allow_acces_to*
*finegrain*
"""
mempool = (self.finegrain_mempool
if finegrain
else self.coarsegrain_mempool)
buff = mempool.allocate(nbytes)
fin = _make_mem_finalizer(hsa.hsa_amd_memory_pool_free)
mp = MemoryPointer(weakref.proxy(self), buff, nbytes,
finalizer=fin(self, buff))
mp.allow_access_to(*allow_access_to)
self.allocations[buff.value] = mp
return mp.own()
def memhostalloc(self, size, finegrain, allow_access_to):
mem = self.mempoolalloc(size, allow_access_to=allow_access_to,
finegrain=finegrain)
return HostMemory(weakref.proxy(self), owner=mem,
pointer=mem.device_pointer, size=mem.size)
class Stream(object):
"""
An asynchronous stream for async API
"""
def __init__(self):
self._signals = deque()
self._callbacks = defaultdict(list)
def _add_signal(self, signal):
"""
Add a signal that corresponds to an async task.
"""
# XXX: too many pending signals seem to cause async copy to hang
if len(self._signals) > 100:
self._sync(50)
self._signals.append(signal)
def _add_callback(self, callback):
assert callable(callback)
self._callbacks[self._get_last_signal()].append(callback)
def _get_last_signal(self):
"""
Get the last signal.
"""
return self._signals[-1] if self._signals else None
def synchronize(self):
"""
Synchronize the stream.
"""
self._sync(len(self._signals))
def _sync(self, limit):
ct = 0<|fim▁hole|> if ct >= limit:
break
sig = self._signals.popleft()
if sig.load_relaxed() == 1:
sig.wait_until_ne_one()
for cb in self._callbacks[sig]:
cb()
del self._callbacks[sig]
ct += 1
@contextmanager
def auto_synchronize(self):
'''
A context manager that waits for all commands in this stream to execute
and commits any pending memory transfers upon exiting the context.
'''
yield self
self.synchronize()
def _make_mem_finalizer(dtor):
"""
finalises memory
Parameters:
dtor a function that will delete/free held memory from a reference
Returns:
Finalising function
"""
def mem_finalize(context, handle):
allocations = context.allocations
sync = hsa.implicit_sync
def core():
_logger.info("Current allocations: %s", allocations)
if allocations:
_logger.info("Attempting delete on %s" % handle.value)
del allocations[handle.value]
sync() # implicit sync
dtor(handle)
return core
return mem_finalize
def device_pointer(obj):
"Get the device pointer as an integer"
return device_ctypes_pointer(obj).value
def device_ctypes_pointer(obj):
"Get the ctypes object for the device pointer"
if obj is None:
return c_void_p(0)
require_device_memory(obj)
return obj.device_ctypes_pointer
def is_device_memory(obj):
"""All HSA dGPU memory object is recognized as an instance with the
attribute "__hsa_memory__" defined and its value evaluated to True.
All HSA memory object should also define an attribute named
"device_pointer" which value is an int(or long) object carrying the pointer
value of the device memory address. This is not tested in this method.
"""
return getattr(obj, '__hsa_memory__', False)
def require_device_memory(obj):
"""A sentry for methods that accept HSA memory object.
"""
if not is_device_memory(obj):
raise Exception("Not a HSA memory object.")
def host_pointer(obj):
"""
NOTE: The underlying data pointer from the host data buffer is used and
it should not be changed until the operation which can be asynchronous
completes.
"""
if isinstance(obj, int):
return obj
forcewritable = isinstance(obj, np.void)
return mviewbuf.memoryview_get_buffer(obj, forcewritable)
def host_to_dGPU(context, dst, src, size):
"""
Copy data from a host memory region to a dGPU.
Parameters:
context the dGPU context
dst a pointer to the destination location in dGPU memory
src a pointer to the source location in host memory
size the size (in bytes) of data to transfer
"""
_logger.info("CPU->dGPU")
if size < 0:
raise ValueError("Invalid size given: %s" % size)
hsa.hsa_memory_copy(device_pointer(dst), host_pointer(src), size)
def dGPU_to_host(context, dst, src, size):
"""
Copy data from a host memory region to a dGPU.
Parameters:
context the dGPU context
dst a pointer to the destination location in dGPU memory
src a pointer to the source location in host memory
size the size (in bytes) of data to transfer
"""
_logger.info("dGPU->CPU")
if size < 0:
raise ValueError("Invalid size given: %s" % size)
hsa.hsa_memory_copy(host_pointer(dst), device_pointer(src), size)
def dGPU_to_dGPU(context, dst, src, size):
_logger.info("dGPU->dGPU")
if size < 0:
raise ValueError("Invalid size given: %s" % size)
hsa.hsa_memory_copy(device_pointer(dst), device_pointer(src), size)
def async_host_to_dGPU(dst_ctx, src_ctx, dst, src, size, stream):
_logger.info("Async CPU->dGPU")
async_copy_dgpu(dst_ctx=dst_ctx, src_ctx=src_ctx,
src=host_pointer(src), dst=device_pointer(dst),
size=size, stream=stream)
def async_dGPU_to_host(dst_ctx, src_ctx, dst, src, size, stream):
_logger.info("Async dGPU->CPU")
async_copy_dgpu(dst_ctx=dst_ctx, src_ctx=src_ctx,
dst=host_pointer(dst), src=device_pointer(src),
size=size, stream=stream)
def async_dGPU_to_dGPU(dst_ctx, src_ctx, dst, src, size, stream):
_logger.info("Async dGPU->dGPU")
async_copy_dgpu(dst_ctx=dst_ctx, src_ctx=src_ctx,
dst=device_pointer(dst), src=device_pointer(src),
size=size, stream=stream)
def async_copy_dgpu(dst_ctx, src_ctx, dst, src, size, stream):
if size < 0:
raise ValueError("Invalid size given: %s" % size)
completion_signal = hsa.create_signal(1)
dependent_signal = stream._get_last_signal()
if dependent_signal is not None:
dsignal = drvapi.hsa_signal_t(dependent_signal._id)
signals = (1, ctypes.byref(dsignal), completion_signal)
else:
signals = (0, None, completion_signal)
hsa.hsa_amd_memory_async_copy(dst, dst_ctx._agent._id,
src, src_ctx._agent._id,
size, *signals)
stream._add_signal(completion_signal)
def dgpu_count():
"""
Returns the number of discrete GPUs present on the current machine.
"""
ngpus = 0
try:
for a in hsa.agents:
if a.is_component and a.device == 'GPU':
ngpus += 1
except:
pass
return ngpus
"""
True if a dGPU is present in the current machine.
"""
dgpu_present = dgpu_count() > 0<|fim▁end|> | while self._signals: |
<|file_name|>graph.py<|end_file_name|><|fim▁begin|># This file is part of Androguard.
#
# Copyright (c) 2012 Geoffroy Gueguen <[email protected]>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from collections import defaultdict
from androguard.decompiler.dad.basic_blocks import (build_node_from_block,
StatementBlock, CondBlock)
from androguard.decompiler.dad.instruction import Variable
logger = logging.getLogger('dad.graph')
class Graph(object):
def __init__(self):
self.entry = None
self.exit = None
self.nodes = list()
self.rpo = []
self.edges = defaultdict(list)
self.catch_edges = defaultdict(list)
self.reverse_edges = defaultdict(list)
self.reverse_catch_edges = defaultdict(list)
self.loc_to_ins = None
self.loc_to_node = None
def sucs(self, node):
return self.edges.get(node, [])
def all_sucs(self, node):
return self.edges.get(node, []) + self.catch_edges.get(node, [])
def preds(self, node):
return [n for n in self.reverse_edges.get(node, [])
if not n.in_catch]
def all_preds(self, node):
return (self.reverse_edges.get(node, []) +
self.reverse_catch_edges.get(node, []))
def add_node(self, node):
self.nodes.append(node)
def add_edge(self, e1, e2):
lsucs = self.edges[e1]
if e2 not in lsucs:
lsucs.append(e2)
lpreds = self.reverse_edges[e2]
if e1 not in lpreds:
lpreds.append(e1)
def add_catch_edge(self, e1, e2):
lsucs = self.catch_edges[e1]
if e2 not in lsucs:
lsucs.append(e2)
lpreds = self.reverse_catch_edges[e2]
if e1 not in lpreds:
lpreds.append(e1)
def remove_node(self, node):
preds = self.reverse_edges.get(node, [])
for pred in preds:
self.edges[pred].remove(node)
succs = self.edges.get(node, [])
for suc in succs:
self.reverse_edges[suc].remove(node)
exc_preds = self.reverse_catch_edges.pop(node, [])
for pred in exc_preds:
self.catch_edges[pred].remove(node)
exc_succs = self.catch_edges.pop(node, [])
for suc in exc_succs:
self.reverse_catch_edges[suc].remove(node)
self.nodes.remove(node)
if node in self.rpo:
self.rpo.remove(node)
del node
def number_ins(self):
self.loc_to_ins = {}
self.loc_to_node = {}
num = 0
for node in self.rpo:
start_node = num
num = node.number_ins(num)
end_node = num - 1
self.loc_to_ins.update(node.get_loc_with_ins())
self.loc_to_node[(start_node, end_node)] = node
def get_ins_from_loc(self, loc):
return self.loc_to_ins.get(loc)
def get_node_from_loc(self, loc):
for (start, end), node in self.loc_to_node.iteritems():
if start <= loc <= end:
return node
def remove_ins(self, loc):
ins = self.get_ins_from_loc(loc)
self.get_node_from_loc(loc).remove_ins(loc, ins)
self.loc_to_ins.pop(loc)
def split_if_nodes(self):
'''
Split IfNodes in two nodes, the first node is the header node, the
second one is only composed of the jump condition.
'''
node_map = {n: n for n in self.nodes}
to_update = set()
for node in self.nodes[:]:
if node.type.is_cond:
if len(node.get_ins()) > 1:
pre_ins = node.get_ins()[:-1]
last_ins = node.get_ins()[-1]
pre_node = StatementBlock('%s-pre' % node.name, pre_ins)
cond_node = CondBlock('%s-cond' % node.name, [last_ins])
node_map[node] = pre_node
node_map[pre_node] = pre_node
node_map[cond_node] = cond_node
pre_node.copy_from(node)
cond_node.copy_from(node)
for var in node.var_to_declare:
pre_node.add_variable_declaration(var)
pre_node.type.is_stmt = True
cond_node.true = node.true
cond_node.false = node.false
for pred in self.all_preds(node):
pred_node = node_map[pred]
# Verify that the link is not an exception link
if node not in self.sucs(pred):
self.add_catch_edge(pred_node, pre_node)
continue
if pred is node:
pred_node = cond_node
if pred.type.is_cond: # and not (pred is node):
if pred.true is node:
pred_node.true = pre_node
if pred.false is node:
pred_node.false = pre_node
self.add_edge(pred_node, pre_node)
for suc in self.sucs(node):
self.add_edge(cond_node, node_map[suc])
# We link all the exceptions to the pre node instead of the
# condition node, which should not trigger any of them.
for suc in self.catch_edges.get(node, []):
self.add_catch_edge(pre_node, node_map[suc])
if node is self.entry:
self.entry = pre_node
self.add_node(pre_node)
self.add_node(cond_node)
self.add_edge(pre_node, cond_node)
pre_node.update_attribute_with(node_map)
cond_node.update_attribute_with(node_map)
self.remove_node(node)
else:
to_update.add(node)
for node in to_update:
node.update_attribute_with(node_map)
def simplify(self):
'''
Simplify the CFG by merging/deleting statement nodes when possible:
If statement B follows statement A and if B has no other predecessor
besides A, then we can merge A and B into a new statement node.
We also remove nodes which do nothing except redirecting the control
flow (nodes which only contains a goto).
'''
redo = True
while redo:
redo = False
node_map = {}
to_update = set()
for node in self.nodes[:]:
if node.type.is_stmt and node in self.nodes:
sucs = self.all_sucs(node)
if len(sucs) != 1:
continue
suc = sucs[0]
if len(node.get_ins()) == 0:
if any(pred.type.is_switch
for pred in self.all_preds(node)):
continue
if node is suc:
continue
node_map[node] = suc
for pred in self.all_preds(node):
pred.update_attribute_with(node_map)
if node not in self.sucs(pred):
self.add_catch_edge(pred, suc)
continue
self.add_edge(pred, suc)
redo = True
if node is self.entry:
self.entry = suc
self.remove_node(node)
elif (suc.type.is_stmt and
len(self.all_preds(suc)) == 1 and
not (suc in self.catch_edges) and
not ((node is suc) or (suc is self.entry))):
ins_to_merge = suc.get_ins()
node.add_ins(ins_to_merge)
for var in suc.var_to_declare:
node.add_variable_declaration(var)
new_suc = self.sucs(suc)[0]
if new_suc:
self.add_edge(node, new_suc)
for exception_suc in self.catch_edges.get(suc, []):
self.add_catch_edge(node, exception_suc)
redo = True
self.remove_node(suc)
else:
to_update.add(node)
for node in to_update:
node.update_attribute_with(node_map)
def compute_rpo(self):
'''
Number the nodes in reverse post order.
An RPO traversal visit as many predecessors of a node as possible
before visiting the node itself.
'''
nb = len(self.nodes) + 1
for node in self.post_order():
node.num = nb - node.po
self.rpo = sorted(self.nodes, key=lambda n: n.num)
def post_order(self):
'''
Return the nodes of the graph in post-order i.e we visit all the
children of a node before visiting the node itself.
'''
def _visit(n, cnt):
visited.add(n)
for suc in self.all_sucs(n):
if not suc in visited:
for cnt, s in _visit(suc, cnt):
yield cnt, s
n.po = cnt
yield cnt + 1, n
visited = set()
for _, node in _visit(self.entry, 1):
yield node
def draw(self, name, dname, draw_branches=True):
from pydot import Dot, Edge
g = Dot()
g.set_node_defaults(color='lightgray', style='filled', shape='box',
fontname='Courier', fontsize='10')
for node in sorted(self.nodes, key=lambda x: x.num):
if draw_branches and node.type.is_cond:
g.add_edge(Edge(str(node), str(node.true), color='green'))
g.add_edge(Edge(str(node), str(node.false), color='red'))
else:
for suc in self.sucs(node):
g.add_edge(Edge(str(node), str(suc), color='blue'))
for except_node in self.catch_edges.get(node, []):
g.add_edge(Edge(str(node), str(except_node),
color='black', style='dashed'))
g.write_png('%s/%s.png' % (dname, name))
def immediate_dominators(self):
return dom_lt(self)
def __len__(self):
return len(self.nodes)
def __repr__(self):
return str(self.nodes)
def __iter__(self):
for node in self.nodes:
yield node
def dom_lt(graph):
'''Dominator algorithm from Lengaeur-Tarjan'''
def _dfs(v, n):
semi[v] = n = n + 1
vertex[n] = label[v] = v
ancestor[v] = 0
for w in graph.all_sucs(v):
if not semi[w]:
parent[w] = v
n = _dfs(w, n)
pred[w].add(v)
return n
def _compress(v):
u = ancestor[v]
if ancestor[u]:
_compress(u)
if semi[label[u]] < semi[label[v]]:
label[v] = label[u]
ancestor[v] = ancestor[u]
def _eval(v):
if ancestor[v]:
_compress(v)
return label[v]
return v
def _link(v, w):
ancestor[w] = v
parent, ancestor, vertex = {}, {}, {}
label, dom = {}, {}
pred, bucket = defaultdict(set), defaultdict(set)
# Step 1:
semi = {v: 0 for v in graph.nodes}
n = _dfs(graph.entry, 0)
for i in xrange(n, 1, -1):
w = vertex[i]
# Step 2:
for v in pred[w]:
u = _eval(v)
y = semi[w] = min(semi[w], semi[u])
bucket[vertex[y]].add(w)
pw = parent[w]
_link(pw, w)
# Step 3:
bpw = bucket[pw]
while bpw:
v = bpw.pop()
u = _eval(v)
dom[v] = u if semi[u] < semi[v] else pw
# Step 4:
for i in range(2, n + 1):
w = vertex[i]
dw = dom[w]
if dw != vertex[semi[w]]:
dom[w] = dom[dw]
dom[graph.entry] = None
return dom
def bfs(start):
to_visit = [start]
visited = set([start])
while to_visit:
node = to_visit.pop(0)
yield node
if node.exception_analysis:
for _, _, exception in node.exception_analysis.exceptions:
if exception not in visited:
to_visit.append(exception)
visited.add(exception)
for _, _, child in node.childs:
if child not in visited:
to_visit.append(child)
visited.add(child)
class GenInvokeRetName(object):
def __init__(self):
self.num = 0
self.ret = None
def new(self):
self.num += 1
self.ret = Variable('tmp%d' % self.num)
return self.ret
def set_to(self, ret):
self.ret = ret
def last(self):
return self.ret
def make_node(graph, block, block_to_node, vmap, gen_ret):
node = block_to_node.get(block)
if node is None:
node = build_node_from_block(block, vmap, gen_ret)
block_to_node[block] = node
if block.exception_analysis:
for _type, _, exception_target in block.exception_analysis.exceptions:
exception_node = block_to_node.get(exception_target)
if exception_node is None:
exception_node = build_node_from_block(exception_target,
vmap, gen_ret, _type)
exception_node.in_catch = True
block_to_node[exception_target] = exception_node
graph.add_catch_edge(node, exception_node)
for _, _, child_block in block.childs:
child_node = block_to_node.get(child_block)
if child_node is None:
child_node = build_node_from_block(child_block, vmap, gen_ret)
block_to_node[child_block] = child_node
graph.add_edge(node, child_node)
if node.type.is_switch:
node.add_case(child_node)
if node.type.is_cond:
if_target = ((block.end / 2) - (block.last_length / 2) +
node.off_last_ins)
child_addr = child_block.start / 2
if if_target == child_addr:
node.true = child_node
else:
node.false = child_node
# Check that both branch of the if point to something
# It may happen that both branch point to the same node, in this case
# the false branch will be None. So we set it to the right node.<|fim▁hole|> # a statement node
if node.type.is_cond and node.false is None:
node.false = node.true
return node
def construct(start_block, vmap, exceptions):
bfs_blocks = bfs(start_block)
graph = Graph()
gen_ret = GenInvokeRetName()
# Construction of a mapping of basic blocks into Nodes
block_to_node = {}
exceptions_start_block = []
for exception in exceptions:
for _, _, block in exception.exceptions:
exceptions_start_block.append(block)
for block in bfs_blocks:
node = make_node(graph, block, block_to_node, vmap, gen_ret)
graph.add_node(node)
graph.entry = block_to_node[start_block]
del block_to_node, bfs_blocks
graph.compute_rpo()
graph.number_ins()
for node in graph.rpo:
preds = [pred for pred in graph.all_preds(node)
if pred.num < node.num]
if preds and all(pred.in_catch for pred in preds):
node.in_catch = True
# Create a list of Node which are 'return' node
# There should be one and only one node of this type
# If this is not the case, try to continue anyway by setting the exit node
# to the one which has the greatest RPO number (not necessarily the case)
lexit_nodes = [node for node in graph if node.type.is_return]
if len(lexit_nodes) > 1:
# Not sure that this case is possible...
logger.error('Multiple exit nodes found !')
graph.exit = graph.rpo[-1]
elif len(lexit_nodes) < 1:
# A method can have no return if it has throw statement(s) or if its
# body is a while(1) whitout break/return.
logger.debug('No exit node found !')
else:
graph.exit = lexit_nodes[0]
return graph<|fim▁end|> | # TODO: In this situation, we should transform the condition node into |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Mattis Marjak ([email protected])
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,<|fim▁hole|> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#![deny(missing_debug_implementations, missing_copy_implementations,
trivial_casts, trivial_numeric_casts, unsafe_code,
unstable_features, unused_import_braces, unused_qualifications)]
#[macro_use]
extern crate log;
extern crate time;
mod timer;
mod storage;
#[macro_use]
mod macros;
#[cfg(test)]
mod test;
pub use timer::{Timer, TimerAction};
pub use storage::{TimerStorage, TimerEvent};<|fim▁end|> | |
<|file_name|>resolutionCache.ts<|end_file_name|><|fim▁begin|>namespace ts.tscWatch {
describe("unittests:: tsc-watch:: resolutionCache:: tsc-watch module resolution caching", () => {
const scenario = "resolutionCache";
it("works", () => {
const root = {
path: "/a/d/f0.ts",
content: `import {x} from "f1"`
};
const imported = {
path: "/a/f1.ts",
content: `foo()`
};
const files = [root, imported, libFile];
const host = createWatchedSystem(files);
const watch = createWatchOfFilesAndCompilerOptions([root.path], host, { module: ModuleKind.AMD });
const f1IsNotModule = getDiagnosticOfFileFromProgram(watch.getCurrentProgram().getProgram(), root.path, root.content.indexOf('"f1"'), '"f1"'.length, Diagnostics.File_0_is_not_a_module, imported.path);
const cannotFindFoo = getDiagnosticOfFileFromProgram(watch.getCurrentProgram().getProgram(), imported.path, imported.content.indexOf("foo"), "foo".length, Diagnostics.Cannot_find_name_0, "foo");
// ensure that imported file was found
checkOutputErrorsInitial(host, [f1IsNotModule, cannotFindFoo]);
const originalFileExists = host.fileExists;
{
const newContent = `import {x} from "f1"
var x: string = 1;`;
root.content = newContent;
host.reloadFS(files);
// patch fileExists to make sure that disk is not touched
host.fileExists = notImplemented;
// trigger synchronization to make sure that import will be fetched from the cache
host.runQueuedTimeoutCallbacks();
// ensure file has correct number of errors after edit
checkOutputErrorsIncremental(host, [
f1IsNotModule,
getDiagnosticOfFileFromProgram(watch.getCurrentProgram().getProgram(), root.path, newContent.indexOf("var x") + "var ".length, "x".length, Diagnostics.Type_0_is_not_assignable_to_type_1, 1, "string"),
cannotFindFoo
]);
}
{
let fileExistsIsCalled = false;
host.fileExists = (fileName): boolean => {
if (fileName === "lib.d.ts") {
return false;
}
fileExistsIsCalled = true;
assert.isTrue(fileName.indexOf("/f2.") !== -1);
return originalFileExists.call(host, fileName);
};
root.content = `import {x} from "f2"`;
host.reloadFS(files);
// trigger synchronization to make sure that system will try to find 'f2' module on disk
host.runQueuedTimeoutCallbacks();
// ensure file has correct number of errors after edit
checkOutputErrorsIncremental(host, [
getDiagnosticModuleNotFoundOfFile(watch.getCurrentProgram().getProgram(), root, "f2")
]);
assert.isTrue(fileExistsIsCalled);
}
{
let fileExistsCalled = false;
host.fileExists = (fileName): boolean => {
if (fileName === "lib.d.ts") {
return false;
}
fileExistsCalled = true;
assert.isTrue(fileName.indexOf("/f1.") !== -1);
return originalFileExists.call(host, fileName);
};
const newContent = `import {x} from "f1"`;
root.content = newContent;
host.reloadFS(files);
host.runQueuedTimeoutCallbacks();
checkOutputErrorsIncremental(host, [f1IsNotModule, cannotFindFoo]);
assert.isTrue(fileExistsCalled);
}
});
it("loads missing files from disk", () => {
const root = {
path: `/a/foo.ts`,
content: `import {x} from "bar"`
};
const imported = {
path: `/a/bar.d.ts`,
content: `export const y = 1;`
};
const files = [root, libFile];
const host = createWatchedSystem(files);
const originalFileExists = host.fileExists;
let fileExistsCalledForBar = false;
host.fileExists = fileName => {
if (fileName === "lib.d.ts") {
return false;
}
if (!fileExistsCalledForBar) {
fileExistsCalledForBar = fileName.indexOf("/bar.") !== -1;
}
return originalFileExists.call(host, fileName);
};
const watch = createWatchOfFilesAndCompilerOptions([root.path], host, { module: ModuleKind.AMD });
assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called");
checkOutputErrorsInitial(host, [
getDiagnosticModuleNotFoundOfFile(watch.getCurrentProgram().getProgram(), root, "bar")
]);
fileExistsCalledForBar = false;
root.content = `import {y} from "bar"`;
host.reloadFS(files.concat(imported));
host.runQueuedTimeoutCallbacks();
checkOutputErrorsIncremental(host, emptyArray);
assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called.");
});
it("should compile correctly when resolved module goes missing and then comes back (module is not part of the root)", () => {
const root = {
path: `/a/foo.ts`,
content: `import {x} from "bar"`
};
const imported = {
path: `/a/bar.d.ts`,
content: `export const y = 1;export const x = 10;`
};
<|fim▁hole|> const files = [root, libFile];
const filesWithImported = files.concat(imported);
const host = createWatchedSystem(filesWithImported);
const originalFileExists = host.fileExists;
let fileExistsCalledForBar = false;
host.fileExists = fileName => {
if (fileName === "lib.d.ts") {
return false;
}
if (!fileExistsCalledForBar) {
fileExistsCalledForBar = fileName.indexOf("/bar.") !== -1;
}
return originalFileExists.call(host, fileName);
};
const watch = createWatchOfFilesAndCompilerOptions([root.path], host, { module: ModuleKind.AMD });
assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called");
checkOutputErrorsInitial(host, emptyArray);
fileExistsCalledForBar = false;
host.reloadFS(files);
host.runQueuedTimeoutCallbacks();
assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called.");
checkOutputErrorsIncremental(host, [
getDiagnosticModuleNotFoundOfFile(watch.getCurrentProgram().getProgram(), root, "bar")
]);
fileExistsCalledForBar = false;
host.reloadFS(filesWithImported);
host.checkTimeoutQueueLengthAndRun(1);
checkOutputErrorsIncremental(host, emptyArray);
assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called.");
});
verifyTscWatch({
scenario,
subScenario: "works when module resolution changes to ambient module",
commandLineArgs: ["-w", "/a/b/foo.ts"],
sys: () => createWatchedSystem([{
path: "/a/b/foo.ts",
content: `import * as fs from "fs";`
}, libFile], { currentDirectory: "/a/b" }),
changes: [
sys => {
sys.ensureFileOrFolder({
path: "/a/b/node_modules/@types/node/package.json",
content: `
{
"main": ""
}
`
});
sys.ensureFileOrFolder({
path: "/a/b/node_modules/@types/node/index.d.ts",
content: `
declare module "fs" {
export interface Stats {
isFile(): boolean;
}
}`
});
sys.runQueuedTimeoutCallbacks();
return "npm install node types";
}
]
});
verifyTscWatch({
scenario,
subScenario: "works when included file with ambient module changes",
commandLineArgs: ["--w", "/a/b/foo.ts", "/a/b/bar.d.ts"],
sys: () => {
const root = {
path: "/a/b/foo.ts",
content: `
import * as fs from "fs";
import * as u from "url";
`
};
const file = {
path: "/a/b/bar.d.ts",
content: `
declare module "url" {
export interface Url {
href?: string;
}
}
`
};
return createWatchedSystem([root, file, libFile], { currentDirectory: "/a/b" });
},
changes: [
sys => {
sys.appendFile("/a/b/bar.d.ts", `
declare module "fs" {
export interface Stats {
isFile(): boolean;
}
}
`);
sys.runQueuedTimeoutCallbacks();
return "Add fs definition";
}
]
});
verifyTscWatch({
scenario,
subScenario: "works when reusing program with files from external library",
commandLineArgs: ["--w", "-p", "/a/b/projects/myProject/src"],
sys: () => {
const configDir = "/a/b/projects/myProject/src/";
const file1: File = {
path: configDir + "file1.ts",
content: 'import module1 = require("module1");\nmodule1("hello");'
};
const file2: File = {
path: configDir + "file2.ts",
content: 'import module11 = require("module1");\nmodule11("hello");'
};
const module1: File = {
path: "/a/b/projects/myProject/node_modules/module1/index.js",
content: "module.exports = options => { return options.toString(); }"
};
const configFile: File = {
path: configDir + "tsconfig.json",
content: JSON.stringify({
compilerOptions: {
allowJs: true,
rootDir: ".",
outDir: "../dist",
moduleResolution: "node",
maxNodeModuleJsDepth: 1
}
})
};
return createWatchedSystem([file1, file2, module1, libFile, configFile], { currentDirectory: "/a/b/projects/myProject/" });
},
changes: [
sys => {
sys.appendFile("/a/b/projects/myProject/src/file1.ts", "\n;");
sys.runQueuedTimeoutCallbacks();
return "Add new line to file1";
}
]
});
verifyTscWatch({
scenario,
subScenario: "works when renaming node_modules folder that already contains @types folder",
commandLineArgs: ["--w", `${projectRoot}/a.ts`],
sys: () => {
const file: File = {
path: `${projectRoot}/a.ts`,
content: `import * as q from "qqq";`
};
const module: File = {
path: `${projectRoot}/node_modules2/@types/qqq/index.d.ts`,
content: "export {}"
};
return createWatchedSystem([file, libFile, module], { currentDirectory: projectRoot });
},
changes: [
sys => {
sys.renameFolder(`${projectRoot}/node_modules2`, `${projectRoot}/node_modules`);
sys.runQueuedTimeoutCallbacks();
return "npm install";
}
]
});
describe("ignores files/folder changes in node_modules that start with '.'", () => {
function verifyIgnore(subScenario: string, commandLineArgs: readonly string[]) {
verifyTscWatch({
scenario,
subScenario: `ignores changes in node_modules that start with dot/${subScenario}`,
commandLineArgs,
sys: () => {
const file1: File = {
path: `${projectRoot}/test.ts`,
content: `import { x } from "somemodule";`
};
const file2: File = {
path: `${projectRoot}/node_modules/somemodule/index.d.ts`,
content: `export const x = 10;`
};
const config: File = {
path: `${projectRoot}/tsconfig.json`,
content: "{}"
};
return createWatchedSystem([libFile, file1, file2, config]);
},
changes: [
sys => {
const npmCacheFile: File = {
path: `${projectRoot}/node_modules/.cache/babel-loader/89c02171edab901b9926470ba6d5677e.ts`,
content: JSON.stringify({ something: 10 })
};
sys.ensureFileOrFolder(npmCacheFile);
sys.checkTimeoutQueueLength(0);
return "npm install file and folder that start with '.'";
}
]
});
}
verifyIgnore("watch without configFile", ["--w", `${projectRoot}/test.ts`]);
verifyIgnore("watch with configFile", ["--w", "-p", `${projectRoot}/tsconfig.json`]);
});
verifyTscWatch({
scenario,
subScenario: "when types in compiler option are global and installed at later point",
commandLineArgs: ["--w", "-p", `${projectRoot}/tsconfig.json`],
sys: () => {
const app: File = {
path: `${projectRoot}/lib/app.ts`,
content: `myapp.component("hello");`
};
const tsconfig: File = {
path: `${projectRoot}/tsconfig.json`,
content: JSON.stringify({
compilerOptions: {
module: "none",
types: ["@myapp/ts-types"]
}
})
};
return createWatchedSystem([app, tsconfig, libFile]);
},
changes: [
sys => {
sys.ensureFileOrFolder({
path: `${projectRoot}/node_modules/@myapp/ts-types/package.json`,
content: JSON.stringify({
version: "1.65.1",
types: "types/somefile.define.d.ts"
})
});
sys.ensureFileOrFolder({
path: `${projectRoot}/node_modules/@myapp/ts-types/types/somefile.define.d.ts`,
content: `
declare namespace myapp {
function component(str: string): number;
}`
});
sys.checkTimeoutQueueLengthAndRun(1);
return "npm install ts-types";
},
(sys, [[oldProgram, oldBuilderProgram]], watchorSolution) => {
sys.checkTimeoutQueueLength(0);
const newProgram = (watchorSolution as Watch).getProgram();
assert.strictEqual(newProgram, oldBuilderProgram, "No change so builder program should be same");
assert.strictEqual(newProgram.getProgram(), oldProgram, "No change so program should be same");
return "No change, just check program";
}
]
});
verifyTscWatch({
scenario,
subScenario: "with modules linked to sibling folder",
commandLineArgs: ["-w"],
sys: () => {
const mainPackageRoot = `${projectRoot}/main`;
const linkedPackageRoot = `${projectRoot}/linked-package`;
const mainFile: File = {
path: `${mainPackageRoot}/index.ts`,
content: "import { Foo } from '@scoped/linked-package'"
};
const config: File = {
path: `${mainPackageRoot}/tsconfig.json`,
content: JSON.stringify({
compilerOptions: { module: "commonjs", moduleResolution: "node", baseUrl: ".", rootDir: "." },
files: ["index.ts"]
})
};
const linkedPackageInMain: SymLink = {
path: `${mainPackageRoot}/node_modules/@scoped/linked-package`,
symLink: `${linkedPackageRoot}`
};
const linkedPackageJson: File = {
path: `${linkedPackageRoot}/package.json`,
content: JSON.stringify({ name: "@scoped/linked-package", version: "0.0.1", types: "dist/index.d.ts", main: "dist/index.js" })
};
const linkedPackageIndex: File = {
path: `${linkedPackageRoot}/dist/index.d.ts`,
content: "export * from './other';"
};
const linkedPackageOther: File = {
path: `${linkedPackageRoot}/dist/other.d.ts`,
content: 'export declare const Foo = "BAR";'
};
const files = [libFile, mainFile, config, linkedPackageInMain, linkedPackageJson, linkedPackageIndex, linkedPackageOther];
return createWatchedSystem(files, { currentDirectory: mainPackageRoot });
},
changes: emptyArray
});
});
}<|fim▁end|> | |
<|file_name|>google_services.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python
"""
Handles Google Service Authentication
"""
# TODO(rrayborn): Better documentation
__author__ = "Rob Rayborn"
__copyright__ = "Copyright 2014, The Mozilla Foundation"
__license__ = "MPLv2"
__maintainer__ = "Rob Rayborn"
__email__ = "[email protected]"
__status__ = "Development"
from OpenSSL.crypto import load_pkcs12, dump_privatekey, FILETYPE_PEM
from datetime import date, datetime, timedelta
from os import environ
import json
import jwt
import requests
import time
_SECRETS_PATH = environ['SECRETS_PATH']
# Header and Grant Type are always the same for Google's API so making a
# variable instead of a file
_HEADER_JSON = {'alg':'RS256','typ':'jwt'}
_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:jwt-bearer'
# Default filenames
_CLAIMS_FILE = _SECRETS_PATH + 'claims.json'
_P12_FILE = _SECRETS_PATH + 'goog.p12'
_AUTH_FILE = _SECRETS_PATH + '.auth.tmp'
# Other defaults
_GOOG_PASSPHRASE = 'notasecret' # notasecret is the universal google passphrase
class google_service_connection(object):
def __init__(self, json_web_token=None, expiration=None, claims_file=_CLAIMS_FILE,
p12_file=_P12_FILE, auth_file=_AUTH_FILE):
self._json_web_token = None
self._expiration = None
self._auth_token = None
self._claims_file = claims_file
self._p12_file = p12_file
self._auth_file = auth_file
self.get_auth_token(json_web_token,expiration)
def get_expiration(self):
return self._expiration
def set_files(self, claims_file=None, p12_file=None,
auth_file=None):
self._claims_file = claims_file or self._claims_file
self._p12_file = p12_file or self._p12_file
self._auth_file = auth_file or self._auth_file
def _refresh_json_web_token(self, json_web_token=None, expiration=None,
force=False):
if not force and not _expired(self._expiration):
return
if json_web_token or expiration:
if json_web_token and expiration:
if not _expired(expiration):
self._json_web_token = json_web_token
self._expiration = expiration
return
#else continue
else:
raise Exception('_refresh_json_web_token: Must pass json_web_token'\
' and expiration together.')
with open(self._p12_file, 'r') as f:
pk = load_pkcs12(f.read(), _GOOG_PASSPHRASE).get_privatekey()
secret = dump_privatekey(FILETYPE_PEM, pk)
# Load claims json
with open(self._claims_file, 'r') as f:
claims_json = json.load(f)
# Modify claims data
current_time = int(time.time())
claims_json['iat'] = current_time<|fim▁hole|>
self._json_web_token = jwt.encode(
claims_json, secret, algorithm='RS256', headers=_HEADER_JSON
)
def _load_auth_token(self):
try:
with open(self._auth_file, 'r') as f:
auth_json = json.load(f)
if not _expired(auth_json['expiration']):
self._expiration = auth_json['expiration']
self._auth_token = auth_json['token']
return self._auth_token
else:
return None
except:
return None
def _save_auth_token(self):
with open(self._auth_file, 'w') as f:
data = {'token':self._auth_token, 'expiration':self._expiration}
json.dump(data, f)
def get_auth_token(self, json_web_token=None, expiration=None):
if self._load_auth_token():
return self._auth_token
self._refresh_json_web_token(json_web_token=json_web_token,
expiration=expiration)
parameters = {
'grant_type':_GRANT_TYPE,
'assertion':self._json_web_token
}
response = requests.post('https://accounts.google.com/o/oauth2/token',
data=parameters)
if response.status_code == 200:
self._auth_token = response.json()['access_token']
else:
raise Exception('Token Request results in a %s response code.' \
% response.status_code)
self._save_auth_token()
return self._auth_token
def _expired(exp):
return time.time() >= exp
def main():
gsc = google_service_connection()
if __name__ == '__main__':
main()<|fim▁end|> | claims_json['exp'] = current_time + 3600 - 1
# Remember expiration
self._expiration = current_time + 3600 |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use diem_config::network_id::NetworkContext;
use diem_crypto::{test_utils::TEST_SEED, x25519, Uniform as _};
use diem_logger::prelude::*;
use diem_types::network_address::NetworkAddress;
use futures::{
future::Future,
io::{AsyncRead, AsyncWrite},
sink::SinkExt,
stream::{Stream, StreamExt},
};
use memsocket::MemorySocket;
use netcore::transport::{
memory::MemoryTransport,
tcp::{TcpSocket, TcpTransport},
Transport, TransportExt,
};
use network::{
constants,
noise::{stream::NoiseStream, HandshakeAuthMode, NoiseUpgrader},
protocols::wire::messaging::v1::network_message_frame_codec,<|fim▁hole|>use tokio::runtime::Handle;
use tokio_util::{codec::Framed, compat::FuturesAsyncReadCompatExt};
#[derive(Debug)]
pub struct Args {
pub tcp_addr: Option<NetworkAddress>,
pub tcp_noise_addr: Option<NetworkAddress>,
pub msg_lens: Option<Vec<usize>>,
}
fn parse_addr(s: OsString) -> NetworkAddress {
s.to_str()
.expect("Error: Address should be valid Unicode")
.parse()
.expect("Error: Address should be a multiaddr")
}
fn parse_msg_lens(s: OsString) -> Vec<usize> {
let s = s
.into_string()
.expect("Error: $MSG_LENS should be valid Unicode");
// check for surrounding array brackets
if &s[..1] != "[" || &s[s.len() - 1..] != "]" {
panic!(
"Error: Malformed $MSG_LENS: \"{}\": Should be formatted like an array \"[123, 456]\"",
s
);
}
// parse Vec<usize> from comma-delimited string
s[1..s.len() - 1]
.split(',')
.map(|ss| {
ss.trim()
.parse::<usize>()
.expect("Error: Malformed $MSG_LENS: Failed to parse usize")
})
.collect()
}
impl Args {
pub fn from_env() -> Self {
Self {
tcp_addr: env::var_os("TCP_ADDR").map(parse_addr),
tcp_noise_addr: env::var_os("TCP_NOISE_ADDR").map(parse_addr),
msg_lens: env::var_os("MSG_LENS").map(parse_msg_lens),
}
}
}
/// Build a MemorySocket + Noise transport
pub fn build_memsocket_noise_transport() -> impl Transport<Output = NoiseStream<MemorySocket>> {
MemoryTransport::default().and_then(move |socket, addr, origin| async move {
let mut rng: StdRng = SeedableRng::from_seed(TEST_SEED);
let private = x25519::PrivateKey::generate(&mut rng);
let public = private.public_key();
let peer_id = diem_types::account_address::from_identity_public_key(public);
let noise_config = Arc::new(NoiseUpgrader::new(
NetworkContext::mock_with_peer_id(peer_id),
private,
HandshakeAuthMode::server_only(),
));
let remote_public_key = addr.find_noise_proto();
let (_remote_static_key, socket) = noise_config
.upgrade_with_noise(socket, origin, remote_public_key)
.await
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
Ok(socket)
})
}
/// Build a Tcp + Noise transport
pub fn build_tcp_noise_transport() -> impl Transport<Output = NoiseStream<TcpSocket>> {
TcpTransport::default().and_then(move |socket, addr, origin| async move {
let mut rng: StdRng = SeedableRng::from_seed(TEST_SEED);
let private = x25519::PrivateKey::generate(&mut rng);
let public = private.public_key();
let peer_id = diem_types::account_address::from_identity_public_key(public);
let noise_config = Arc::new(NoiseUpgrader::new(
NetworkContext::mock_with_peer_id(peer_id),
private,
HandshakeAuthMode::server_only(),
));
let remote_public_key = addr.find_noise_proto();
let (_remote_static_key, socket) = noise_config
.upgrade_with_noise(socket, origin, remote_public_key)
.await
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
Ok(socket)
})
}
/// Server side handler for send throughput benchmark when the messages are sent
/// over a simple stream (tcp or in-memory).
pub async fn server_stream_handler<L, I, S, E>(server_listener: L)
where
L: Stream<Item = Result<(I, NetworkAddress), E>> + Unpin,
I: Future<Output = Result<S, E>> + Send + 'static,
S: AsyncRead + AsyncWrite + Unpin + Send + 'static,
E: ::std::error::Error + Send,
{
// Wait for next inbound connection, this simulated the TransportHandler
// which is single-threaded asynchronous accepting new connections.
server_listener
.for_each_concurrent(None, |result| async {
match result {
Ok((f_stream, _)) => {
match f_stream.await {
Ok(stream) => {
let codec = network_message_frame_codec(constants::MAX_FRAME_SIZE);
let mut stream = Framed::new(stream.compat(), codec);
tokio::task::spawn(async move {
// Drain all messages from the client.
while stream.next().await.is_some() {}
stream.close().await.unwrap();
});
}
Err(e) => error!(
error = ?e,
"Connection upgrade failed {:?}", e),
};
}
Err(e) => error!(
error = ?e,
"Stream failed {:?}", e),
}
})
.await
}
pub fn start_stream_server<T, L, I, S, E>(
executor: &Handle,
transport: T,
listen_addr: NetworkAddress,
) -> NetworkAddress
where
T: Transport<Output = S, Error = E, Listener = L, Inbound = I>,
L: Stream<Item = Result<(I, NetworkAddress), E>> + Unpin + Send + 'static,
I: Future<Output = Result<S, E>> + Send + 'static,
S: AsyncRead + AsyncWrite + Unpin + Send + 'static,
E: ::std::error::Error + Send + Sync + 'static,
{
let _gaurd = executor.enter();
let (listener, server_addr) = transport.listen_on(listen_addr).unwrap();
executor.spawn(server_stream_handler(listener));
server_addr
}<|fim▁end|> | };
use rand::prelude::*;
use std::{env, ffi::OsString, io, sync::Arc}; |
<|file_name|>train.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# based on ideas in https://github.com/lethienhoa/Very-Deep-Convolutional-Networks-for-Natural-Language-Processing/blob/master/train.py
import tensorflow as tf
from vdcnn import VDCNN
import numpy as np
import os
import time
import datetime
import cPickle as pkl
import tables
# Parameters
# ==================================================
# Model Hyperparameters
tf.flags.DEFINE_float("dropout_keep_prob", 0.5, "Dropout keep probability (default: 0.5)")
tf.flags.DEFINE_float("l2_reg_lambda", 0.0, "L2 regularizaion lambda (default: 0.0)")
# Training parameters
tf.flags.DEFINE_integer("batch_size", 400, "Batch Size (default: 128)")
tf.flags.DEFINE_integer("num_epochs", 20, "Number of training epochs (default: 200)")
tf.flags.DEFINE_integer("evaluate_every", 5000, "Evaluate model on dev set after this many steps (default: 100)")<|fim▁hole|>tf.flags.DEFINE_integer("checkpoint_every", 1000, "Save model after this many steps (default: 100)")
# Misc Parameters
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
print("\nParameters:")
for attr, value in sorted(FLAGS.__flags.items()):
print("{}={}".format(attr.upper(), value))
print("")
# ===================== Preparation des données =============================
# Load data
print("Loading data...")
alphabet = "abcdefghijklmnopqrstuvwxyz0123456789-,;.!?:'\"/\\|_@#$%^&*~`+-=<>()[]{} "
sequence_max_length = 1024
# shuffeling data for training
# Training
# ==================================================
# ----------------- Phase de construction du graphe -------------------------------
# Input data.
with tf.Graph().as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement)
sess = tf.Session(config=session_conf)
with sess.as_default():
cnn = VDCNN()
# Ensures that we execute the update_ops before performing the train
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
global_step = tf.Variable(0, name="global_step", trainable=False)
optimizer = tf.train.AdamOptimizer(1e-3)
grads_and_vars = optimizer.compute_gradients(cnn.loss)
train_op = optimizer.apply_gradients(grads_and_vars, global_step=global_step)
# Initialize all variables
print("START %s" % datetime.datetime.now())
sess.run(tf.initialize_all_variables())
saver = tf.train.Saver()
print('Initialized')
batch_size = FLAGS.batch_size
epochs = FLAGS.num_epochs
hdf5_path = "my_extendable_compressed_data_train.hdf5"
for e in range(epochs):
extendable_hdf5_file = tables.open_file(hdf5_path, mode='r')
for ptr in range(0, 500000, batch_size):
#print(ptr)
feed_dict = {
cnn.input_x: extendable_hdf5_file.root.data[ptr: ptr+batch_size],
cnn.input_y:extendable_hdf5_file.root.clusters[ptr: ptr+batch_size] ,
cnn.is_training: True } # Update moving_mean, moving_var }
sess.run(train_op,feed_dict)
time_str = datetime.datetime.now().isoformat()
if e % 1 == 0:
step ,loss, accuracy = sess.run([global_step, cnn.loss, cnn.accuracy],feed_dict)
save_path = saver.save(sess, "model_vdcnn_full_dataset.ckpt")
print("model saved in file: %s" % save_path)
print("{}: epoch {}, loss {}, acc {}".format(time_str,e, loss, accuracy))
print("epoch %d:" % e)
extendable_hdf5_file.close()
print("END %s" % str(datetime.datetime.now()))<|fim▁end|> | |
<|file_name|>hashchangeevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::HashChangeEventBinding;
use dom::bindings::codegen::Bindings::HashChangeEventBinding::HashChangeEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::bindings::str::{DOMString, USVString};
use dom::event::Event;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_atoms::Atom;
// https://html.spec.whatwg.org/multipage/#hashchangeevent
#[dom_struct]
pub struct HashChangeEvent {
event: Event,
old_url: String,
new_url: String,
}
impl HashChangeEvent {
fn new_inherited(old_url: String, new_url: String) -> HashChangeEvent {
HashChangeEvent {
event: Event::new_inherited(),
old_url: old_url,
new_url: new_url,
}
}
pub fn new_uninitialized(window: &Window) -> DomRoot<HashChangeEvent> {
reflect_dom_object(
Box::new(HashChangeEvent::new_inherited(String::new(), String::new())),
window,
HashChangeEventBinding::Wrap,
)
}
<|fim▁hole|> cancelable: bool,
old_url: String,
new_url: String,
) -> DomRoot<HashChangeEvent> {
let ev = reflect_dom_object(
Box::new(HashChangeEvent::new_inherited(old_url, new_url)),
window,
HashChangeEventBinding::Wrap,
);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev
}
pub fn Constructor(
window: &Window,
type_: DOMString,
init: &HashChangeEventBinding::HashChangeEventInit,
) -> Fallible<DomRoot<HashChangeEvent>> {
Ok(HashChangeEvent::new(
window,
Atom::from(type_),
init.parent.bubbles,
init.parent.cancelable,
init.oldURL.0.clone(),
init.newURL.0.clone(),
))
}
}
impl HashChangeEventMethods for HashChangeEvent {
// https://html.spec.whatwg.org/multipage/#dom-hashchangeevent-oldurl
fn OldURL(&self) -> USVString {
USVString(self.old_url.clone())
}
// https://html.spec.whatwg.org/multipage/#dom-hashchangeevent-newurl
fn NewURL(&self) -> USVString {
USVString(self.new_url.clone())
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}<|fim▁end|> | pub fn new(
window: &Window,
type_: Atom,
bubbles: bool, |
<|file_name|>jquery.backstretch.min.js<|end_file_name|><|fim▁begin|>/*! Backstretch - v2.0.4 - 2013-06-19
* http://srobbin.com/jquery-plugins/backstretch/
* Copyright (c) 2013 Scott Robbin; Licensed MIT */
(function (a, d, p) {
a.fn.backstretch = function (c, b) {
(c === p || 0 === c.length) && a.error("No images were supplied for Backstretch");
0 === a(d).scrollTop() && d.scrollTo(0, 0);
return this.each(function () {
var d = a(this), g = d.data("backstretch");
if (g) {
if ("string" == typeof c && "function" == typeof g[c]) {
g[c](b);
return
}
b = a.extend(g.options, b);
g.destroy(!0)
}
g = new q(this, c, b);
d.data("backstretch", g)
})
};
a.backstretch = function (c, b) {
return a("body").backstretch(c, b).data("backstretch")
};
a.expr[":"].backstretch = function (c) {
return a(c).data("backstretch") !== p
};
a.fn.backstretch.defaults = {centeredX: !0, centeredY: !0, duration: 5E3, fade: 0};
var r = {left: 0, top: 0, overflow: "hidden", margin: 0, padding: 0, height: "100%", width: "100%", zIndex: -999999}, s = {position: "absolute", display: "none", margin: 0, padding: 0, border: "none", width: "auto", height: "auto", maxHeight: "none", maxWidth: "none", zIndex: -999999}, q = function (c, b, e) {
this.options = a.extend({}, a.fn.backstretch.defaults, e || {});
this.images = a.isArray(b) ? b : [b];
a.each(this.images, function () {
a("<img />")[0].src = this
});
this.isBody = c === document.body;
this.$container = a(c);
this.$root = this.isBody ? l ? a(d) : a(document) : this.$container;
c = this.$container.children(".backstretch").first();
this.$wrap = c.length ? c : a('<div class="backstretch"></div>').css(r).appendTo(this.$container);
this.isBody || (c = this.$container.css("position"), b = this.$container.css("zIndex"), this.$container.css({position: "static" === c ? "relative" : c, zIndex: "auto" === b ? 0 : b, background: "none"}), this.$wrap.css({zIndex: -999998}));
this.$wrap.css({position: this.isBody && l ? "fixed" : "absolute"});
this.index = 0;
this.show(this.index);
a(d).on("resize.backstretch", a.proxy(this.resize, this)).on("orientationchange.backstretch", a.proxy(function () {
this.isBody && 0 === d.pageYOffset && (d.scrollTo(0, 1), this.resize())
}, this))
};
q.prototype = {resize: function () {
try {
var a = {left: 0, top: 0}, b = this.isBody ? this.$root.width() : this.$root.innerWidth(), e = b, g = this.isBody ? d.innerHeight ? d.innerHeight : this.$root.height() : this.$root.innerHeight(), j = e / this.$img.data("ratio"), f;
j >= g ? (f = (j - g) / 2, this.options.centeredY && (a.top = "-" + f + "px")) : (j = g, e = j * this.$img.data("ratio"), f = (e - b) / 2, this.options.centeredX && (a.left = "-" + f + "px"));<|fim▁hole|> }, show: function (c) {
if (!(Math.abs(c) > this.images.length - 1)) {
var b = this, e = b.$wrap.find("img").addClass("deleteable"), d = {relatedTarget: b.$container[0]};
b.$container.trigger(a.Event("backstretch.before", d), [b, c]);
this.index = c;
clearInterval(b.interval);
b.$img = a("<img />").css(s).bind("load",function (f) {
var h = this.width || a(f.target).width();
f = this.height || a(f.target).height();
a(this).data("ratio", h / f);
a(this).fadeIn(b.options.speed || b.options.fade, function () {
e.remove();
b.paused || b.cycle();
a(["after", "show"]).each(function () {
b.$container.trigger(a.Event("backstretch." + this, d), [b, c])
})
});
b.resize()
}).appendTo(b.$wrap);
b.$img.attr("src", b.images[c]);
return b
}
}, next: function () {
return this.show(this.index < this.images.length - 1 ? this.index + 1 : 0)
}, prev: function () {
return this.show(0 === this.index ? this.images.length - 1 : this.index - 1)
}, pause: function () {
this.paused = !0;
return this
}, resume: function () {
this.paused = !1;
this.next();
return this
}, cycle: function () {
1 < this.images.length && (clearInterval(this.interval), this.interval = setInterval(a.proxy(function () {
this.paused || this.next()
}, this), this.options.duration));
return this
}, destroy: function (c) {
a(d).off("resize.backstretch orientationchange.backstretch");
clearInterval(this.interval);
c || this.$wrap.remove();
this.$container.removeData("backstretch")
}};
var l, f = navigator.userAgent, m = navigator.platform, e = f.match(/AppleWebKit\/([0-9]+)/), e = !!e && e[1], h = f.match(/Fennec\/([0-9]+)/), h = !!h && h[1], n = f.match(/Opera Mobi\/([0-9]+)/), t = !!n && n[1], k = f.match(/MSIE ([0-9]+)/), k = !!k && k[1];
l = !((-1 < m.indexOf("iPhone") || -1 < m.indexOf("iPad") || -1 < m.indexOf("iPod")) && e && 534 > e || d.operamini && "[object OperaMini]" === {}.toString.call(d.operamini) || n && 7458 > t || -1 < f.indexOf("Android") && e && 533 > e || h && 6 > h || "palmGetResource"in d && e && 534 > e || -1 < f.indexOf("MeeGo") && -1 < f.indexOf("NokiaBrowser/8.5.0") || k && 6 >= k)
})(jQuery, window);<|fim▁end|> | this.$wrap.css({width: b, height: g}).find("img:not(.deleteable)").css({width: e, height: j}).css(a)
} catch (h) {
}
return this |
<|file_name|>interrupt_pause_script.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from __future__ import print_function
import RPi.GPIO as GPIO
import time
import Queue # https://pymotw.com/2/Queue/
#GPIO pins
Taster1 = 24
Taster2 = 27
# GPIO-Nummer als Pinreferenz waehlen
GPIO.setmode(GPIO.BCM)
# GPIO vom SoC als Input deklarieren und Pull-Down Widerstand aktivieren
#PULL = GPIO.PUD_DOWN #GPIO -> GND
PULL = GPIO.PUD_UP #GPIO -> 3V3
GPIO.setup(Taster1, GPIO.IN, pull_up_down=PULL)
GPIO.setup(Taster2, GPIO.IN, pull_up_down=PULL)
# Dictionary definieren. http://www.tutorialspoint.com/python/python_dictionary.htm
dictionary = {}
dictionary['pause'] = False
queue = Queue.Queue()
# Script pausieren/blockieren/beschaeftigen
def Pause():
while dictionary['pause'] == True:
time.sleep(1)
# ISR
def interrupt_event(pin):
if pin == Taster1:
queue.put(pin)
if pin == Taster2:
print("Führe Script weiter aus")
dictionary['pause'] = False
try:
# Interrupt Event hinzufuegen. Auf steigende Flanke reagieren und ISR "Interrupt" deklarieren sowie Pin entprellen
GPIO.add_event_detect(Taster1, GPIO.RISING, callback=interrupt_event, bouncetime=200)
GPIO.add_event_detect(Taster2, GPIO.RISING, callback=interrupt_event, bouncetime=200)
# keep script running
while True:<|fim▁hole|> print("Pausiere Script")
dictionary['pause'] = True
Pause()
print("...puh... Im super heavy busy...")
except (KeyboardInterrupt, SystemExit):
GPIO.cleanup()
print("\nQuit\n")<|fim▁end|> | time.sleep(0.5)
if not queue.empty():
job = queue.get()
if job == Taster1: |
<|file_name|>test_adapter.py<|end_file_name|><|fim▁begin|>##############################################################################
#
# Copyright (c) 2004 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
$Id: test_adapter.py 67630 2006-04-27 00:54:03Z jim $
"""
import unittest
from zope.testing.doctestunit import DocTestSuite
def test_suite():
return unittest.TestSuite((<|fim▁hole|> ))
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')<|fim▁end|> | DocTestSuite('zope.security.adapter'), |
<|file_name|>terminal.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, OnDestroy, ElementRef, ViewChild } from '@angular/core';
import { fromEvent } from 'rxjs';
import { debounceTime, distinctUntilChanged, takeWhile } from 'rxjs/operators';
import { Terminal } from 'xterm';
import * as fit from 'xterm/lib/addons/fit/fit';
import * as search from 'xterm/lib/addons/search/search';
import * as webLinks from 'xterm/lib/addons/webLinks/webLinks';
import * as fullscreen from 'xterm/lib/addons/fullscreen/fullscreen';
Terminal.applyAddon(fit);
Terminal.applyAddon(search);
Terminal.applyAddon(webLinks);
Terminal.applyAddon(fullscreen);
@Component({
selector: 'dng-terminal',
templateUrl: './terminal.component.html',
styleUrls: ['./terminal.component.scss']
})
export class TerminalComponent implements OnInit, OnDestroy {
public terminal: Terminal;
title = new Date(Date.now()).toDateString();
protected focused = false;
private destroy = false;
@ViewChild('terminal', { static: true }) term: ElementRef;
@ViewChild('menu', { static: true }) private menu: ElementRef;
constructor() { }
getTerminalMenuClass() {
return (this.focused) ? 'terminalMenuFocus' : 'terminalMenuBlur';
}
ngOnInit() {
this.terminal = new Terminal({
windowsMode: true,
cursorBlink: true,
cols: 100,
rows: 25
});
this.terminal.open(this.term.nativeElement);
this.terminal['webLinksInit']();
this.terminal['fit']();
this.terminal.focus();
this.focused = true;
fromEvent(window, 'resize').pipe(
debounceTime(50),
distinctUntilChanged(),
takeWhile(() => this.destroy === false)
).subscribe(() => this.terminal['fit']());
fromEvent(this.menu.nativeElement, 'click').pipe(
debounceTime(50),
distinctUntilChanged(),
takeWhile(() => this.destroy === false)
).subscribe(() => this.terminal.focus());
this.terminal.on('blur', () => {
this.focused = false;
});
this.terminal.on('focus', () => {
this.focused = true;
});
this.terminal.on('key', (key, e) => {<|fim▁hole|> const printable = (!e.altKey && !e.ctrlKey && !e.metaKey);
/* tslint:disable:deprecation */
if (e.keyCode === 13) {
this.terminal.writeln('');
} else if (e.keyCode === 8) {
this.terminal.write('\b \b');
} else if (e.keyCode === 9) {
this.terminal.write('\t');
} else if (printable) {
this.terminal.write(e.key);
}
/* tslint:enable:deprecation */
});
}
ngOnDestroy() {
this.destroy = true;
this.terminal.dispose();
}
}<|fim▁end|> | |
<|file_name|>stats.py<|end_file_name|><|fim▁begin|>class Stats:
"""
Contains the stats that a character or monster may have.
The stats tied to an agent are:
* Health: The amount of damage the agent can withstand. The agent dies when their health falls to zero. This is
the only stat that will persist after a battle ends.
* Stamina: An arbitrary measure of ability to use special actions such as magic or other techniques. This stat is
meant to be consumed through the battle.
* Strength: Influences the effectiveness of physical attacks.
* Magic: Influences the effectiveness of magical attacks.
* Endurance: Influences defence and resistance to attacks, as well as resistance to status ailments.
* Agility: Influences speed in battle and chance to evade attacks.
"""
def __init__(self, health, stamina, strength, magic, endurance, agility):
"""
Initializes stats with specific values.
"""
self.health = health
self.stamina = stamina
self.strength = strength
self.magic = magic
self.endurance = endurance
self.agility = agility
def __str__(self):
return str(self.__dict__)
class EquipmentStats:
<|fim▁hole|> * Attack: Heavily influences physical attacks.
* Magic attack: Heavily influences magic attacks.
* Armour: Heavily influences physical defence.
* Magic armour: Heavily influences magic defence.
"""
def __init__(self, attack, magic_attack, armour, magic_armour):
"""
Initializes equipment stats with specific values.
"""
self.attack = attack
self.magic_attack = magic_attack
self.armour = armour
self.magic_armour = magic_armour
def __str__(self):
return str(self.__dict__)<|fim▁end|> | """
Contains the stats that come from equipment. Only characters have equipment.
|
<|file_name|>roundrobin_router.go<|end_file_name|><|fim▁begin|>package router
import (
"sync/atomic"
"github.com/AsynkronIT/protoactor-go/actor"
)
type roundRobinGroupRouter struct {
GroupRouter
}
type roundRobinPoolRouter struct {
PoolRouter
}
type roundRobinState struct {
index int32
routees *actor.PIDSet
values []actor.PID
}
func (state *roundRobinState) SetRoutees(routees *actor.PIDSet) {
state.routees = routees
state.values = routees.Values()
}
func (state *roundRobinState) GetRoutees() *actor.PIDSet {
return state.routees
}
func (state *roundRobinState) RouteMessage(message interface{}, sender *actor.PID) {
pid := roundRobinRoutee(&state.index, state.values)
pid.Request(message, sender)<|fim▁hole|>func NewRoundRobinPool(size int) *actor.Props {
return actor.FromSpawnFunc(spawner(&roundRobinPoolRouter{PoolRouter{PoolSize: size}}))
}
func NewRoundRobinGroup(routees ...*actor.PID) *actor.Props {
return actor.FromSpawnFunc(spawner(&roundRobinGroupRouter{GroupRouter{Routees: actor.NewPIDSet(routees...)}}))
}
func (config *roundRobinPoolRouter) CreateRouterState() Interface {
return &roundRobinState{}
}
func (config *roundRobinGroupRouter) CreateRouterState() Interface {
return &roundRobinState{}
}
func roundRobinRoutee(index *int32, routees []actor.PID) actor.PID {
i := int(atomic.AddInt32(index, 1))
if i < 0 {
*index = 0
i = 0
}
mod := len(routees)
routee := routees[i%mod]
return routee
}<|fim▁end|> | }
|
<|file_name|>SSConsts.hpp<|end_file_name|><|fim▁begin|>#ifndef _SS_CONSTS_HPP_
#define _SS_CONSTS_HPP_
#include "StrideSearchConfig.h"
namespace StrideSearch {
/// Meters per second to Kilometers per hour conversion factor
static const Real MPS2KPH = 3.6;
/// Knots to meters per second conversion factor<|fim▁hole|> static constexpr Real PI = 3.1415926535897932384626433832795027975;
/// Radians to degrees conversion factor
static constexpr Real RAD2DEG = 180.0 / PI;
/// Degrees to radians conversion factor
static constexpr Real DEG2RAD = PI / 180.0;
/// Hours to days conversion factor
static constexpr Real HOURS2DAYS = 1.0/24.0;
/// Minutes to days conversion factor
static constexpr Real MINUTES2DAYS = 1.0/24.0/60.0;
/// Gravitational acceleration
static constexpr Real G = 9.80616;
/// Mean sea level radius of the Earth (meters)
static constexpr Real EARTH_RADIUS_KM = 6371.220;
static constexpr Real SQ_EARTH_RADIUS_KM = EARTH_RADIUS_KM*EARTH_RADIUS_KM;
/// One sidereal day, in units of seconds
static constexpr Real SIDEREAL_DAY_SEC = 24.0 * 3600.0;
/// Rotational rate of Earth about its z-axis
static constexpr Real EARTH_OMEGA_HZ = 2.0 * PI / SIDEREAL_DAY_SEC;
/// Floating point zero
static constexpr Real ZERO_TOL = 1.0e-11;
}
#endif<|fim▁end|> | static const Real KTS2MPS = 0.5144444;
/// Nautical miles to kilometers conversion factor
static const Real NM2KM = 1.852;
/// Pi |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2015 ERP|OPEN (www.erpopen.nl).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################<|fim▁hole|><|fim▁end|> | from . import models |
<|file_name|>long-while.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>#[allow(unused_variable)];
pub fn main() {
let mut i: int = 0;
while i < 1000000 {
i += 1;
let x = 3;
}
}<|fim▁end|> | // option. This file may not be copied, modified, or distributed
// except according to those terms.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.