prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>environment.js<|end_file_name|><|fim▁begin|>module.exports = function(environment) {
var ENV = {
modulePrefix: 'dummy',
environment: environment,
baseURL: '/',
locationType: 'auto',
EmberENV: {
EXTEND_PROTOTYPES: false,
FEATURES: {<|fim▁hole|> }
},
APP: {
// Here you can pass flags/options to your application instance
// when it is created
}
};
if (environment === 'development') {
// ENV.APP.LOG_RESOLVER = true;
// ENV.APP.LOG_ACTIVE_GENERATION = true;
// ENV.APP.LOG_TRANSITIONS = true;
// ENV.APP.LOG_TRANSITIONS_INTERNAL = true;
// ENV.APP.LOG_VIEW_LOOKUPS = true;
}
if (environment === 'test') {
// Testem prefers this...
ENV.baseURL = '/';
ENV.locationType = 'none';
// keep test console output quieter
ENV.APP.LOG_ACTIVE_GENERATION = false;
ENV.APP.LOG_VIEW_LOOKUPS = false;
ENV.APP.rootElement = '#ember-testing';
ENV.APP.autoboot = false;
}
if (environment === 'production') {
ENV.baseURL = '/ember-collection';
ENV.locationType = 'hash';
}
return ENV;
};<|fim▁end|>
|
// Here you can enable experimental features on an ember canary build
// e.g. 'with-controller': true
|
<|file_name|>PollResultIndexingStrategyIndex.java<|end_file_name|><|fim▁begin|>/**************************************************************************************
* Copyright (C) 2008 EsperTech, Inc. All rights reserved. *
* http://esper.codehaus.org *
* http://www.espertech.com *
* ---------------------------------------------------------------------------------- *
* The software in this package is published under the terms of the GPL license *
* a copy of which has been included with this distribution in the license.txt file. *
**************************************************************************************/
package com.espertech.esper.epl.join.pollindex;
import com.espertech.esper.epl.join.table.EventTable;
import com.espertech.esper.epl.join.table.UnindexedEventTableList;
import com.espertech.esper.epl.join.table.PropertyIndexedEventTable;
import com.espertech.esper.client.EventBean;
import com.espertech.esper.client.EventType;
import java.util.Arrays;
import java.util.List;
/**
* Strategy for building an index out of poll-results knowing the properties to base the index on.
*/
public class PollResultIndexingStrategyIndex implements PollResultIndexingStrategy
{
private final int streamNum;
private final EventType eventType;
private final String[] propertyNames;
/**
* Ctor.
* @param streamNum is the stream number of the indexed stream
* @param eventType is the event type of the indexed stream
* @param propertyNames is the property names to be indexed
*/
public PollResultIndexingStrategyIndex(int streamNum, EventType eventType, String[] propertyNames)
{
this.streamNum = streamNum;
this.eventType = eventType;
this.propertyNames = propertyNames;
}
public EventTable index(List<EventBean> pollResult, boolean isActiveCache)
{
if (!isActiveCache)
{
return new UnindexedEventTableList(pollResult);
}
PropertyIndexedEventTable table = new PropertyIndexedEventTable(streamNum, eventType, propertyNames);
table.add(pollResult.toArray(new EventBean[pollResult.size()]));
return table;
}
<|fim▁hole|>}<|fim▁end|>
|
public String toQueryPlan() {
return this.getClass().getSimpleName() + " properties " + Arrays.toString(propertyNames);
}
|
<|file_name|>broker.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from . import SearchBackend
import importlib
import logging
class SearchBroker(SearchBackend):
def __init__(self, config_name=None):
super(SearchBroker, self).__init__(config_name)
self._servers = {}
if self._settings is None:
return
for server in self._settings:
if config_name is None or server in config_name:
try:
_module = '.'.join(self._settings[server]['ENGINE'].split('.')[:-1])
_search_class = self._settings[server]['ENGINE'].split('.')[-1]
except KeyError:
logging.warning("Search engine '%s' is missing the required "
"'ENGINE' setting" % server)<|fim▁hole|> break
try:
module = importlib.import_module(_module)
try:
self._servers[server] = getattr(module, _search_class)(server)
except AttributeError:
logging.warning("Search backend '%s'. No search class "
"'%s' defined." % (server, _search_class))
except ImportError:
logging.warning("Search backend '%s'. Cannot import '%s'" %
(server, _module))
def search(self, unit):
if not self._servers:
return []
results = []
counter = {}
for server in self._servers:
for result in self._servers[server].search(unit):
translation_pair = result['source'] + result['target']
if translation_pair not in counter:
counter[translation_pair] = result['count']
results.append(result)
else:
counter[translation_pair] += result['count']
for item in results:
item['count'] = counter[item['source']+item['target']]
return results
def update(self, language, obj):
for server in self._servers:
self._servers[server].update(language, obj)<|fim▁end|>
| |
<|file_name|>demoSensorController.js<|end_file_name|><|fim▁begin|>'use strict';
// Load the application's configuration
const config = require('../server/config');
const url = config.express_host + '/api';
// Required modules
const async = require('async');
const colors = require('colors');
const request = require('request');
// Counter for the Measurements
let counter = 1;
// Read the arguments from the command line or set them to the default values
const interval = process.argv[2] || 2000;
const thingName = process.argv[3] || 'Demo';
const thingLocLat = process.argv[4] || 51.964113;
const thingLocLng = process.argv[5] || 7.624862;
// REST API authentication token
let token;
console.log('\n////////////////////////////////////////////////////////////\n');
console.log(' STARTING DEMONSTRATION...'.cyan);
console.log('\n////////////////////////////////////////////////////////////\n');<|fim▁hole|> console.log(' Creating a new', 'User...\n'.cyan);
const userJson = {
email: 'demo#' + Math.random().toFixed() + '@example.com',
password: 'demoPass'
};
// Post the new User
request.post({
headers: {'content-type': 'application/json'},
url: url + '/users',
json: userJson
}, function(error, response, body) {
if (!error) {
console.log(' New User', 'created.'.green);
token = body.token;
} else {
console.log(' New User creation', 'failed'.red);
}
console.log('\n------------------------------------------------------------\n');
callback(error, body._id);
});
},
// Create a new Thing
function(userId, callback) {
console.log(' Creating a new', 'Thing...\n'.cyan);
const thingJson = {
name: thingName,
loc: {
coordinates: [ thingLocLat, thingLocLng ]
},
userId: userId,
waterbodyId: '5752d2d7e5d703480187e0d9',
token: token
};
// Post the new Thing
request.post({
headers: {'content-type': 'application/json'},
url: url + '/things',
json: thingJson
}, function(error, response, body) {
if (!error) {
console.log(' New Thing', 'created.'.green);
} else {
console.log(' New Thing creation', 'failed'.red);
}
console.log('\n------------------------------------------------------------\n');
callback(error, body._id);
});
},
// Create a new Feature
function(thingId, callback) {
console.log(' Creating a new', 'Feature...\n'.cyan);
const featureJson = {
name: 'demoFeature',
unit: 'foo',
token: token
};
// Post the new Feature
request.post({
headers: {'content-type': 'application/json'},
url: url + '/features',
json: featureJson
}, function(error, response, body) {
if (!error) {
console.log(' New Feature', 'created'.green);
} else {
console.log(' New Feature creation', 'failed'.red);
}
console.log('\n------------------------------------------------------------\n');
callback(error, thingId, body._id);
});
},
// Create a new Sensor
function(thingId, featureId, callback) {
console.log(' Creating a new', 'Sensor...\n'.cyan);
const sensorJson = {
name: 'demoSensor',
interval: interval,
refLevel: 2,
warnLevel: 6,
riskLevel: 8,
thingId: thingId,
featureId: featureId,
token: token
};
// Post the new Sensor
request.post({
headers: {'content-type': 'application/json'},
url: url + '/sensors',
json: sensorJson
}, function(error, response, body) {
if (!error) {
console.log(' New Sensor', 'created.'.green);
} else {
console.log(' New Sensor creation', 'failed'.red);
}
console.log('\n------------------------------------------------------------\n');
callback(error, body._id);
});
},
// Create new Measurements in an interval
function(sensorId, callback) {
console.log(' Finished demo setup. Measuring now...'.cyan);
console.log('\n------------------------------------------------------------\n');
let value = 4;
setInterval(function() {
console.log(' Creating a new', 'Measurement...\n'.cyan);
// Calculate the Measurement's value as a random number with respect to its previous value
if (value < 1 || Math.random() > 0.5) {
value += Math.random();
} else {
value -= Math.random();
}
value = parseFloat(value.toFixed(2));
let measurementJson = {
date: Date.now(),
value: value,
sensorId: sensorId,
token: token
};
// Post the new Measurement
request.post({
headers: {'content-type': 'application/json'},
url: url + '/measurements',
json: measurementJson
}, function(error, response, body) {
if (!error) {
console.log(' New Measurement', ('#' + counter).cyan, 'created.'.green, '\nValue:', body.value.cyan);
counter++;
} else {
console.log(' New Measurement creation', 'failed'.red);
callback(error);
}
console.log('\n------------------------------------------------------------\n');
});
}, interval);
}
], function(err, result) {
if (err) {
console.log(err);
}
});<|fim▁end|>
|
async.waterfall([
// Create a new User
function(callback) {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
from bitfinex.client import Client, TradeClient
|
<|file_name|>multi_rpc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# Copyright (c) 2015 The Aureus Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test mulitple rpc user config option rpcauth
#
from test_framework.test_framework import AureusTestFramework
from test_framework.util import *
import base64
try:
import http.client as httplib
except ImportError:
import httplib
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
class HTTPBasicsTest (AureusTestFramework):
def setup_nodes(self):
return start_nodes(4, self.options.tmpdir)
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain(self.options.tmpdir)
#Append rpcauth to aureus.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
with open(os.path.join(self.options.tmpdir+"/node0", "aureus.conf"), 'a') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urlparse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
#New authpair generated via share/rpcuser tool
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
#Second authpair with different username
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Use new authpair to confirm both work
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}<|fim▁hole|> conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong login name with rt's password
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Wrong password for rt
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Correct for rt2
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong password for rt2
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()<|fim▁end|>
|
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
|
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/bin/python2
import os, gzip, StringIO, time, csv, datetime
from flask import Flask, request, redirect, url_for, render_template
from werkzeug.utils import secure_filename
from wtforms import Form, DecimalField, validators
class UpdateForm(Form):
weight = DecimalField('Weight', [validators.DataRequired()])
fat = DecimalField('Body fat', [validators.DataRequired()])
water = DecimalField('Body water', [validators.DataRequired()])
muscle = DecimalField('Muscle', [validators.DataRequired()])
bonemass = DecimalField('Bone mass', [validators.DataRequired()])
calories = DecimalField('Calories', [validators.DataRequired()])
<|fim▁hole|>CSV_FILENAME = "smartscale.csv"
types = ["weight", "fat", "water", "muscle", "bonemass", "calories"]
@app.route('/stats')
def stats():
with open(CSV_FILENAME) as f:
csv = f.read().splitlines()
data = [(line.split(',')) for line in csv]
return render_template('chart.html', types=types, values=data)
def updateData(data):
values = []
for t in types:
values.append(str(data[t]))
filename = CSV_FILENAME
timestr = time.strftime("%Y%m%d-%H%M%S")
with open(filename, "a") as fh:
fh.write(','.join(values) + "," + timestr + "\n")
@app.route('/', methods=['GET', 'POST'])
def update():
form = UpdateForm(request.form)
if request.method == 'POST' and form.validate():
updateData(form.data)
return redirect('stats')
return render_template('update.html', form=form)
if __name__ == "__main__":
app.run(host='0.0.0.0')<|fim▁end|>
|
app = Flask(__name__)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from chainer.iterators import multiprocess_iterator
from chainer.iterators import serial_iterator
<|fim▁hole|>SerialIterator = serial_iterator.SerialIterator<|fim▁end|>
|
MultiprocessIterator = multiprocess_iterator.MultiprocessIterator
|
<|file_name|>images.go<|end_file_name|><|fim▁begin|>package client
import (
"encoding/json"
"fmt"
"net/url"
"text/tabwriter"
"time"
"github.com/docker/docker/api/types"
Cli "github.com/docker/docker/cli"
"github.com/docker/docker/opts"
flag "github.com/docker/docker/pkg/mflag"
"github.com/docker/docker/pkg/parsers"
"github.com/docker/docker/pkg/parsers/filters"
"github.com/docker/docker/pkg/stringid"
"github.com/docker/docker/pkg/units"
"github.com/docker/docker/utils"
)
// CmdImages lists the images in a specified repository, or all top-level images if no repository is specified.
//
// Usage: docker images [OPTIONS] [REPOSITORY]
func (cli *DockerCli) CmdImages(args ...string) error {
cmd := Cli.Subcmd("images", []string{"[REPOSITORY[:TAG]]"}, Cli.DockerCommands["images"].Description, true)
quiet := cmd.Bool([]string{"q", "-quiet"}, false, "Only show numeric IDs")
all := cmd.Bool([]string{"a", "-all"}, false, "Show all images (default hides intermediate images)")
noTrunc := cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
showDigests := cmd.Bool([]string{"-digests"}, false, "Show digests")
flFilter := opts.NewListOpts(nil)
cmd.Var(&flFilter, []string{"f", "-filter"}, "Filter output based on conditions provided")
cmd.Require(flag.Max, 1)
cmd.ParseFlags(args, true)
// Consolidate all filter flags, and sanity check them early.
// They'll get process in the daemon/server.
imageFilterArgs := filters.Args{}
for _, f := range flFilter.GetAll() {
var err error
imageFilterArgs, err = filters.ParseFlag(f, imageFilterArgs)
if err != nil {
return err
}
}
matchName := cmd.Arg(0)
v := url.Values{}
if len(imageFilterArgs) > 0 {
filterJSON, err := filters.ToParam(imageFilterArgs)
if err != nil {
return err
}
v.Set("filters", filterJSON)
}
if cmd.NArg() == 1 {
// FIXME rename this parameter, to not be confused with the filters flag
v.Set("filter", matchName)
}
if *all {
v.Set("all", "1")
}
serverResp, err := cli.call("GET", "/images/json?"+v.Encode(), nil, nil)
if err != nil {
return err
}
defer serverResp.body.Close()
images := []types.Image{}
if err := json.NewDecoder(serverResp.body).Decode(&images); err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
if !*quiet {
if *showDigests {
fmt.Fprintln(w, "REPOSITORY\tTAG\tDIGEST\tIMAGE ID\tCREATED\tVIRTUAL SIZE")
} else {
fmt.Fprintln(w, "REPOSITORY\tTAG\tIMAGE ID\tCREATED\tVIRTUAL SIZE")
}
}
for _, image := range images {
ID := image.ID
if !*noTrunc {<|fim▁hole|> }
repoTags := image.RepoTags
repoDigests := image.RepoDigests
if len(repoTags) == 1 && repoTags[0] == "<none>:<none>" && len(repoDigests) == 1 && repoDigests[0] == "<none>@<none>" {
// dangling image - clear out either repoTags or repoDigsts so we only show it once below
repoDigests = []string{}
}
// combine the tags and digests lists
tagsAndDigests := append(repoTags, repoDigests...)
for _, repoAndRef := range tagsAndDigests {
repo, ref := parsers.ParseRepositoryTag(repoAndRef)
// default tag and digest to none - if there's a value, it'll be set below
tag := "<none>"
digest := "<none>"
if utils.DigestReference(ref) {
digest = ref
} else {
tag = ref
}
if !*quiet {
if *showDigests {
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s ago\t%s\n", repo, tag, digest, ID, units.HumanDuration(time.Now().UTC().Sub(time.Unix(int64(image.Created), 0))), units.HumanSize(float64(image.VirtualSize)))
} else {
fmt.Fprintf(w, "%s\t%s\t%s\t%s ago\t%s\n", repo, tag, ID, units.HumanDuration(time.Now().UTC().Sub(time.Unix(int64(image.Created), 0))), units.HumanSize(float64(image.VirtualSize)))
}
} else {
fmt.Fprintln(w, ID)
}
}
}
if !*quiet {
w.Flush()
}
return nil
}<|fim▁end|>
|
ID = stringid.TruncateID(ID)
|
<|file_name|>cnf.py<|end_file_name|><|fim▁begin|># Homework 2 solution, part 1: cnf.py
# Andrew Gordon
# Feb 18, 2015
# Revised June 19, 2015 for better input/output and implies->if
import sys
import fileinput
def biconditionalElimination(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "iff":
return(["and",
["if",
biconditionalElimination(s[1]),
biconditionalElimination(s[2])],
["if",
biconditionalElimination(s[2]),
biconditionalElimination(s[1])]])
else:
return([s[0]] + [biconditionalElimination(i) for i in s[1:]])
def implicationElimination(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "if":
return(["or",
["not",
implicationElimination(s[1])],
implicationElimination(s[2])])
else:
return([s[0]] + [implicationElimination(i) for i in s[1:]])
def doubleNegationElimination(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "not" and type(s[1]) is list and s[1][0] == "not":
return(doubleNegationElimination(s[1][1]))
else:
return([s[0]] + [doubleNegationElimination(i) for i in s[1:]])
def demorgan(s):
revision = demorgan1(s)
if revision == s:
return s
else:
return demorgan(revision)
def demorgan1(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "not" and type(s[1]) is list and s[1][0] == "and":
return(["or"] + [demorgan(["not", i]) for i in s[1][1:]])
elif type(s) is list and s[0] == "not" and type(s[1]) is list and s[1][0] == "or":
return(["and"] + [demorgan(["not", i]) for i in s[1][1:]])
else:
return ([s[0]] + [demorgan(i) for i in s[1:]])
def binaryize(s): # ensures all connectives are binary (and / or)
if type(s) is str:
return s
elif type(s) is list and s[0] == "and" and len(s) > 3: # too long
return(["and", s[1], binaryize(["and"] + s[2:])])
elif type(s) is list and s[0] == "or" and len(s) > 3: # too long
return(["or", s[1], binaryize(["or"] + s[2:])])
else:
return([s[0]] + [binaryize(i) for i in s[1:]])
def distributivity(s):
revision = distributivity1(s)
if revision == s:
return s
else:
return distributivity(revision)
def distributivity1(s): # only works on binary connectives
if type(s) is str:
return s
elif type(s) is list and s[0] == "or" and type(s[1]) is list and s[1][0] == "and":
# distribute s[2] over s[1]
return(["and"] + [distributivity(["or", i, s[2]]) for i in s[1][1:]])
elif type(s) is list and s[0] == "or" and type(s[2]) is list and s[2][0] == "and":
# distribute s[1] over s[2]
return(["and"] + [distributivity(["or", i, s[1]]) for i in s[2][1:]])
else:
return ([s[0]] + [distributivity(i) for i in s[1:]])
def andAssociativity(s):
revision = andAssociativity1(s)
if revision == s:
return s
else:
return andAssociativity(revision)
def andAssociativity1(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "and":
result = ["and"]
# iterate through conjuncts looking for "and" lists
for i in s[1:]:
if type(i) is list and i[0] == "and":
result = result + i[1:]
else:
result.append(i)
return result
else:
return([s[0]] + [andAssociativity1(i) for i in s[1:]])
def orAssociativity(s):
revision = orAssociativity1(s)
if revision == s:
return s
else:
return orAssociativity(revision)
def orAssociativity1(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "or":
result = ["or"]
# iterate through disjuncts looking for "or" lists
for i in s[1:]:
if type(i) is list and i[0] == "or":
result = result + i[1:]
else:
result.append(i)
return result
else:
return([s[0]] + [orAssociativity1(i) for i in s[1:]])
def removeDuplicateLiterals(s):
if type(s) is str:
return s
if s[0] == "not":
return s
if s[0] == "and":
return(["and"] + [removeDuplicateLiterals(i) for i in s[1:]])
if s[0] == "or":
remains = []
for l in s[1:]:
if l not in remains:
remains.append(l)
if len(remains) == 1:
return remains[0]
else:
return(["or"] + remains)
def removeDuplicateClauses(s):
if type(s) is str:
return s
if s[0] == "not":
return s
if s[0] == "or":
return s
if s[0] == "and": #conjunction of clauses
remains = []
for c in s[1:]:
if unique(c, remains):
remains.append(c)
if len(remains) == 1:
return remains[0]
else:
return(["and"] + remains)
def unique(c, remains):
for p in remains:
if type(c) is str or type(p) is str:
if c == p:
return False
elif len(c) == len(p):
if len([i for i in c[1:] if i not in p[1:]]) == 0:
return False
return True
def cnf(s):
s = biconditionalElimination(s)
s = implicationElimination(s)
s = demorgan(s)<|fim▁hole|> s = binaryize(s)
s = distributivity(s)
s = andAssociativity(s)
s = orAssociativity(s)
s = removeDuplicateLiterals(s)
s = removeDuplicateClauses(s)
return s
if __name__ == "__main__":
sentences = fileinput.input()
for l in sentences:
print repr(cnf(eval(l.strip())))<|fim▁end|>
|
s = doubleNegationElimination(s)
|
<|file_name|>test_gluon.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
from mxnet import gluon
from mxnet.gluon import nn
from mxnet.test_utils import assert_almost_equal
import numpy as np
from nose.tools import raises
from copy import deepcopy
import warnings
def test_parameter():
p = gluon.Parameter('weight', shape=(10, 10))
p.initialize(init='xavier', ctx=[mx.cpu(0), mx.cpu(1)])
assert len(p.list_data()) == 2
assert len(p.list_grad()) == 2
assert p.data(mx.cpu(1)).context == mx.cpu(1)
assert p.data(mx.cpu(0)).shape == (10, 10)
assert p.var().name == 'weight'
p.reset_ctx(ctx=[mx.cpu(1), mx.cpu(2)])
assert p.list_ctx() == [mx.cpu(1), mx.cpu(2)]
def test_paramdict():
params = gluon.ParameterDict('net_')
params.get('weight', shape=(10, 10))
assert list(params.keys()) == ['net_weight']
params.initialize(ctx=mx.cpu())
params.save('test.params')
params.load('test.params', mx.cpu())
def test_parameter_sharing():
class Net(gluon.Block):
def __init__(self, **kwargs):
super(Net, self).__init__(**kwargs)
with self.name_scope():
self.dense0 = nn.Dense(5, in_units=5)
self.dense1 = nn.Dense(5, in_units=5)
def forward(self, x):
return self.dense1(self.dense0(x))
net1 = Net(prefix='net1_')
net2 = Net(prefix='net2_', params=net1.collect_params())
net1.collect_params().initialize()
net2(mx.nd.zeros((3, 5)))
net1.save_params('net1.params')
net3 = Net(prefix='net3_')
net3.load_params('net1.params', mx.cpu())
def test_basic():
model = nn.Sequential()
model.add(nn.Dense(128, activation='tanh', in_units=10, flatten=False))
model.add(nn.Dropout(0.5))
model.add(nn.Dense(64, activation='tanh', in_units=256),
nn.Dense(32, in_units=64))
model.add(nn.Activation('relu'))
# symbol
x = mx.sym.var('data')
y = model(x)
assert len(y.list_arguments()) == 7
# ndarray
model.collect_params().initialize(mx.init.Xavier(magnitude=2.24))
x = model(mx.nd.zeros((32, 2, 10)))
assert x.shape == (32, 32)
x.wait_to_read()
model.collect_params().setattr('grad_req', 'null')
assert list(model.collect_params().values())[0]._grad is None
model.collect_params().setattr('grad_req', 'write')
assert list(model.collect_params().values())[0]._grad is not None
def test_dense():
model = nn.Dense(128, activation='tanh', in_units=10, flatten=False, prefix='test_')
inputs = mx.sym.Variable('data')
outputs = model(inputs)
assert set(model.collect_params().keys()) == set(['test_weight', 'test_bias'])
assert outputs.list_outputs() == ['test_tanh_fwd_output']
args, outs, auxs = outputs.infer_shape(data=(2, 3, 10))
assert outs == [(2, 3, 128)]
model = nn.Dense(128, activation='relu', in_units=30, flatten=True, prefix='test2_')
inputs = mx.sym.Variable('data')
outputs = model(inputs)
assert set(model.collect_params().keys()) == set(['test2_weight', 'test2_bias'])
assert outputs.list_outputs() == ['test2_relu_fwd_output']
args, outs, auxs = outputs.infer_shape(data=(17, 2, 5, 3))
assert outs == [(17, 128)]
def test_symbol_block():
model = nn.HybridSequential()
model.add(nn.Dense(128, activation='tanh'))
model.add(nn.Dropout(0.5))
model.add(nn.Dense(64, activation='tanh'),
nn.Dense(32, in_units=64))
model.add(nn.Activation('relu'))
model.initialize()
inputs = mx.sym.var('data')
outputs = model(inputs).get_internals()
smodel = gluon.SymbolBlock(outputs, inputs, params=model.collect_params())
assert len(smodel(mx.nd.zeros((16, 10)))) == 14
out = smodel(mx.sym.var('in'))
assert len(out) == len(outputs.list_outputs())
class Net(nn.HybridBlock):
def __init__(self, model):
super(Net, self).__init__()
self.model = model
def hybrid_forward(self, F, x):
out = self.model(x)
return F.add_n(*[i.sum() for i in out])
net = Net(smodel)
net.hybridize()
assert isinstance(net(mx.nd.zeros((16, 10))), mx.nd.NDArray)
inputs = mx.sym.var('data')
outputs = model(inputs)
smodel = gluon.SymbolBlock(outputs, inputs, params=model.collect_params())
net = Net(smodel)
net.hybridize()
assert isinstance(net(mx.nd.zeros((16, 10))), mx.nd.NDArray)
def check_layer_forward(layer, dshape):
layer.collect_params().initialize()
x = mx.nd.ones(shape=dshape)
x.attach_grad()
with mx.autograd.record():
out = layer(x)
out.backward()
np_out = out.asnumpy()
np_dx = x.grad.asnumpy()
layer.hybridize()
x = mx.nd.ones(shape=dshape)
x.attach_grad()
with mx.autograd.record():
out = layer(x)
out.backward()
mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-5, atol=1e-6)
mx.test_utils.assert_almost_equal(np_dx, x.grad.asnumpy(), rtol=1e-5, atol=1e-6)
def test_conv():
layers1d = [
nn.Conv1D(16, 3, in_channels=4),
nn.Conv1D(16, 3, groups=2, in_channels=4),
nn.Conv1D(16, 3, strides=3, groups=2, in_channels=4),
]
for layer in layers1d:
check_layer_forward(layer, (1, 4, 10))
layers2d = [
nn.Conv2D(16, (3, 4), in_channels=4),
nn.Conv2D(16, (5, 4), in_channels=4),
nn.Conv2D(16, (3, 4), groups=2, in_channels=4),
nn.Conv2D(16, (3, 4), strides=4, in_channels=4),
nn.Conv2D(16, (3, 4), dilation=4, in_channels=4),
nn.Conv2D(16, (3, 4), padding=4, in_channels=4),
]
for layer in layers2d:
check_layer_forward(layer, (1, 4, 20, 20))
layers3d = [
nn.Conv3D(16, (1, 8, 4), in_channels=4, activation='relu'),
nn.Conv3D(16, (5, 4, 3), in_channels=4),
nn.Conv3D(16, (3, 3, 3), groups=2, in_channels=4),
nn.Conv3D(16, 4, strides=4, in_channels=4),
nn.Conv3D(16, (3, 3, 3), padding=4, in_channels=4),
]
for layer in layers3d:
check_layer_forward(layer, (1, 4, 10, 10, 10))
layer = nn.Conv2D(16, (3, 3), layout='NHWC', in_channels=4)
# check_layer_forward(layer, (1, 10, 10, 4))
layer = nn.Conv3D(16, (3, 3, 3), layout='NDHWC', in_channels=4)
# check_layer_forward(layer, (1, 10, 10, 10, 4))
def test_deconv():
# layers1d = [
# nn.Conv1DTranspose(16, 3, in_channels=4),
# nn.Conv1DTranspose(16, 3, groups=2, in_channels=4),
# nn.Conv1DTranspose(16, 3, strides=3, groups=2, in_channels=4),
# ]
# for layer in layers1d:
# check_layer_forward(layer, (1, 4, 10))
layers2d = [
nn.Conv2DTranspose(16, (3, 4), in_channels=4),
nn.Conv2DTranspose(16, (5, 4), in_channels=4),
nn.Conv2DTranspose(16, (3, 4), groups=2, in_channels=4),
nn.Conv2DTranspose(16, (3, 4), strides=4, in_channels=4),
nn.Conv2DTranspose(16, (3, 4), dilation=4, in_channels=4),
nn.Conv2DTranspose(16, (3, 4), padding=4, in_channels=4),
nn.Conv2DTranspose(16, (3, 4), strides=4, output_padding=3, in_channels=4),<|fim▁hole|>
# layers3d = [
# nn.Conv3DTranspose(16, (1, 8, 4), in_channels=4),
# nn.Conv3DTranspose(16, (5, 4, 3), in_channels=4),
# nn.Conv3DTranspose(16, (3, 3, 3), groups=2, in_channels=4),
# nn.Conv3DTranspose(16, 4, strides=4, in_channels=4),
# nn.Conv3DTranspose(16, (3, 3, 3), padding=4, in_channels=4),
# ]
# for layer in layers3d:
# check_layer_forward(layer, (1, 4, 10, 10, 10))
#
#
# layer = nn.Conv2DTranspose(16, (3, 3), layout='NHWC', in_channels=4)
# # check_layer_forward(layer, (1, 10, 10, 4))
#
# layer = nn.Conv3DTranspose(16, (3, 3, 3), layout='NDHWC', in_channels=4)
# # check_layer_forward(layer, (1, 10, 10, 10, 4))
def test_pool():
layers1d = [
nn.MaxPool1D(),
nn.MaxPool1D(3),
nn.MaxPool1D(3, 2),
nn.AvgPool1D(),
nn.GlobalAvgPool1D(),
]
for layer in layers1d:
check_layer_forward(layer, (1, 2, 10))
layers2d = [
nn.MaxPool2D(),
nn.MaxPool2D((3, 3)),
nn.MaxPool2D(3, 2),
nn.AvgPool2D(),
nn.GlobalAvgPool2D(),
]
for layer in layers2d:
check_layer_forward(layer, (1, 2, 10, 10))
layers3d = [
nn.MaxPool3D(),
nn.MaxPool3D((3, 3, 3)),
nn.MaxPool3D(3, 2),
nn.AvgPool3D(),
nn.GlobalAvgPool3D(),
]
for layer in layers3d:
check_layer_forward(layer, (1, 2, 10, 10, 10))
# test ceil_mode
x = mx.nd.zeros((2, 2, 10, 10))
layer = nn.MaxPool2D(3, ceil_mode=False)
layer.collect_params().initialize()
assert (layer(x).shape==(2, 2, 3, 3))
layer = nn.MaxPool2D(3, ceil_mode=True)
layer.collect_params().initialize()
assert (layer(x).shape==(2, 2, 4, 4))
def test_batchnorm():
layer = nn.BatchNorm(in_channels=10)
check_layer_forward(layer, (2, 10, 10, 10))
def test_reshape():
x = mx.nd.ones((2, 4, 10, 10))
layer = nn.Conv2D(10, 2, in_channels=4)
layer.collect_params().initialize()
with mx.autograd.record():
x = layer(x)
x = x.reshape((-1,))
x = x + 10
x.backward()
def test_slice():
x = mx.nd.ones((5, 4, 10, 10))
layer = nn.Conv2D(10, 2, in_channels=4)
layer.collect_params().initialize()
with mx.autograd.record():
x = layer(x)
x = x[1:3]
x = x + 10
x.backward()
def test_at():
x = mx.nd.ones((5, 4, 10, 10))
layer = nn.Conv2D(10, 2, in_channels=4)
layer.collect_params().initialize()
with mx.autograd.record():
x = layer(x)
x = x[1]
x = x + 10
x.backward()
def test_deferred_init():
x = mx.nd.ones((5, 4, 10, 10))
layer = nn.Conv2D(10, 2)
layer.collect_params().initialize()
layer(x)
def check_split_data(x, num_slice, batch_axis, **kwargs):
res = gluon.utils.split_data(x, num_slice, batch_axis, **kwargs)
assert len(res) == num_slice
mx.test_utils.assert_almost_equal(mx.nd.concat(*res, dim=batch_axis).asnumpy(),
x.asnumpy())
def test_split_data():
x = mx.nd.random.uniform(shape=(128, 33, 64))
check_split_data(x, 8, 0)
check_split_data(x, 3, 1)
check_split_data(x, 4, 1, even_split=False)
check_split_data(x, 15, 1, even_split=False)
try:
check_split_data(x, 4, 1)
except ValueError:
return
assert False, "Should have failed"
def test_flatten():
flatten = nn.Flatten()
x = mx.nd.zeros((3,4,5,6))
assert flatten(x).shape == (3, 4*5*6)
x = mx.nd.zeros((3,6))
assert flatten(x).shape == (3, 6)
x = mx.nd.zeros((3,))
assert flatten(x).shape == (3, 1)
def test_trainer():
def dict_equ(a, b):
assert set(a) == set(b)
for k in a:
assert (a[k].asnumpy() == b[k].asnumpy()).all()
x = gluon.Parameter('x', shape=(10,))
x.initialize(ctx=[mx.cpu(0), mx.cpu(1)], init='zeros')
trainer = gluon.Trainer([x], 'sgd', {'learning_rate': 1.0, 'momentum': 0.5})
with mx.autograd.record():
for w in x.list_data():
y = w + 1
y.backward()
trainer.step(1)
assert (x.data(mx.cpu(1)).asnumpy() == -2).all()
x.lr_mult = 0.5
with mx.autograd.record():
for w in x.list_data():
y = w + 1
y.backward()
trainer.step(1)
assert (x.data(mx.cpu(1)).asnumpy() == -4).all()
trainer.save_states('test.states')
states = deepcopy(trainer._kvstore._updater.states) if trainer._update_on_kvstore \
else deepcopy(trainer._updaters[0].states)
trainer.load_states('test.states')
if trainer._update_on_kvstore:
dict_equ(trainer._kvstore._updater.states, states)
assert trainer._optimizer == trainer._kvstore._updater.optimizer
else:
for updater in trainer._updaters:
dict_equ(updater.states, states)
assert trainer._optimizer == trainer._updaters[0].optimizer
def test_block_attr_hidden():
b = gluon.Block()
# regular attributes can change types
b.a = None
b.a = 1
@raises(TypeError)
def test_block_attr_block():
b = gluon.Block()
# regular variables can't change types
b.b = gluon.Block()
b.b = (2,)
@raises(TypeError)
def test_block_attr_param():
b = gluon.Block()
# regular variables can't change types
b.b = gluon.Parameter()
b.b = (2,)
def test_block_attr_regular():
b = gluon.Block()
# set block attribute also sets _children
b.c = gluon.Block()
c2 = gluon.Block()
b.c = c2
assert b.c is c2 and b._children[0] is c2
def test_sequential_warning():
with warnings.catch_warnings(record=True) as w:
b = gluon.nn.Sequential()
b.add(gluon.nn.Dense(20))
b.hybridize()
assert len(w) == 1
def test_global_norm_clip():
x1 = mx.nd.ones((3,3))
x2 = mx.nd.ones((4,4))
norm = gluon.utils.clip_global_norm([x1, x2], 1.0)
assert norm == 5.0
assert_almost_equal(x1.asnumpy(), np.ones((3,3))/5)
assert_almost_equal(x2.asnumpy(), np.ones((4,4))/5)
x3 = mx.nd.array([1.0, 2.0, float('nan')])
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
gluon.utils.clip_global_norm([x1, x3], 2.0)
assert len(w) == 1
def test_embedding():
layer = gluon.nn.Embedding(10, 100)
layer.initialize()
x = mx.nd.array([3,4,2,0,1])
with mx.autograd.record():
y = layer(x)
y.backward()
assert (layer.weight.grad()[:5] == 1).asnumpy().all()
assert (layer.weight.grad()[5:] == 0).asnumpy().all()
def test_export():
ctx = mx.context.current_context()
model = gluon.model_zoo.vision.resnet18_v1(
prefix='resnet', ctx=ctx, pretrained=True)
model.hybridize()
data = mx.nd.random.normal(shape=(1, 3, 224, 224))
out = model(data)
model.export('gluon')
module = mx.mod.Module.load('gluon', 0, label_names=None, context=ctx)
module.bind(data_shapes=[('data', data.shape)])
module.forward(mx.io.DataBatch([data], None), is_train=False)
mod_out, = module.get_outputs()
assert_almost_equal(out.asnumpy(), mod_out.asnumpy())
model2 = gluon.model_zoo.vision.resnet18_v1(prefix='resnet', ctx=ctx)
model2.collect_params().load('gluon-0000.params', ctx)
out2 = model2(data)
assert_almost_equal(out.asnumpy(), out2.asnumpy())
def test_hybrid_stale_cache():
net = mx.gluon.nn.HybridSequential()
with net.name_scope():
net.add(mx.gluon.nn.Dense(10, weight_initializer='zeros', bias_initializer='ones', flatten=False))
net.hybridize()
net.initialize()
net(mx.nd.ones((2,3,5)))
net.add(mx.gluon.nn.Flatten())
assert net(mx.nd.ones((2,3,5))).shape == (2, 30)
net = mx.gluon.nn.HybridSequential()
with net.name_scope():
net.fc1 = mx.gluon.nn.Dense(10, weight_initializer='zeros',
bias_initializer='ones', flatten=False)
net.fc2 = mx.gluon.nn.Dense(10, weight_initializer='zeros',
bias_initializer='ones', flatten=False)
net.hybridize()
net.initialize()
net(mx.nd.ones((2,3,5)))
net.fc2 = mx.gluon.nn.Dense(10, weight_initializer='zeros',
bias_initializer='ones', flatten=True)
net.initialize()
assert net(mx.nd.ones((2,3,5))).shape == (2, 10)
if __name__ == '__main__':
import nose
nose.runmodule()<|fim▁end|>
|
]
for layer in layers2d:
check_layer_forward(layer, (1, 4, 20, 20))
|
<|file_name|>RegurgitatorConfiguration.java<|end_file_name|><|fim▁begin|>package no.api.regurgitator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.dropwizard.Configuration;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
@JsonIgnoreProperties(ignoreUnknown = true)
public class RegurgitatorConfiguration extends Configuration {
@Valid
@NotNull
@JsonProperty
private int proxyPort;
@Valid
@JsonProperty
private String archivedFolder;
@Valid
@NotNull
@JsonProperty
private Boolean recordOnStart;
@Valid
@NotNull
@JsonProperty
private String storageManager;
public int getProxyPort() {
return proxyPort;
}
public String getStorageManager() {
return storageManager;
}
public Boolean getRecordOnStart() {
return recordOnStart;
}
public String getArchivedFolder() {
return archivedFolder;<|fim▁hole|><|fim▁end|>
|
}
}
|
<|file_name|>LineType.hpp<|end_file_name|><|fim▁begin|>#pragma once
#include <string>
namespace pyconv {
namespace language {
namespace types{
namespace line {
using std::string;
class LineType {
public:
typedef int line_t;
static const line_t BLANK = 0;
static const line_t CLOSE_BRACE = 1;
static const line_t ELIF_STATEMENT = 2;
static const line_t ELSE_IF_STATEMENT = 3;
static const line_t ELSE_STATEMENT = 4;
static const line_t FOR_LOOP = 5;
static const line_t IF_STATEMENT = 6;
static const line_t PRINT_STATEMENT = 7;
static const line_t VARIABLE = 8;
static const line_t VARIABLE_ASSIGNMENT = 9;
static const line_t VARIABLE_DECLARATION = 10;
static const line_t UNKNOWN = -1;
static string lineTypeToString(line_t const & lineType) {
switch(lineType) {
case BLANK:
return "blank";
case CLOSE_BRACE:
return "}";
case ELIF_STATEMENT:
return "elif";
case ELSE_STATEMENT:
return "else";
case ELSE_IF_STATEMENT:
return "else if";
case FOR_LOOP:
return "for";
case IF_STATEMENT:
return "if";
case PRINT_STATEMENT:
return "print";
case VARIABLE:
return "variable";
case VARIABLE_ASSIGNMENT:
return "variable assignment";
case VARIABLE_DECLARATION:
return "variable declaration";
case UNKNOWN:
default:
return "unknown";
}
}
static line_t firstWordToLineType(string const & firstWord) {
if (firstWord == "") {
return LineType::BLANK;
} else if (firstWord == "}") {
return LineType::CLOSE_BRACE;
} else if (firstWord == "elif") {
return LineType::ELIF_STATEMENT;
} else if (firstWord == "else") {
return LineType::ELSE_STATEMENT;
} else if (firstWord == "for") {
return LineType::FOR_LOOP;
} else if (firstWord == "if") {
return LineType::IF_STATEMENT;
} else if (firstWord == "print") {
return LineType::PRINT_STATEMENT;
}
return LineType::VARIABLE;
}
private:
<|fim▁hole|>
};
}
}
}
}<|fim▁end|>
|
protected:
|
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>use std::ops::{Add, Sub, Div, Mul};
use scene::ScreenPoint;
use super::Pixel;
pub fn to_uniform(resolution: Pixel, point: ScreenPoint) -> ScreenPoint {
let resolution = ScreenPoint::from(resolution);
let result = (point * 2.0 - resolution + ScreenPoint::new(1.0, 1.0)) / resolution;
assert!(result.is_normalized());
result
}
pub fn from_uniform(resolution: Pixel, point: ScreenPoint) -> ScreenPoint {
let resolution = ScreenPoint::from(resolution);
let result = ((point + ScreenPoint::new(1.0, 1.0)) / 2.0) * resolution -
ScreenPoint::new(0.5, 0.5);
result
}
impl Sub<ScreenPoint> for ScreenPoint {
type Output = ScreenPoint;
fn sub(self, rhs: ScreenPoint) -> ScreenPoint {
ScreenPoint::new(self.x - rhs.x, self.y - rhs.y)
}
}
impl Add<ScreenPoint> for ScreenPoint {
type Output = ScreenPoint;
fn add(self, rhs: ScreenPoint) -> ScreenPoint {
ScreenPoint::new(self.x + rhs.x, self.y + rhs.y)
}
}
impl Div<f64> for ScreenPoint {
type Output = ScreenPoint;
fn div(self, rhs: f64) -> ScreenPoint {
ScreenPoint::new(self.x / rhs, self.y / rhs)
}
}<|fim▁hole|>
impl Mul<f64> for ScreenPoint {
type Output = ScreenPoint;
fn mul(self, rhs: f64) -> ScreenPoint {
ScreenPoint::new(self.x * rhs, self.y * rhs)
}
}
impl Div<ScreenPoint> for ScreenPoint {
type Output = ScreenPoint;
fn div(self, rhs: ScreenPoint) -> ScreenPoint {
ScreenPoint::new(self.x / rhs.x, self.y / rhs.y)
}
}
impl Mul<ScreenPoint> for ScreenPoint {
type Output = ScreenPoint;
fn mul(self, rhs: ScreenPoint) -> ScreenPoint {
ScreenPoint::new(self.x * rhs.x, self.y * rhs.y)
}
}
impl From<[u32; 2]> for ScreenPoint {
fn from(xy: [u32; 2]) -> ScreenPoint {
ScreenPoint::from([xy[0] as f64, xy[1] as f64])
}
}<|fim▁end|>
| |
<|file_name|>parsers.py<|end_file_name|><|fim▁begin|>"""
The MIT License (MIT)
Copyright (c) 2015-2021 Kim Blomqvist
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
from .yasha import ENCODING
def parse_json(file):
import json
assert file.name.endswith('.json')
variables = json.loads(file.read().decode(ENCODING))
return variables if variables else dict()
def parse_yaml(file):
import yaml
assert file.name.endswith(('.yaml', '.yml'))
variables = yaml.safe_load(file)
return variables if variables else dict()
def parse_toml(file):
import pytoml as toml
assert file.name.endswith('.toml')
variables = toml.load(file)
return variables if variables else dict()
def parse_xml(file):
import xmltodict
assert file.name.endswith('.xml')
variables = xmltodict.parse(file.read().decode(ENCODING))
return variables if variables else dict()
def parse_svd(file):
# TODO: To be moved into its own repo
from .cmsis import SVDFile
svd = SVDFile(file)
svd.parse()
return {
"cpu": svd.cpu,
"device": svd.device,
"peripherals": svd.peripherals,
}
def parse_ini(file):
from configparser import ConfigParser
cfg = ConfigParser()
# yasha opens files in binary mode, configparser expects files in text mode
content = file.read().decode(ENCODING)
cfg.read_string(content)
result = dict(cfg)
for section, data in result.items():
result[section] = dict(data)
return result
def parse_csv(file):
from csv import reader, DictReader, Sniffer
from io import TextIOWrapper
from os.path import basename, splitext
assert file.name.endswith('.csv')
name = splitext(basename(file.name))[0] # get the filename without the extension
content = TextIOWrapper(file, encoding='utf-8', errors='replace')
sample = content.read(1024)
content.seek(0)
csv = list()
if Sniffer().has_header(sample):<|fim▁hole|> for row in DictReader(content):
csv.append(dict(row))
else:
for row in reader(content):
csv.append(row)
return {name: csv}
PARSERS = {
'.json': parse_json,
'.yaml': parse_yaml,
'.yml': parse_yaml,
'.toml': parse_toml,
'.xml': parse_xml,
'.svd': parse_svd,
'.ini': parse_ini,
'.csv': parse_csv
}<|fim▁end|>
| |
<|file_name|>openEyes.js<|end_file_name|><|fim▁begin|>/**
* Open the Eyes Instance
*/
import {getBrowserFor} from './utils'
module.exports = (
person, page, done
) => {
console.log("(openEyes) Opening the Eyes for: " + person)
getBrowserFor(person).EyesOpen(page);
global.eyesIsOpen = true<|fim▁hole|>};<|fim▁end|>
|
done()
|
<|file_name|>app_usart_stm32l1.rs<|end_file_name|><|fim▁begin|>#![feature(plugin, no_std, core)]
#![crate_type="staticlib"]
#![no_std]
#![plugin(macro_platformtree)]
extern crate core;
extern crate zinc;
#[no_mangle]
pub unsafe fn main() {
use zinc::drivers::chario::CharIO;
use zinc::hal;
use zinc::hal::pin::Gpio;
use zinc::hal::stm32l1::{init, pin, usart};
zinc::hal::mem_init::init_stack();
zinc::hal::mem_init::init_data();
let sys_clock = init::ClockConfig::new_default();
sys_clock.setup();
let _pin_tx = pin::Pin::new(pin::Port::PortA, 2,
pin::Mode::AltFunction(
pin::AltMode::AfUsart1_Usart2_Usart3,
pin::OutputType::OutPushPull,
pin::Speed::VeryLow),
pin::PullType::PullNone);
let led1 = pin::Pin::new(pin::Port::PortA, 5,
pin::Mode::GpioOut(pin::OutputType::OutPushPull, pin::Speed::VeryLow),
pin::PullType::PullNone);
led1.set_low();
let uart = usart::Usart::new(usart::UsartPeripheral::Usart2, 38400, usart::WordLen::WordLen8bits,
hal::uart::Parity::Disabled, usart::StopBit::StopBit1bit, &sys_clock);
uart.puts("Hello, world\n");
led1.set_high();<|fim▁hole|>}<|fim▁end|>
|
loop {}
|
<|file_name|>TimelineDataGrid.js<|end_file_name|><|fim▁begin|>var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; })();
/*
* Copyright (C) 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
WebInspector.TimelineDataGrid = function (treeOutline, columns, delegate, editCallback, deleteCallback) {
WebInspector.DataGrid.call(this, columns, editCallback, deleteCallback);
this._treeOutlineDataGridSynchronizer = new WebInspector.TreeOutlineDataGridSynchronizer(treeOutline, this, delegate);
this.element.classList.add(WebInspector.TimelineDataGrid.StyleClassName);
this._filterableColumns = [];
// Check if any of the cells can be filtered.
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = this.columns[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var _step$value = _slicedToArray(_step.value, 2);
var identifier = _step$value[0];
var column = _step$value[1];
var scopeBar = column.scopeBar;
if (!scopeBar) continue;
this._filterableColumns.push(identifier);
scopeBar.columnIdentifier = identifier;
scopeBar.addEventListener(WebInspector.ScopeBar.Event.SelectionChanged, this._scopeBarSelectedItemsDidChange, this);
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator["return"]) {
_iterator["return"]();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
if (this._filterableColumns.length > 1) {
console.error("Creating a TimelineDataGrid with more than one filterable column is not yet supported.");
return;
}
if (this._filterableColumns.length) {
var items = [new WebInspector.FlexibleSpaceNavigationItem(), this.columns.get(this._filterableColumns[0]).scopeBar, new WebInspector.FlexibleSpaceNavigationItem()];
this._navigationBar = new WebInspector.NavigationBar(null, items);
var container = this.element.appendChild(document.createElement("div"));
container.className = "navigation-bar-container";
container.appendChild(this._navigationBar.element);
this._updateScopeBarForcedVisibility();
}
this.addEventListener(WebInspector.DataGrid.Event.SelectedNodeChanged, this._dataGridSelectedNodeChanged, this);
this.addEventListener(WebInspector.DataGrid.Event.SortChanged, this._sort, this);
window.addEventListener("resize", this._windowResized.bind(this));
};
WebInspector.TimelineDataGrid.StyleClassName = "timeline";
WebInspector.TimelineDataGrid.HasNonDefaultFilterStyleClassName = "has-non-default-filter";
WebInspector.TimelineDataGrid.DelayedPopoverShowTimeout = 250;
WebInspector.TimelineDataGrid.DelayedPopoverHideContentClearTimeout = 500;
WebInspector.TimelineDataGrid.Event = {
FiltersDidChange: "timelinedatagrid-filters-did-change"
};
WebInspector.TimelineDataGrid.createColumnScopeBar = function (prefix, dictionary) {
prefix = prefix + "-timeline-data-grid-";
var keys = Object.keys(dictionary).filter(function (key) {
return typeof dictionary[key] === "string" || dictionary[key] instanceof String;
});
var scopeBarItems = keys.map(function (key) {
var value = dictionary[key];
var id = prefix + value;
var label = dictionary.displayName(value, true);
var item = new WebInspector.ScopeBarItem(id, label);
item.value = value;
return item;
});
scopeBarItems.unshift(new WebInspector.ScopeBarItem(prefix + "type-all", WebInspector.UIString("All"), true));
return new WebInspector.ScopeBar(prefix + "scope-bar", scopeBarItems, scopeBarItems[0]);
};
WebInspector.TimelineDataGrid.prototype = {
constructor: WebInspector.TimelineDataGrid,
__proto__: WebInspector.DataGrid.prototype,
// Public
reset: function reset() {
// May be overridden by subclasses. If so, they should call the superclass.
this._hidePopover();
},
shown: function shown() {
// May be overridden by subclasses. If so, they should call the superclass.
this._treeOutlineDataGridSynchronizer.synchronize();
},
hidden: function hidden() {
// May be overridden by subclasses. If so, they should call the superclass.
this._hidePopover();
},
treeElementForDataGridNode: function treeElementForDataGridNode(dataGridNode) {
return this._treeOutlineDataGridSynchronizer.treeElementForDataGridNode(dataGridNode);
},
dataGridNodeForTreeElement: function dataGridNodeForTreeElement(treeElement) {
return this._treeOutlineDataGridSynchronizer.dataGridNodeForTreeElement(treeElement);
},
callFramePopoverAnchorElement: function callFramePopoverAnchorElement() {
// Implemented by subclasses.
return null;
},
updateLayout: function updateLayout() {
WebInspector.DataGrid.prototype.updateLayout.call(this);
if (this._navigationBar) this._navigationBar.updateLayout();
},
treeElementMatchesActiveScopeFilters: function treeElementMatchesActiveScopeFilters(treeElement) {
var dataGridNode = this._treeOutlineDataGridSynchronizer.dataGridNodeForTreeElement(treeElement);
console.assert(dataGridNode);
var _iteratorNormalCompletion2 = true;
var _didIteratorError2 = false;
var _iteratorError2 = undefined;
try {
for (var _iterator2 = this._filterableColumns[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
var identifier = _step2.value;
var scopeBar = this.columns.get(identifier).scopeBar;
if (!scopeBar || scopeBar.defaultItem.selected) continue;
var value = dataGridNode.data[identifier];
var matchesFilter = scopeBar.selectedItems.some(function (scopeBarItem) {
return scopeBarItem.value === value;
});
if (!matchesFilter) return false;
}
} catch (err) {
_didIteratorError2 = true;<|fim▁hole|> if (!_iteratorNormalCompletion2 && _iterator2["return"]) {
_iterator2["return"]();
}
} finally {
if (_didIteratorError2) {
throw _iteratorError2;
}
}
}
return true;
},
addRowInSortOrder: function addRowInSortOrder(treeElement, dataGridNode, parentElement) {
this._treeOutlineDataGridSynchronizer.associate(treeElement, dataGridNode);
parentElement = parentElement || this._treeOutlineDataGridSynchronizer.treeOutline;
parentNode = parentElement.root ? this : this._treeOutlineDataGridSynchronizer.dataGridNodeForTreeElement(parentElement);
console.assert(parentNode);
if (this.sortColumnIdentifier) {
var insertionIndex = insertionIndexForObjectInListSortedByFunction(dataGridNode, parentNode.children, this._sortComparator.bind(this));
// Insert into the parent, which will cause the synchronizer to insert into the data grid.
parentElement.insertChild(treeElement, insertionIndex);
} else {
// Append to the parent, which will cause the synchronizer to append to the data grid.
parentElement.appendChild(treeElement);
}
},
shouldIgnoreSelectionEvent: function shouldIgnoreSelectionEvent() {
return this._ignoreSelectionEvent || false;
},
// Protected
dataGridNodeNeedsRefresh: function dataGridNodeNeedsRefresh(dataGridNode) {
if (!this._dirtyDataGridNodes) this._dirtyDataGridNodes = new Set();
this._dirtyDataGridNodes.add(dataGridNode);
if (this._scheduledDataGridNodeRefreshIdentifier) return;
this._scheduledDataGridNodeRefreshIdentifier = requestAnimationFrame(this._refreshDirtyDataGridNodes.bind(this));
},
// Private
_refreshDirtyDataGridNodes: function _refreshDirtyDataGridNodes() {
if (this._scheduledDataGridNodeRefreshIdentifier) {
cancelAnimationFrame(this._scheduledDataGridNodeRefreshIdentifier);
delete this._scheduledDataGridNodeRefreshIdentifier;
}
if (!this._dirtyDataGridNodes) return;
var selectedNode = this.selectedNode;
var sortComparator = this._sortComparator.bind(this);
var treeOutline = this._treeOutlineDataGridSynchronizer.treeOutline;
this._treeOutlineDataGridSynchronizer.enabled = false;
var _iteratorNormalCompletion3 = true;
var _didIteratorError3 = false;
var _iteratorError3 = undefined;
try {
for (var _iterator3 = this._dirtyDataGridNodes[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
var dataGridNode = _step3.value;
dataGridNode.refresh();
if (!this.sortColumnIdentifier) continue;
if (dataGridNode === selectedNode) this._ignoreSelectionEvent = true;
var treeElement = this._treeOutlineDataGridSynchronizer.treeElementForDataGridNode(dataGridNode);
console.assert(treeElement);
treeOutline.removeChild(treeElement);
this.removeChild(dataGridNode);
var insertionIndex = insertionIndexForObjectInListSortedByFunction(dataGridNode, this.children, sortComparator);
treeOutline.insertChild(treeElement, insertionIndex);
this.insertChild(dataGridNode, insertionIndex);
// Adding the tree element back to the tree outline subjects it to filters.
// Make sure we keep the hidden state in-sync while the synchronizer is disabled.
dataGridNode.element.classList.toggle("hidden", treeElement.hidden);
if (dataGridNode === selectedNode) {
selectedNode.revealAndSelect();
delete this._ignoreSelectionEvent;
}
}
} catch (err) {
_didIteratorError3 = true;
_iteratorError3 = err;
} finally {
try {
if (!_iteratorNormalCompletion3 && _iterator3["return"]) {
_iterator3["return"]();
}
} finally {
if (_didIteratorError3) {
throw _iteratorError3;
}
}
}
this._treeOutlineDataGridSynchronizer.enabled = true;
delete this._dirtyDataGridNodes;
},
_sort: function _sort() {
var sortColumnIdentifier = this.sortColumnIdentifier;
if (!sortColumnIdentifier) return;
var selectedNode = this.selectedNode;
this._ignoreSelectionEvent = true;
this._treeOutlineDataGridSynchronizer.enabled = false;
var treeOutline = this._treeOutlineDataGridSynchronizer.treeOutline;
if (treeOutline.selectedTreeElement) treeOutline.selectedTreeElement.deselect(true);
// Collect parent nodes that need their children sorted. So this in two phases since
// traverseNextNode would get confused if we sort the tree while traversing it.
var parentDataGridNodes = [this];
var currentDataGridNode = this.children[0];
while (currentDataGridNode) {
if (currentDataGridNode.children.length) parentDataGridNodes.push(currentDataGridNode);
currentDataGridNode = currentDataGridNode.traverseNextNode(false, null, true);
}
// Sort the children of collected parent nodes.
var _iteratorNormalCompletion4 = true;
var _didIteratorError4 = false;
var _iteratorError4 = undefined;
try {
for (var _iterator4 = parentDataGridNodes[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
var parentDataGridNode = _step4.value;
var parentTreeElement = parentDataGridNode === this ? treeOutline : this._treeOutlineDataGridSynchronizer.treeElementForDataGridNode(parentDataGridNode);
console.assert(parentTreeElement);
var childDataGridNodes = parentDataGridNode.children.slice();
parentDataGridNode.removeChildren();
parentTreeElement.removeChildren();
childDataGridNodes.sort(this._sortComparator.bind(this));
var _iteratorNormalCompletion5 = true;
var _didIteratorError5 = false;
var _iteratorError5 = undefined;
try {
for (var _iterator5 = childDataGridNodes[Symbol.iterator](), _step5; !(_iteratorNormalCompletion5 = (_step5 = _iterator5.next()).done); _iteratorNormalCompletion5 = true) {
var dataGridNode = _step5.value;
var treeElement = this._treeOutlineDataGridSynchronizer.treeElementForDataGridNode(dataGridNode);
console.assert(treeElement);
parentTreeElement.appendChild(treeElement);
parentDataGridNode.appendChild(dataGridNode);
// Adding the tree element back to the tree outline subjects it to filters.
// Make sure we keep the hidden state in-sync while the synchronizer is disabled.
dataGridNode.element.classList.toggle("hidden", treeElement.hidden);
}
} catch (err) {
_didIteratorError5 = true;
_iteratorError5 = err;
} finally {
try {
if (!_iteratorNormalCompletion5 && _iterator5["return"]) {
_iterator5["return"]();
}
} finally {
if (_didIteratorError5) {
throw _iteratorError5;
}
}
}
}
} catch (err) {
_didIteratorError4 = true;
_iteratorError4 = err;
} finally {
try {
if (!_iteratorNormalCompletion4 && _iterator4["return"]) {
_iterator4["return"]();
}
} finally {
if (_didIteratorError4) {
throw _iteratorError4;
}
}
}
this._treeOutlineDataGridSynchronizer.enabled = true;
if (selectedNode) selectedNode.revealAndSelect();
delete this._ignoreSelectionEvent;
},
_sortComparator: function _sortComparator(node1, node2) {
var sortColumnIdentifier = this.sortColumnIdentifier;
if (!sortColumnIdentifier) return 0;
var sortDirection = this.sortOrder === WebInspector.DataGrid.SortOrder.Ascending ? 1 : -1;
var value1 = node1.data[sortColumnIdentifier];
var value2 = node2.data[sortColumnIdentifier];
if (typeof value1 === "number" && typeof value2 === "number") {
if (isNaN(value1) && isNaN(value2)) return 0;
if (isNaN(value1)) return -sortDirection;
if (isNaN(value2)) return sortDirection;
return (value1 - value2) * sortDirection;
}
if (typeof value1 === "string" && typeof value2 === "string") return value1.localeCompare(value2) * sortDirection;
if (value1 instanceof WebInspector.CallFrame || value2 instanceof WebInspector.CallFrame) {
// Sort by function name if available, then fall back to the source code object.
value1 = value1 && value1.functionName ? value1.functionName : value1 && value1.sourceCodeLocation ? value1.sourceCodeLocation.sourceCode : "";
value2 = value2 && value2.functionName ? value2.functionName : value2 && value2.sourceCodeLocation ? value2.sourceCodeLocation.sourceCode : "";
}
if (value1 instanceof WebInspector.SourceCode || value2 instanceof WebInspector.SourceCode) {
value1 = value1 ? value1.displayName || "" : "";
value2 = value2 ? value2.displayName || "" : "";
}
// For everything else (mostly booleans).
return (value1 < value2 ? -1 : value1 > value2 ? 1 : 0) * sortDirection;
},
_updateScopeBarForcedVisibility: function _updateScopeBarForcedVisibility() {
var _iteratorNormalCompletion6 = true;
var _didIteratorError6 = false;
var _iteratorError6 = undefined;
try {
for (var _iterator6 = this._filterableColumns[Symbol.iterator](), _step6; !(_iteratorNormalCompletion6 = (_step6 = _iterator6.next()).done); _iteratorNormalCompletion6 = true) {
var identifier = _step6.value;
var scopeBar = this.columns.get(identifier).scopeBar;
if (scopeBar) {
this.element.classList.toggle(WebInspector.TimelineDataGrid.HasNonDefaultFilterStyleClassName, scopeBar.hasNonDefaultItemSelected());
break;
}
}
} catch (err) {
_didIteratorError6 = true;
_iteratorError6 = err;
} finally {
try {
if (!_iteratorNormalCompletion6 && _iterator6["return"]) {
_iterator6["return"]();
}
} finally {
if (_didIteratorError6) {
throw _iteratorError6;
}
}
}
},
_scopeBarSelectedItemsDidChange: function _scopeBarSelectedItemsDidChange(event) {
this._updateScopeBarForcedVisibility();
var columnIdentifier = event.target.columnIdentifier;
this.dispatchEventToListeners(WebInspector.TimelineDataGrid.Event.FiltersDidChange, { columnIdentifier: columnIdentifier });
},
_dataGridSelectedNodeChanged: function _dataGridSelectedNodeChanged(event) {
if (!this.selectedNode) {
this._hidePopover();
return;
}
var record = this.selectedNode.record;
if (!record || !record.callFrames || !record.callFrames.length) {
this._hidePopover();
return;
}
this._showPopoverForSelectedNodeSoon();
},
_windowResized: function _windowResized(event) {
if (this._popover && this._popover.visible) this._updatePopoverForSelectedNode(false);
},
_showPopoverForSelectedNodeSoon: function _showPopoverForSelectedNodeSoon() {
if (this._showPopoverTimeout) return;
function delayedWork() {
if (!this._popover) this._popover = new WebInspector.Popover();
this._updatePopoverForSelectedNode(true);
}
this._showPopoverTimeout = setTimeout(delayedWork.bind(this), WebInspector.TimelineDataGrid.DelayedPopoverShowTimeout);
},
_hidePopover: function _hidePopover() {
if (this._showPopoverTimeout) {
clearTimeout(this._showPopoverTimeout);
delete this._showPopoverTimeout;
}
if (this._popover) this._popover.dismiss();
function delayedWork() {
if (this._popoverCallStackTreeOutline) this._popoverCallStackTreeOutline.removeChildren();
}
if (this._hidePopoverContentClearTimeout) clearTimeout(this._hidePopoverContentClearTimeout);
this._hidePopoverContentClearTimeout = setTimeout(delayedWork.bind(this), WebInspector.TimelineDataGrid.DelayedPopoverHideContentClearTimeout);
},
_updatePopoverForSelectedNode: function _updatePopoverForSelectedNode(updateContent) {
if (!this._popover || !this.selectedNode) return;
var targetPopoverElement = this.callFramePopoverAnchorElement();
console.assert(targetPopoverElement, "TimelineDataGrid subclass should always return a valid element from callFramePopoverAnchorElement.");
if (!targetPopoverElement) return;
var targetFrame = WebInspector.Rect.rectFromClientRect(targetPopoverElement.getBoundingClientRect());
// The element might be hidden if it does not have a width and height.
if (!targetFrame.size.width && !targetFrame.size.height) return;
if (this._hidePopoverContentClearTimeout) {
clearTimeout(this._hidePopoverContentClearTimeout);
delete this._hidePopoverContentClearTimeout;
}
if (updateContent) this._popover.content = this._createPopoverContent();
this._popover.present(targetFrame.pad(2), [WebInspector.RectEdge.MAX_Y, WebInspector.RectEdge.MIN_Y, WebInspector.RectEdge.MAX_X]);
},
_createPopoverContent: function _createPopoverContent() {
if (!this._popoverCallStackTreeOutline) {
var contentElement = document.createElement("ol");
contentElement.classList.add("timeline-data-grid-tree-outline");
this._popoverCallStackTreeOutline = new TreeOutline(contentElement);
this._popoverCallStackTreeOutline.onselect = this._popoverCallStackTreeElementSelected.bind(this);
} else this._popoverCallStackTreeOutline.removeChildren();
var callFrames = this.selectedNode.record.callFrames;
for (var i = 0; i < callFrames.length; ++i) {
var callFrameTreeElement = new WebInspector.CallFrameTreeElement(callFrames[i]);
this._popoverCallStackTreeOutline.appendChild(callFrameTreeElement);
}
var content = document.createElement("div");
content.className = "timeline-data-grid-popover";
content.appendChild(this._popoverCallStackTreeOutline.element);
return content;
},
_popoverCallStackTreeElementSelected: function _popoverCallStackTreeElementSelected(treeElement, selectedByUser) {
this._popover.dismiss();
console.assert(treeElement instanceof WebInspector.CallFrameTreeElement, "TreeElements in TimelineDataGrid popover should always be CallFrameTreeElements");
var callFrame = treeElement.callFrame;
if (!callFrame.sourceCodeLocation) return;
WebInspector.resourceSidebarPanel.showSourceCodeLocation(callFrame.sourceCodeLocation);
}
};<|fim▁end|>
|
_iteratorError2 = err;
} finally {
try {
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of the jabber.at homepage (https://github.com/jabber-at/hp).
#
# This project is free software: you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# This project is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along with this project. If not, see
# <http://www.gnu.org/licenses/>.
import os
import re
from contextlib import contextmanager
from unittest import mock
from celery import task
from pyvirtualdisplay import Display
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.firefox.webdriver import WebDriver
from selenium.webdriver.support.wait import WebDriverWait
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import TestCase as DjangoTestCase
VIRTUAL_DISPLAY = os.environ.get('VIRTUAL_DISPLAY', 'y').lower().strip() == 'y'
class HomepageTestCaseMixin(object):
def assertIsTask(self, t, expected):
self.assertEqual(t, task(expected))
def assertTaskCount(self, mocked, count):
"""Assert that `count` Celery tasks have been called."""
self.assertEqual(mocked.call_count, count)
def assertNoTasks(self, mocked):
self.assertTaskCount(mocked, 0)
def assertTaskCall(self, mocked, task, *args, **kwargs):
self.assertTrue(mocked.called)
a, k = mocked.call_args
self.assertEqual(k, {}) # apply_async receives task args/kwargs as tuple/dict arg
instance, called_args, called_kwargs = a
self.assertIsTask(instance, task)
self.assertEqual(args, called_args)
self.assertEqual(kwargs, called_kwargs)
@contextmanager
def mock_celery(self):
def run(self, args, kwargs):
return self.run(*args, **kwargs)
with mock.patch('celery.app.task.Task.apply_async', side_effect=run, autospec=True) as mocked:
yield mocked
class SeleniumMixin(object):
@classmethod
def setUpClass(cls):
super().setUpClass()
if VIRTUAL_DISPLAY:
cls.vdisplay = Display(visible=0, size=(1024, 768))
cls.vdisplay.start()
cls.selenium = WebDriver(executable_path=settings.GECKODRIVER_PATH)
@classmethod
def tearDownClass(cls):
cls.selenium.quit()
if VIRTUAL_DISPLAY:
cls.vdisplay.stop()
super().tearDownClass()
class wait_for_css_property(object):
def __init__(self, elem, prop, value):
self.elem = elem
self.prop = prop
self.value = value
def __call__(self, driver):
if self.elem.value_of_css_property(self.prop) == self.value:
return self.elem
else:
return False
def wait_for_display(self, elem, wait=2):
WebDriverWait(self.selenium, wait).until(lambda d: elem.is_displayed())
def wait_for_page_load(self, wait=2):
WebDriverWait(self.selenium, wait).until(lambda driver: driver.find_element_by_tag_name('body'))
def wait_for_valid_form(self, form=None, wait=2):
"""Wait until a form becomes valid according to HTML5 form validation.
The registration form becomes valid only after a split second, for some reason.
"""
if form is None:
form = self.find('form')
WebDriverWait(self.selenium, wait).until(
lambda driver: self.selenium.execute_script('return arguments[0].checkValidity() === true', form))
def wait_for_focus(self, elem):
# when an element gets focus, it turns blue:
wait = WebDriverWait(self.selenium, 10)
wait.until(self.wait_for_css_property(elem, 'border-top-color', 'rgb(128, 189, 255)'))
def wait_for_invalid(self, elem):
wait = WebDriverWait(self.selenium, 10)
wait.until(self.wait_for_css_property(elem, 'border-top-color', 'rgb(220, 53, 69)'))
def wait_for_valid(self, elem):
wait = WebDriverWait(self.selenium, 10)
wait.until(self.wait_for_css_property(elem, 'border-top-color', 'rgb(40, 167, 69)'))
def find(self, selector):
"""Find an element by CSS selector."""<|fim▁hole|>
return self.selenium.find_element_by_css_selector(selector)
def get_classes(self, elem):
"""Get CSS classes from the passed Element."""
return re.split(r'\s+', elem.get_attribute('class').strip())
def get_validity(self, elem):
"""Get validity object from a HTML5 form field."""
return self.selenium.execute_script('return arguments[0].validity', elem)
def get_valid(self, elem):
val = self.get_validity(elem)
return val['valid']
def assertNoElementExists(self, selector, wait=0):
"""Assert that no element with the passed selector is present on the page."""
if wait:
with self.assertRaises(TimeoutException):
WebDriverWait(self.selenium, wait).until(lambda d: self.find(selector))
else:
with self.assertRaises(NoSuchElementException):
self.find(selector)
def assertDisplayed(self, elem):
if isinstance(elem, str):
elem = self.find(elem)
self.assertTrue(elem.is_displayed())
def assertNotDisplayed(self, elem):
if isinstance(elem, str):
elem = self.find(elem)
self.assertFalse(elem.is_displayed())
def assertClass(self, elem, cls):
"""Assert that an element has a CSS class."""
self.assertIn(cls, self.get_classes(elem))
def assertNotClass(self, elem, cls):
"""Assert that an element does **not** have a CSS class."""
self.assertNotIn(cls, self.get_classes(elem))
def assertCSSBorderColor(self, elem, color):
"""Assert that an element has a given border color."""
self.assertEqual(elem.value_of_css_property('border-right-color'), color)
self.assertEqual(elem.value_of_css_property('border-left-color'), color)
self.assertEqual(elem.value_of_css_property('border-top-color'), color)
self.assertEqual(elem.value_of_css_property('border-bottom-color'), color)
def assertNotValidated(self, fg, elem):
"""Assert that a Bootstrap input element is not validated."""
self.assertNotClass(fg, 'was-validated')
for feedback in fg.find_elements_by_css_selector('.invalid-feedback'):
self.assertFalse(feedback.is_displayed())
if self.selenium.switch_to.active_element != elem: # passed element is not currently active
self.assertCSSBorderColor(elem, 'rgb(206, 212, 218)')
else:
self.assertCSSBorderColor(elem, 'rgb(128, 189, 255)')
def assertInvalid(self, fg, elem, *errors):
"""Assert that a Bootstrap input element validates as invalid."""
self.assertClass(fg, 'was-validated')
errors = set(['invalid-%s' % e for e in errors])
for feedback in fg.find_elements_by_css_selector('.invalid-feedback'):
classes = set(self.get_classes(feedback))
if errors & classes:
self.assertTrue(feedback.is_displayed(), '.%s is not displayed' % ('.'.join(classes)))
else:
self.assertFalse(feedback.is_displayed(), '.%s is displayed' % ('.'.join(classes)))
self.wait_for_invalid(elem)
self.assertFalse(self.get_valid(elem))
def assertValid(self, fg, elem):
"""Assert that a Bootstrap input element validates as valid."""
self.assertClass(fg, 'was-validated')
for feedback in fg.find_elements_by_css_selector('.invalid-feedback'):
self.assertFalse(feedback.is_displayed())
self.wait_for_valid(elem)
self.assertTrue(self.get_valid(elem))
class TestCase(HomepageTestCaseMixin, DjangoTestCase):
pass
class SeleniumTestCase(SeleniumMixin, HomepageTestCaseMixin, StaticLiveServerTestCase):
pass<|fim▁end|>
| |
<|file_name|>feature_fee_estimation.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2009-2019 The Bitcoin Core developers
# Copyright (c) 2014-2019 The DigiByte Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test fee estimation code."""
from decimal import Decimal
import random
from test_framework.messages import CTransaction, CTxIn, CTxOut, COutPoint, ToHex, COIN
from test_framework.script import CScript, OP_1, OP_DROP, OP_2, OP_HASH160, OP_EQUAL, hash160, OP_TRUE
from test_framework.test_framework import DigiByteTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
connect_nodes,
satoshi_round,
sync_blocks,
sync_mempools,
)
# Construct 2 trivial P2SH's and the ScriptSigs that spend them
# So we can create many transactions without needing to spend
# time signing.
REDEEM_SCRIPT_1 = CScript([OP_1, OP_DROP])
REDEEM_SCRIPT_2 = CScript([OP_2, OP_DROP])
P2SH_1 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_1), OP_EQUAL])
P2SH_2 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_2), OP_EQUAL])
# Associated ScriptSig's to spend satisfy P2SH_1 and P2SH_2
SCRIPT_SIG = [CScript([OP_TRUE, REDEEM_SCRIPT_1]), CScript([OP_TRUE, REDEEM_SCRIPT_2])]
def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment):
"""Create and send a transaction with a random fee.
The transaction pays to a trivial P2SH script, and assumes that its inputs
are of the same form.
The function takes a list of confirmed outputs and unconfirmed outputs
and attempts to use the confirmed list first for its inputs.
It adds the newly created outputs to the unconfirmed list.
Returns (raw transaction, fee)."""
# It's best to exponentially distribute our random fees
# because the buckets are exponentially spaced.
# Exponentially distributed from 1-128 * fee_increment
rand_fee = float(fee_increment) * (1.1892 ** random.randint(0, 28))
# Total fee ranges from min_fee to min_fee + 127*fee_increment
fee = min_fee - fee_increment + satoshi_round(rand_fee)
tx = CTransaction()
total_in = Decimal("0.00000000")
while total_in <= (amount + fee) and len(conflist) > 0:
t = conflist.pop(0)
total_in += t["amount"]
tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b""))
if total_in <= amount + fee:
while total_in <= (amount + fee) and len(unconflist) > 0:
t = unconflist.pop(0)
total_in += t["amount"]
tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b""))
if total_in <= amount + fee:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount + fee, total_in))
tx.vout.append(CTxOut(int((total_in - amount - fee) * COIN), P2SH_1))
tx.vout.append(CTxOut(int(amount * COIN), P2SH_2))
# These transactions don't need to be signed, but we still have to insert
# the ScriptSig that will satisfy the ScriptPubKey.
for inp in tx.vin:
inp.scriptSig = SCRIPT_SIG[inp.prevout.n]
txid = from_node.sendrawtransaction(ToHex(tx), True)
unconflist.append({"txid": txid, "vout": 0, "amount": total_in - amount - fee})
unconflist.append({"txid": txid, "vout": 1, "amount": amount})
return (ToHex(tx), fee)
def split_inputs(from_node, txins, txouts, initial_split=False):
"""Generate a lot of inputs so we can generate a ton of transactions.
This function takes an input from txins, and creates and sends a transaction
which splits the value into 2 outputs which are appended to txouts.
Previously this was designed to be small inputs so they wouldn't have
a high coin age when the notion of priority still existed."""
prevtxout = txins.pop()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(prevtxout["txid"], 16), prevtxout["vout"]), b""))
half_change = satoshi_round(prevtxout["amount"] / 2)
rem_change = prevtxout["amount"] - half_change - Decimal("0.00001000")
tx.vout.append(CTxOut(int(half_change * COIN), P2SH_1))
tx.vout.append(CTxOut(int(rem_change * COIN), P2SH_2))
# If this is the initial split we actually need to sign the transaction
# Otherwise we just need to insert the proper ScriptSig
if (initial_split):
completetx = from_node.signrawtransactionwithwallet(ToHex(tx))["hex"]
else:
tx.vin[0].scriptSig = SCRIPT_SIG[prevtxout["vout"]]
completetx = ToHex(tx)
txid = from_node.sendrawtransaction(completetx, True)
txouts.append({"txid": txid, "vout": 0, "amount": half_change})
txouts.append({"txid": txid, "vout": 1, "amount": rem_change})
def check_estimates(node, fees_seen):
"""Call estimatesmartfee and verify that the estimates meet certain invariants."""
delta = 1.0e-6 # account for rounding error
last_feerate = float(max(fees_seen))
all_smart_estimates = [node.estimatesmartfee(i) for i in range(1, 26)]
for i, e in enumerate(all_smart_estimates): # estimate is for i+1
feerate = float(e["feerate"])
assert_greater_than(feerate, 0)
if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen):
raise AssertionError("Estimated fee (%f) out of range (%f,%f)"
% (feerate, min(fees_seen), max(fees_seen)))
if feerate - delta > last_feerate:
raise AssertionError("Estimated fee (%f) larger than last fee (%f) for lower number of confirms"
% (feerate, last_feerate))
last_feerate = feerate
if i == 0:
assert_equal(e["blocks"], 2)
else:
assert_greater_than_or_equal(i + 1, e["blocks"])
class EstimateFeeTest(DigiByteTestFramework):
def set_test_params(self):
self.num_nodes = 3
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
"""
We'll setup the network to have 3 nodes that all mine with different parameters.
But first we need to use one node to create a lot of outputs
which we will use to generate our transactions.
"""
self.add_nodes(3, extra_args=[["-maxorphantx=1000", "-whitelist=127.0.0.1"],
["-blockmaxweight=68000", "-maxorphantx=1000"],
["-blockmaxweight=32000", "-maxorphantx=1000"]])
# Use node0 to mine blocks for input splitting
# Node1 mines small blocks but that are bigger than the expected transaction rate.
# NOTE: the CreateNewBlock code starts counting block weight at 4,000 weight,
# (68k weight is room enough for 120 or so transactions)
# Node2 is a stingy miner, that
# produces too small blocks (room for only 55 or so transactions)
def transact_and_mine(self, numblocks, mining_node):
min_fee = Decimal("0.00001")
# We will now mine numblocks blocks generating on average 100 transactions between each block
# We shuffle our confirmed txout set before each set of transactions
# small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible
# resorting to tx's that depend on the mempool when those run out
for i in range(numblocks):
random.shuffle(self.confutxo)
for j in range(random.randrange(100 - 50, 100 + 50)):
from_index = random.randint(1, 2)
(txhex, fee) = small_txpuzzle_randfee(self.nodes[from_index], self.confutxo,
self.memutxo, Decimal("0.005"), min_fee, min_fee)
tx_kbytes = (len(txhex) // 2) / 1000.0
self.fees_per_kb.append(float(fee) / tx_kbytes)
sync_mempools(self.nodes[0:3], wait=.1)
mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"]
sync_blocks(self.nodes[0:3], wait=.1)
# update which txouts are confirmed
newmem = []
for utx in self.memutxo:
if utx["txid"] in mined:
self.confutxo.append(utx)
else:
newmem.append(utx)
self.memutxo = newmem
def import_deterministic_coinbase_privkeys(self):
self.start_nodes()
super().import_deterministic_coinbase_privkeys()
self.stop_nodes()
def run_test(self):
self.log.info("This test is time consuming, please be patient")
self.log.info("Splitting inputs so we can generate tx's")
# Start node0
self.start_node(0)
self.txouts = []
self.txouts2 = []
# Split a coinbase into two transaction puzzle outputs
split_inputs(self.nodes[0], self.nodes[0].listunspent(0), self.txouts, True)
# Mine
while (len(self.nodes[0].getrawmempool()) > 0):
self.nodes[0].generate(1)
# Repeatedly split those 2 outputs, doubling twice for each rep
# Use txouts to monitor the available utxo, since these won't be tracked in wallet
reps = 0
while (reps < 5):
# Double txouts to txouts2
while (len(self.txouts) > 0):
split_inputs(self.nodes[0], self.txouts, self.txouts2)
while (len(self.nodes[0].getrawmempool()) > 0):
self.nodes[0].generate(1)
# Double txouts2 to txouts
while (len(self.txouts2) > 0):
split_inputs(self.nodes[0], self.txouts2, self.txouts)
while (len(self.nodes[0].getrawmempool()) > 0):<|fim▁hole|> # Now we can connect the other nodes, didn't want to connect them earlier
# so the estimates would not be affected by the splitting transactions
self.start_node(1)
self.start_node(2)
connect_nodes(self.nodes[1], 0)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[2], 1)
self.sync_all()
self.fees_per_kb = []
self.memutxo = []
self.confutxo = self.txouts # Start with the set of confirmed txouts after splitting
self.log.info("Will output estimates for 1/2/3/6/15/25 blocks")
for i in range(2):
self.log.info("Creating transactions and mining them with a block size that can't keep up")
# Create transactions and mine 10 small blocks with node 2, but create txs faster than we can mine
self.transact_and_mine(10, self.nodes[2])
check_estimates(self.nodes[1], self.fees_per_kb)
self.log.info("Creating transactions and mining them at a block size that is just big enough")
# Generate transactions while mining 10 more blocks, this time with node1
# which mines blocks with capacity just above the rate that transactions are being created
self.transact_and_mine(10, self.nodes[1])
check_estimates(self.nodes[1], self.fees_per_kb)
# Finish by mining a normal-sized block:
while len(self.nodes[1].getrawmempool()) > 0:
self.nodes[1].generate(1)
sync_blocks(self.nodes[0:3], wait=.1)
self.log.info("Final estimates after emptying mempools")
check_estimates(self.nodes[1], self.fees_per_kb)
if __name__ == '__main__':
EstimateFeeTest().main()<|fim▁end|>
|
self.nodes[0].generate(1)
reps += 1
self.log.info("Finished splitting")
|
<|file_name|>musashi.rs<|end_file_name|><|fim▁begin|>// Integration with Musashi
extern crate libc;
// Register enum copied from Musashi's m68k_register_t enum
#[repr(C)]
#[derive(Copy, Clone, Debug, PartialEq)]
#[allow(dead_code)]
pub enum Register {
/* Real registers */
D0, /* Data registers */
D1,
D2,
D3,
D4,
D5,
D6,
D7,
A0, /* Address registers */
A1,
A2,
A3,
A4,
A5,
A6,
A7,
PC, /* Program Counter */
SR, /* Status Register */
SP, /* The current Stack Pointer (located in A7) */
USP, /* User Stack Pointer */
ISP, /* Interrupt Stack Pointer */
MSP, /* Master Stack Pointer */
SFC, /* Source Function Code */
DFC, /* Destination Function Code */
VBR, /* Vector Base Register */
CACR, /* Cache Control Register */
CAAR, /* Cache Address Register */
/* Assumed registers */
/* These are cheat registers which emulate the 1-longword prefetch
* present in the 68000 and 68010.
*/
PrefAddr, /* Last prefetch address */
PrefData, /* Last prefetch data */
/* Convenience registers */
PPC, /* Previous value in the program counter */
IR, /* Instruction register */
CpuType /* Type of CPU being run */
}
#[repr(C)]
#[derive(Copy, Clone)]
#[allow(dead_code)]
enum CpuType
{
Invalid,
M68000,
M68010,
M68EC020,
M68020,
M68030, /* Supported by disassembler ONLY */
M68040 /* Supported by disassembler ONLY */
}
#[link(name = "musashi", kind = "static")]
extern {
fn m68k_init();
fn m68k_set_cpu_type(cputype: CpuType);
fn m68k_pulse_reset();
fn m68k_execute(num_cycles: i32) -> i32;
fn m68k_get_reg(context: *mut libc::c_void, regnum: Register) -> u32;
fn m68k_set_reg(regnum: Register, value: u32);
}
use ram::{Operation, AddressSpace, SUPERVISOR_PROGRAM, SUPERVISOR_DATA, USER_PROGRAM, USER_DATA, ADDRBUS_MASK};
static mut musashi_locations_used: usize = 0;
static mut musashi_memory_initializer: u32 = 0xaaaaaaaa;
static mut musashi_memory_location: [u32; 1024] = [0; 1024];
static mut musashi_memory_data: [u8; 1024] = [0; 1024];
// as statics are not allowed to have destructors, allocate a
// big enough array to hold the small number of operations
// expected from executing a very limited number of opcodes
static mut musashi_ops: [Operation; 512] = [Operation::None; 512];
static mut musashi_opcount: usize = 0;
static mut musashi_address_space: AddressSpace = SUPERVISOR_PROGRAM;
unsafe fn register_op(op: Operation) {
if musashi_opcount < musashi_ops.len() {
// println!("mem_op {:?}", op);
musashi_ops[musashi_opcount] = op;
musashi_opcount += 1;
}
}
// callbacks from Musashi
#[no_mangle]
pub extern fn m68k_read_memory_8(address: u32) -> u32 {
unsafe {
let address = address & ADDRBUS_MASK;
let value = read_musashi_byte(address);
let op = Operation::ReadByte(musashi_address_space, address, value);
register_op(op);
value as u32
}
}
#[no_mangle]
pub extern fn m68k_read_memory_16(address: u32) -> u32 {
unsafe {
let address = address & ADDRBUS_MASK;
let value = (read_musashi_byte(address+0) as u16) << 8
|(read_musashi_byte(address+1) as u16) << 0;
let op = Operation::ReadWord(musashi_address_space, address, value);
register_op(op);
value as u32
}
}
#[no_mangle]
pub extern fn m68k_read_memory_32(address: u32) -> u32 {
unsafe {
let value = ((read_musashi_byte(address+0) as u32) << 24
|(read_musashi_byte(address+1) as u32) << 16
|(read_musashi_byte(address+2) as u32) << 8
|(read_musashi_byte(address+3) as u32) << 0) as u32;
let op = Operation::ReadLong(musashi_address_space, address, value);
register_op(op);
value
}
}
#[no_mangle]
pub extern fn m68k_write_memory_8(address: u32, value: u32) {
unsafe {
let op = Operation::WriteByte(musashi_address_space, address, value);
register_op(op);
write_musashi_byte(address+0, (value & 0xff) as u8);
}
}
#[no_mangle]
pub extern fn m68k_write_memory_16(address: u32, value: u32) {
unsafe {
let op = Operation::WriteWord(musashi_address_space, address, value);
register_op(op);
write_musashi_byte(address+0, ((value & 0xff00) >> 8) as u8);
write_musashi_byte(address+1, ((value & 0x00ff) >> 0) as u8);
}
}
#[no_mangle]
pub extern fn m68k_write_memory_32(address: u32, value: u32) {
unsafe {
let op = Operation::WriteLong(musashi_address_space, address, value);
register_op(op);
write_musashi_byte(address+0, ((value & 0xff000000) >> 24) as u8);
write_musashi_byte(address+1, ((value & 0x00ff0000) >> 16) as u8);
write_musashi_byte(address+2, ((value & 0x0000ff00) >> 8) as u8);
write_musashi_byte(address+3, ((value & 0x000000ff) >> 0) as u8);
}
}
// read uninitialized bytes from initializer instead
unsafe fn read_initializer(address: u32) -> u8 {
let shift = match address % 4 {
0 => 24,
1 => 16,
2 => 8,
_ => 0,
};
((musashi_memory_initializer >> shift) & 0xFF) as u8
}
unsafe fn find_musashi_location(address: u32) -> Option<usize> {
for i in 0..musashi_locations_used {
if musashi_memory_location[i as usize] == address {
return Some(i as usize)
}
};
None
}
unsafe fn read_musashi_byte(address: u32) -> u8 {
let address = address & ADDRBUS_MASK;
if let Some(index) = find_musashi_location(address) {
musashi_memory_data[index]
} else {
read_initializer(address)
}
}
unsafe fn write_musashi_byte(address: u32, data: u8) {
let address = address & ADDRBUS_MASK;
let write_differs_from_initializer = data != read_initializer(address);
if write_differs_from_initializer {
if let Some(index) = find_musashi_location(address) {
musashi_memory_data[index] = data;
} else {
musashi_memory_location[musashi_locations_used] = address;
musashi_memory_data[musashi_locations_used] = data;
musashi_locations_used += 1;
}
}
}
#[no_mangle]
pub extern fn cpu_pulse_reset() {panic!("pr")}
#[no_mangle]
pub extern fn cpu_long_branch() {}
#[no_mangle]
pub extern fn m68k_set_fc(fc: u32) {
unsafe {
musashi_address_space = match fc {
1 => USER_DATA,
2 => USER_PROGRAM,
5 => SUPERVISOR_DATA,
6 => SUPERVISOR_PROGRAM,
_ => panic!("unknown fc: {}", fc),
};
// println!("set_fc {:?}", musashi_address_space);
}
}
#[allow(unused_variables)]
#[no_mangle]
pub extern fn cpu_irq_ack(level: i32) -> i32 {panic!("ia")}
#[no_mangle]
pub extern fn cpu_instr_callback() {}
use std::ptr;
#[allow(unused_variables)]
pub fn experimental_communication() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
unsafe {
m68k_init();
m68k_set_cpu_type(CpuType::M68000);
m68k_set_reg(Register::D0, 123);
println!("D0: {}", m68k_get_reg(ptr::null_mut(), Register::D0));
}
}
#[allow(unused_variables)]
pub fn roundtrip_register(reg: Register, value: u32) -> u32 {
let _mutex = MUSASHI_LOCK.lock().unwrap();
unsafe {
m68k_init();
m68k_set_cpu_type(CpuType::M68000);
m68k_set_reg(reg, value);
m68k_get_reg(ptr::null_mut(), reg)
}
}
use cpu::{Core, Cycles};
static REGS:[Register; 16] = [Register::D0, Register::D1, Register::D2, Register::D3, Register::D4, Register::D5, Register::D6, Register::D7, Register::A0, Register::A1, Register::A2, Register::A3, Register::A4, Register::A5, Register::A6, Register::A7];
fn get_ops() -> Vec<Operation> {
let mut res: Vec<Operation> = vec![];
unsafe {
for i in 0..musashi_opcount {
res.push(musashi_ops[i]);
}
}
res
}
pub fn initialize_musashi(core: &mut Core, memory_initializer: u32) {
// println!("initialize_musashi {:?}", thread::current());
unsafe {
initialize_musashi_memory(memory_initializer);
m68k_init();
m68k_set_cpu_type(CpuType::M68000);
m68k_write_memory_32(0, core.ssp());
m68k_write_memory_32(4, core.pc);
m68k_pulse_reset();
// Resetting opcount, because m68k_pulse_reset causes irrelevant
// reads from 0x00000000 to set PC/SP, a jump to PC and
// resetting of state. But we don't want to test those ops.
musashi_opcount = 0;
//m68k_set_reg(Register::PC, core.pc);
m68k_set_reg(Register::USP, core.usp());
// if SR clears S_FLAG then SSP <- A7, A7 <- USP
m68k_set_reg(Register::SR, core.status_register() as u32);
for (i, ®) in REGS.iter().enumerate() {
if i != 15 {
m68k_set_reg(reg, core.dar[i]);
}
}
// just copy diffs, as it takes too long to reset all 16MB
for (addr, byte) in core.mem.diffs() {
write_musashi_byte(addr, byte);
}
}
}
pub fn initialize_musashi_memory(initializer: u32) {
unsafe {
musashi_memory_initializer = initializer;
musashi_opcount = 0;
musashi_locations_used = 0;
m68k_set_fc(SUPERVISOR_PROGRAM.fc());
}
}
pub fn musashi_written_bytes() -> u16 {
unsafe {
musashi_locations_used as u16
}
}
const EXEC_CYCLES: i32 = 1; // configurable for testing purposes
pub fn execute1(core: &mut Core) -> Cycles {
// println!("execute1 mushashi {:?}", thread::current());
unsafe {
let cycle_count = m68k_execute(EXEC_CYCLES);
for (i, ®) in REGS.iter().enumerate() {
core.dar[i] = m68k_get_reg(ptr::null_mut(), reg);
}
core.pc = m68k_get_reg(ptr::null_mut(), Register::PC);
core.sr_to_flags(m68k_get_reg(ptr::null_mut(), Register::SR) as u16);
if core.s_flag > 0 {
core.inactive_usp = m68k_get_reg(ptr::null_mut(), Register::USP);
core.dar[15] = m68k_get_reg(ptr::null_mut(), Register::ISP);
} else {
core.dar[15] = m68k_get_reg(ptr::null_mut(), Register::USP);
core.inactive_ssp = m68k_get_reg(ptr::null_mut(), Register::ISP);
}
Cycles(cycle_count)
}
}
#[allow(unused_variables)]
pub fn reset_and_execute1(core: &mut Core, memory_initializer: u32) -> Cycles {
initialize_musashi(core, memory_initializer);
execute1(core)
}
// Talking to Musashi isn't thread-safe, and the tests are running
// threaded, which cause intermittent test failures unless serializing
// access using something like a mutex. Musashi functions are called in
// global/static context, and statics are not allowed to have
// destructors
use std::sync::{Arc, Mutex};
// using lazy_static! to work-around "statics are not allowed to have destructors [E0493]""
lazy_static! {
static ref MUSASHI_LOCK: Arc<Mutex<i32>> = Arc::new(Mutex::new(0));
static ref QUICKCHECK_LOCK: Arc<Mutex<i32>> = Arc::new(Mutex::new(0));
}
#[cfg(test)]
mod tests {
use super::*;
use ram::SUPERVISOR_PROGRAM;
use super::MUSASHI_LOCK;
use super::QUICKCHECK_LOCK;
use ram::{Operation, AddressBus};
use cpu::{Core, EXCEPTION_ZERO_DIVIDE, EXCEPTION_CHK};
use std::cmp;
extern crate quickcheck;
use self::quickcheck::*;
#[derive(Copy, Clone, Debug, PartialEq)]
struct Bitpattern(u32);
impl Arbitrary for Bitpattern {
fn arbitrary<G: Gen>(g: &mut G) -> Bitpattern {
// when size 256, could generate any 32 bit pattern
let nonuniform: u32 = g.gen_range(0, 256);
// increase likelihood of returning all zeros to 1:32
if nonuniform < 8 {return Bitpattern(0)}
// increase likelihood of returning all ones to 1:32
if nonuniform < 16 {return Bitpattern(0xffffffff)}
let i1: u32 = Arbitrary::arbitrary(g);
let i2: u32 = Arbitrary::arbitrary(g);
let i3: u32 = Arbitrary::arbitrary(g);
let i4: u32 = Arbitrary::arbitrary(g);
let sum: u32 = (i1 << 24) | (i2 << 16) | (i3 << 8) | i4;
Bitpattern(sum)
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match *self {
Bitpattern(x) => {
let xs = x.shrink(); // should shrink Bitpattern by clearing bits, not setting new ones
let tagged = xs //.inspect(|x|println!("{}", x))
.map(Bitpattern);
Box::new(tagged)
}
}
}
}
impl Arbitrary for Register {
fn arbitrary<G: Gen>(g: &mut G) -> Register {
let regs = [Register::D0, Register::D1, Register::D2, Register::D3, Register::D4, Register::D5, Register::D6, Register::D7, Register::A0, Register::A1, Register::A2, Register::A3, Register::A4, Register::A5, Register::A6,
Register::SR, // Register::A7, Register::SP, Register::PC
];
//println!("{}",i);
if let Some(®) = g.choose(®s) {
reg
} else {
unreachable!();
}
}
}
extern crate rand;
use itertools::{Itertools, assert_equal};
use cpu::ops::handlers::*;
use super::get_ops;
// struct OpSeq {
// mask: u32,
// matching: u32,
// current_op: u32,
// }
// impl OpSeq {
// fn new(mask: u32, matching: u32) -> OpSeq {
// OpSeq { mask: mask, matching: matching, current_op: 0 }
// }
// }
// impl Iterator for OpSeq {
// type Item = u32;
// fn next(&mut self) -> Option<u32> {
// if self.current_op == 0x10000 {
// None
// } else {
// while (self.current_op & self.mask) != self.matching && self.current_op < 0x10000 {
// self.current_op += 1;
// }
// if self.current_op == 0x10000 {
// return None;
// }
// let res = Some(self.current_op);
// self.current_op += 1;
// res
// }
// }
// }
fn opcodes(mask: u32, matching: u32) -> Vec<u16> {
(matching..0x10000u32)
.filter(|opcode| (opcode & mask) == matching)
.map(|v|v as u16).collect::<Vec<u16>>()
}
macro_rules! opcodes {
($mask:expr , $matching:expr) => {($matching..0x10000).filter(|opcode| (opcode & $mask) == $matching)}
}
#[test]
fn opcodes_from_mask_and_matching(){
let mut opseq = Vec::new();
opseq.extend(opcodes!(MASK_OUT_X_Y, OP_ABCD_8_RR));
assert_eq!(64, opseq.len());
let ops = opseq.iter().unique();
assert_eq!(64, ops.count());
if let Some(&min) = opseq.iter().min() {
assert_eq!(0b1100000100000000, min);
}
if let Some(&max) = opseq.iter().max() {
assert_eq!(0b1100111100000111, max);
}
for code in opseq.iter() {
assert_eq!(OP_ABCD_8_RR, code & OP_ABCD_8_RR);
}
}
static mut opcode_under_test: u16 = 0;
fn hammer_cores_even_addresses(memory_pattern: Bitpattern, rs: Vec<(Register, Bitpattern)>) -> TestResult {
let mem_mask = (2<<24)-2; // keep even
hammer_cores_with(mem_mask, memory_pattern, rs, false)
}
fn hammer_cores(memory_pattern: Bitpattern, rs: Vec<(Register, Bitpattern)>) -> TestResult {
let mem_mask = (2<<24)-1; // allow odd
hammer_cores_with(mem_mask, memory_pattern, rs, false)
}
fn hammer_cores_allow_exception(memory_pattern: Bitpattern, rs: Vec<(Register, Bitpattern)>) -> TestResult {
let mem_mask = (2<<24)-2; // keep even
hammer_cores_with(mem_mask, memory_pattern, rs, true)
}
fn hammer_cores_with(mem_mask: u32, memory_pattern: Bitpattern, rs: Vec<(Register, Bitpattern)>, allow_exception: bool) -> TestResult {
let pc = 0x140;
let mem = unsafe {
[((opcode_under_test >> 8) & 0xff) as u8, (opcode_under_test & 0xff) as u8]
};
let Bitpattern(memory_initializer) = memory_pattern;
let mut musashi = Core::new_mem_init(pc, &mem, memory_initializer & mem_mask);
const STACK_MASK:u32 = (1024-16); // keep even
musashi.inactive_ssp = 0x128;
musashi.inactive_usp = 0x256;
for r in 0..8 {
musashi.dar[r] = 0;
musashi.dar[8+r] = 0x128;
}
// set up RESET vector in memory
let (ssp, pc) = (musashi.ssp(), musashi.pc);
musashi.write_program_long(0, ssp).unwrap();
musashi.write_program_long(4, pc).unwrap();
let generic_handler = 0xf00000;
for v in 2..48 {
musashi.write_data_long(v * 4, generic_handler);
}
// ensure the handler is a series of NOPs that will exhaust any
// remaining supply of cycles. In case of Address Error, Musashi
// in some cases got extra cycles via a negative deduction issue
// and continued execution for several more cycles (now fixed)
for i in 0..4 {
musashi.write_program_word(generic_handler + 2 * i, OP_NOP);
}
for r in rs {
match r {
(Register::D0, Bitpattern(bp)) => musashi.dar[0] = bp,
(Register::D1, Bitpattern(bp)) => musashi.dar[1] = bp,
(Register::D2, Bitpattern(bp)) => musashi.dar[2] = bp,
(Register::D3, Bitpattern(bp)) => musashi.dar[3] = bp,
(Register::D4, Bitpattern(bp)) => musashi.dar[4] = bp,
(Register::D5, Bitpattern(bp)) => musashi.dar[5] = bp,
(Register::D6, Bitpattern(bp)) => musashi.dar[6] = bp,
(Register::D7, Bitpattern(bp)) => musashi.dar[7] = bp,
// must ensure Addresses are within musashi memory space!
(Register::A0, Bitpattern(bp)) => musashi.dar[0+8] = bp & mem_mask,
(Register::A1, Bitpattern(bp)) => musashi.dar[1+8] = bp & mem_mask,
(Register::A2, Bitpattern(bp)) => musashi.dar[2+8] = bp & mem_mask,
(Register::A3, Bitpattern(bp)) => musashi.dar[3+8] = bp & mem_mask,
(Register::A4, Bitpattern(bp)) => musashi.dar[4+8] = bp & mem_mask,
(Register::A5, Bitpattern(bp)) => musashi.dar[5+8] = bp & mem_mask,
(Register::A6, Bitpattern(bp)) => musashi.dar[6+8] = bp & mem_mask,
(Register::A7, Bitpattern(bp)) => musashi.dar[7+8] = bp & STACK_MASK + 8,
(Register::USP, Bitpattern(bp)) => musashi.inactive_usp = bp & STACK_MASK + 8,
(Register::SR, Bitpattern(bp)) => musashi.sr_to_flags(bp as u16),
_ => {
panic!("No idea how to set {:?}", r.0)
},
}
}
let mut r68k = musashi.clone(); // so very self-aware!
let _mutex = MUSASHI_LOCK.lock().unwrap();
let musashi_cycles = reset_and_execute1(&mut musashi, memory_initializer & mem_mask);
let r68k_cycles = r68k.execute(super::EXEC_CYCLES);
// panics if differences are found. Returns false if an
// exception occurred, and then we cannot compare state further
// unless PC is the same (as then the cores have progressed to
// the same spot) and we allow exceptions (or we would discard
// all results for those instructions that always result in
// exceptions such as illegal/unimplemented or traps)
let can_compare_cycles = if let Some(vector) = memory_accesses_equal_unless_exception(&r68k) {
if musashi.pc != r68k.pc || !allow_exception {
return TestResult::discard();
} else {
// cannot compare cycles due to differences with
// Musashis handling of CHK and DIV exceptions
vector != EXCEPTION_ZERO_DIVIDE && vector != EXCEPTION_CHK
}
} else {true};
if cores_equal(&musashi, &r68k) {
if can_compare_cycles && musashi_cycles != r68k_cycles {
println!("Musashi {:?} but r68k {:?}", musashi_cycles, r68k_cycles);
}
TestResult::from_bool(!can_compare_cycles || musashi_cycles == r68k_cycles)
} else {
TestResult::failed()
}
}
macro_rules! qc8 {
($opmask:ident, $opcode:ident, $fn_name:ident) => (qc!($opmask, $opcode, $fn_name, hammer_cores););
}
macro_rules! qc_allow_exception {
($opmask:ident, $opcode:ident, $fn_name:ident) => (qc!($opmask, $opcode, $fn_name, hammer_cores_allow_exception););
}
macro_rules! qc {
($opmask:ident, $opcode:ident, $fn_name:ident) => (qc!($opmask, $opcode, $fn_name, hammer_cores_even_addresses););
($opmask:ident, $opcode:ident, $fn_name:ident, $hammer:ident) => (
#[test]
#[ignore]
fn $fn_name() {
// Musashi isn't thread safe, and the construct with opcode_under_test
// isn't either. :(
let _mutex = QUICKCHECK_LOCK.lock().unwrap();
// check for mask/opcode inconsistency
assert!($opmask & $opcode == $opcode);
let qc_rounds = cmp::max(1, 384 >> ($opmask as u16).count_zeros());
for opcode in opcodes($opmask, $opcode)
{
println!("Will hammer {:016b} {} times", opcode, qc_rounds);
unsafe {
// this is because I don't know how to make
// hammer_cores take the opcode as a parameter and
// we cannot simply use a closure either; see
// https://github.com/BurntSushi/quickcheck/issues/56
opcode_under_test = opcode;
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 256))
.tests(qc_rounds)
.quickcheck($hammer as fn(_, _) -> _);
}
})
}
const MASK_LO3NIB_QUICKER: u32 = MASK_LO3NIB + 0x0555;
qc_allow_exception!(MASK_LO3NIB_QUICKER, OP_UNIMPLEMENTED_1010, qc_unimplemented_1010);
qc_allow_exception!(MASK_LO3NIB_QUICKER, OP_UNIMPLEMENTED_1111, qc_unimplemented_1111);
qc8!(MASK_OUT_X_Y, OP_ABCD_8_RR, qc_abcd_rr);
qc8!(MASK_OUT_X_Y, OP_ABCD_8_MM, qc_abcd_mm);
qc8!(MASK_OUT_X_Y, OP_ADD_8_ER_DN, qc_add_8_er_dn);
qc8!(MASK_OUT_X_Y, OP_ADD_8_ER_PI, qc_add_8_er_pi);
qc8!(MASK_OUT_X_Y, OP_ADD_8_ER_PD, qc_add_8_er_pd);
qc8!(MASK_OUT_X_Y, OP_ADD_8_ER_AI, qc_add_8_er_ai);
qc8!(MASK_OUT_X_Y, OP_ADD_8_ER_DI, qc_add_8_er_di);
qc8!(MASK_OUT_X_Y, OP_ADD_8_ER_IX, qc_add_8_er_ix);
qc8!(MASK_OUT_X, OP_ADD_8_ER_AW, qc_add_8_er_aw);
qc8!(MASK_OUT_X, OP_ADD_8_ER_AL, qc_add_8_er_al);
qc8!(MASK_OUT_X, OP_ADD_8_ER_PCDI, qc_add_8_er_pcdi);
qc8!(MASK_OUT_X, OP_ADD_8_ER_PCIX, qc_add_8_er_pcix);
qc8!(MASK_OUT_X, OP_ADD_8_ER_IMM, qc_add_8_er_imm);
qc8!(MASK_OUT_X_Y, OP_ADD_8_RE_PI, qc_add_8_re_pi);
qc8!(MASK_OUT_X_Y, OP_ADD_8_RE_PD, qc_add_8_re_pd);
qc8!(MASK_OUT_X_Y, OP_ADD_8_RE_AI, qc_add_8_re_ai);
qc8!(MASK_OUT_X_Y, OP_ADD_8_RE_DI, qc_add_8_re_di);
qc8!(MASK_OUT_X_Y, OP_ADD_8_RE_IX, qc_add_8_re_ix);
qc8!(MASK_OUT_X, OP_ADD_8_RE_AW, qc_add_8_re_aw);
qc8!(MASK_OUT_X, OP_ADD_8_RE_AL, qc_add_8_re_al);
qc!(MASK_OUT_X_Y, OP_ADD_16_ER_DN, qc_add_16_er_dn);
qc!(MASK_OUT_X_Y, OP_ADD_16_ER_AN, qc_add_16_er_an);
qc!(MASK_OUT_X_Y, OP_ADD_16_ER_PI, qc_add_16_er_pi);
qc!(MASK_OUT_X_Y, OP_ADD_16_ER_PD, qc_add_16_er_pd);
qc!(MASK_OUT_X_Y, OP_ADD_16_ER_AI, qc_add_16_er_ai);
qc!(MASK_OUT_X_Y, OP_ADD_16_ER_DI, qc_add_16_er_di);
qc!(MASK_OUT_X_Y, OP_ADD_16_ER_IX, qc_add_16_er_ix);
qc!(MASK_OUT_X, OP_ADD_16_ER_AW, qc_add_16_er_aw);
qc!(MASK_OUT_X, OP_ADD_16_ER_AL, qc_add_16_er_al);
qc!(MASK_OUT_X, OP_ADD_16_ER_PCDI, qc_add_16_er_pcdi);
qc!(MASK_OUT_X, OP_ADD_16_ER_PCIX, qc_add_16_er_pcix);
qc!(MASK_OUT_X, OP_ADD_16_ER_IMM, qc_add_16_er_imm);
qc!(MASK_OUT_X_Y, OP_ADD_16_RE_PI, qc_add_16_re_pi);
qc!(MASK_OUT_X_Y, OP_ADD_16_RE_PD, qc_add_16_re_pd);
qc!(MASK_OUT_X_Y, OP_ADD_16_RE_AI, qc_add_16_re_ai);
qc!(MASK_OUT_X_Y, OP_ADD_16_RE_DI, qc_add_16_re_di);
qc!(MASK_OUT_X_Y, OP_ADD_16_RE_IX, qc_add_16_re_ix);
qc!(MASK_OUT_X, OP_ADD_16_RE_AW, qc_add_16_re_aw);
qc!(MASK_OUT_X, OP_ADD_16_RE_AL, qc_add_16_re_al);
qc!(MASK_OUT_X_Y, OP_ADD_32_ER_DN, qc_add_32_er_dn);
qc!(MASK_OUT_X_Y, OP_ADD_32_ER_AN, qc_add_32_er_an);
qc!(MASK_OUT_X_Y, OP_ADD_32_ER_PI, qc_add_32_er_pi);
qc!(MASK_OUT_X_Y, OP_ADD_32_ER_PD, qc_add_32_er_pd);
qc!(MASK_OUT_X_Y, OP_ADD_32_ER_AI, qc_add_32_er_ai);
qc!(MASK_OUT_X_Y, OP_ADD_32_ER_DI, qc_add_32_er_di);
qc!(MASK_OUT_X_Y, OP_ADD_32_ER_IX, qc_add_32_er_ix);
qc!(MASK_OUT_X, OP_ADD_32_ER_AW, qc_add_32_er_aw);
qc!(MASK_OUT_X, OP_ADD_32_ER_AL, qc_add_32_er_al);
qc!(MASK_OUT_X, OP_ADD_32_ER_PCDI, qc_add_32_er_pcdi);
qc!(MASK_OUT_X, OP_ADD_32_ER_PCIX, qc_add_32_er_pcix);
qc!(MASK_OUT_X, OP_ADD_32_ER_IMM, qc_add_32_er_imm);
qc!(MASK_OUT_X_Y, OP_ADD_32_RE_PI, qc_add_32_re_pi);
qc!(MASK_OUT_X_Y, OP_ADD_32_RE_PD, qc_add_32_re_pd);
qc!(MASK_OUT_X_Y, OP_ADD_32_RE_AI, qc_add_32_re_ai);
qc!(MASK_OUT_X_Y, OP_ADD_32_RE_DI, qc_add_32_re_di);
qc!(MASK_OUT_X_Y, OP_ADD_32_RE_IX, qc_add_32_re_ix);
qc!(MASK_OUT_X, OP_ADD_32_RE_AW, qc_add_32_re_aw);
qc!(MASK_OUT_X, OP_ADD_32_RE_AL, qc_add_32_re_al);
qc!(MASK_OUT_X_Y, OP_ADDA_16_DN, qc_adda_16_dn);
qc!(MASK_OUT_X_Y, OP_ADDA_16_AN, qc_adda_16_an);
qc!(MASK_OUT_X_Y, OP_ADDA_16_PI, qc_adda_16_pi);
qc!(MASK_OUT_X_Y, OP_ADDA_16_PD, qc_adda_16_pd);
qc!(MASK_OUT_X_Y, OP_ADDA_16_AI, qc_adda_16_ai);
qc!(MASK_OUT_X_Y, OP_ADDA_16_DI, qc_adda_16_di);
qc!(MASK_OUT_X_Y, OP_ADDA_16_IX, qc_adda_16_ix);
qc!(MASK_OUT_X, OP_ADDA_16_AW, qc_adda_16_aw);
qc!(MASK_OUT_X, OP_ADDA_16_AL, qc_adda_16_al);
qc!(MASK_OUT_X, OP_ADDA_16_PCDI, qc_adda_16_pcdi);
qc!(MASK_OUT_X, OP_ADDA_16_PCIX, qc_adda_16_pcix);
qc!(MASK_OUT_X, OP_ADDA_16_IMM, qc_adda_16_imm);
qc!(MASK_OUT_X_Y, OP_ADDA_32_DN, qc_adda_32_dn);
qc!(MASK_OUT_X_Y, OP_ADDA_32_AN, qc_adda_32_an);
qc!(MASK_OUT_X_Y, OP_ADDA_32_PI, qc_adda_32_pi);
qc!(MASK_OUT_X_Y, OP_ADDA_32_PD, qc_adda_32_pd);
qc!(MASK_OUT_X_Y, OP_ADDA_32_AI, qc_adda_32_ai);
qc!(MASK_OUT_X_Y, OP_ADDA_32_DI, qc_adda_32_di);
qc!(MASK_OUT_X_Y, OP_ADDA_32_IX, qc_adda_32_ix);
qc!(MASK_OUT_X, OP_ADDA_32_AW, qc_adda_32_aw);
qc!(MASK_OUT_X, OP_ADDA_32_AL, qc_adda_32_al);
qc!(MASK_OUT_X, OP_ADDA_32_PCDI, qc_adda_32_pcdi);
qc!(MASK_OUT_X, OP_ADDA_32_PCIX, qc_adda_32_pcix);
qc!(MASK_OUT_X, OP_ADDA_32_IMM, qc_adda_32_imm);
qc8!(MASK_OUT_Y, OP_ADDI_8_DN, qc_addi_8_dn);
qc8!(MASK_OUT_Y, OP_ADDI_8_PI, qc_addi_8_pi);
qc8!(MASK_OUT_Y, OP_ADDI_8_PD, qc_addi_8_pd);
qc8!(MASK_OUT_Y, OP_ADDI_8_AI, qc_addi_8_ai);
qc8!(MASK_OUT_Y, OP_ADDI_8_DI, qc_addi_8_di);
qc8!(MASK_OUT_Y, OP_ADDI_8_IX, qc_addi_8_ix);
qc8!(MASK_EXACT, OP_ADDI_8_AW, qc_addi_8_aw);
qc8!(MASK_EXACT, OP_ADDI_8_AL, qc_addi_8_al);
qc!(MASK_OUT_Y, OP_ADDI_16_DN, qc_addi_16_dn);
qc!(MASK_OUT_Y, OP_ADDI_16_PI, qc_addi_16_pi);
qc!(MASK_OUT_Y, OP_ADDI_16_PD, qc_addi_16_pd);
qc!(MASK_OUT_Y, OP_ADDI_16_AI, qc_addi_16_ai);
qc!(MASK_OUT_Y, OP_ADDI_16_DI, qc_addi_16_di);
qc!(MASK_OUT_Y, OP_ADDI_16_IX, qc_addi_16_ix);
qc!(MASK_EXACT, OP_ADDI_16_AW, qc_addi_16_aw);
qc!(MASK_EXACT, OP_ADDI_16_AL, qc_addi_16_al);
qc!(MASK_OUT_Y, OP_ADDI_32_DN, qc_addi_32_dn);
qc!(MASK_OUT_Y, OP_ADDI_32_PI, qc_addi_32_pi);
qc!(MASK_OUT_Y, OP_ADDI_32_PD, qc_addi_32_pd);
qc!(MASK_OUT_Y, OP_ADDI_32_AI, qc_addi_32_ai);
qc!(MASK_OUT_Y, OP_ADDI_32_DI, qc_addi_32_di);
qc!(MASK_OUT_Y, OP_ADDI_32_IX, qc_addi_32_ix);
qc!(MASK_EXACT, OP_ADDI_32_AW, qc_addi_32_aw);
qc!(MASK_EXACT, OP_ADDI_32_AL, qc_addi_32_al);
qc8!(MASK_OUT_X_Y, OP_ADDQ_8_DN, qc_addq_8_dn);
qc8!(MASK_OUT_X_Y, OP_ADDQ_8_PI, qc_addq_8_pi);
qc8!(MASK_OUT_X_Y, OP_ADDQ_8_PD, qc_addq_8_pd);
qc8!(MASK_OUT_X_Y, OP_ADDQ_8_AI, qc_addq_8_ai);
qc8!(MASK_OUT_X_Y, OP_ADDQ_8_DI, qc_addq_8_di);
qc8!(MASK_OUT_X_Y, OP_ADDQ_8_IX, qc_addq_8_ix);
qc8!(MASK_OUT_X, OP_ADDQ_8_AW, qc_addq_8_aw);
qc8!(MASK_OUT_X, OP_ADDQ_8_AL, qc_addq_8_al);
qc!(MASK_OUT_X_Y, OP_ADDQ_16_DN, qc_addq_16_dn);
qc!(MASK_OUT_X_Y, OP_ADDQ_16_AN, qc_addq_16_an);
qc!(MASK_OUT_X_Y, OP_ADDQ_16_PI, qc_addq_16_pi);
qc!(MASK_OUT_X_Y, OP_ADDQ_16_PD, qc_addq_16_pd);
qc!(MASK_OUT_X_Y, OP_ADDQ_16_AI, qc_addq_16_ai);
qc!(MASK_OUT_X_Y, OP_ADDQ_16_DI, qc_addq_16_di);
qc!(MASK_OUT_X_Y, OP_ADDQ_16_IX, qc_addq_16_ix);
qc!(MASK_OUT_X, OP_ADDQ_16_AW, qc_addq_16_aw);
qc!(MASK_OUT_X, OP_ADDQ_16_AL, qc_addq_16_al);
qc!(MASK_OUT_X_Y, OP_ADDQ_32_DN, qc_addq_32_dn);
qc!(MASK_OUT_X_Y, OP_ADDQ_32_AN, qc_addq_32_an);
qc!(MASK_OUT_X_Y, OP_ADDQ_32_PI, qc_addq_32_pi);
qc!(MASK_OUT_X_Y, OP_ADDQ_32_PD, qc_addq_32_pd);
qc!(MASK_OUT_X_Y, OP_ADDQ_32_AI, qc_addq_32_ai);
qc!(MASK_OUT_X_Y, OP_ADDQ_32_DI, qc_addq_32_di);
qc!(MASK_OUT_X_Y, OP_ADDQ_32_IX, qc_addq_32_ix);
qc!(MASK_OUT_X, OP_ADDQ_32_AW, qc_addq_32_aw);
qc!(MASK_OUT_X, OP_ADDQ_32_AL, qc_addq_32_al);
qc8!(MASK_OUT_X_Y, OP_ADDX_8_RR, qc_addx_8_rr);
qc8!(MASK_OUT_X_Y, OP_ADDX_8_MM, qc_addx_8_mm);
qc!(MASK_OUT_X_Y, OP_ADDX_16_RR, qc_addx_16_rr);
qc!(MASK_OUT_X_Y, OP_ADDX_16_MM, qc_addx_16_mm);
qc!(MASK_OUT_X_Y, OP_ADDX_32_RR, qc_addx_32_rr);
qc!(MASK_OUT_X_Y, OP_ADDX_32_MM, qc_addx_32_mm);
qc8!(MASK_OUT_X_Y, OP_AND_8_ER_DN, qc_and_8_er_dn);
qc8!(MASK_OUT_X_Y, OP_AND_8_ER_PI, qc_and_8_er_pi);
qc8!(MASK_OUT_X_Y, OP_AND_8_ER_PD, qc_and_8_er_pd);
qc8!(MASK_OUT_X_Y, OP_AND_8_ER_AI, qc_and_8_er_ai);
qc8!(MASK_OUT_X_Y, OP_AND_8_ER_DI, qc_and_8_er_di);
qc8!(MASK_OUT_X_Y, OP_AND_8_ER_IX, qc_and_8_er_ix);
qc8!(MASK_OUT_X, OP_AND_8_ER_AW, qc_and_8_er_aw);
qc8!(MASK_OUT_X, OP_AND_8_ER_AL, qc_and_8_er_al);
qc8!(MASK_OUT_X, OP_AND_8_ER_PCDI, qc_and_8_er_pcdi);
qc8!(MASK_OUT_X, OP_AND_8_ER_PCIX, qc_and_8_er_pcix);
qc8!(MASK_OUT_X, OP_AND_8_ER_IMM, qc_and_8_er_imm);
qc8!(MASK_OUT_X_Y, OP_AND_8_RE_PI, qc_and_8_re_pi);
qc8!(MASK_OUT_X_Y, OP_AND_8_RE_PD, qc_and_8_re_pd);
qc8!(MASK_OUT_X_Y, OP_AND_8_RE_AI, qc_and_8_re_ai);
qc8!(MASK_OUT_X_Y, OP_AND_8_RE_DI, qc_and_8_re_di);
qc8!(MASK_OUT_X_Y, OP_AND_8_RE_IX, qc_and_8_re_ix);
qc8!(MASK_OUT_X, OP_AND_8_RE_AW, qc_and_8_re_aw);
qc8!(MASK_OUT_X, OP_AND_8_RE_AL, qc_and_8_re_al);
qc!(MASK_OUT_X_Y, OP_AND_16_ER_DN, qc_and_16_er_dn);
qc!(MASK_OUT_X_Y, OP_AND_16_ER_PI, qc_and_16_er_pi);
qc!(MASK_OUT_X_Y, OP_AND_16_ER_PD, qc_and_16_er_pd);
qc!(MASK_OUT_X_Y, OP_AND_16_ER_AI, qc_and_16_er_ai);
qc!(MASK_OUT_X_Y, OP_AND_16_ER_DI, qc_and_16_er_di);
qc!(MASK_OUT_X_Y, OP_AND_16_ER_IX, qc_and_16_er_ix);
qc!(MASK_OUT_X, OP_AND_16_ER_AW, qc_and_16_er_aw);
qc!(MASK_OUT_X, OP_AND_16_ER_AL, qc_and_16_er_al);
qc!(MASK_OUT_X, OP_AND_16_ER_PCDI, qc_and_16_er_pcdi);
qc!(MASK_OUT_X, OP_AND_16_ER_PCIX, qc_and_16_er_pcix);
qc!(MASK_OUT_X, OP_AND_16_ER_IMM, qc_and_16_er_imm);
qc!(MASK_OUT_X_Y, OP_AND_16_RE_PI, qc_and_16_re_pi);
qc!(MASK_OUT_X_Y, OP_AND_16_RE_PD, qc_and_16_re_pd);
qc!(MASK_OUT_X_Y, OP_AND_16_RE_AI, qc_and_16_re_ai);
qc!(MASK_OUT_X_Y, OP_AND_16_RE_DI, qc_and_16_re_di);
qc!(MASK_OUT_X_Y, OP_AND_16_RE_IX, qc_and_16_re_ix);
qc!(MASK_OUT_X, OP_AND_16_RE_AW, qc_and_16_re_aw);
qc!(MASK_OUT_X, OP_AND_16_RE_AL, qc_and_16_re_al);
qc!(MASK_OUT_X_Y, OP_AND_32_ER_DN, qc_and_32_er_dn);
qc!(MASK_OUT_X_Y, OP_AND_32_ER_PI, qc_and_32_er_pi);
qc!(MASK_OUT_X_Y, OP_AND_32_ER_PD, qc_and_32_er_pd);
qc!(MASK_OUT_X_Y, OP_AND_32_ER_AI, qc_and_32_er_ai);
qc!(MASK_OUT_X_Y, OP_AND_32_ER_DI, qc_and_32_er_di);
qc!(MASK_OUT_X_Y, OP_AND_32_ER_IX, qc_and_32_er_ix);
qc!(MASK_OUT_X, OP_AND_32_ER_AW, qc_and_32_er_aw);
qc!(MASK_OUT_X, OP_AND_32_ER_AL, qc_and_32_er_al);
qc!(MASK_OUT_X, OP_AND_32_ER_PCDI, qc_and_32_er_pcdi);
qc!(MASK_OUT_X, OP_AND_32_ER_PCIX, qc_and_32_er_pcix);
qc!(MASK_OUT_X, OP_AND_32_ER_IMM, qc_and_32_er_imm);
qc!(MASK_OUT_X_Y, OP_AND_32_RE_PI, qc_and_32_re_pi);
qc!(MASK_OUT_X_Y, OP_AND_32_RE_PD, qc_and_32_re_pd);
qc!(MASK_OUT_X_Y, OP_AND_32_RE_AI, qc_and_32_re_ai);
qc!(MASK_OUT_X_Y, OP_AND_32_RE_DI, qc_and_32_re_di);
qc!(MASK_OUT_X_Y, OP_AND_32_RE_IX, qc_and_32_re_ix);
qc!(MASK_OUT_X, OP_AND_32_RE_AW, qc_and_32_re_aw);
qc!(MASK_OUT_X, OP_AND_32_RE_AL, qc_and_32_re_al);
qc8!(MASK_OUT_Y, OP_ANDI_8_DN, qc_andi_8_dn);
qc8!(MASK_OUT_Y, OP_ANDI_8_PI, qc_andi_8_pi);
qc8!(MASK_OUT_Y, OP_ANDI_8_PD, qc_andi_8_pd);
qc8!(MASK_OUT_Y, OP_ANDI_8_AI, qc_andi_8_ai);
qc8!(MASK_OUT_Y, OP_ANDI_8_DI, qc_andi_8_di);
qc8!(MASK_OUT_Y, OP_ANDI_8_IX, qc_andi_8_ix);
qc8!(MASK_EXACT, OP_ANDI_8_AW, qc_andi_8_aw);
qc8!(MASK_EXACT, OP_ANDI_8_AL, qc_andi_8_al);
qc!(MASK_OUT_Y, OP_ANDI_16_DN, qc_andi_16_dn);
qc!(MASK_OUT_Y, OP_ANDI_16_PI, qc_andi_16_pi);
qc!(MASK_OUT_Y, OP_ANDI_16_PD, qc_andi_16_pd);
qc!(MASK_OUT_Y, OP_ANDI_16_AI, qc_andi_16_ai);
qc!(MASK_OUT_Y, OP_ANDI_16_DI, qc_andi_16_di);
qc!(MASK_OUT_Y, OP_ANDI_16_IX, qc_andi_16_ix);
qc!(MASK_EXACT, OP_ANDI_16_AW, qc_andi_16_aw);
qc!(MASK_EXACT, OP_ANDI_16_AL, qc_andi_16_al);
qc!(MASK_OUT_Y, OP_ANDI_32_DN, qc_andi_32_dn);
qc!(MASK_OUT_Y, OP_ANDI_32_PI, qc_andi_32_pi);
qc!(MASK_OUT_Y, OP_ANDI_32_PD, qc_andi_32_pd);
qc!(MASK_OUT_Y, OP_ANDI_32_AI, qc_andi_32_ai);
qc!(MASK_OUT_Y, OP_ANDI_32_DI, qc_andi_32_di);
qc!(MASK_OUT_Y, OP_ANDI_32_IX, qc_andi_32_ix);
qc!(MASK_EXACT, OP_ANDI_32_AW, qc_andi_32_aw);
qc!(MASK_EXACT, OP_ANDI_32_AL, qc_andi_32_al);
qc!(MASK_EXACT, OP_ANDI_16_TOC, qc_andi_16_toc);
qc!(MASK_EXACT, OP_ANDI_16_TOS, qc_andi_16_tos);
qc8!(MASK_OUT_X_Y, OP_ASR_8_S, qc_asr_8_s);
qc!(MASK_OUT_X_Y, OP_ASR_16_S, qc_asr_16_s);
qc!(MASK_OUT_X_Y, OP_ASR_32_S, qc_asr_32_s);
qc8!(MASK_OUT_X_Y, OP_ASR_8_R, qc_asr_8_r);
qc!(MASK_OUT_X_Y, OP_ASR_16_R, qc_asr_16_r);
qc!(MASK_OUT_X_Y, OP_ASR_32_R, qc_asr_32_r);
qc8!(MASK_OUT_X_Y, OP_ASL_8_S, qc_asl_8_s);
qc!(MASK_OUT_X_Y, OP_ASL_16_S, qc_asl_16_s);
qc!(MASK_OUT_X_Y, OP_ASL_32_S, qc_asl_32_s);
qc8!(MASK_OUT_X_Y, OP_ASL_8_R, qc_asl_8_r);
qc!(MASK_OUT_X_Y, OP_ASL_16_R, qc_asl_16_r);
qc!(MASK_OUT_X_Y, OP_ASL_32_R, qc_asl_32_r);
qc!(MASK_OUT_Y, OP_ASL_16_AI, qc_asl_16_ai);
qc!(MASK_OUT_Y, OP_ASL_16_PI, qc_asl_16_pi);
qc!(MASK_OUT_Y, OP_ASL_16_PD, qc_asl_16_pd);
qc!(MASK_OUT_Y, OP_ASL_16_DI, qc_asl_16_di);
qc!(MASK_OUT_Y, OP_ASL_16_IX, qc_asl_16_ix);
qc!(MASK_EXACT, OP_ASL_16_AW, qc_asl_16_aw);
qc!(MASK_EXACT, OP_ASL_16_AL, qc_asl_16_al);
qc!(MASK_OUT_Y, OP_ASR_16_AI, qc_asr_16_ai);
qc!(MASK_OUT_Y, OP_ASR_16_PI, qc_asr_16_pi);
qc!(MASK_OUT_Y, OP_ASR_16_PD, qc_asr_16_pd);
qc!(MASK_OUT_Y, OP_ASR_16_DI, qc_asr_16_di);
qc!(MASK_OUT_Y, OP_ASR_16_IX, qc_asr_16_ix);
qc!(MASK_EXACT, OP_ASR_16_AW, qc_asr_16_aw);
qc!(MASK_EXACT, OP_ASR_16_AL, qc_asr_16_al);
const MASK_LOBYTE_QUICKER: u32 = MASK_LOBYTE + 0xe0;
qc8!(MASK_LOBYTE_QUICKER, OP_BHI_8, qc_bhi_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BLS_8, qc_bls_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BCC_8, qc_bcc_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BCS_8, qc_bcs_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BNE_8, qc_bne_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BEQ_8, qc_beq_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BVC_8, qc_bvc_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BVS_8, qc_bvs_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BPL_8, qc_bpl_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BMI_8, qc_bmi_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BGE_8, qc_bge_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BLT_8, qc_blt_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BGT_8, qc_bgt_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BLE_8, qc_ble_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BRA_8, qc_bra_8);
qc8!(MASK_LOBYTE_QUICKER, OP_BSR_8, qc_bsr_8);
qc!(MASK_EXACT, OP_BHI_16, qc_bhi_16);
qc!(MASK_EXACT, OP_BLS_16, qc_bls_16);
qc!(MASK_EXACT, OP_BCC_16, qc_bcc_16);
qc!(MASK_EXACT, OP_BCS_16, qc_bcs_16);
qc!(MASK_EXACT, OP_BNE_16, qc_bne_16);
qc!(MASK_EXACT, OP_BEQ_16, qc_beq_16);
qc!(MASK_EXACT, OP_BVC_16, qc_bvc_16);
qc!(MASK_EXACT, OP_BVS_16, qc_bvs_16);
qc!(MASK_EXACT, OP_BPL_16, qc_bpl_16);
qc!(MASK_EXACT, OP_BMI_16, qc_bmi_16);
qc!(MASK_EXACT, OP_BGE_16, qc_bge_16);
qc!(MASK_EXACT, OP_BLT_16, qc_blt_16);
qc!(MASK_EXACT, OP_BGT_16, qc_bgt_16);
qc!(MASK_EXACT, OP_BLE_16, qc_ble_16);
qc!(MASK_EXACT, OP_BRA_16, qc_bra_16);
qc!(MASK_EXACT, OP_BSR_16, qc_bsr_16);
qc!(MASK_OUT_X_Y, OP_BCHG_32_R_DN, qc_bchg_32_r_dn);
qc!(MASK_OUT_Y, OP_BCHG_32_S_DN, qc_bchg_32_s_dn);
qc8!(MASK_OUT_X_Y, OP_BCHG_8_R_AI, qc_bchg_8_r_ai);
qc8!(MASK_OUT_X_Y, OP_BCHG_8_R_PI, qc_bchg_8_r_pi);
qc8!(MASK_OUT_X_Y, OP_BCHG_8_R_PD, qc_bchg_8_r_pd);
qc8!(MASK_OUT_X_Y, OP_BCHG_8_R_DI, qc_bchg_8_r_di);
qc8!(MASK_OUT_X_Y, OP_BCHG_8_R_IX, qc_bchg_8_r_ix);
qc8!(MASK_OUT_X, OP_BCHG_8_R_AW, qc_bchg_8_r_aw);
qc8!(MASK_OUT_X, OP_BCHG_8_R_AL, qc_bchg_8_r_al);
qc8!(MASK_OUT_Y, OP_BCHG_8_S_AI, qc_bchg_8_s_ai);
qc8!(MASK_OUT_Y, OP_BCHG_8_S_PI, qc_bchg_8_s_pi);
qc8!(MASK_OUT_Y, OP_BCHG_8_S_PD, qc_bchg_8_s_pd);
qc8!(MASK_OUT_Y, OP_BCHG_8_S_DI, qc_bchg_8_s_di);
qc8!(MASK_OUT_Y, OP_BCHG_8_S_IX, qc_bchg_8_s_ix);
qc8!(MASK_EXACT, OP_BCHG_8_S_AW, qc_bchg_8_s_aw);
qc8!(MASK_EXACT, OP_BCHG_8_S_AL, qc_bchg_8_s_al);
qc!(MASK_OUT_X_Y, OP_BCLR_32_R_DN, qc_bclr_32_r_dn);
qc!(MASK_OUT_Y, OP_BCLR_32_S_DN, qc_bclr_32_s_dn);
qc8!(MASK_OUT_X_Y, OP_BCLR_8_R_AI, qc_bclr_8_r_ai);
qc8!(MASK_OUT_X_Y, OP_BCLR_8_R_PI, qc_bclr_8_r_pi);
qc8!(MASK_OUT_X_Y, OP_BCLR_8_R_PD, qc_bclr_8_r_pd);
qc8!(MASK_OUT_X_Y, OP_BCLR_8_R_DI, qc_bclr_8_r_di);
qc8!(MASK_OUT_X_Y, OP_BCLR_8_R_IX, qc_bclr_8_r_ix);
qc8!(MASK_OUT_X, OP_BCLR_8_R_AW, qc_bclr_8_r_aw);
qc8!(MASK_OUT_X, OP_BCLR_8_R_AL, qc_bclr_8_r_al);
qc8!(MASK_OUT_Y, OP_BCLR_8_S_AI, qc_bclr_8_s_ai);
qc8!(MASK_OUT_Y, OP_BCLR_8_S_PI, qc_bclr_8_s_pi);
qc8!(MASK_OUT_Y, OP_BCLR_8_S_PD, qc_bclr_8_s_pd);
qc8!(MASK_OUT_Y, OP_BCLR_8_S_DI, qc_bclr_8_s_di);
qc8!(MASK_OUT_Y, OP_BCLR_8_S_IX, qc_bclr_8_s_ix);
qc8!(MASK_EXACT, OP_BCLR_8_S_AW, qc_bclr_8_s_aw);
qc8!(MASK_EXACT, OP_BCLR_8_S_AL, qc_bclr_8_s_al);
qc!(MASK_OUT_X_Y, OP_BSET_32_R_DN, qc_bset_32_r_dn);
qc!(MASK_OUT_Y, OP_BSET_32_S_DN, qc_bset_32_s_dn);
qc8!(MASK_OUT_X_Y, OP_BSET_8_R_AI, qc_bset_8_r_ai);
qc8!(MASK_OUT_X_Y, OP_BSET_8_R_PI, qc_bset_8_r_pi);
qc8!(MASK_OUT_X_Y, OP_BSET_8_R_PD, qc_bset_8_r_pd);
qc8!(MASK_OUT_X_Y, OP_BSET_8_R_DI, qc_bset_8_r_di);
qc8!(MASK_OUT_X_Y, OP_BSET_8_R_IX, qc_bset_8_r_ix);
qc8!(MASK_OUT_X, OP_BSET_8_R_AW, qc_bset_8_r_aw);
qc8!(MASK_OUT_X, OP_BSET_8_R_AL, qc_bset_8_r_al);
qc8!(MASK_OUT_Y, OP_BSET_8_S_AI, qc_bset_8_s_ai);
qc8!(MASK_OUT_Y, OP_BSET_8_S_PI, qc_bset_8_s_pi);
qc8!(MASK_OUT_Y, OP_BSET_8_S_PD, qc_bset_8_s_pd);
qc8!(MASK_OUT_Y, OP_BSET_8_S_DI, qc_bset_8_s_di);
qc8!(MASK_OUT_Y, OP_BSET_8_S_IX, qc_bset_8_s_ix);
qc8!(MASK_EXACT, OP_BSET_8_S_AW, qc_bset_8_s_aw);
qc8!(MASK_EXACT, OP_BSET_8_S_AL, qc_bset_8_s_al);
qc!(MASK_OUT_X_Y, OP_BTST_32_R_DN, qc_btst_32_r_dn);
qc!(MASK_OUT_Y, OP_BTST_32_S_DN, qc_btst_32_s_dn);
qc8!(MASK_OUT_X_Y, OP_BTST_8_R_AI, qc_btst_8_r_ai);
qc8!(MASK_OUT_X_Y, OP_BTST_8_R_PI, qc_btst_8_r_pi);
qc8!(MASK_OUT_X_Y, OP_BTST_8_R_PD, qc_btst_8_r_pd);
qc8!(MASK_OUT_X_Y, OP_BTST_8_R_DI, qc_btst_8_r_di);
qc8!(MASK_OUT_X_Y, OP_BTST_8_R_IX, qc_btst_8_r_ix);
qc8!(MASK_OUT_X, OP_BTST_8_R_AW, qc_btst_8_r_aw);
qc8!(MASK_OUT_X, OP_BTST_8_R_AL, qc_btst_8_r_al);
qc8!(MASK_OUT_X, OP_BTST_8_R_PCDI, qc_btst_8_r_pcdi);
qc8!(MASK_OUT_X, OP_BTST_8_R_PCIX, qc_btst_8_r_pcix);
qc8!(MASK_OUT_X, OP_BTST_8_R_IMM, qc_btst_8_r_imm);
qc8!(MASK_OUT_Y, OP_BTST_8_S_AI, qc_btst_8_s_ai);
qc8!(MASK_OUT_Y, OP_BTST_8_S_PI, qc_btst_8_s_pi);
qc8!(MASK_OUT_Y, OP_BTST_8_S_PD, qc_btst_8_s_pd);
qc8!(MASK_OUT_Y, OP_BTST_8_S_DI, qc_btst_8_s_di);
qc8!(MASK_OUT_Y, OP_BTST_8_S_IX, qc_btst_8_s_ix);
qc8!(MASK_EXACT, OP_BTST_8_S_AW, qc_btst_8_s_aw);
qc8!(MASK_EXACT, OP_BTST_8_S_AL, qc_btst_8_s_al);
qc8!(MASK_EXACT, OP_BTST_8_S_PCDI, qc_btst_8_s_pcdi);
qc8!(MASK_EXACT, OP_BTST_8_S_PCIX, qc_btst_8_s_pcix);
qc!(MASK_OUT_X_Y, OP_CHK_16_AI, qc_chk_16_ai);
qc!(MASK_OUT_X, OP_CHK_16_AL, qc_chk_16_al);
qc!(MASK_OUT_X, OP_CHK_16_AW, qc_chk_16_aw);
qc!(MASK_OUT_X_Y, OP_CHK_16_DN, qc_chk_16_dn);
qc!(MASK_OUT_X_Y, OP_CHK_16_DI, qc_chk_16_di);
qc!(MASK_OUT_X, OP_CHK_16_IMM, qc_chk_16_imm);
qc!(MASK_OUT_X_Y, OP_CHK_16_IX, qc_chk_16_ix);
qc!(MASK_OUT_X, OP_CHK_16_PCDI, qc_chk_16_pcdi);
qc!(MASK_OUT_X, OP_CHK_16_PCIX, qc_chk_16_pcix);
qc!(MASK_OUT_X_Y, OP_CHK_16_PD, qc_chk_16_pd);
qc!(MASK_OUT_X_Y, OP_CHK_16_PI, qc_chk_16_pi);
qc8!(MASK_OUT_Y, OP_CLR_8_DN, qc_clr_8_dn);
qc8!(MASK_OUT_Y, OP_CLR_8_AI, qc_clr_8_ai);
qc8!(MASK_OUT_Y, OP_CLR_8_PI, qc_clr_8_pi);
qc8!(MASK_OUT_Y, OP_CLR_8_PD, qc_clr_8_pd);
qc8!(MASK_OUT_Y, OP_CLR_8_DI, qc_clr_8_di);
qc8!(MASK_OUT_Y, OP_CLR_8_IX, qc_clr_8_ix);
qc8!(MASK_EXACT, OP_CLR_8_AW, qc_clr_8_aw);
qc8!(MASK_EXACT, OP_CLR_8_AL, qc_clr_8_al);
qc!(MASK_OUT_Y, OP_CLR_16_DN, qc_clr_16_dn);
qc!(MASK_OUT_Y, OP_CLR_16_AI, qc_clr_16_ai);
qc!(MASK_OUT_Y, OP_CLR_16_PI, qc_clr_16_pi);
qc!(MASK_OUT_Y, OP_CLR_16_PD, qc_clr_16_pd);
qc!(MASK_OUT_Y, OP_CLR_16_DI, qc_clr_16_di);
qc!(MASK_OUT_Y, OP_CLR_16_IX, qc_clr_16_ix);
qc!(MASK_EXACT, OP_CLR_16_AW, qc_clr_16_aw);
qc!(MASK_EXACT, OP_CLR_16_AL, qc_clr_16_al);
qc!(MASK_OUT_Y, OP_CLR_32_DN, qc_clr_32_dn);
qc!(MASK_OUT_Y, OP_CLR_32_AI, qc_clr_32_ai);
qc!(MASK_OUT_Y, OP_CLR_32_PI, qc_clr_32_pi);
qc!(MASK_OUT_Y, OP_CLR_32_PD, qc_clr_32_pd);
qc!(MASK_OUT_Y, OP_CLR_32_DI, qc_clr_32_di);
qc!(MASK_OUT_Y, OP_CLR_32_IX, qc_clr_32_ix);
qc!(MASK_EXACT, OP_CLR_32_AW, qc_clr_32_aw);
qc!(MASK_EXACT, OP_CLR_32_AL, qc_clr_32_al);
qc8!(MASK_OUT_X_Y, OP_CMP_8_DN, qc_cmp_8_dn);
qc8!(MASK_OUT_X_Y, OP_CMP_8_AI, qc_cmp_8_ai);
qc8!(MASK_OUT_X_Y, OP_CMP_8_PI, qc_cmp_8_pi);
qc8!(MASK_OUT_X_Y, OP_CMP_8_PD, qc_cmp_8_pd);
qc8!(MASK_OUT_X_Y, OP_CMP_8_DI, qc_cmp_8_di);
qc8!(MASK_OUT_X_Y, OP_CMP_8_IX, qc_cmp_8_ix);
qc8!(MASK_OUT_X, OP_CMP_8_AW, qc_cmp_8_aw);
qc8!(MASK_OUT_X, OP_CMP_8_AL, qc_cmp_8_al);
qc8!(MASK_OUT_X, OP_CMP_8_PCDI, qc_cmp_8_pcdi);
qc8!(MASK_OUT_X, OP_CMP_8_PCIX, qc_cmp_8_pcix);
qc8!(MASK_OUT_X, OP_CMP_8_IMM, qc_cmp_8_imm);
qc!(MASK_OUT_X_Y, OP_CMP_16_DN, qc_cmp_16_dn);
qc!(MASK_OUT_X_Y, OP_CMP_16_AN, qc_cmp_16_an);
qc!(MASK_OUT_X_Y, OP_CMP_16_AI, qc_cmp_16_ai);
qc!(MASK_OUT_X_Y, OP_CMP_16_PI, qc_cmp_16_pi);
qc!(MASK_OUT_X_Y, OP_CMP_16_PD, qc_cmp_16_pd);
qc!(MASK_OUT_X_Y, OP_CMP_16_DI, qc_cmp_16_di);
qc!(MASK_OUT_X_Y, OP_CMP_16_IX, qc_cmp_16_ix);
qc!(MASK_OUT_X, OP_CMP_16_AW, qc_cmp_16_aw);
qc!(MASK_OUT_X, OP_CMP_16_AL, qc_cmp_16_al);
qc!(MASK_OUT_X, OP_CMP_16_PCDI, qc_cmp_16_pcdi);
qc!(MASK_OUT_X, OP_CMP_16_PCIX, qc_cmp_16_pcix);
qc!(MASK_OUT_X, OP_CMP_16_IMM, qc_cmp_16_imm);
qc!(MASK_OUT_X_Y, OP_CMP_32_DN, qc_cmp_32_dn);
qc!(MASK_OUT_X_Y, OP_CMP_32_AN, qc_cmp_32_an);
qc!(MASK_OUT_X_Y, OP_CMP_32_AI, qc_cmp_32_ai);
qc!(MASK_OUT_X_Y, OP_CMP_32_PI, qc_cmp_32_pi);
qc!(MASK_OUT_X_Y, OP_CMP_32_PD, qc_cmp_32_pd);
qc!(MASK_OUT_X_Y, OP_CMP_32_DI, qc_cmp_32_di);
qc!(MASK_OUT_X_Y, OP_CMP_32_IX, qc_cmp_32_ix);
qc!(MASK_OUT_X, OP_CMP_32_AW, qc_cmp_32_aw);
qc!(MASK_OUT_X, OP_CMP_32_AL, qc_cmp_32_al);
qc!(MASK_OUT_X, OP_CMP_32_PCDI, qc_cmp_32_pcdi);
qc!(MASK_OUT_X, OP_CMP_32_PCIX, qc_cmp_32_pcix);
qc!(MASK_OUT_X, OP_CMP_32_IMM, qc_cmp_32_imm);
qc!(MASK_OUT_X_Y, OP_CMPA_16_DN, qc_cmpa_16_dn);
qc!(MASK_OUT_X_Y, OP_CMPA_16_AN, qc_cmpa_16_an);
qc!(MASK_OUT_X_Y, OP_CMPA_16_PI, qc_cmpa_16_pi);
qc!(MASK_OUT_X_Y, OP_CMPA_16_PD, qc_cmpa_16_pd);
qc!(MASK_OUT_X_Y, OP_CMPA_16_AI, qc_cmpa_16_ai);
qc!(MASK_OUT_X_Y, OP_CMPA_16_DI, qc_cmpa_16_di);
qc!(MASK_OUT_X_Y, OP_CMPA_16_IX, qc_cmpa_16_ix);
qc!(MASK_OUT_X, OP_CMPA_16_AW, qc_cmpa_16_aw);
qc!(MASK_OUT_X, OP_CMPA_16_AL, qc_cmpa_16_al);
qc!(MASK_OUT_X, OP_CMPA_16_PCDI, qc_cmpa_16_pcdi);
qc!(MASK_OUT_X, OP_CMPA_16_PCIX, qc_cmpa_16_pcix);
qc!(MASK_OUT_X, OP_CMPA_16_IMM, qc_cmpa_16_imm);
qc!(MASK_OUT_X_Y, OP_CMPA_32_DN, qc_cmpa_32_dn);
qc!(MASK_OUT_X_Y, OP_CMPA_32_AN, qc_cmpa_32_an);
qc!(MASK_OUT_X_Y, OP_CMPA_32_PI, qc_cmpa_32_pi);
qc!(MASK_OUT_X_Y, OP_CMPA_32_PD, qc_cmpa_32_pd);
qc!(MASK_OUT_X_Y, OP_CMPA_32_AI, qc_cmpa_32_ai);
qc!(MASK_OUT_X_Y, OP_CMPA_32_DI, qc_cmpa_32_di);
qc!(MASK_OUT_X_Y, OP_CMPA_32_IX, qc_cmpa_32_ix);
qc!(MASK_OUT_X, OP_CMPA_32_AW, qc_cmpa_32_aw);
qc!(MASK_OUT_X, OP_CMPA_32_AL, qc_cmpa_32_al);
qc!(MASK_OUT_X, OP_CMPA_32_PCDI, qc_cmpa_32_pcdi);
qc!(MASK_OUT_X, OP_CMPA_32_PCIX, qc_cmpa_32_pcix);
qc!(MASK_OUT_X, OP_CMPA_32_IMM, qc_cmpa_32_imm);
qc8!(MASK_OUT_Y, OP_CMPI_8_DN, qc_cmpi_8_dn);
qc8!(MASK_OUT_Y, OP_CMPI_8_AI, qc_cmpi_8_ai);
qc8!(MASK_OUT_Y, OP_CMPI_8_PI, qc_cmpi_8_pi);
qc8!(MASK_OUT_Y, OP_CMPI_8_PD, qc_cmpi_8_pd);
qc8!(MASK_OUT_Y, OP_CMPI_8_DI, qc_cmpi_8_di);
qc8!(MASK_OUT_Y, OP_CMPI_8_IX, qc_cmpi_8_ix);
qc8!(MASK_EXACT, OP_CMPI_8_AW, qc_cmpi_8_aw);
qc8!(MASK_EXACT, OP_CMPI_8_AL, qc_cmpi_8_al);
qc!(MASK_OUT_Y, OP_CMPI_16_DN, qc_cmpi_16_dn);
qc!(MASK_OUT_Y, OP_CMPI_16_AI, qc_cmpi_16_ai);
qc!(MASK_OUT_Y, OP_CMPI_16_PI, qc_cmpi_16_pi);
qc!(MASK_OUT_Y, OP_CMPI_16_PD, qc_cmpi_16_pd);
qc!(MASK_OUT_Y, OP_CMPI_16_DI, qc_cmpi_16_di);
qc!(MASK_OUT_Y, OP_CMPI_16_IX, qc_cmpi_16_ix);
qc!(MASK_EXACT, OP_CMPI_16_AW, qc_cmpi_16_aw);
qc!(MASK_EXACT, OP_CMPI_16_AL, qc_cmpi_16_al);
qc!(MASK_OUT_Y, OP_CMPI_32_DN, qc_cmpi_32_dn);
qc!(MASK_OUT_Y, OP_CMPI_32_AI, qc_cmpi_32_ai);
qc!(MASK_OUT_Y, OP_CMPI_32_PI, qc_cmpi_32_pi);
qc!(MASK_OUT_Y, OP_CMPI_32_PD, qc_cmpi_32_pd);
qc!(MASK_OUT_Y, OP_CMPI_32_DI, qc_cmpi_32_di);
qc!(MASK_OUT_Y, OP_CMPI_32_IX, qc_cmpi_32_ix);
qc!(MASK_EXACT, OP_CMPI_32_AW, qc_cmpi_32_aw);
qc!(MASK_EXACT, OP_CMPI_32_AL, qc_cmpi_32_al);
qc8!(MASK_OUT_X_Y, OP_CMPM_8, qc_cmpm_8);
qc!(MASK_OUT_X_Y, OP_CMPM_16, qc_cmpm_16);
qc!(MASK_OUT_X_Y, OP_CMPM_32, qc_cmpm_32);
// Put qc for DBcc here
qc!(MASK_OUT_Y, OP_DBT_16, qc_dbt_16);
qc!(MASK_OUT_Y, OP_DBF_16, qc_dbf_16);
qc!(MASK_OUT_Y, OP_DBHI_16, qc_dbhi_16);
qc!(MASK_OUT_Y, OP_DBLS_16, qc_dbls_16);
qc!(MASK_OUT_Y, OP_DBCC_16, qc_dbcc_16);
qc!(MASK_OUT_Y, OP_DBCS_16, qc_dbcs_16);
qc!(MASK_OUT_Y, OP_DBNE_16, qc_dbne_16);
qc!(MASK_OUT_Y, OP_DBEQ_16, qc_dbeq_16);
qc!(MASK_OUT_Y, OP_DBVC_16, qc_dbvc_16);
qc!(MASK_OUT_Y, OP_DBVS_16, qc_dbvs_16);
qc!(MASK_OUT_Y, OP_DBPL_16, qc_dbpl_16);
qc!(MASK_OUT_Y, OP_DBMI_16, qc_dbmi_16);
qc!(MASK_OUT_Y, OP_DBGE_16, qc_dbge_16);
qc!(MASK_OUT_Y, OP_DBLT_16, qc_dblt_16);
qc!(MASK_OUT_Y, OP_DBGT_16, qc_dbgt_16);
qc!(MASK_OUT_Y, OP_DBLE_16, qc_dble_16);
// Put qc for DIVS here
qc!(MASK_OUT_X_Y, OP_DIVS_16_AI, qc_divs_16_ai);
qc!(MASK_OUT_X, OP_DIVS_16_AL, qc_divs_16_al);
qc!(MASK_OUT_X, OP_DIVS_16_AW, qc_divs_16_aw);
qc!(MASK_OUT_X_Y, OP_DIVS_16_DN, qc_divs_16_dn);
qc!(MASK_OUT_X_Y, OP_DIVS_16_DI, qc_divs_16_di);
qc!(MASK_OUT_X, OP_DIVS_16_IMM, qc_divs_16_imm);
qc!(MASK_OUT_X_Y, OP_DIVS_16_IX, qc_divs_16_ix);
qc!(MASK_OUT_X, OP_DIVS_16_PCDI, qc_divs_16_pcdi);
qc!(MASK_OUT_X, OP_DIVS_16_PCIX, qc_divs_16_pcix);
qc!(MASK_OUT_X_Y, OP_DIVS_16_PD, qc_divs_16_pd);
qc!(MASK_OUT_X_Y, OP_DIVS_16_PI, qc_divs_16_pi);
// Put qc for DIVU here
qc!(MASK_OUT_X_Y, OP_DIVU_16_AI, qc_divu_16_ai);
qc!(MASK_OUT_X, OP_DIVU_16_AL, qc_divu_16_al);
qc!(MASK_OUT_X, OP_DIVU_16_AW, qc_divu_16_aw);
qc!(MASK_OUT_X_Y, OP_DIVU_16_DN, qc_divu_16_dn);
qc!(MASK_OUT_X_Y, OP_DIVU_16_DI, qc_divu_16_di);
qc!(MASK_OUT_X, OP_DIVU_16_IMM, qc_divu_16_imm);
qc!(MASK_OUT_X_Y, OP_DIVU_16_IX, qc_divu_16_ix);
qc!(MASK_OUT_X, OP_DIVU_16_PCDI, qc_divu_16_pcdi);
qc!(MASK_OUT_X, OP_DIVU_16_PCIX, qc_divu_16_pcix);
qc!(MASK_OUT_X_Y, OP_DIVU_16_PD, qc_divu_16_pd);
qc!(MASK_OUT_X_Y, OP_DIVU_16_PI, qc_divu_16_pi);
// Put qc for EOR, EORI, EORI to CCR and EORI to SR here
qc8!(MASK_OUT_X_Y, OP_EOR_8_DN, qc_eor_8_dn);
qc8!(MASK_OUT_X_Y, OP_EOR_8_AI, qc_eor_8_ai);
qc8!(MASK_OUT_X_Y, OP_EOR_8_PI, qc_eor_8_pi);
qc8!(MASK_OUT_X_Y, OP_EOR_8_PD, qc_eor_8_pd);
qc8!(MASK_OUT_X_Y, OP_EOR_8_DI, qc_eor_8_di);
qc8!(MASK_OUT_X_Y, OP_EOR_8_IX, qc_eor_8_ix);
qc8!(MASK_OUT_X, OP_EOR_8_AW, qc_eor_8_aw);
qc8!(MASK_OUT_X, OP_EOR_8_AL, qc_eor_8_al);
qc!(MASK_OUT_X_Y, OP_EOR_16_DN, qc_eor_16_dn);
qc!(MASK_OUT_X_Y, OP_EOR_16_AI, qc_eor_16_ai);
qc!(MASK_OUT_X_Y, OP_EOR_16_PI, qc_eor_16_pi);
qc!(MASK_OUT_X_Y, OP_EOR_16_PD, qc_eor_16_pd);
qc!(MASK_OUT_X_Y, OP_EOR_16_DI, qc_eor_16_di);
qc!(MASK_OUT_X_Y, OP_EOR_16_IX, qc_eor_16_ix);
qc!(MASK_OUT_X, OP_EOR_16_AW, qc_eor_16_aw);
qc!(MASK_OUT_X, OP_EOR_16_AL, qc_eor_16_al);
qc!(MASK_OUT_X_Y, OP_EOR_32_DN, qc_eor_32_dn);
qc!(MASK_OUT_X_Y, OP_EOR_32_AI, qc_eor_32_ai);
qc!(MASK_OUT_X_Y, OP_EOR_32_PI, qc_eor_32_pi);
qc!(MASK_OUT_X_Y, OP_EOR_32_PD, qc_eor_32_pd);
qc!(MASK_OUT_X_Y, OP_EOR_32_DI, qc_eor_32_di);
qc!(MASK_OUT_X_Y, OP_EOR_32_IX, qc_eor_32_ix);
qc!(MASK_OUT_X, OP_EOR_32_AW, qc_eor_32_aw);
qc!(MASK_OUT_X, OP_EOR_32_AL, qc_eor_32_al);
qc8!(MASK_OUT_Y, OP_EORI_8_DN, qc_eori_8_dn);
qc8!(MASK_OUT_Y, OP_EORI_8_AI, qc_eori_8_ai);
qc8!(MASK_OUT_Y, OP_EORI_8_PI, qc_eori_8_pi);
qc8!(MASK_OUT_Y, OP_EORI_8_PD, qc_eori_8_pd);
qc8!(MASK_OUT_Y, OP_EORI_8_DI, qc_eori_8_di);
qc8!(MASK_OUT_Y, OP_EORI_8_IX, qc_eori_8_ix);
qc8!(MASK_EXACT, OP_EORI_8_AW, qc_eori_8_aw);
qc8!(MASK_EXACT, OP_EORI_8_AL, qc_eori_8_al);
qc!(MASK_OUT_Y, OP_EORI_16_DN, qc_eori_16_dn);
qc!(MASK_OUT_Y, OP_EORI_16_AI, qc_eori_16_ai);
qc!(MASK_OUT_Y, OP_EORI_16_PI, qc_eori_16_pi);
qc!(MASK_OUT_Y, OP_EORI_16_PD, qc_eori_16_pd);
qc!(MASK_OUT_Y, OP_EORI_16_DI, qc_eori_16_di);
qc!(MASK_OUT_Y, OP_EORI_16_IX, qc_eori_16_ix);
qc!(MASK_EXACT, OP_EORI_16_AW, qc_eori_16_aw);
qc!(MASK_EXACT, OP_EORI_16_AL, qc_eori_16_al);
qc!(MASK_OUT_Y, OP_EORI_32_DN, qc_eori_32_dn);
qc!(MASK_OUT_Y, OP_EORI_32_AI, qc_eori_32_ai);
qc!(MASK_OUT_Y, OP_EORI_32_PI, qc_eori_32_pi);
qc!(MASK_OUT_Y, OP_EORI_32_PD, qc_eori_32_pd);
qc!(MASK_OUT_Y, OP_EORI_32_DI, qc_eori_32_di);
qc!(MASK_OUT_Y, OP_EORI_32_IX, qc_eori_32_ix);
qc!(MASK_EXACT, OP_EORI_32_AW, qc_eori_32_aw);
qc!(MASK_EXACT, OP_EORI_32_AL, qc_eori_32_al);
qc!(MASK_EXACT, OP_EORI_16_TOC, qc_eori_16_toc);
qc!(MASK_EXACT, OP_EORI_16_TOS, qc_eori_16_tos);
// Put qc for EXG here
qc!(MASK_OUT_X_Y, OP_EXG_32_DD, qc_exg_32_dd);
qc!(MASK_OUT_X_Y, OP_EXG_32_AA, qc_exg_32_aa);
qc!(MASK_OUT_X_Y, OP_EXG_32_DA, qc_exg_32_da);
// Put qc for EXT here
qc!(MASK_OUT_Y, OP_EXT_BW, qc_ext_bw);
qc!(MASK_OUT_Y, OP_EXT_WL, qc_ext_wl);
// Put qc for ILLEGAL here
qc_allow_exception!(MASK_EXACT, OP_ILLEGAL, qc_illegal);
// Put qc for JMP here
qc!(MASK_OUT_Y, OP_JMP_32_AI, qc_jmp_32_ai);<|fim▁hole|> qc!(MASK_EXACT, OP_JMP_32_AL, qc_jmp_32_al);
qc!(MASK_EXACT, OP_JMP_32_AW, qc_jmp_32_aw);
qc!(MASK_OUT_Y, OP_JMP_32_DI, qc_jmp_32_di);
qc!(MASK_OUT_Y, OP_JMP_32_IX, qc_jmp_32_ix);
qc!(MASK_EXACT, OP_JMP_32_PCDI, qc_jmp_32_pcdi);
qc!(MASK_EXACT, OP_JMP_32_PCIX, qc_jmp_32_pcix);
// Put qc for JSR here
qc!(MASK_OUT_Y, OP_JSR_32_AI, qc_jsr_32_ai);
qc!(MASK_EXACT, OP_JSR_32_AL, qc_jsr_32_al);
qc!(MASK_EXACT, OP_JSR_32_AW, qc_jsr_32_aw);
qc!(MASK_OUT_Y, OP_JSR_32_DI, qc_jsr_32_di);
qc!(MASK_OUT_Y, OP_JSR_32_IX, qc_jsr_32_ix);
qc!(MASK_EXACT, OP_JSR_32_PCDI, qc_jsr_32_pcdi);
qc!(MASK_EXACT, OP_JSR_32_PCIX, qc_jsr_32_pcix);
// Put qc for LEA here
qc!(MASK_OUT_X_Y, OP_LEA_32_AI, qc_lea_32_ai);
qc!(MASK_OUT_X, OP_LEA_32_AL, qc_lea_32_al);
qc!(MASK_OUT_X, OP_LEA_32_AW, qc_lea_32_aw);
qc!(MASK_OUT_X_Y, OP_LEA_32_DI, qc_lea_32_di);
qc!(MASK_OUT_X_Y, OP_LEA_32_IX, qc_lea_32_ix);
qc!(MASK_OUT_X, OP_LEA_32_PCDI, qc_lea_32_pcdi);
qc!(MASK_OUT_X, OP_LEA_32_PCIX, qc_lea_32_pcix);
// Put qc for LINK here
qc!(MASK_OUT_Y, OP_LINK_16, qc_link_16);
// Put qc for LSL, LSR here
qc8!(MASK_OUT_X_Y, OP_LSR_8_S, qc_lsr_8_s);
qc!(MASK_OUT_X_Y, OP_LSR_16_S, qc_lsr_16_s);
qc!(MASK_OUT_X_Y, OP_LSR_32_S, qc_lsr_32_s);
qc8!(MASK_OUT_X_Y, OP_LSR_8_R, qc_lsr_8_r);
qc!(MASK_OUT_X_Y, OP_LSR_16_R, qc_lsr_16_r);
qc!(MASK_OUT_X_Y, OP_LSR_32_R, qc_lsr_32_r);
qc8!(MASK_OUT_X_Y, OP_LSL_8_S, qc_lsl_8_s);
qc!(MASK_OUT_X_Y, OP_LSL_16_S, qc_lsl_16_s);
qc!(MASK_OUT_X_Y, OP_LSL_32_S, qc_lsl_32_s);
qc8!(MASK_OUT_X_Y, OP_LSL_8_R, qc_lsl_8_r);
qc!(MASK_OUT_X_Y, OP_LSL_16_R, qc_lsl_16_r);
qc!(MASK_OUT_X_Y, OP_LSL_32_R, qc_lsl_32_r);
qc!(MASK_OUT_Y, OP_LSL_16_AI, qc_lsl_16_ai);
qc!(MASK_OUT_Y, OP_LSL_16_PI, qc_lsl_16_pi);
qc!(MASK_OUT_Y, OP_LSL_16_PD, qc_lsl_16_pd);
qc!(MASK_OUT_Y, OP_LSL_16_DI, qc_lsl_16_di);
qc!(MASK_OUT_Y, OP_LSL_16_IX, qc_lsl_16_ix);
qc!(MASK_EXACT, OP_LSL_16_AW, qc_lsl_16_aw);
qc!(MASK_EXACT, OP_LSL_16_AL, qc_lsl_16_al);
qc!(MASK_OUT_Y, OP_LSR_16_AI, qc_lsr_16_ai);
qc!(MASK_OUT_Y, OP_LSR_16_PI, qc_lsr_16_pi);
qc!(MASK_OUT_Y, OP_LSR_16_PD, qc_lsr_16_pd);
qc!(MASK_OUT_Y, OP_LSR_16_DI, qc_lsr_16_di);
qc!(MASK_OUT_Y, OP_LSR_16_IX, qc_lsr_16_ix);
qc!(MASK_EXACT, OP_LSR_16_AW, qc_lsr_16_aw);
qc!(MASK_EXACT, OP_LSR_16_AL, qc_lsr_16_al);
// Put qc for MOVE here
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DN_DN, qc_move_8_dn_dn);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_AI_DN, qc_move_8_ai_dn);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PI_DN, qc_move_8_pi_dn);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PD_DN, qc_move_8_pd_dn);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DI_DN, qc_move_8_di_dn);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_IX_DN, qc_move_8_ix_dn);
qc8!(MASK_OUT_Y, OP_MOVE_8_AW_DN, qc_move_8_aw_dn);
qc8!(MASK_OUT_Y, OP_MOVE_8_AL_DN, qc_move_8_al_dn);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DN_AI, qc_move_8_dn_ai);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_AI_AI, qc_move_8_ai_ai);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PI_AI, qc_move_8_pi_ai);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PD_AI, qc_move_8_pd_ai);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DI_AI, qc_move_8_di_ai);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_IX_AI, qc_move_8_ix_ai);
qc8!(MASK_OUT_Y, OP_MOVE_8_AW_AI, qc_move_8_aw_ai);
qc8!(MASK_OUT_Y, OP_MOVE_8_AL_AI, qc_move_8_al_ai);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DN_PI, qc_move_8_dn_pi);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_AI_PI, qc_move_8_ai_pi);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PI_PI, qc_move_8_pi_pi);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PD_PI, qc_move_8_pd_pi);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DI_PI, qc_move_8_di_pi);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_IX_PI, qc_move_8_ix_pi);
qc8!(MASK_OUT_Y, OP_MOVE_8_AW_PI, qc_move_8_aw_pi);
qc8!(MASK_OUT_Y, OP_MOVE_8_AL_PI, qc_move_8_al_pi);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DN_PD, qc_move_8_dn_pd);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_AI_PD, qc_move_8_ai_pd);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PI_PD, qc_move_8_pi_pd);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PD_PD, qc_move_8_pd_pd);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DI_PD, qc_move_8_di_pd);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_IX_PD, qc_move_8_ix_pd);
qc8!(MASK_OUT_Y, OP_MOVE_8_AW_PD, qc_move_8_aw_pd);
qc8!(MASK_OUT_Y, OP_MOVE_8_AL_PD, qc_move_8_al_pd);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DN_DI, qc_move_8_dn_di);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_AI_DI, qc_move_8_ai_di);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PI_DI, qc_move_8_pi_di);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PD_DI, qc_move_8_pd_di);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DI_DI, qc_move_8_di_di);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_IX_DI, qc_move_8_ix_di);
qc8!(MASK_OUT_Y, OP_MOVE_8_AW_DI, qc_move_8_aw_di);
qc8!(MASK_OUT_Y, OP_MOVE_8_AL_DI, qc_move_8_al_di);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DN_IX, qc_move_8_dn_ix);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_AI_IX, qc_move_8_ai_ix);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PI_IX, qc_move_8_pi_ix);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_PD_IX, qc_move_8_pd_ix);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_DI_IX, qc_move_8_di_ix);
qc8!(MASK_OUT_X_Y, OP_MOVE_8_IX_IX, qc_move_8_ix_ix);
qc8!(MASK_OUT_Y, OP_MOVE_8_AW_IX, qc_move_8_aw_ix);
qc8!(MASK_OUT_Y, OP_MOVE_8_AL_IX, qc_move_8_al_ix);
qc8!(MASK_OUT_X, OP_MOVE_8_DN_AW, qc_move_8_dn_aw);
qc8!(MASK_OUT_X, OP_MOVE_8_AI_AW, qc_move_8_ai_aw);
qc8!(MASK_OUT_X, OP_MOVE_8_PI_AW, qc_move_8_pi_aw);
qc8!(MASK_OUT_X, OP_MOVE_8_PD_AW, qc_move_8_pd_aw);
qc8!(MASK_OUT_X, OP_MOVE_8_DI_AW, qc_move_8_di_aw);
qc8!(MASK_OUT_X, OP_MOVE_8_IX_AW, qc_move_8_ix_aw);
qc8!(MASK_EXACT, OP_MOVE_8_AW_AW, qc_move_8_aw_aw);
qc8!(MASK_EXACT, OP_MOVE_8_AL_AW, qc_move_8_al_aw);
qc8!(MASK_OUT_X, OP_MOVE_8_DN_AL, qc_move_8_dn_al);
qc8!(MASK_OUT_X, OP_MOVE_8_AI_AL, qc_move_8_ai_al);
qc8!(MASK_OUT_X, OP_MOVE_8_PI_AL, qc_move_8_pi_al);
qc8!(MASK_OUT_X, OP_MOVE_8_PD_AL, qc_move_8_pd_al);
qc8!(MASK_OUT_X, OP_MOVE_8_DI_AL, qc_move_8_di_al);
qc8!(MASK_OUT_X, OP_MOVE_8_IX_AL, qc_move_8_ix_al);
qc8!(MASK_EXACT, OP_MOVE_8_AW_AL, qc_move_8_aw_al);
qc8!(MASK_EXACT, OP_MOVE_8_AL_AL, qc_move_8_al_al);
qc8!(MASK_OUT_X, OP_MOVE_8_DN_PCDI, qc_move_8_dn_pcdi);
qc8!(MASK_OUT_X, OP_MOVE_8_AI_PCDI, qc_move_8_ai_pcdi);
qc8!(MASK_OUT_X, OP_MOVE_8_PI_PCDI, qc_move_8_pi_pcdi);
qc8!(MASK_OUT_X, OP_MOVE_8_PD_PCDI, qc_move_8_pd_pcdi);
qc8!(MASK_OUT_X, OP_MOVE_8_DI_PCDI, qc_move_8_di_pcdi);
qc8!(MASK_OUT_X, OP_MOVE_8_IX_PCDI, qc_move_8_ix_pcdi);
qc8!(MASK_EXACT, OP_MOVE_8_AW_PCDI, qc_move_8_aw_pcdi);
qc8!(MASK_EXACT, OP_MOVE_8_AL_PCDI, qc_move_8_al_pcdi);
qc8!(MASK_OUT_X, OP_MOVE_8_DN_PCIX, qc_move_8_dn_pcix);
qc8!(MASK_OUT_X, OP_MOVE_8_AI_PCIX, qc_move_8_ai_pcix);
qc8!(MASK_OUT_X, OP_MOVE_8_PI_PCIX, qc_move_8_pi_pcix);
qc8!(MASK_OUT_X, OP_MOVE_8_PD_PCIX, qc_move_8_pd_pcix);
qc8!(MASK_OUT_X, OP_MOVE_8_DI_PCIX, qc_move_8_di_pcix);
qc8!(MASK_OUT_X, OP_MOVE_8_IX_PCIX, qc_move_8_ix_pcix);
qc8!(MASK_EXACT, OP_MOVE_8_AW_PCIX, qc_move_8_aw_pcix);
qc8!(MASK_EXACT, OP_MOVE_8_AL_PCIX, qc_move_8_al_pcix);
qc8!(MASK_OUT_X, OP_MOVE_8_DN_IMM, qc_move_8_dn_imm);
qc8!(MASK_OUT_X, OP_MOVE_8_AI_IMM, qc_move_8_ai_imm);
qc8!(MASK_OUT_X, OP_MOVE_8_PI_IMM, qc_move_8_pi_imm);
qc8!(MASK_OUT_X, OP_MOVE_8_PD_IMM, qc_move_8_pd_imm);
qc8!(MASK_OUT_X, OP_MOVE_8_DI_IMM, qc_move_8_di_imm);
qc8!(MASK_OUT_X, OP_MOVE_8_IX_IMM, qc_move_8_ix_imm);
qc8!(MASK_EXACT, OP_MOVE_8_AW_IMM, qc_move_8_aw_imm);
qc8!(MASK_EXACT, OP_MOVE_8_AL_IMM, qc_move_8_al_imm);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DN_DN, qc_move_16_dn_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_16_AI_DN, qc_move_16_ai_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PI_DN, qc_move_16_pi_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PD_DN, qc_move_16_pd_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DI_DN, qc_move_16_di_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_16_IX_DN, qc_move_16_ix_dn);
qc!(MASK_OUT_Y, OP_MOVE_16_AW_DN, qc_move_16_aw_dn);
qc!(MASK_OUT_Y, OP_MOVE_16_AL_DN, qc_move_16_al_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DN_AN, qc_move_16_dn_an);
qc!(MASK_OUT_X_Y, OP_MOVE_16_AI_AN, qc_move_16_ai_an);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PI_AN, qc_move_16_pi_an);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PD_AN, qc_move_16_pd_an);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DI_AN, qc_move_16_di_an);
qc!(MASK_OUT_X_Y, OP_MOVE_16_IX_AN, qc_move_16_ix_an);
qc!(MASK_OUT_Y, OP_MOVE_16_AW_AN, qc_move_16_aw_an);
qc!(MASK_OUT_Y, OP_MOVE_16_AL_AN, qc_move_16_al_an);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DN_AI, qc_move_16_dn_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_16_AI_AI, qc_move_16_ai_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PI_AI, qc_move_16_pi_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PD_AI, qc_move_16_pd_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DI_AI, qc_move_16_di_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_16_IX_AI, qc_move_16_ix_ai);
qc!(MASK_OUT_Y, OP_MOVE_16_AW_AI, qc_move_16_aw_ai);
qc!(MASK_OUT_Y, OP_MOVE_16_AL_AI, qc_move_16_al_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DN_PI, qc_move_16_dn_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_16_AI_PI, qc_move_16_ai_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PI_PI, qc_move_16_pi_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PD_PI, qc_move_16_pd_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DI_PI, qc_move_16_di_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_16_IX_PI, qc_move_16_ix_pi);
qc!(MASK_OUT_Y, OP_MOVE_16_AW_PI, qc_move_16_aw_pi);
qc!(MASK_OUT_Y, OP_MOVE_16_AL_PI, qc_move_16_al_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DN_PD, qc_move_16_dn_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_16_AI_PD, qc_move_16_ai_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PI_PD, qc_move_16_pi_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PD_PD, qc_move_16_pd_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DI_PD, qc_move_16_di_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_16_IX_PD, qc_move_16_ix_pd);
qc!(MASK_OUT_Y, OP_MOVE_16_AW_PD, qc_move_16_aw_pd);
qc!(MASK_OUT_Y, OP_MOVE_16_AL_PD, qc_move_16_al_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DN_DI, qc_move_16_dn_di);
qc!(MASK_OUT_X_Y, OP_MOVE_16_AI_DI, qc_move_16_ai_di);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PI_DI, qc_move_16_pi_di);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PD_DI, qc_move_16_pd_di);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DI_DI, qc_move_16_di_di);
qc!(MASK_OUT_X_Y, OP_MOVE_16_IX_DI, qc_move_16_ix_di);
qc!(MASK_OUT_Y, OP_MOVE_16_AW_DI, qc_move_16_aw_di);
qc!(MASK_OUT_Y, OP_MOVE_16_AL_DI, qc_move_16_al_di);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DN_IX, qc_move_16_dn_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_16_AI_IX, qc_move_16_ai_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PI_IX, qc_move_16_pi_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_16_PD_IX, qc_move_16_pd_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_16_DI_IX, qc_move_16_di_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_16_IX_IX, qc_move_16_ix_ix);
qc!(MASK_OUT_Y, OP_MOVE_16_AW_IX, qc_move_16_aw_ix);
qc!(MASK_OUT_Y, OP_MOVE_16_AL_IX, qc_move_16_al_ix);
qc!(MASK_OUT_X, OP_MOVE_16_DN_AW, qc_move_16_dn_aw);
qc!(MASK_OUT_X, OP_MOVE_16_AI_AW, qc_move_16_ai_aw);
qc!(MASK_OUT_X, OP_MOVE_16_PI_AW, qc_move_16_pi_aw);
qc!(MASK_OUT_X, OP_MOVE_16_PD_AW, qc_move_16_pd_aw);
qc!(MASK_OUT_X, OP_MOVE_16_DI_AW, qc_move_16_di_aw);
qc!(MASK_OUT_X, OP_MOVE_16_IX_AW, qc_move_16_ix_aw);
qc!(MASK_EXACT, OP_MOVE_16_AW_AW, qc_move_16_aw_aw);
qc!(MASK_EXACT, OP_MOVE_16_AL_AW, qc_move_16_al_aw);
qc!(MASK_OUT_X, OP_MOVE_16_DN_AL, qc_move_16_dn_al);
qc!(MASK_OUT_X, OP_MOVE_16_AI_AL, qc_move_16_ai_al);
qc!(MASK_OUT_X, OP_MOVE_16_PI_AL, qc_move_16_pi_al);
qc!(MASK_OUT_X, OP_MOVE_16_PD_AL, qc_move_16_pd_al);
qc!(MASK_OUT_X, OP_MOVE_16_DI_AL, qc_move_16_di_al);
qc!(MASK_OUT_X, OP_MOVE_16_IX_AL, qc_move_16_ix_al);
qc!(MASK_EXACT, OP_MOVE_16_AW_AL, qc_move_16_aw_al);
qc!(MASK_EXACT, OP_MOVE_16_AL_AL, qc_move_16_al_al);
qc!(MASK_OUT_X, OP_MOVE_16_DN_PCDI, qc_move_16_dn_pcdi);
qc!(MASK_OUT_X, OP_MOVE_16_AI_PCDI, qc_move_16_ai_pcdi);
qc!(MASK_OUT_X, OP_MOVE_16_PI_PCDI, qc_move_16_pi_pcdi);
qc!(MASK_OUT_X, OP_MOVE_16_PD_PCDI, qc_move_16_pd_pcdi);
qc!(MASK_OUT_X, OP_MOVE_16_DI_PCDI, qc_move_16_di_pcdi);
qc!(MASK_OUT_X, OP_MOVE_16_IX_PCDI, qc_move_16_ix_pcdi);
qc!(MASK_EXACT, OP_MOVE_16_AW_PCDI, qc_move_16_aw_pcdi);
qc!(MASK_EXACT, OP_MOVE_16_AL_PCDI, qc_move_16_al_pcdi);
qc!(MASK_OUT_X, OP_MOVE_16_DN_PCIX, qc_move_16_dn_pcix);
qc!(MASK_OUT_X, OP_MOVE_16_AI_PCIX, qc_move_16_ai_pcix);
qc!(MASK_OUT_X, OP_MOVE_16_PI_PCIX, qc_move_16_pi_pcix);
qc!(MASK_OUT_X, OP_MOVE_16_PD_PCIX, qc_move_16_pd_pcix);
qc!(MASK_OUT_X, OP_MOVE_16_DI_PCIX, qc_move_16_di_pcix);
qc!(MASK_OUT_X, OP_MOVE_16_IX_PCIX, qc_move_16_ix_pcix);
qc!(MASK_EXACT, OP_MOVE_16_AW_PCIX, qc_move_16_aw_pcix);
qc!(MASK_EXACT, OP_MOVE_16_AL_PCIX, qc_move_16_al_pcix);
qc!(MASK_OUT_X, OP_MOVE_16_DN_IMM, qc_move_16_dn_imm);
qc!(MASK_OUT_X, OP_MOVE_16_AI_IMM, qc_move_16_ai_imm);
qc!(MASK_OUT_X, OP_MOVE_16_PI_IMM, qc_move_16_pi_imm);
qc!(MASK_OUT_X, OP_MOVE_16_PD_IMM, qc_move_16_pd_imm);
qc!(MASK_OUT_X, OP_MOVE_16_DI_IMM, qc_move_16_di_imm);
qc!(MASK_OUT_X, OP_MOVE_16_IX_IMM, qc_move_16_ix_imm);
qc!(MASK_EXACT, OP_MOVE_16_AW_IMM, qc_move_16_aw_imm);
qc!(MASK_EXACT, OP_MOVE_16_AL_IMM, qc_move_16_al_imm);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DN_DN, qc_move_32_dn_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_32_AI_DN, qc_move_32_ai_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PI_DN, qc_move_32_pi_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PD_DN, qc_move_32_pd_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DI_DN, qc_move_32_di_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_32_IX_DN, qc_move_32_ix_dn);
qc!(MASK_OUT_Y, OP_MOVE_32_AW_DN, qc_move_32_aw_dn);
qc!(MASK_OUT_Y, OP_MOVE_32_AL_DN, qc_move_32_al_dn);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DN_AN, qc_move_32_dn_an);
qc!(MASK_OUT_X_Y, OP_MOVE_32_AI_AN, qc_move_32_ai_an);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PI_AN, qc_move_32_pi_an);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PD_AN, qc_move_32_pd_an);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DI_AN, qc_move_32_di_an);
qc!(MASK_OUT_X_Y, OP_MOVE_32_IX_AN, qc_move_32_ix_an);
qc!(MASK_OUT_Y, OP_MOVE_32_AW_AN, qc_move_32_aw_an);
qc!(MASK_OUT_Y, OP_MOVE_32_AL_AN, qc_move_32_al_an);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DN_AI, qc_move_32_dn_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_32_AI_AI, qc_move_32_ai_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PI_AI, qc_move_32_pi_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PD_AI, qc_move_32_pd_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DI_AI, qc_move_32_di_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_32_IX_AI, qc_move_32_ix_ai);
qc!(MASK_OUT_Y, OP_MOVE_32_AW_AI, qc_move_32_aw_ai);
qc!(MASK_OUT_Y, OP_MOVE_32_AL_AI, qc_move_32_al_ai);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DN_PI, qc_move_32_dn_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_32_AI_PI, qc_move_32_ai_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PI_PI, qc_move_32_pi_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PD_PI, qc_move_32_pd_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DI_PI, qc_move_32_di_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_32_IX_PI, qc_move_32_ix_pi);
qc!(MASK_OUT_Y, OP_MOVE_32_AW_PI, qc_move_32_aw_pi);
qc!(MASK_OUT_Y, OP_MOVE_32_AL_PI, qc_move_32_al_pi);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DN_PD, qc_move_32_dn_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_32_AI_PD, qc_move_32_ai_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PI_PD, qc_move_32_pi_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PD_PD, qc_move_32_pd_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DI_PD, qc_move_32_di_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_32_IX_PD, qc_move_32_ix_pd);
qc!(MASK_OUT_Y, OP_MOVE_32_AW_PD, qc_move_32_aw_pd);
qc!(MASK_OUT_Y, OP_MOVE_32_AL_PD, qc_move_32_al_pd);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DN_DI, qc_move_32_dn_di);
qc!(MASK_OUT_X_Y, OP_MOVE_32_AI_DI, qc_move_32_ai_di);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PI_DI, qc_move_32_pi_di);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PD_DI, qc_move_32_pd_di);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DI_DI, qc_move_32_di_di);
qc!(MASK_OUT_X_Y, OP_MOVE_32_IX_DI, qc_move_32_ix_di);
qc!(MASK_OUT_Y, OP_MOVE_32_AW_DI, qc_move_32_aw_di);
qc!(MASK_OUT_Y, OP_MOVE_32_AL_DI, qc_move_32_al_di);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DN_IX, qc_move_32_dn_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_32_AI_IX, qc_move_32_ai_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PI_IX, qc_move_32_pi_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_32_PD_IX, qc_move_32_pd_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_32_DI_IX, qc_move_32_di_ix);
qc!(MASK_OUT_X_Y, OP_MOVE_32_IX_IX, qc_move_32_ix_ix);
qc!(MASK_OUT_Y, OP_MOVE_32_AW_IX, qc_move_32_aw_ix);
qc!(MASK_OUT_Y, OP_MOVE_32_AL_IX, qc_move_32_al_ix);
qc!(MASK_OUT_X, OP_MOVE_32_DN_AW, qc_move_32_dn_aw);
qc!(MASK_OUT_X, OP_MOVE_32_AI_AW, qc_move_32_ai_aw);
qc!(MASK_OUT_X, OP_MOVE_32_PI_AW, qc_move_32_pi_aw);
qc!(MASK_OUT_X, OP_MOVE_32_PD_AW, qc_move_32_pd_aw);
qc!(MASK_OUT_X, OP_MOVE_32_DI_AW, qc_move_32_di_aw);
qc!(MASK_OUT_X, OP_MOVE_32_IX_AW, qc_move_32_ix_aw);
qc!(MASK_EXACT, OP_MOVE_32_AW_AW, qc_move_32_aw_aw);
qc!(MASK_EXACT, OP_MOVE_32_AL_AW, qc_move_32_al_aw);
qc!(MASK_OUT_X, OP_MOVE_32_DN_AL, qc_move_32_dn_al);
qc!(MASK_OUT_X, OP_MOVE_32_AI_AL, qc_move_32_ai_al);
qc!(MASK_OUT_X, OP_MOVE_32_PI_AL, qc_move_32_pi_al);
qc!(MASK_OUT_X, OP_MOVE_32_PD_AL, qc_move_32_pd_al);
qc!(MASK_OUT_X, OP_MOVE_32_DI_AL, qc_move_32_di_al);
qc!(MASK_OUT_X, OP_MOVE_32_IX_AL, qc_move_32_ix_al);
qc!(MASK_EXACT, OP_MOVE_32_AW_AL, qc_move_32_aw_al);
qc!(MASK_EXACT, OP_MOVE_32_AL_AL, qc_move_32_al_al);
qc!(MASK_OUT_X, OP_MOVE_32_DN_PCDI, qc_move_32_dn_pcdi);
qc!(MASK_OUT_X, OP_MOVE_32_AI_PCDI, qc_move_32_ai_pcdi);
qc!(MASK_OUT_X, OP_MOVE_32_PI_PCDI, qc_move_32_pi_pcdi);
qc!(MASK_OUT_X, OP_MOVE_32_PD_PCDI, qc_move_32_pd_pcdi);
qc!(MASK_OUT_X, OP_MOVE_32_DI_PCDI, qc_move_32_di_pcdi);
qc!(MASK_OUT_X, OP_MOVE_32_IX_PCDI, qc_move_32_ix_pcdi);
qc!(MASK_EXACT, OP_MOVE_32_AW_PCDI, qc_move_32_aw_pcdi);
qc!(MASK_EXACT, OP_MOVE_32_AL_PCDI, qc_move_32_al_pcdi);
qc!(MASK_OUT_X, OP_MOVE_32_DN_PCIX, qc_move_32_dn_pcix);
qc!(MASK_OUT_X, OP_MOVE_32_AI_PCIX, qc_move_32_ai_pcix);
qc!(MASK_OUT_X, OP_MOVE_32_PI_PCIX, qc_move_32_pi_pcix);
qc!(MASK_OUT_X, OP_MOVE_32_PD_PCIX, qc_move_32_pd_pcix);
qc!(MASK_OUT_X, OP_MOVE_32_DI_PCIX, qc_move_32_di_pcix);
qc!(MASK_OUT_X, OP_MOVE_32_IX_PCIX, qc_move_32_ix_pcix);
qc!(MASK_EXACT, OP_MOVE_32_AW_PCIX, qc_move_32_aw_pcix);
qc!(MASK_EXACT, OP_MOVE_32_AL_PCIX, qc_move_32_al_pcix);
qc!(MASK_OUT_X, OP_MOVE_32_DN_IMM, qc_move_32_dn_imm);
qc!(MASK_OUT_X, OP_MOVE_32_AI_IMM, qc_move_32_ai_imm);
qc!(MASK_OUT_X, OP_MOVE_32_PI_IMM, qc_move_32_pi_imm);
qc!(MASK_OUT_X, OP_MOVE_32_PD_IMM, qc_move_32_pd_imm);
qc!(MASK_OUT_X, OP_MOVE_32_DI_IMM, qc_move_32_di_imm);
qc!(MASK_OUT_X, OP_MOVE_32_IX_IMM, qc_move_32_ix_imm);
qc!(MASK_EXACT, OP_MOVE_32_AW_IMM, qc_move_32_aw_imm);
qc!(MASK_EXACT, OP_MOVE_32_AL_IMM, qc_move_32_al_imm);
// Put qc for MOVEA here
qc!(MASK_OUT_X_Y, OP_MOVEA_16_DN, qc_movea_16_dn);
qc!(MASK_OUT_X_Y, OP_MOVEA_16_AN, qc_movea_16_an);
qc!(MASK_OUT_X_Y, OP_MOVEA_16_AI, qc_movea_16_ai);
qc!(MASK_OUT_X_Y, OP_MOVEA_16_PI, qc_movea_16_pi);
qc!(MASK_OUT_X_Y, OP_MOVEA_16_PD, qc_movea_16_pd);
qc!(MASK_OUT_X_Y, OP_MOVEA_16_DI, qc_movea_16_di);
qc!(MASK_OUT_X_Y, OP_MOVEA_16_IX, qc_movea_16_ix);
qc!(MASK_OUT_X, OP_MOVEA_16_AW, qc_movea_16_aw);
qc!(MASK_OUT_X, OP_MOVEA_16_AL, qc_movea_16_al);
qc!(MASK_OUT_X, OP_MOVEA_16_PCDI, qc_movea_16_pcdi);
qc!(MASK_OUT_X, OP_MOVEA_16_PCIX, qc_movea_16_pcix);
qc!(MASK_OUT_X, OP_MOVEA_16_IMM, qc_movea_16_imm);
qc!(MASK_OUT_X_Y, OP_MOVEA_32_DN, qc_movea_32_dn);
qc!(MASK_OUT_X_Y, OP_MOVEA_32_AN, qc_movea_32_an);
qc!(MASK_OUT_X_Y, OP_MOVEA_32_AI, qc_movea_32_ai);
qc!(MASK_OUT_X_Y, OP_MOVEA_32_PI, qc_movea_32_pi);
qc!(MASK_OUT_X_Y, OP_MOVEA_32_PD, qc_movea_32_pd);
qc!(MASK_OUT_X_Y, OP_MOVEA_32_DI, qc_movea_32_di);
qc!(MASK_OUT_X_Y, OP_MOVEA_32_IX, qc_movea_32_ix);
qc!(MASK_OUT_X, OP_MOVEA_32_AW, qc_movea_32_aw);
qc!(MASK_OUT_X, OP_MOVEA_32_AL, qc_movea_32_al);
qc!(MASK_OUT_X, OP_MOVEA_32_PCDI, qc_movea_32_pcdi);
qc!(MASK_OUT_X, OP_MOVEA_32_PCIX, qc_movea_32_pcix);
qc!(MASK_OUT_X, OP_MOVEA_32_IMM, qc_movea_32_imm);
// Put qc for MOVE to CCR here
qc!(MASK_OUT_Y, OP_MOVE_16_TOC_DN, qc_move_16_toc_dn);
qc!(MASK_OUT_Y, OP_MOVE_16_TOC_AI, qc_move_16_toc_ai);
qc!(MASK_OUT_Y, OP_MOVE_16_TOC_PI, qc_move_16_toc_pi);
qc!(MASK_OUT_Y, OP_MOVE_16_TOC_PD, qc_move_16_toc_pd);
qc!(MASK_OUT_Y, OP_MOVE_16_TOC_DI, qc_move_16_toc_di);
qc!(MASK_OUT_Y, OP_MOVE_16_TOC_IX, qc_move_16_toc_ix);
qc!(MASK_EXACT, OP_MOVE_16_TOC_AW, qc_move_16_toc_aw);
qc!(MASK_EXACT, OP_MOVE_16_TOC_AL, qc_move_16_toc_al);
qc!(MASK_EXACT, OP_MOVE_16_TOC_PCDI, qc_move_16_toc_pcdi);
qc!(MASK_EXACT, OP_MOVE_16_TOC_PCIX, qc_move_16_toc_pcix);
qc!(MASK_EXACT, OP_MOVE_16_TOC_IMM, qc_move_16_toc_imm);
// Put qc for MOVE from SR here
qc!(MASK_OUT_Y, OP_MOVE_16_FRS_DN, qc_move_16_frs_dn);
qc!(MASK_OUT_Y, OP_MOVE_16_FRS_AI, qc_move_16_frs_ai);
qc!(MASK_OUT_Y, OP_MOVE_16_FRS_PI, qc_move_16_frs_pi);
qc!(MASK_OUT_Y, OP_MOVE_16_FRS_PD, qc_move_16_frs_pd);
qc!(MASK_OUT_Y, OP_MOVE_16_FRS_DI, qc_move_16_frs_di);
qc!(MASK_OUT_Y, OP_MOVE_16_FRS_IX, qc_move_16_frs_ix);
qc!(MASK_EXACT, OP_MOVE_16_FRS_AW, qc_move_16_frs_aw);
qc!(MASK_EXACT, OP_MOVE_16_FRS_AL, qc_move_16_frs_al);
// Put qc for MOVE to SR here
qc!(MASK_OUT_Y, OP_MOVE_16_TOS_DN, qc_move_16_tos_dn);
qc!(MASK_OUT_Y, OP_MOVE_16_TOS_AI, qc_move_16_tos_ai);
qc!(MASK_OUT_Y, OP_MOVE_16_TOS_PI, qc_move_16_tos_pi);
qc!(MASK_OUT_Y, OP_MOVE_16_TOS_PD, qc_move_16_tos_pd);
qc!(MASK_OUT_Y, OP_MOVE_16_TOS_DI, qc_move_16_tos_di);
qc!(MASK_OUT_Y, OP_MOVE_16_TOS_IX, qc_move_16_tos_ix);
qc!(MASK_EXACT, OP_MOVE_16_TOS_AW, qc_move_16_tos_aw);
qc!(MASK_EXACT, OP_MOVE_16_TOS_AL, qc_move_16_tos_al);
qc!(MASK_EXACT, OP_MOVE_16_TOS_PCDI, qc_move_16_tos_pcdi);
qc!(MASK_EXACT, OP_MOVE_16_TOS_PCIX, qc_move_16_tos_pcix);
qc!(MASK_EXACT, OP_MOVE_16_TOS_IMM, qc_move_16_tos_imm);
// Put qc for MOVE USP here
qc!(MASK_OUT_Y, OP_MOVE_32_TOU, qc_move_32_tou);
qc!(MASK_OUT_Y, OP_MOVE_32_FRU, qc_move_32_fru);
// Put qc for MOVEM here
qc!(MASK_OUT_Y, OP_MOVEM_16_RE_AI, qc_movem_16_re_ai);
qc!(MASK_OUT_Y, OP_MOVEM_16_RE_PD, qc_movem_16_re_pd);
qc!(MASK_OUT_Y, OP_MOVEM_16_RE_DI, qc_movem_16_re_di);
qc!(MASK_OUT_Y, OP_MOVEM_16_RE_IX, qc_movem_16_re_ix);
qc!(MASK_EXACT, OP_MOVEM_16_RE_AW, qc_movem_16_re_aw);
qc!(MASK_EXACT, OP_MOVEM_16_RE_AL, qc_movem_16_re_al);
qc!(MASK_OUT_Y, OP_MOVEM_16_ER_AI, qc_movem_16_er_ai);
qc!(MASK_OUT_Y, OP_MOVEM_16_ER_PI, qc_movem_16_er_pi);
qc!(MASK_OUT_Y, OP_MOVEM_16_ER_DI, qc_movem_16_er_di);
qc!(MASK_OUT_Y, OP_MOVEM_16_ER_IX, qc_movem_16_er_ix);
qc!(MASK_EXACT, OP_MOVEM_16_ER_AW, qc_movem_16_er_aw);
qc!(MASK_EXACT, OP_MOVEM_16_ER_AL, qc_movem_16_er_al);
qc!(MASK_EXACT, OP_MOVEM_16_ER_PCDI, qc_movem_16_er_pcdi);
qc!(MASK_EXACT, OP_MOVEM_16_ER_PCIX, qc_movem_16_er_pcix);
qc!(MASK_OUT_Y, OP_MOVEM_32_RE_AI, qc_movem_32_re_ai);
qc!(MASK_OUT_Y, OP_MOVEM_32_RE_PD, qc_movem_32_re_pd);
qc!(MASK_OUT_Y, OP_MOVEM_32_RE_DI, qc_movem_32_re_di);
qc!(MASK_OUT_Y, OP_MOVEM_32_RE_IX, qc_movem_32_re_ix);
qc!(MASK_EXACT, OP_MOVEM_32_RE_AW, qc_movem_32_re_aw);
qc!(MASK_EXACT, OP_MOVEM_32_RE_AL, qc_movem_32_re_al);
qc!(MASK_OUT_Y, OP_MOVEM_32_ER_AI, qc_movem_32_er_ai);
qc!(MASK_OUT_Y, OP_MOVEM_32_ER_PI, qc_movem_32_er_pi);
qc!(MASK_OUT_Y, OP_MOVEM_32_ER_DI, qc_movem_32_er_di);
qc!(MASK_OUT_Y, OP_MOVEM_32_ER_IX, qc_movem_32_er_ix);
qc!(MASK_EXACT, OP_MOVEM_32_ER_AW, qc_movem_32_er_aw);
qc!(MASK_EXACT, OP_MOVEM_32_ER_AL, qc_movem_32_er_al);
qc!(MASK_EXACT, OP_MOVEM_32_ER_PCDI, qc_movem_32_er_pcdi);
qc!(MASK_EXACT, OP_MOVEM_32_ER_PCIX, qc_movem_32_er_pcix);
// Put qc for MOVEP here
qc!(MASK_OUT_X_Y, OP_MOVEP_16_ER, qc_movep_16_er);
qc!(MASK_OUT_X_Y, OP_MOVEP_16_RE, qc_movep_16_re);
qc!(MASK_OUT_X_Y, OP_MOVEP_32_ER, qc_movep_32_er);
qc!(MASK_OUT_X_Y, OP_MOVEP_32_RE, qc_movep_32_re);
// Put qc for MOVEQ here
const MASK_LOBYTX_QUICKER: u32 = MASK_LOBYTX + 0x55;
qc!(MASK_LOBYTX_QUICKER, OP_MOVEQ_32, qc_moveq_32);
// Put qc for MULS here
qc!(MASK_OUT_X_Y, OP_MULS_16_DN, qc_muls_16_dn);
qc!(MASK_OUT_X_Y, OP_MULS_16_AI, qc_muls_16_ai);
qc!(MASK_OUT_X_Y, OP_MULS_16_PI, qc_muls_16_pi);
qc!(MASK_OUT_X_Y, OP_MULS_16_PD, qc_muls_16_pd);
qc!(MASK_OUT_X_Y, OP_MULS_16_DI, qc_muls_16_di);
qc!(MASK_OUT_X_Y, OP_MULS_16_IX, qc_muls_16_ix);
qc!(MASK_OUT_X, OP_MULS_16_AW, qc_muls_16_aw);
qc!(MASK_OUT_X, OP_MULS_16_AL, qc_muls_16_al);
qc!(MASK_OUT_X, OP_MULS_16_PCDI, qc_muls_16_pcdi);
qc!(MASK_OUT_X, OP_MULS_16_PCIX, qc_muls_16_pcix);
qc!(MASK_OUT_X, OP_MULS_16_IMM, qc_muls_16_imm);
// Put qc for MULU here
qc!(MASK_OUT_X_Y, OP_MULU_16_DN, qc_mulu_16_dn);
qc!(MASK_OUT_X_Y, OP_MULU_16_AI, qc_mulu_16_ai);
qc!(MASK_OUT_X_Y, OP_MULU_16_PI, qc_mulu_16_pi);
qc!(MASK_OUT_X_Y, OP_MULU_16_PD, qc_mulu_16_pd);
qc!(MASK_OUT_X_Y, OP_MULU_16_DI, qc_mulu_16_di);
qc!(MASK_OUT_X_Y, OP_MULU_16_IX, qc_mulu_16_ix);
qc!(MASK_OUT_X, OP_MULU_16_AW, qc_mulu_16_aw);
qc!(MASK_OUT_X, OP_MULU_16_AL, qc_mulu_16_al);
qc!(MASK_OUT_X, OP_MULU_16_PCDI, qc_mulu_16_pcdi);
qc!(MASK_OUT_X, OP_MULU_16_PCIX, qc_mulu_16_pcix);
qc!(MASK_OUT_X, OP_MULU_16_IMM, qc_mulu_16_imm);
// Put qc for NBCD here
qc!(MASK_OUT_Y, OP_NBCD_8_DN, qc_nbcd_8_dn);
qc!(MASK_OUT_Y, OP_NBCD_8_AI, qc_nbcd_8_ai);
qc!(MASK_OUT_Y, OP_NBCD_8_PI, qc_nbcd_8_pi);
qc!(MASK_OUT_Y, OP_NBCD_8_PD, qc_nbcd_8_pd);
qc!(MASK_OUT_Y, OP_NBCD_8_DI, qc_nbcd_8_di);
qc!(MASK_OUT_Y, OP_NBCD_8_IX, qc_nbcd_8_ix);
qc!(MASK_EXACT, OP_NBCD_8_AW, qc_nbcd_8_aw);
qc!(MASK_EXACT, OP_NBCD_8_AL, qc_nbcd_8_al);
// Put qc for NEG here
qc!(MASK_OUT_Y, OP_NEG_8_DN, qc_neg_8_dn);
qc!(MASK_OUT_Y, OP_NEG_8_AI, qc_neg_8_ai);
qc!(MASK_OUT_Y, OP_NEG_8_PI, qc_neg_8_pi);
qc!(MASK_OUT_Y, OP_NEG_8_PD, qc_neg_8_pd);
qc!(MASK_OUT_Y, OP_NEG_8_DI, qc_neg_8_di);
qc!(MASK_OUT_Y, OP_NEG_8_IX, qc_neg_8_ix);
qc!(MASK_EXACT, OP_NEG_8_AW, qc_neg_8_aw);
qc!(MASK_EXACT, OP_NEG_8_AL, qc_neg_8_al);
qc!(MASK_OUT_Y, OP_NEG_16_DN, qc_neg_16_dn);
qc!(MASK_OUT_Y, OP_NEG_16_AI, qc_neg_16_ai);
qc!(MASK_OUT_Y, OP_NEG_16_PI, qc_neg_16_pi);
qc!(MASK_OUT_Y, OP_NEG_16_PD, qc_neg_16_pd);
qc!(MASK_OUT_Y, OP_NEG_16_DI, qc_neg_16_di);
qc!(MASK_OUT_Y, OP_NEG_16_IX, qc_neg_16_ix);
qc!(MASK_EXACT, OP_NEG_16_AW, qc_neg_16_aw);
qc!(MASK_EXACT, OP_NEG_16_AL, qc_neg_16_al);
qc!(MASK_OUT_Y, OP_NEG_32_DN, qc_neg_32_dn);
qc!(MASK_OUT_Y, OP_NEG_32_AI, qc_neg_32_ai);
qc!(MASK_OUT_Y, OP_NEG_32_PI, qc_neg_32_pi);
qc!(MASK_OUT_Y, OP_NEG_32_PD, qc_neg_32_pd);
qc!(MASK_OUT_Y, OP_NEG_32_DI, qc_neg_32_di);
qc!(MASK_OUT_Y, OP_NEG_32_IX, qc_neg_32_ix);
qc!(MASK_EXACT, OP_NEG_32_AW, qc_neg_32_aw);
qc!(MASK_EXACT, OP_NEG_32_AL, qc_neg_32_al);
// Put qc for NEGX here
qc!(MASK_OUT_Y, OP_NEGX_8_DN, qc_negx_8_dn);
qc!(MASK_OUT_Y, OP_NEGX_8_AI, qc_negx_8_ai);
qc!(MASK_OUT_Y, OP_NEGX_8_PI, qc_negx_8_pi);
qc!(MASK_OUT_Y, OP_NEGX_8_PD, qc_negx_8_pd);
qc!(MASK_OUT_Y, OP_NEGX_8_DI, qc_negx_8_di);
qc!(MASK_OUT_Y, OP_NEGX_8_IX, qc_negx_8_ix);
qc!(MASK_EXACT, OP_NEGX_8_AW, qc_negx_8_aw);
qc!(MASK_EXACT, OP_NEGX_8_AL, qc_negx_8_al);
qc!(MASK_OUT_Y, OP_NEGX_16_DN, qc_negx_16_dn);
qc!(MASK_OUT_Y, OP_NEGX_16_AI, qc_negx_16_ai);
qc!(MASK_OUT_Y, OP_NEGX_16_PI, qc_negx_16_pi);
qc!(MASK_OUT_Y, OP_NEGX_16_PD, qc_negx_16_pd);
qc!(MASK_OUT_Y, OP_NEGX_16_DI, qc_negx_16_di);
qc!(MASK_OUT_Y, OP_NEGX_16_IX, qc_negx_16_ix);
qc!(MASK_EXACT, OP_NEGX_16_AW, qc_negx_16_aw);
qc!(MASK_EXACT, OP_NEGX_16_AL, qc_negx_16_al);
qc!(MASK_OUT_Y, OP_NEGX_32_DN, qc_negx_32_dn);
qc!(MASK_OUT_Y, OP_NEGX_32_AI, qc_negx_32_ai);
qc!(MASK_OUT_Y, OP_NEGX_32_PI, qc_negx_32_pi);
qc!(MASK_OUT_Y, OP_NEGX_32_PD, qc_negx_32_pd);
qc!(MASK_OUT_Y, OP_NEGX_32_DI, qc_negx_32_di);
qc!(MASK_OUT_Y, OP_NEGX_32_IX, qc_negx_32_ix);
qc!(MASK_EXACT, OP_NEGX_32_AW, qc_negx_32_aw);
qc!(MASK_EXACT, OP_NEGX_32_AL, qc_negx_32_al);
// Put qc for NOP here
qc8!(MASK_EXACT, OP_NOP, qc_nop);
// Put qc for NOT here
qc8!(MASK_OUT_Y, OP_NOT_8_DN, qc_not_8_dn);
qc8!(MASK_OUT_Y, OP_NOT_8_AI, qc_not_8_ai);
qc8!(MASK_OUT_Y, OP_NOT_8_PI, qc_not_8_pi);
qc8!(MASK_OUT_Y, OP_NOT_8_PD, qc_not_8_pd);
qc8!(MASK_OUT_Y, OP_NOT_8_DI, qc_not_8_di);
qc8!(MASK_OUT_Y, OP_NOT_8_IX, qc_not_8_ix);
qc8!(MASK_EXACT, OP_NOT_8_AW, qc_not_8_aw);
qc8!(MASK_EXACT, OP_NOT_8_AL, qc_not_8_al);
qc!(MASK_OUT_Y, OP_NOT_16_DN, qc_not_16_dn);
qc!(MASK_OUT_Y, OP_NOT_16_AI, qc_not_16_ai);
qc!(MASK_OUT_Y, OP_NOT_16_PI, qc_not_16_pi);
qc!(MASK_OUT_Y, OP_NOT_16_PD, qc_not_16_pd);
qc!(MASK_OUT_Y, OP_NOT_16_DI, qc_not_16_di);
qc!(MASK_OUT_Y, OP_NOT_16_IX, qc_not_16_ix);
qc!(MASK_EXACT, OP_NOT_16_AW, qc_not_16_aw);
qc!(MASK_EXACT, OP_NOT_16_AL, qc_not_16_al);
qc!(MASK_OUT_Y, OP_NOT_32_DN, qc_not_32_dn);
qc!(MASK_OUT_Y, OP_NOT_32_AI, qc_not_32_ai);
qc!(MASK_OUT_Y, OP_NOT_32_PI, qc_not_32_pi);
qc!(MASK_OUT_Y, OP_NOT_32_PD, qc_not_32_pd);
qc!(MASK_OUT_Y, OP_NOT_32_DI, qc_not_32_di);
qc!(MASK_OUT_Y, OP_NOT_32_IX, qc_not_32_ix);
qc!(MASK_EXACT, OP_NOT_32_AW, qc_not_32_aw);
qc!(MASK_EXACT, OP_NOT_32_AL, qc_not_32_al);
// Put qc for OR here
qc8!(MASK_OUT_X_Y, OP_OR_8_ER_DN, qc_or_8_er_dn);
qc8!(MASK_OUT_X_Y, OP_OR_8_ER_AI, qc_or_8_er_ai);
qc8!(MASK_OUT_X_Y, OP_OR_8_ER_PI, qc_or_8_er_pi);
qc8!(MASK_OUT_X_Y, OP_OR_8_ER_PD, qc_or_8_er_pd);
qc8!(MASK_OUT_X_Y, OP_OR_8_ER_DI, qc_or_8_er_di);
qc8!(MASK_OUT_X_Y, OP_OR_8_ER_IX, qc_or_8_er_ix);
qc8!(MASK_OUT_X, OP_OR_8_ER_AW, qc_or_8_er_aw);
qc8!(MASK_OUT_X, OP_OR_8_ER_AL, qc_or_8_er_al);
qc8!(MASK_OUT_X, OP_OR_8_ER_PCDI, qc_or_8_er_pcdi);
qc8!(MASK_OUT_X, OP_OR_8_ER_PCIX, qc_or_8_er_pcix);
qc8!(MASK_OUT_X, OP_OR_8_ER_IMM, qc_or_8_er_imm);
qc8!(MASK_OUT_X_Y, OP_OR_8_RE_AI, qc_or_8_re_ai);
qc8!(MASK_OUT_X_Y, OP_OR_8_RE_PI, qc_or_8_re_pi);
qc8!(MASK_OUT_X_Y, OP_OR_8_RE_PD, qc_or_8_re_pd);
qc8!(MASK_OUT_X_Y, OP_OR_8_RE_DI, qc_or_8_re_di);
qc8!(MASK_OUT_X_Y, OP_OR_8_RE_IX, qc_or_8_re_ix);
qc8!(MASK_OUT_X, OP_OR_8_RE_AW, qc_or_8_re_aw);
qc8!(MASK_OUT_X, OP_OR_8_RE_AL, qc_or_8_re_al);
qc!(MASK_OUT_X_Y, OP_OR_16_ER_DN, qc_or_16_er_dn);
qc!(MASK_OUT_X_Y, OP_OR_16_ER_AI, qc_or_16_er_ai);
qc!(MASK_OUT_X_Y, OP_OR_16_ER_PI, qc_or_16_er_pi);
qc!(MASK_OUT_X_Y, OP_OR_16_ER_PD, qc_or_16_er_pd);
qc!(MASK_OUT_X_Y, OP_OR_16_ER_DI, qc_or_16_er_di);
qc!(MASK_OUT_X_Y, OP_OR_16_ER_IX, qc_or_16_er_ix);
qc!(MASK_OUT_X, OP_OR_16_ER_AW, qc_or_16_er_aw);
qc!(MASK_OUT_X, OP_OR_16_ER_AL, qc_or_16_er_al);
qc!(MASK_OUT_X, OP_OR_16_ER_PCDI, qc_or_16_er_pcdi);
qc!(MASK_OUT_X, OP_OR_16_ER_PCIX, qc_or_16_er_pcix);
qc!(MASK_OUT_X, OP_OR_16_ER_IMM, qc_or_16_er_imm);
qc!(MASK_OUT_X_Y, OP_OR_16_RE_AI, qc_or_16_re_ai);
qc!(MASK_OUT_X_Y, OP_OR_16_RE_PI, qc_or_16_re_pi);
qc!(MASK_OUT_X_Y, OP_OR_16_RE_PD, qc_or_16_re_pd);
qc!(MASK_OUT_X_Y, OP_OR_16_RE_DI, qc_or_16_re_di);
qc!(MASK_OUT_X_Y, OP_OR_16_RE_IX, qc_or_16_re_ix);
qc!(MASK_OUT_X, OP_OR_16_RE_AW, qc_or_16_re_aw);
qc!(MASK_OUT_X, OP_OR_16_RE_AL, qc_or_16_re_al);
qc!(MASK_OUT_X_Y, OP_OR_32_ER_DN, qc_or_32_er_dn);
qc!(MASK_OUT_X_Y, OP_OR_32_ER_AI, qc_or_32_er_ai);
qc!(MASK_OUT_X_Y, OP_OR_32_ER_PI, qc_or_32_er_pi);
qc!(MASK_OUT_X_Y, OP_OR_32_ER_PD, qc_or_32_er_pd);
qc!(MASK_OUT_X_Y, OP_OR_32_ER_DI, qc_or_32_er_di);
qc!(MASK_OUT_X_Y, OP_OR_32_ER_IX, qc_or_32_er_ix);
qc!(MASK_OUT_X, OP_OR_32_ER_AW, qc_or_32_er_aw);
qc!(MASK_OUT_X, OP_OR_32_ER_AL, qc_or_32_er_al);
qc!(MASK_OUT_X, OP_OR_32_ER_PCDI, qc_or_32_er_pcdi);
qc!(MASK_OUT_X, OP_OR_32_ER_PCIX, qc_or_32_er_pcix);
qc!(MASK_OUT_X, OP_OR_32_ER_IMM, qc_or_32_er_imm);
qc!(MASK_OUT_X_Y, OP_OR_32_RE_AI, qc_or_32_re_ai);
qc!(MASK_OUT_X_Y, OP_OR_32_RE_PI, qc_or_32_re_pi);
qc!(MASK_OUT_X_Y, OP_OR_32_RE_PD, qc_or_32_re_pd);
qc!(MASK_OUT_X_Y, OP_OR_32_RE_DI, qc_or_32_re_di);
qc!(MASK_OUT_X_Y, OP_OR_32_RE_IX, qc_or_32_re_ix);
qc!(MASK_OUT_X, OP_OR_32_RE_AW, qc_or_32_re_aw);
qc!(MASK_OUT_X, OP_OR_32_RE_AL, qc_or_32_re_al);
// Put qc for ORI here
qc8!(MASK_OUT_Y, OP_ORI_8_DN, qc_ori_8_dn);
qc8!(MASK_OUT_Y, OP_ORI_8_AI, qc_ori_8_ai);
qc8!(MASK_OUT_Y, OP_ORI_8_PI, qc_ori_8_pi);
qc8!(MASK_OUT_Y, OP_ORI_8_PD, qc_ori_8_pd);
qc8!(MASK_OUT_Y, OP_ORI_8_DI, qc_ori_8_di);
qc8!(MASK_OUT_Y, OP_ORI_8_IX, qc_ori_8_ix);
qc8!(MASK_EXACT, OP_ORI_8_AW, qc_ori_8_aw);
qc8!(MASK_EXACT, OP_ORI_8_AL, qc_ori_8_al);
qc!(MASK_OUT_Y, OP_ORI_16_DN, qc_ori_16_dn);
qc!(MASK_OUT_Y, OP_ORI_16_AI, qc_ori_16_ai);
qc!(MASK_OUT_Y, OP_ORI_16_PI, qc_ori_16_pi);
qc!(MASK_OUT_Y, OP_ORI_16_PD, qc_ori_16_pd);
qc!(MASK_OUT_Y, OP_ORI_16_DI, qc_ori_16_di);
qc!(MASK_OUT_Y, OP_ORI_16_IX, qc_ori_16_ix);
qc!(MASK_EXACT, OP_ORI_16_AW, qc_ori_16_aw);
qc!(MASK_EXACT, OP_ORI_16_AL, qc_ori_16_al);
qc!(MASK_OUT_Y, OP_ORI_32_DN, qc_ori_32_dn);
qc!(MASK_OUT_Y, OP_ORI_32_AI, qc_ori_32_ai);
qc!(MASK_OUT_Y, OP_ORI_32_PI, qc_ori_32_pi);
qc!(MASK_OUT_Y, OP_ORI_32_PD, qc_ori_32_pd);
qc!(MASK_OUT_Y, OP_ORI_32_DI, qc_ori_32_di);
qc!(MASK_OUT_Y, OP_ORI_32_IX, qc_ori_32_ix);
qc!(MASK_EXACT, OP_ORI_32_AW, qc_ori_32_aw);
qc!(MASK_EXACT, OP_ORI_32_AL, qc_ori_32_al);
// Put qc for ORI to CCR here
qc!(MASK_EXACT, OP_ORI_16_TOC, qc_ori_16_toc);
// Put qc for ORI to SR here
qc!(MASK_EXACT, OP_ORI_16_TOS, qc_ori_16_tos);
// Put qc for PEA here
qc!(MASK_OUT_Y, OP_PEA_32_AI, qc_pea_32_ai);
qc!(MASK_OUT_Y, OP_PEA_32_DI, qc_pea_32_di);
qc!(MASK_OUT_Y, OP_PEA_32_IX, qc_pea_32_ix);
qc!(MASK_EXACT, OP_PEA_32_AW, qc_pea_32_aw);
qc!(MASK_EXACT, OP_PEA_32_AL, qc_pea_32_al);
qc!(MASK_EXACT, OP_PEA_32_PCDI, qc_pea_32_pcdi);
qc!(MASK_EXACT, OP_PEA_32_PCIX, qc_pea_32_pcix);
// Put qc for RESET here
qc8!(MASK_EXACT, OP_RESET, qc_reset);
// Put qc for ROL, ROR here
qc8!(MASK_OUT_X_Y, OP_ROR_8_S, qc_ror_8_s);
qc!(MASK_OUT_X_Y, OP_ROR_16_S, qc_ror_16_s);
qc!(MASK_OUT_X_Y, OP_ROR_32_S, qc_ror_32_s);
qc8!(MASK_OUT_X_Y, OP_ROR_8_R, qc_ror_8_r);
qc!(MASK_OUT_X_Y, OP_ROR_16_R, qc_ror_16_r);
qc!(MASK_OUT_X_Y, OP_ROR_32_R, qc_ror_32_r);
qc8!(MASK_OUT_X_Y, OP_ROL_8_S, qc_rol_8_s);
qc!(MASK_OUT_X_Y, OP_ROL_16_S, qc_rol_16_s);
qc!(MASK_OUT_X_Y, OP_ROL_32_S, qc_rol_32_s);
qc8!(MASK_OUT_X_Y, OP_ROL_8_R, qc_rol_8_r);
qc!(MASK_OUT_X_Y, OP_ROL_16_R, qc_rol_16_r);
qc!(MASK_OUT_X_Y, OP_ROL_32_R, qc_rol_32_r);
qc!(MASK_OUT_Y, OP_ROL_16_AI, qc_rol_16_ai);
qc!(MASK_OUT_Y, OP_ROL_16_PI, qc_rol_16_pi);
qc!(MASK_OUT_Y, OP_ROL_16_PD, qc_rol_16_pd);
qc!(MASK_OUT_Y, OP_ROL_16_DI, qc_rol_16_di);
qc!(MASK_OUT_Y, OP_ROL_16_IX, qc_rol_16_ix);
qc!(MASK_EXACT, OP_ROL_16_AW, qc_rol_16_aw);
qc!(MASK_EXACT, OP_ROL_16_AL, qc_rol_16_al);
qc!(MASK_OUT_Y, OP_ROR_16_AI, qc_ror_16_ai);
qc!(MASK_OUT_Y, OP_ROR_16_PI, qc_ror_16_pi);
qc!(MASK_OUT_Y, OP_ROR_16_PD, qc_ror_16_pd);
qc!(MASK_OUT_Y, OP_ROR_16_DI, qc_ror_16_di);
qc!(MASK_OUT_Y, OP_ROR_16_IX, qc_ror_16_ix);
qc!(MASK_EXACT, OP_ROR_16_AW, qc_ror_16_aw);
qc!(MASK_EXACT, OP_ROR_16_AL, qc_ror_16_al);
// Put qc for ROXL, ROXR here
qc8!(MASK_OUT_X_Y, OP_ROXR_8_S, qc_roxr_8_s);
qc!(MASK_OUT_X_Y, OP_ROXR_16_S, qc_roxr_16_s);
qc!(MASK_OUT_X_Y, OP_ROXR_32_S, qc_roxr_32_s);
qc8!(MASK_OUT_X_Y, OP_ROXR_8_R, qc_roxr_8_r);
qc!(MASK_OUT_X_Y, OP_ROXR_16_R, qc_roxr_16_r);
qc!(MASK_OUT_X_Y, OP_ROXR_32_R, qc_roxr_32_r);
qc8!(MASK_OUT_X_Y, OP_ROXL_8_S, qc_roxl_8_s);
qc!(MASK_OUT_X_Y, OP_ROXL_16_S, qc_roxl_16_s);
qc!(MASK_OUT_X_Y, OP_ROXL_32_S, qc_roxl_32_s);
qc8!(MASK_OUT_X_Y, OP_ROXL_8_R, qc_roxl_8_r);
qc!(MASK_OUT_X_Y, OP_ROXL_16_R, qc_roxl_16_r);
qc!(MASK_OUT_X_Y, OP_ROXL_32_R, qc_roxl_32_r);
qc!(MASK_OUT_Y, OP_ROXL_16_AI, qc_roxl_16_ai);
qc!(MASK_OUT_Y, OP_ROXL_16_PI, qc_roxl_16_pi);
qc!(MASK_OUT_Y, OP_ROXL_16_PD, qc_roxl_16_pd);
qc!(MASK_OUT_Y, OP_ROXL_16_DI, qc_roxl_16_di);
qc!(MASK_OUT_Y, OP_ROXL_16_IX, qc_roxl_16_ix);
qc!(MASK_EXACT, OP_ROXL_16_AW, qc_roxl_16_aw);
qc!(MASK_EXACT, OP_ROXL_16_AL, qc_roxl_16_al);
qc!(MASK_OUT_Y, OP_ROXR_16_AI, qc_roxr_16_ai);
qc!(MASK_OUT_Y, OP_ROXR_16_PI, qc_roxr_16_pi);
qc!(MASK_OUT_Y, OP_ROXR_16_PD, qc_roxr_16_pd);
qc!(MASK_OUT_Y, OP_ROXR_16_DI, qc_roxr_16_di);
qc!(MASK_OUT_Y, OP_ROXR_16_IX, qc_roxr_16_ix);
qc!(MASK_EXACT, OP_ROXR_16_AW, qc_roxr_16_aw);
qc!(MASK_EXACT, OP_ROXR_16_AL, qc_roxr_16_al);
// Put qc for RTE here
qc8!(MASK_EXACT, OP_RTE_32, qc_rte_32);
// Put qc for RTR here
qc8!(MASK_EXACT, OP_RTR_32, qc_rtr_32);
// Put qc for RTS here
qc8!(MASK_EXACT, OP_RTS_32, qc_rts_32);
qc8!(MASK_OUT_X_Y, OP_SBCD_8_RR, qc_sbcd_rr);
qc8!(MASK_OUT_X_Y, OP_SBCD_8_MM, qc_sbcd_mm);
qc!(MASK_OUT_Y, OP_SCC_8_AI, qc_scc_8_ai);
qc!(MASK_EXACT, OP_SCC_8_AL, qc_scc_8_al);
qc!(MASK_EXACT, OP_SCC_8_AW, qc_scc_8_aw);
qc!(MASK_OUT_Y, OP_SCC_8_DN, qc_scc_8_dn);
qc!(MASK_OUT_Y, OP_SCC_8_DI, qc_scc_8_di);
qc!(MASK_OUT_Y, OP_SCC_8_IX, qc_scc_8_ix);
qc!(MASK_OUT_Y, OP_SCC_8_PD, qc_scc_8_pd);
qc!(MASK_OUT_Y, OP_SCC_8_PI, qc_scc_8_pi);
qc!(MASK_OUT_Y, OP_SCS_8_AI, qc_scs_8_ai);
qc!(MASK_EXACT, OP_SCS_8_AL, qc_scs_8_al);
qc!(MASK_EXACT, OP_SCS_8_AW, qc_scs_8_aw);
qc!(MASK_OUT_Y, OP_SCS_8_DN, qc_scs_8_dn);
qc!(MASK_OUT_Y, OP_SCS_8_DI, qc_scs_8_di);
qc!(MASK_OUT_Y, OP_SCS_8_IX, qc_scs_8_ix);
qc!(MASK_OUT_Y, OP_SCS_8_PD, qc_scs_8_pd);
qc!(MASK_OUT_Y, OP_SCS_8_PI, qc_scs_8_pi);
qc!(MASK_OUT_Y, OP_SEQ_8_AI, qc_seq_8_ai);
qc!(MASK_EXACT, OP_SEQ_8_AL, qc_seq_8_al);
qc!(MASK_EXACT, OP_SEQ_8_AW, qc_seq_8_aw);
qc!(MASK_OUT_Y, OP_SEQ_8_DN, qc_seq_8_dn);
qc!(MASK_OUT_Y, OP_SEQ_8_DI, qc_seq_8_di);
qc!(MASK_OUT_Y, OP_SEQ_8_IX, qc_seq_8_ix);
qc!(MASK_OUT_Y, OP_SEQ_8_PD, qc_seq_8_pd);
qc!(MASK_OUT_Y, OP_SEQ_8_PI, qc_seq_8_pi);
qc!(MASK_OUT_Y, OP_SF_8_AI, qc_sf_8_ai);
qc!(MASK_EXACT, OP_SF_8_AL, qc_sf_8_al);
qc!(MASK_EXACT, OP_SF_8_AW, qc_sf_8_aw);
qc!(MASK_OUT_Y, OP_SF_8_DN, qc_sf_8_dn);
qc!(MASK_OUT_Y, OP_SF_8_DI, qc_sf_8_di);
qc!(MASK_OUT_Y, OP_SF_8_IX, qc_sf_8_ix);
qc!(MASK_OUT_Y, OP_SF_8_PD, qc_sf_8_pd);
qc!(MASK_OUT_Y, OP_SF_8_PI, qc_sf_8_pi);
qc!(MASK_OUT_Y, OP_SGE_8_AI, qc_sge_8_ai);
qc!(MASK_EXACT, OP_SGE_8_AL, qc_sge_8_al);
qc!(MASK_EXACT, OP_SGE_8_AW, qc_sge_8_aw);
qc!(MASK_OUT_Y, OP_SGE_8_DN, qc_sge_8_dn);
qc!(MASK_OUT_Y, OP_SGE_8_DI, qc_sge_8_di);
qc!(MASK_OUT_Y, OP_SGE_8_IX, qc_sge_8_ix);
qc!(MASK_OUT_Y, OP_SGE_8_PD, qc_sge_8_pd);
qc!(MASK_OUT_Y, OP_SGE_8_PI, qc_sge_8_pi);
qc!(MASK_OUT_Y, OP_SGT_8_AI, qc_sgt_8_ai);
qc!(MASK_EXACT, OP_SGT_8_AL, qc_sgt_8_al);
qc!(MASK_EXACT, OP_SGT_8_AW, qc_sgt_8_aw);
qc!(MASK_OUT_Y, OP_SGT_8_DN, qc_sgt_8_dn);
qc!(MASK_OUT_Y, OP_SGT_8_DI, qc_sgt_8_di);
qc!(MASK_OUT_Y, OP_SGT_8_IX, qc_sgt_8_ix);
qc!(MASK_OUT_Y, OP_SGT_8_PD, qc_sgt_8_pd);
qc!(MASK_OUT_Y, OP_SGT_8_PI, qc_sgt_8_pi);
qc!(MASK_OUT_Y, OP_SHI_8_AI, qc_shi_8_ai);
qc!(MASK_EXACT, OP_SHI_8_AL, qc_shi_8_al);
qc!(MASK_EXACT, OP_SHI_8_AW, qc_shi_8_aw);
qc!(MASK_OUT_Y, OP_SHI_8_DN, qc_shi_8_dn);
qc!(MASK_OUT_Y, OP_SHI_8_DI, qc_shi_8_di);
qc!(MASK_OUT_Y, OP_SHI_8_IX, qc_shi_8_ix);
qc!(MASK_OUT_Y, OP_SHI_8_PD, qc_shi_8_pd);
qc!(MASK_OUT_Y, OP_SHI_8_PI, qc_shi_8_pi);
qc!(MASK_OUT_Y, OP_SLE_8_AI, qc_sle_8_ai);
qc!(MASK_EXACT, OP_SLE_8_AL, qc_sle_8_al);
qc!(MASK_EXACT, OP_SLE_8_AW, qc_sle_8_aw);
qc!(MASK_OUT_Y, OP_SLE_8_DN, qc_sle_8_dn);
qc!(MASK_OUT_Y, OP_SLE_8_DI, qc_sle_8_di);
qc!(MASK_OUT_Y, OP_SLE_8_IX, qc_sle_8_ix);
qc!(MASK_OUT_Y, OP_SLE_8_PD, qc_sle_8_pd);
qc!(MASK_OUT_Y, OP_SLE_8_PI, qc_sle_8_pi);
qc!(MASK_OUT_Y, OP_SLS_8_AI, qc_sls_8_ai);
qc!(MASK_EXACT, OP_SLS_8_AL, qc_sls_8_al);
qc!(MASK_EXACT, OP_SLS_8_AW, qc_sls_8_aw);
qc!(MASK_OUT_Y, OP_SLS_8_DN, qc_sls_8_dn);
qc!(MASK_OUT_Y, OP_SLS_8_DI, qc_sls_8_di);
qc!(MASK_OUT_Y, OP_SLS_8_IX, qc_sls_8_ix);
qc!(MASK_OUT_Y, OP_SLS_8_PD, qc_sls_8_pd);
qc!(MASK_OUT_Y, OP_SLS_8_PI, qc_sls_8_pi);
qc!(MASK_OUT_Y, OP_SLT_8_AI, qc_slt_8_ai);
qc!(MASK_EXACT, OP_SLT_8_AL, qc_slt_8_al);
qc!(MASK_EXACT, OP_SLT_8_AW, qc_slt_8_aw);
qc!(MASK_OUT_Y, OP_SLT_8_DN, qc_slt_8_dn);
qc!(MASK_OUT_Y, OP_SLT_8_DI, qc_slt_8_di);
qc!(MASK_OUT_Y, OP_SLT_8_IX, qc_slt_8_ix);
qc!(MASK_OUT_Y, OP_SLT_8_PD, qc_slt_8_pd);
qc!(MASK_OUT_Y, OP_SLT_8_PI, qc_slt_8_pi);
qc!(MASK_OUT_Y, OP_SMI_8_AI, qc_smi_8_ai);
qc!(MASK_EXACT, OP_SMI_8_AL, qc_smi_8_al);
qc!(MASK_EXACT, OP_SMI_8_AW, qc_smi_8_aw);
qc!(MASK_OUT_Y, OP_SMI_8_DN, qc_smi_8_dn);
qc!(MASK_OUT_Y, OP_SMI_8_DI, qc_smi_8_di);
qc!(MASK_OUT_Y, OP_SMI_8_IX, qc_smi_8_ix);
qc!(MASK_OUT_Y, OP_SMI_8_PD, qc_smi_8_pd);
qc!(MASK_OUT_Y, OP_SMI_8_PI, qc_smi_8_pi);
qc!(MASK_OUT_Y, OP_SNE_8_AI, qc_sne_8_ai);
qc!(MASK_EXACT, OP_SNE_8_AL, qc_sne_8_al);
qc!(MASK_EXACT, OP_SNE_8_AW, qc_sne_8_aw);
qc!(MASK_OUT_Y, OP_SNE_8_DN, qc_sne_8_dn);
qc!(MASK_OUT_Y, OP_SNE_8_DI, qc_sne_8_di);
qc!(MASK_OUT_Y, OP_SNE_8_IX, qc_sne_8_ix);
qc!(MASK_OUT_Y, OP_SNE_8_PD, qc_sne_8_pd);
qc!(MASK_OUT_Y, OP_SNE_8_PI, qc_sne_8_pi);
qc!(MASK_OUT_Y, OP_SPL_8_AI, qc_spl_8_ai);
qc!(MASK_EXACT, OP_SPL_8_AL, qc_spl_8_al);
qc!(MASK_EXACT, OP_SPL_8_AW, qc_spl_8_aw);
qc!(MASK_OUT_Y, OP_SPL_8_DN, qc_spl_8_dn);
qc!(MASK_OUT_Y, OP_SPL_8_DI, qc_spl_8_di);
qc!(MASK_OUT_Y, OP_SPL_8_IX, qc_spl_8_ix);
qc!(MASK_OUT_Y, OP_SPL_8_PD, qc_spl_8_pd);
qc!(MASK_OUT_Y, OP_SPL_8_PI, qc_spl_8_pi);
qc!(MASK_OUT_Y, OP_ST_8_AI, qc_st_8_ai);
qc!(MASK_EXACT, OP_ST_8_AL, qc_st_8_al);
qc!(MASK_EXACT, OP_ST_8_AW, qc_st_8_aw);
qc!(MASK_OUT_Y, OP_ST_8_DN, qc_st_8_dn);
qc!(MASK_OUT_Y, OP_ST_8_DI, qc_st_8_di);
qc!(MASK_OUT_Y, OP_ST_8_IX, qc_st_8_ix);
qc!(MASK_OUT_Y, OP_ST_8_PD, qc_st_8_pd);
qc!(MASK_OUT_Y, OP_ST_8_PI, qc_st_8_pi);
qc!(MASK_OUT_Y, OP_SVC_8_AI, qc_svc_8_ai);
qc!(MASK_EXACT, OP_SVC_8_AL, qc_svc_8_al);
qc!(MASK_EXACT, OP_SVC_8_AW, qc_svc_8_aw);
qc!(MASK_OUT_Y, OP_SVC_8_DN, qc_svc_8_dn);
qc!(MASK_OUT_Y, OP_SVC_8_DI, qc_svc_8_di);
qc!(MASK_OUT_Y, OP_SVC_8_IX, qc_svc_8_ix);
qc!(MASK_OUT_Y, OP_SVC_8_PD, qc_svc_8_pd);
qc!(MASK_OUT_Y, OP_SVC_8_PI, qc_svc_8_pi);
qc!(MASK_OUT_Y, OP_SVS_8_AI, qc_svs_8_ai);
qc!(MASK_EXACT, OP_SVS_8_AL, qc_svs_8_al);
qc!(MASK_EXACT, OP_SVS_8_AW, qc_svs_8_aw);
qc!(MASK_OUT_Y, OP_SVS_8_DN, qc_svs_8_dn);
qc!(MASK_OUT_Y, OP_SVS_8_DI, qc_svs_8_di);
qc!(MASK_OUT_Y, OP_SVS_8_IX, qc_svs_8_ix);
qc!(MASK_OUT_Y, OP_SVS_8_PD, qc_svs_8_pd);
qc!(MASK_OUT_Y, OP_SVS_8_PI, qc_svs_8_pi);
// Put qc for STOP here
qc8!(MASK_EXACT, OP_STOP, qc_stop);
// Put qc for SUB here
qc8!(MASK_OUT_X_Y, OP_SUB_8_ER_DN, qc_sub_8_er_dn);
qc8!(MASK_OUT_X_Y, OP_SUB_8_ER_PI, qc_sub_8_er_pi);
qc8!(MASK_OUT_X_Y, OP_SUB_8_ER_PD, qc_sub_8_er_pd);
qc8!(MASK_OUT_X_Y, OP_SUB_8_ER_AI, qc_sub_8_er_ai);
qc8!(MASK_OUT_X_Y, OP_SUB_8_ER_DI, qc_sub_8_er_di);
qc8!(MASK_OUT_X_Y, OP_SUB_8_ER_IX, qc_sub_8_er_ix);
qc8!(MASK_OUT_X, OP_SUB_8_ER_AW, qc_sub_8_er_aw);
qc8!(MASK_OUT_X, OP_SUB_8_ER_AL, qc_sub_8_er_al);
qc8!(MASK_OUT_X, OP_SUB_8_ER_PCDI, qc_sub_8_er_pcdi);
qc8!(MASK_OUT_X, OP_SUB_8_ER_PCIX, qc_sub_8_er_pcix);
qc8!(MASK_OUT_X, OP_SUB_8_ER_IMM, qc_sub_8_er_imm);
qc8!(MASK_OUT_X_Y, OP_SUB_8_RE_PI, qc_sub_8_re_pi);
qc8!(MASK_OUT_X_Y, OP_SUB_8_RE_PD, qc_sub_8_re_pd);
qc8!(MASK_OUT_X_Y, OP_SUB_8_RE_AI, qc_sub_8_re_ai);
qc8!(MASK_OUT_X_Y, OP_SUB_8_RE_DI, qc_sub_8_re_di);
qc8!(MASK_OUT_X_Y, OP_SUB_8_RE_IX, qc_sub_8_re_ix);
qc8!(MASK_OUT_X, OP_SUB_8_RE_AW, qc_sub_8_re_aw);
qc8!(MASK_OUT_X, OP_SUB_8_RE_AL, qc_sub_8_re_al);
qc!(MASK_OUT_X_Y, OP_SUB_16_ER_DN, qc_sub_16_er_dn);
qc!(MASK_OUT_X_Y, OP_SUB_16_ER_AN, qc_sub_16_er_an);
qc!(MASK_OUT_X_Y, OP_SUB_16_ER_PI, qc_sub_16_er_pi);
qc!(MASK_OUT_X_Y, OP_SUB_16_ER_PD, qc_sub_16_er_pd);
qc!(MASK_OUT_X_Y, OP_SUB_16_ER_AI, qc_sub_16_er_ai);
qc!(MASK_OUT_X_Y, OP_SUB_16_ER_DI, qc_sub_16_er_di);
qc!(MASK_OUT_X_Y, OP_SUB_16_ER_IX, qc_sub_16_er_ix);
qc!(MASK_OUT_X, OP_SUB_16_ER_AW, qc_sub_16_er_aw);
qc!(MASK_OUT_X, OP_SUB_16_ER_AL, qc_sub_16_er_al);
qc!(MASK_OUT_X, OP_SUB_16_ER_PCDI, qc_sub_16_er_pcdi);
qc!(MASK_OUT_X, OP_SUB_16_ER_PCIX, qc_sub_16_er_pcix);
qc!(MASK_OUT_X, OP_SUB_16_ER_IMM, qc_sub_16_er_imm);
qc!(MASK_OUT_X_Y, OP_SUB_16_RE_PI, qc_sub_16_re_pi);
qc!(MASK_OUT_X_Y, OP_SUB_16_RE_PD, qc_sub_16_re_pd);
qc!(MASK_OUT_X_Y, OP_SUB_16_RE_AI, qc_sub_16_re_ai);
qc!(MASK_OUT_X_Y, OP_SUB_16_RE_DI, qc_sub_16_re_di);
qc!(MASK_OUT_X_Y, OP_SUB_16_RE_IX, qc_sub_16_re_ix);
qc!(MASK_OUT_X, OP_SUB_16_RE_AW, qc_sub_16_re_aw);
qc!(MASK_OUT_X, OP_SUB_16_RE_AL, qc_sub_16_re_al);
qc!(MASK_OUT_X_Y, OP_SUB_32_ER_DN, qc_sub_32_er_dn);
qc!(MASK_OUT_X_Y, OP_SUB_32_ER_AN, qc_sub_32_er_an);
qc!(MASK_OUT_X_Y, OP_SUB_32_ER_PI, qc_sub_32_er_pi);
qc!(MASK_OUT_X_Y, OP_SUB_32_ER_PD, qc_sub_32_er_pd);
qc!(MASK_OUT_X_Y, OP_SUB_32_ER_AI, qc_sub_32_er_ai);
qc!(MASK_OUT_X_Y, OP_SUB_32_ER_DI, qc_sub_32_er_di);
qc!(MASK_OUT_X_Y, OP_SUB_32_ER_IX, qc_sub_32_er_ix);
qc!(MASK_OUT_X, OP_SUB_32_ER_AW, qc_sub_32_er_aw);
qc!(MASK_OUT_X, OP_SUB_32_ER_AL, qc_sub_32_er_al);
qc!(MASK_OUT_X, OP_SUB_32_ER_PCDI, qc_sub_32_er_pcdi);
qc!(MASK_OUT_X, OP_SUB_32_ER_PCIX, qc_sub_32_er_pcix);
qc!(MASK_OUT_X, OP_SUB_32_ER_IMM, qc_sub_32_er_imm);
qc!(MASK_OUT_X_Y, OP_SUB_32_RE_PI, qc_sub_32_re_pi);
qc!(MASK_OUT_X_Y, OP_SUB_32_RE_PD, qc_sub_32_re_pd);
qc!(MASK_OUT_X_Y, OP_SUB_32_RE_AI, qc_sub_32_re_ai);
qc!(MASK_OUT_X_Y, OP_SUB_32_RE_DI, qc_sub_32_re_di);
qc!(MASK_OUT_X_Y, OP_SUB_32_RE_IX, qc_sub_32_re_ix);
qc!(MASK_OUT_X, OP_SUB_32_RE_AW, qc_sub_32_re_aw);
qc!(MASK_OUT_X, OP_SUB_32_RE_AL, qc_sub_32_re_al);
qc!(MASK_OUT_X_Y, OP_SUBA_16_DN, qc_suba_16_dn);
qc!(MASK_OUT_X_Y, OP_SUBA_16_AN, qc_suba_16_an);
qc!(MASK_OUT_X_Y, OP_SUBA_16_PI, qc_suba_16_pi);
qc!(MASK_OUT_X_Y, OP_SUBA_16_PD, qc_suba_16_pd);
qc!(MASK_OUT_X_Y, OP_SUBA_16_AI, qc_suba_16_ai);
qc!(MASK_OUT_X_Y, OP_SUBA_16_DI, qc_suba_16_di);
qc!(MASK_OUT_X_Y, OP_SUBA_16_IX, qc_suba_16_ix);
qc!(MASK_OUT_X, OP_SUBA_16_AW, qc_suba_16_aw);
qc!(MASK_OUT_X, OP_SUBA_16_AL, qc_suba_16_al);
qc!(MASK_OUT_X, OP_SUBA_16_PCDI, qc_suba_16_pcdi);
qc!(MASK_OUT_X, OP_SUBA_16_PCIX, qc_suba_16_pcix);
qc!(MASK_OUT_X, OP_SUBA_16_IMM, qc_suba_16_imm);
qc!(MASK_OUT_X_Y, OP_SUBA_32_DN, qc_suba_32_dn);
qc!(MASK_OUT_X_Y, OP_SUBA_32_AN, qc_suba_32_an);
qc!(MASK_OUT_X_Y, OP_SUBA_32_PI, qc_suba_32_pi);
qc!(MASK_OUT_X_Y, OP_SUBA_32_PD, qc_suba_32_pd);
qc!(MASK_OUT_X_Y, OP_SUBA_32_AI, qc_suba_32_ai);
qc!(MASK_OUT_X_Y, OP_SUBA_32_DI, qc_suba_32_di);
qc!(MASK_OUT_X_Y, OP_SUBA_32_IX, qc_suba_32_ix);
qc!(MASK_OUT_X, OP_SUBA_32_AW, qc_suba_32_aw);
qc!(MASK_OUT_X, OP_SUBA_32_AL, qc_suba_32_al);
qc!(MASK_OUT_X, OP_SUBA_32_PCDI, qc_suba_32_pcdi);
qc!(MASK_OUT_X, OP_SUBA_32_PCIX, qc_suba_32_pcix);
qc!(MASK_OUT_X, OP_SUBA_32_IMM, qc_suba_32_imm);
qc8!(MASK_OUT_Y, OP_SUBI_8_DN, qc_subi_8_dn);
qc8!(MASK_OUT_Y, OP_SUBI_8_PI, qc_subi_8_pi);
qc8!(MASK_OUT_Y, OP_SUBI_8_PD, qc_subi_8_pd);
qc8!(MASK_OUT_Y, OP_SUBI_8_AI, qc_subi_8_ai);
qc8!(MASK_OUT_Y, OP_SUBI_8_DI, qc_subi_8_di);
qc8!(MASK_OUT_Y, OP_SUBI_8_IX, qc_subi_8_ix);
qc8!(MASK_EXACT, OP_SUBI_8_AW, qc_subi_8_aw);
qc8!(MASK_EXACT, OP_SUBI_8_AL, qc_subi_8_al);
qc!(MASK_OUT_Y, OP_SUBI_16_DN, qc_subi_16_dn);
qc!(MASK_OUT_Y, OP_SUBI_16_PI, qc_subi_16_pi);
qc!(MASK_OUT_Y, OP_SUBI_16_PD, qc_subi_16_pd);
qc!(MASK_OUT_Y, OP_SUBI_16_AI, qc_subi_16_ai);
qc!(MASK_OUT_Y, OP_SUBI_16_DI, qc_subi_16_di);
qc!(MASK_OUT_Y, OP_SUBI_16_IX, qc_subi_16_ix);
qc!(MASK_EXACT, OP_SUBI_16_AW, qc_subi_16_aw);
qc!(MASK_EXACT, OP_SUBI_16_AL, qc_subi_16_al);
qc!(MASK_OUT_Y, OP_SUBI_32_DN, qc_subi_32_dn);
qc!(MASK_OUT_Y, OP_SUBI_32_PI, qc_subi_32_pi);
qc!(MASK_OUT_Y, OP_SUBI_32_PD, qc_subi_32_pd);
qc!(MASK_OUT_Y, OP_SUBI_32_AI, qc_subi_32_ai);
qc!(MASK_OUT_Y, OP_SUBI_32_DI, qc_subi_32_di);
qc!(MASK_OUT_Y, OP_SUBI_32_IX, qc_subi_32_ix);
qc!(MASK_EXACT, OP_SUBI_32_AW, qc_subi_32_aw);
qc!(MASK_EXACT, OP_SUBI_32_AL, qc_subi_32_al);
qc8!(MASK_OUT_X_Y, OP_SUBQ_8_DN, qc_subq_8_dn);
qc8!(MASK_OUT_X_Y, OP_SUBQ_8_PI, qc_subq_8_pi);
qc8!(MASK_OUT_X_Y, OP_SUBQ_8_PD, qc_subq_8_pd);
qc8!(MASK_OUT_X_Y, OP_SUBQ_8_AI, qc_subq_8_ai);
qc8!(MASK_OUT_X_Y, OP_SUBQ_8_DI, qc_subq_8_di);
qc8!(MASK_OUT_X_Y, OP_SUBQ_8_IX, qc_subq_8_ix);
qc8!(MASK_OUT_X, OP_SUBQ_8_AW, qc_subq_8_aw);
qc8!(MASK_OUT_X, OP_SUBQ_8_AL, qc_subq_8_al);
qc!(MASK_OUT_X_Y, OP_SUBQ_16_DN, qc_subq_16_dn);
qc!(MASK_OUT_X_Y, OP_SUBQ_16_AN, qc_subq_16_an);
qc!(MASK_OUT_X_Y, OP_SUBQ_16_PI, qc_subq_16_pi);
qc!(MASK_OUT_X_Y, OP_SUBQ_16_PD, qc_subq_16_pd);
qc!(MASK_OUT_X_Y, OP_SUBQ_16_AI, qc_subq_16_ai);
qc!(MASK_OUT_X_Y, OP_SUBQ_16_DI, qc_subq_16_di);
qc!(MASK_OUT_X_Y, OP_SUBQ_16_IX, qc_subq_16_ix);
qc!(MASK_OUT_X, OP_SUBQ_16_AW, qc_subq_16_aw);
qc!(MASK_OUT_X, OP_SUBQ_16_AL, qc_subq_16_al);
qc!(MASK_OUT_X_Y, OP_SUBQ_32_DN, qc_subq_32_dn);
qc!(MASK_OUT_X_Y, OP_SUBQ_32_AN, qc_subq_32_an);
qc!(MASK_OUT_X_Y, OP_SUBQ_32_PI, qc_subq_32_pi);
qc!(MASK_OUT_X_Y, OP_SUBQ_32_PD, qc_subq_32_pd);
qc!(MASK_OUT_X_Y, OP_SUBQ_32_AI, qc_subq_32_ai);
qc!(MASK_OUT_X_Y, OP_SUBQ_32_DI, qc_subq_32_di);
qc!(MASK_OUT_X_Y, OP_SUBQ_32_IX, qc_subq_32_ix);
qc!(MASK_OUT_X, OP_SUBQ_32_AW, qc_subq_32_aw);
qc!(MASK_OUT_X, OP_SUBQ_32_AL, qc_subq_32_al);
qc8!(MASK_OUT_X_Y, OP_SUBX_8_RR, qc_subx_8_rr);
qc8!(MASK_OUT_X_Y, OP_SUBX_8_MM, qc_subx_8_mm);
qc!(MASK_OUT_X_Y, OP_SUBX_16_RR, qc_subx_16_rr);
qc!(MASK_OUT_X_Y, OP_SUBX_16_MM, qc_subx_16_mm);
qc!(MASK_OUT_X_Y, OP_SUBX_32_RR, qc_subx_32_rr);
qc!(MASK_OUT_X_Y, OP_SUBX_32_MM, qc_subx_32_mm);
// Put qc for SWAP here
qc!(MASK_OUT_Y, OP_SWAP_32_DN, qc_swap_32_dn);
// Put qc for TAS here
qc8!(MASK_OUT_Y, OP_TAS_8_DN, qc_tas_8_dn);
qc8!(MASK_OUT_Y, OP_TAS_8_AI, qc_tas_8_ai);
qc8!(MASK_OUT_Y, OP_TAS_8_PI, qc_tas_8_pi);
qc8!(MASK_OUT_Y, OP_TAS_8_PD, qc_tas_8_pd);
qc8!(MASK_OUT_Y, OP_TAS_8_DI, qc_tas_8_di);
qc8!(MASK_OUT_Y, OP_TAS_8_IX, qc_tas_8_ix);
qc8!(MASK_EXACT, OP_TAS_8_AW, qc_tas_8_aw);
qc8!(MASK_EXACT, OP_TAS_8_AL, qc_tas_8_al);
// Put qc for TRAP here
qc_allow_exception!(MASK_LONIB, OP_TRAP, qc_trap);
// Put qc for TRAPV here
qc_allow_exception!(MASK_EXACT, OP_TRAPV, qc_trapv);
// Put qc for TST here
qc!(MASK_OUT_Y, OP_TST_8_DN, qc_tst_8_dn);
qc!(MASK_OUT_Y, OP_TST_8_AI, qc_tst_8_ai);
qc!(MASK_OUT_Y, OP_TST_8_PI, qc_tst_8_pi);
qc!(MASK_OUT_Y, OP_TST_8_PD, qc_tst_8_pd);
qc!(MASK_OUT_Y, OP_TST_8_DI, qc_tst_8_di);
qc!(MASK_OUT_Y, OP_TST_8_IX, qc_tst_8_ix);
qc!(MASK_EXACT, OP_TST_8_AW, qc_tst_8_aw);
qc!(MASK_EXACT, OP_TST_8_AL, qc_tst_8_al);
qc!(MASK_OUT_Y, OP_TST_16_DN, qc_tst_16_dn);
qc!(MASK_OUT_Y, OP_TST_16_AI, qc_tst_16_ai);
qc!(MASK_OUT_Y, OP_TST_16_PI, qc_tst_16_pi);
qc!(MASK_OUT_Y, OP_TST_16_PD, qc_tst_16_pd);
qc!(MASK_OUT_Y, OP_TST_16_DI, qc_tst_16_di);
qc!(MASK_OUT_Y, OP_TST_16_IX, qc_tst_16_ix);
qc!(MASK_EXACT, OP_TST_16_AW, qc_tst_16_aw);
qc!(MASK_EXACT, OP_TST_16_AL, qc_tst_16_al);
qc!(MASK_OUT_Y, OP_TST_32_DN, qc_tst_32_dn);
qc!(MASK_OUT_Y, OP_TST_32_AI, qc_tst_32_ai);
qc!(MASK_OUT_Y, OP_TST_32_PI, qc_tst_32_pi);
qc!(MASK_OUT_Y, OP_TST_32_PD, qc_tst_32_pd);
qc!(MASK_OUT_Y, OP_TST_32_DI, qc_tst_32_di);
qc!(MASK_OUT_Y, OP_TST_32_IX, qc_tst_32_ix);
qc!(MASK_EXACT, OP_TST_32_AW, qc_tst_32_aw);
qc!(MASK_EXACT, OP_TST_32_AL, qc_tst_32_al);
// Put qc for UNLK here
qc!(MASK_OUT_Y, OP_UNLK_32, qc_unlk_32);
// OP completeness test, run once through every opcode
const BLOCK_MASK : u32 = 0b1111_1100_0000_0000;
const BLOCK_SIZE : u32 = 0b0000_0100_0000_0000;
const BLOCK_0K : u32 = 0 * BLOCK_SIZE;
const BLOCK_1K : u32 = 1 * BLOCK_SIZE;
const BLOCK_2K : u32 = 2 * BLOCK_SIZE;
const BLOCK_3K : u32 = 3 * BLOCK_SIZE;
const BLOCK_4K : u32 = 4 * BLOCK_SIZE;
const BLOCK_5K : u32 = 5 * BLOCK_SIZE;
const BLOCK_6K : u32 = 6 * BLOCK_SIZE;
const BLOCK_7K : u32 = 7 * BLOCK_SIZE;
const BLOCK_8K : u32 = 8 * BLOCK_SIZE;
const BLOCK_9K : u32 = 9 * BLOCK_SIZE;
const BLOCK_10K : u32 = 10 * BLOCK_SIZE;
const BLOCK_11K : u32 = 11 * BLOCK_SIZE;
const BLOCK_12K : u32 = 12 * BLOCK_SIZE;
const BLOCK_13K : u32 = 13 * BLOCK_SIZE;
const BLOCK_14K : u32 = 14 * BLOCK_SIZE;
const BLOCK_15K : u32 = 15 * BLOCK_SIZE;
const BLOCK_16K : u32 = 16 * BLOCK_SIZE;
const BLOCK_17K : u32 = 17 * BLOCK_SIZE;
const BLOCK_18K : u32 = 18 * BLOCK_SIZE;
const BLOCK_19K : u32 = 19 * BLOCK_SIZE;
const BLOCK_20K : u32 = 20 * BLOCK_SIZE;
const BLOCK_21K : u32 = 21 * BLOCK_SIZE;
const BLOCK_22K : u32 = 22 * BLOCK_SIZE;
const BLOCK_23K : u32 = 23 * BLOCK_SIZE;
const BLOCK_24K : u32 = 24 * BLOCK_SIZE;
const BLOCK_25K : u32 = 25 * BLOCK_SIZE;
const BLOCK_26K : u32 = 26 * BLOCK_SIZE;
const BLOCK_27K : u32 = 27 * BLOCK_SIZE;
const BLOCK_28K : u32 = 28 * BLOCK_SIZE;
const BLOCK_29K : u32 = 29 * BLOCK_SIZE;
const BLOCK_30K : u32 = 30 * BLOCK_SIZE;
const BLOCK_31K : u32 = 31 * BLOCK_SIZE;
const BLOCK_32K : u32 = 32 * BLOCK_SIZE;
const BLOCK_33K : u32 = 33 * BLOCK_SIZE;
const BLOCK_34K : u32 = 34 * BLOCK_SIZE;
const BLOCK_35K : u32 = 35 * BLOCK_SIZE;
const BLOCK_36K : u32 = 36 * BLOCK_SIZE;
const BLOCK_37K : u32 = 37 * BLOCK_SIZE;
const BLOCK_38K : u32 = 38 * BLOCK_SIZE;
const BLOCK_39K : u32 = 39 * BLOCK_SIZE;
const BLOCK_40K : u32 = 40 * BLOCK_SIZE;
const BLOCK_41K : u32 = 41 * BLOCK_SIZE;
const BLOCK_42K : u32 = 42 * BLOCK_SIZE;
const BLOCK_43K : u32 = 43 * BLOCK_SIZE;
const BLOCK_44K : u32 = 44 * BLOCK_SIZE;
const BLOCK_45K : u32 = 45 * BLOCK_SIZE;
const BLOCK_46K : u32 = 46 * BLOCK_SIZE;
const BLOCK_47K : u32 = 47 * BLOCK_SIZE;
const BLOCK_48K : u32 = 48 * BLOCK_SIZE;
const BLOCK_49K : u32 = 49 * BLOCK_SIZE;
const BLOCK_50K : u32 = 50 * BLOCK_SIZE;
const BLOCK_51K : u32 = 51 * BLOCK_SIZE;
const BLOCK_52K : u32 = 52 * BLOCK_SIZE;
const BLOCK_53K : u32 = 53 * BLOCK_SIZE;
const BLOCK_54K : u32 = 54 * BLOCK_SIZE;
const BLOCK_55K : u32 = 55 * BLOCK_SIZE;
const BLOCK_56K : u32 = 56 * BLOCK_SIZE;
const BLOCK_57K : u32 = 57 * BLOCK_SIZE;
const BLOCK_58K : u32 = 58 * BLOCK_SIZE;
const BLOCK_59K : u32 = 59 * BLOCK_SIZE;
const BLOCK_60K : u32 = 60 * BLOCK_SIZE;
const BLOCK_61K : u32 = 61 * BLOCK_SIZE;
const BLOCK_62K : u32 = 62 * BLOCK_SIZE;
const BLOCK_63K : u32 = 63 * BLOCK_SIZE;
qc_allow_exception!(BLOCK_MASK, BLOCK_0K, qc_block0k);
qc_allow_exception!(BLOCK_MASK, BLOCK_1K, qc_block1k);
qc_allow_exception!(BLOCK_MASK, BLOCK_2K, qc_block2k);
qc_allow_exception!(BLOCK_MASK, BLOCK_3K, qc_block3k);
qc_allow_exception!(BLOCK_MASK, BLOCK_4K, qc_block4k);
qc_allow_exception!(BLOCK_MASK, BLOCK_5K, qc_block5k);
qc_allow_exception!(BLOCK_MASK, BLOCK_6K, qc_block6k);
qc_allow_exception!(BLOCK_MASK, BLOCK_7K, qc_block7k);
qc_allow_exception!(BLOCK_MASK, BLOCK_8K, qc_block8k);
qc_allow_exception!(BLOCK_MASK, BLOCK_9K, qc_block9k);
qc_allow_exception!(BLOCK_MASK, BLOCK_10K, qc_block10k);
qc_allow_exception!(BLOCK_MASK, BLOCK_11K, qc_block11k);
qc_allow_exception!(BLOCK_MASK, BLOCK_12K, qc_block12k);
qc_allow_exception!(BLOCK_MASK, BLOCK_13K, qc_block13k);
qc_allow_exception!(BLOCK_MASK, BLOCK_14K, qc_block14k);
qc_allow_exception!(BLOCK_MASK, BLOCK_15K, qc_block15k);
qc_allow_exception!(BLOCK_MASK, BLOCK_16K, qc_block16k);
qc_allow_exception!(BLOCK_MASK, BLOCK_17K, qc_block17k);
qc_allow_exception!(BLOCK_MASK, BLOCK_18K, qc_block18k);
qc_allow_exception!(BLOCK_MASK, BLOCK_19K, qc_block19k);
qc_allow_exception!(BLOCK_MASK, BLOCK_20K, qc_block20k);
qc_allow_exception!(BLOCK_MASK, BLOCK_21K, qc_block21k);
qc_allow_exception!(BLOCK_MASK, BLOCK_22K, qc_block22k);
qc_allow_exception!(BLOCK_MASK, BLOCK_23K, qc_block23k);
qc_allow_exception!(BLOCK_MASK, BLOCK_24K, qc_block24k);
qc_allow_exception!(BLOCK_MASK, BLOCK_25K, qc_block25k);
qc_allow_exception!(BLOCK_MASK, BLOCK_26K, qc_block26k);
qc_allow_exception!(BLOCK_MASK, BLOCK_27K, qc_block27k);
qc_allow_exception!(BLOCK_MASK, BLOCK_28K, qc_block28k);
qc_allow_exception!(BLOCK_MASK, BLOCK_29K, qc_block29k);
qc_allow_exception!(BLOCK_MASK, BLOCK_30K, qc_block30k);
qc_allow_exception!(BLOCK_MASK, BLOCK_31K, qc_block31k);
qc_allow_exception!(BLOCK_MASK, BLOCK_32K, qc_block32k);
qc_allow_exception!(BLOCK_MASK, BLOCK_33K, qc_block33k);
qc_allow_exception!(BLOCK_MASK, BLOCK_34K, qc_block34k);
qc_allow_exception!(BLOCK_MASK, BLOCK_35K, qc_block35k);
qc_allow_exception!(BLOCK_MASK, BLOCK_36K, qc_block36k);
qc_allow_exception!(BLOCK_MASK, BLOCK_37K, qc_block37k);
qc_allow_exception!(BLOCK_MASK, BLOCK_38K, qc_block38k);
qc_allow_exception!(BLOCK_MASK, BLOCK_39K, qc_block39k);
qc_allow_exception!(BLOCK_MASK, BLOCK_40K, qc_block40k);
qc_allow_exception!(BLOCK_MASK, BLOCK_41K, qc_block41k);
qc_allow_exception!(BLOCK_MASK, BLOCK_42K, qc_block42k);
qc_allow_exception!(BLOCK_MASK, BLOCK_43K, qc_block43k);
qc_allow_exception!(BLOCK_MASK, BLOCK_44K, qc_block44k);
qc_allow_exception!(BLOCK_MASK, BLOCK_45K, qc_block45k);
qc_allow_exception!(BLOCK_MASK, BLOCK_46K, qc_block46k);
qc_allow_exception!(BLOCK_MASK, BLOCK_47K, qc_block47k);
qc_allow_exception!(BLOCK_MASK, BLOCK_48K, qc_block48k);
qc_allow_exception!(BLOCK_MASK, BLOCK_49K, qc_block49k);
qc_allow_exception!(BLOCK_MASK, BLOCK_50K, qc_block50k);
qc_allow_exception!(BLOCK_MASK, BLOCK_51K, qc_block51k);
qc_allow_exception!(BLOCK_MASK, BLOCK_52K, qc_block52k);
qc_allow_exception!(BLOCK_MASK, BLOCK_53K, qc_block53k);
qc_allow_exception!(BLOCK_MASK, BLOCK_54K, qc_block54k);
qc_allow_exception!(BLOCK_MASK, BLOCK_55K, qc_block55k);
qc_allow_exception!(BLOCK_MASK, BLOCK_56K, qc_block56k);
qc_allow_exception!(BLOCK_MASK, BLOCK_57K, qc_block57k);
qc_allow_exception!(BLOCK_MASK, BLOCK_58K, qc_block58k);
qc_allow_exception!(BLOCK_MASK, BLOCK_59K, qc_block59k);
qc_allow_exception!(BLOCK_MASK, BLOCK_60K, qc_block60k);
qc_allow_exception!(BLOCK_MASK, BLOCK_61K, qc_block61k);
qc_allow_exception!(BLOCK_MASK, BLOCK_62K, qc_block62k);
qc_allow_exception!(BLOCK_MASK, BLOCK_63K, qc_block63k);
macro_rules! core_eq {
($left:ident , $right:ident . $field:ident [ $index:expr ]) => ({
match (&($left.$field[$index]), &($right.$field[$index])) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
println!("core incoherence: `{}[{}]` differs \
({}: `0x{:x}`, {}: `0x{:x}`)", stringify!($field), $index, stringify!($left), left_val, stringify!($right), right_val);
return false;
}
}
}
});
($left:ident , $right:ident . $field:ident () ?) => ({
match (&($left.$field()), &($right.$field())) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
println!("core incoherence: `{}()` differs \
({}: `{:?}`, {}: `{:?}`)", stringify!($field), stringify!($left), left_val, stringify!($right), right_val);
return false;
}
}
}
});
($left:ident , $right:ident . $field:ident ()) => ({
match (&($left.$field()), &($right.$field())) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
println!("core incoherence: `{}()` differs \
({}: `0x{:x}`, {}: `0x{:x}`)", stringify!($field), stringify!($left), left_val, stringify!($right), right_val);
return false;
}
}
}
});
($left:ident , $right:ident . $field:ident) => ({
match (&($left.$field), &($right.$field)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
println!("core incoherence: `{}` differs \
({}: `0x{:x}`, {}: `0x{:x}`)", stringify!($field), stringify!($left), left_val, stringify!($right), right_val);
return false;
}
}
}
})
}
fn assert_all_memory_accesses_equal(r68k: &Core) {
assert_equal(get_ops(), r68k.mem.logger.ops());
}
fn memory_accesses_equal_unless_exception(r68k: &Core) -> Option<u8> {
let is_reading_vector = |&op| match op {
Operation::ReadLong(SUPERVISOR_DATA, addr, _) =>
addr % 4 == 0 && addr >= 0x08 && addr < 0x30,
_ =>
false
};
// Check that memory accesses match up.
// If an exception occurred, do not compare beyond which vector
// was taken as Mushashi during address errors, in some cases
// also executed some instructions from the handler (now fixed)
if let Some(vector_read_index) = r68k.mem.logger.ops().iter().position(is_reading_vector) {
assert_equal(get_ops().iter().take(vector_read_index+1), r68k.mem.logger.ops().iter().take(vector_read_index+1));
// If we got this far, the memory accesses up to, and
// including the vector read match up, but we cannot
// compare further
let vector = match r68k.mem.logger.ops()[vector_read_index] {
Operation::ReadLong(SUPERVISOR_DATA, addr, _) => addr / 4,
x => panic!("Unexpectedly got {:?}", x)
};
Some(vector as u8)
} else {
assert_all_memory_accesses_equal(r68k);
None
}
}
fn cores_equal(musashi: &Core, r68k: &Core) -> bool {
core_eq!(musashi, r68k.pc);
core_eq!(musashi, r68k.flags() ?);
core_eq!(musashi, r68k.status_register());
core_eq!(musashi, r68k.ssp());
core_eq!(musashi, r68k.usp());
for i in (0..16).rev() {
core_eq!(musashi, r68k.dar[i]);
}
true
}
fn assert_cores_equal(musashi: &Core, r68k: &Core) {
assert_all_memory_accesses_equal(r68k);
assert!(cores_equal(musashi, r68k));
}
#[test]
fn roundtrip_d0() {
assert_eq!(256, roundtrip_register(Register::D0, 256));
}
#[test]
fn roundtrip_abcd_rr() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
let pc = 0x40;
// 0xc101: ABCD D0, D1
let mut cpu = Core::new_mem(pc, &[0xc1, 0x01, 0x00, 0x00]);
cpu.dar[0] = 0x17;
cpu.dar[1] = 0x27;
cpu.dar[5] = 0x55555;
reset_and_execute1(&mut cpu, 0xaaaaaaaa);
// 17 + 27 is 44
assert_eq!(0x44, cpu.dar[0]);
assert_eq!(0x27, cpu.dar[1]);
assert_eq!(0x55555, cpu.dar[5]);
let ops = get_ops();
assert_eq!(1, ops.len());
assert_eq!(Operation::ReadLong(SUPERVISOR_PROGRAM, pc, 0xc1010000), ops[0]);
}
#[test]
fn compare_abcd_rr() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
let pc = 0x40;
// 0xc300: ABCD D1, D0
let mut musashi = Core::new_mem(pc, &[0xc3, 0x00]);
musashi.dar[0] = 0x16;
musashi.dar[1] = 0x26;
let mut r68k = musashi.clone(); // so very self-aware!
reset_and_execute1(&mut musashi, 0xaaaaaaaa);
r68k.execute1();
assert_eq!(0x42, r68k.dar[1]);
assert_cores_equal(&musashi, &r68k);
}
#[test]
fn run_abcd_rr_twice() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
let pc = 0x40;
// 0xc300: ABCD D1, D0
// 0xc302: ABCD D1, D2
let mut musashi = Core::new_mem(pc, &[0xc3, 0x00, 0xc3, 0x02]);
musashi.dar[0] = 0x16;
musashi.dar[1] = 0x26;
musashi.dar[2] = 0x31;
let mut r68k = musashi.clone(); // so very self-aware!
initialize_musashi(&mut musashi, 0xaaaaaaaa);
// execute ABCD D1, D0
execute1(&mut musashi);
r68k.execute1();
assert_eq!(0x42, musashi.dar[1]);
assert_eq!(0x42, r68k.dar[1]);
// then execute a second instruction (ABCD D1, D2) on the core
execute1(&mut musashi);
r68k.execute1();
assert_eq!(0x73, musashi.dar[1]);
assert_eq!(0x73, r68k.dar[1]);
assert_cores_equal(&musashi, &r68k);
}
#[test]
fn compare_address_error_actions() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
// using an odd absolute address should force an address error
// opcodes d278,0107 is ADD.W $0107, D1
let mut musashi = Core::new_mem(0x40, &[0xd2, 0x78, 0x01, 0x07]);
let vec3handler = 0x1F0000;
musashi.mem.write_long(SUPERVISOR_PROGRAM, 3*4, vec3handler);
musashi.mem.write_word(SUPERVISOR_PROGRAM, vec3handler, OP_NOP);
musashi.dar[15] = 0x100;
let mut r68k = musashi.clone(); // so very self-aware!
initialize_musashi(&mut musashi, 0xaaaaaaaa);
execute1(&mut musashi);
r68k.execute1();
assert_cores_equal(&musashi, &r68k);
}
#[test]
fn compare_illegal_instruction_actions() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
// d208 is ADD.B A0,D0, which is illegal
let mut musashi = Core::new_mem(0x4000, &[0xd2, 08]);
let vec4handler = 0x2F0000;
musashi.mem.write_long(SUPERVISOR_PROGRAM, 4*4, vec4handler);
musashi.mem.write_long(SUPERVISOR_PROGRAM, vec4handler, 0xd2780108);
musashi.dar[15] = 0x100;
let mut r68k = musashi.clone(); // so very self-aware!
initialize_musashi(&mut musashi, 0xaaaaaaaa);
execute1(&mut musashi);
//execute1(&mut musashi);
r68k.execute1();
//r68k.execute1();
assert_cores_equal(&musashi, &r68k);
}
use std::ptr;
use super::m68k_get_reg;
#[test]
fn stackpointers_are_correct_when_starting_in_supervisor_mode() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
let pc = 0x40;
// 0xc300: ABCD D1, D0
// 0xc302: ABCD D1, D2
let mut musashi = Core::new_mem(pc, &[0xc3, 0x00, 0xc3, 0x02]);
musashi.sr_to_flags((1<<13));
musashi.inactive_usp = 0x200; // User SP
musashi.dar[15] = 0x100; // Supa SP
initialize_musashi(&mut musashi, 0xaaaaaaaa);
unsafe {
assert!((1<<13) & m68k_get_reg(ptr::null_mut(), Register::SR) > 0);
assert_eq!(0x100, m68k_get_reg(ptr::null_mut(), Register::ISP));
assert_eq!(0x200, m68k_get_reg(ptr::null_mut(), Register::USP));
}
}
#[test]
fn stackpointers_are_correct_when_starting_in_user_mode() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
let pc = 0x40;
// 0xc300: ABCD D1, D0
// 0xc302: ABCD D1, D2
let mut musashi = Core::new_mem(pc, &[0xc3, 0x00, 0xc3, 0x02]);
musashi.sr_to_flags(0);
musashi.dar[15] = 0x200; // User SP
musashi.inactive_ssp = 0x100; // Supa SP
initialize_musashi(&mut musashi, 0xaaaaaaaa);
unsafe {
assert!((1<<13) & m68k_get_reg(ptr::null_mut(), Register::SR) == 0);
assert_eq!(0x100, m68k_get_reg(ptr::null_mut(), Register::ISP));
assert_eq!(0x200, m68k_get_reg(ptr::null_mut(), Register::USP));
}
}
use ram::{SUPERVISOR_DATA, USER_PROGRAM, USER_DATA, ADDRBUS_MASK};
#[test]
fn read_initialized_memory() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
for v in 0..256 {
assert_eq!(0x01, m68k_read_memory_8(4*v+0));
assert_eq!(0x02, m68k_read_memory_8(4*v+1));
assert_eq!(0x03, m68k_read_memory_8(4*v+2));
assert_eq!(0x04, m68k_read_memory_8(4*v+3));
}
for v in 0..256 {
assert_eq!(0x0102, m68k_read_memory_16(4*v+0));
assert_eq!(0x0203, m68k_read_memory_16(4*v+1));
assert_eq!(0x0304, m68k_read_memory_16(4*v+2));
if 4*v+3 < 1023 {
assert_eq!(0x0401, m68k_read_memory_16(4*v+3));
}
}
for v in 0..255 {
assert_eq!(0x01020304, m68k_read_memory_32(4*v+0));
assert_eq!(0x02030401, m68k_read_memory_32(4*v+1));
assert_eq!(0x03040102, m68k_read_memory_32(4*v+2));
assert_eq!(0x04010203, m68k_read_memory_32(4*v+3));
}
assert_eq!(0x01020304, m68k_read_memory_32(4*255));
}
#[test]
fn read_your_u32_writes() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let pattern = 0xAAAA7777;
let address = 128;
assert!(pattern != m68k_read_memory_32(address));
m68k_write_memory_32(address, pattern);
assert_eq!(pattern, m68k_read_memory_32(address));
}
#[test]
fn read_your_u16_writes() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let pattern = 0xAAAA7777;
let address = 128;
assert!(pattern != m68k_read_memory_16(address));
m68k_write_memory_16(address, pattern);
assert_eq!(pattern & 0xFFFF, m68k_read_memory_16(address));
}
#[test]
fn read_your_u8_writes() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let pattern = 0xAAAA7777;
let address = 128;
assert!(pattern != m68k_read_memory_8(address));
m68k_write_memory_8(address, pattern);
assert_eq!(pattern & 0xFF, m68k_read_memory_8(address));
}
#[test]
fn shared_address_space() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let pattern = 0xAAAA7777;
let address = 128;
m68k_set_fc(USER_DATA.fc());
assert!(pattern != m68k_read_memory_32(address));
m68k_set_fc(USER_PROGRAM.fc());
assert!(pattern != m68k_read_memory_32(address));
m68k_set_fc(SUPERVISOR_DATA.fc());
assert!(pattern != m68k_read_memory_32(address));
m68k_set_fc(SUPERVISOR_PROGRAM.fc());
assert!(pattern != m68k_read_memory_32(address));
m68k_set_fc(USER_DATA.fc());
m68k_write_memory_32(address, pattern);
assert_eq!(pattern, m68k_read_memory_32(address));
m68k_set_fc(USER_PROGRAM.fc());
assert_eq!(pattern, m68k_read_memory_32(address));
m68k_set_fc(SUPERVISOR_DATA.fc());
assert_eq!(pattern, m68k_read_memory_32(address));
m68k_set_fc(SUPERVISOR_PROGRAM.fc());
assert_eq!(pattern, m68k_read_memory_32(address));
}
#[test]
fn do_read_byte_is_logged() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let address = 0x80;
m68k_set_fc(SUPERVISOR_DATA.fc());
m68k_read_memory_8(address);
let ops = get_ops();
assert!(ops.len() > 0);
assert_eq!(Operation::ReadByte(SUPERVISOR_DATA, address & ADDRBUS_MASK, 0x01), ops[0]);
}
#[test]
fn do_read_word_is_logged() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let address = 0x80;
m68k_set_fc(SUPERVISOR_PROGRAM.fc());
m68k_read_memory_16(address);
let ops = get_ops();
assert!(ops.len() > 0);
assert_eq!(Operation::ReadWord(SUPERVISOR_PROGRAM, address & ADDRBUS_MASK, 0x0102), ops[0]);
}
#[test]
fn do_read_long_is_logged() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let address = 0x80;
m68k_set_fc(USER_DATA.fc());
m68k_read_memory_32(address);
let ops = get_ops();
assert!(ops.len() > 0);
assert_eq!(Operation::ReadLong(USER_DATA, address & ADDRBUS_MASK, 0x01020304), ops[0]);
}
#[test]
fn do_write_byte_is_logged() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let address = 0x80;
let pattern = 0xAAAA7777;
m68k_set_fc(USER_PROGRAM.fc());
m68k_write_memory_8(address, pattern);
let ops = get_ops();
assert!(ops.len() > 0);
assert_eq!(Operation::WriteByte(USER_PROGRAM, address & ADDRBUS_MASK, pattern), ops[0]);
}
#[test]
fn do_write_word_is_logged() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let address = 0x80;
let pattern = 0xAAAA7777;
m68k_set_fc(SUPERVISOR_PROGRAM.fc());
m68k_write_memory_16(address, pattern);
let ops = get_ops();
assert!(ops.len() > 0);
assert_eq!(Operation::WriteWord(SUPERVISOR_PROGRAM, address & ADDRBUS_MASK, pattern), ops[0]);
}
#[test]
fn do_write_long_is_logged() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
let address = 0x80;
let pattern = 0xAAAA7777;
m68k_set_fc(USER_DATA.fc());
m68k_write_memory_32(address, pattern);
let ops = get_ops();
assert!(ops.len() > 0);
assert_eq!(Operation::WriteLong(USER_DATA, address & ADDRBUS_MASK, pattern), ops[0]);
}
#[test]
fn page_allocation_on_write_unless_matching_initializer()
{
let _mutex = MUSASHI_LOCK.lock().unwrap();
let data = 0x01020304;
initialize_musashi_memory(data);
for offset in 0..256 {
m68k_write_memory_32(4*offset, data);
}
m68k_write_memory_8(0, 0x1);
m68k_write_memory_8(1, 0x2);
m68k_write_memory_8(2, 0x3);
m68k_write_memory_8(3, 0x4);
m68k_write_memory_16(3, 0x0401);
// no pages allocated
assert_eq!(0, musashi_written_bytes());
// but as soon as we write something different
m68k_write_memory_8(2, 0x2);
// a page is allocated
assert_eq!(1, musashi_written_bytes());
// we don't need to allocate a second page if we overwrite existing data
m68k_write_memory_8(2, 0x99);
assert_eq!(1, musashi_written_bytes());
let ops = get_ops();
assert_eq!(263, ops.len());
}
#[test]
fn cross_boundary_byte_access() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
m68k_write_memory_8(ADDRBUS_MASK, 0x91);
assert_eq!(0x91, m68k_read_memory_8(ADDRBUS_MASK));
m68k_write_memory_8(ADDRBUS_MASK+1, 0x92);
assert_eq!(0x92, m68k_read_memory_8(0));
}
#[test]
fn cross_boundary_word_access() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
m68k_write_memory_16(ADDRBUS_MASK+1, 0x9192);
assert_eq!(0x9192, m68k_read_memory_16(0));
}
#[test]
fn cross_boundary_long_access() {
let _mutex = MUSASHI_LOCK.lock().unwrap();
initialize_musashi_memory(0x01020304);
m68k_write_memory_32(ADDRBUS_MASK-1, 0x91929394);
assert_eq!(0x91929394, m68k_read_memory_32(ADDRBUS_MASK-1));
}
}<|fim▁end|>
| |
<|file_name|>test_sklearn_wrapper.py<|end_file_name|><|fim▁begin|>import numpy as np
import pandas as pd
import pytest
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.impute import SimpleImputer
from sklearn.model_selection import GridSearchCV, cross_val_score
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import FunctionTransformer, StandardScaler
from featuretools.demo.mock_customer import load_mock_customer
from featuretools.wrappers import DFSTransformer
def select_numeric(df):
return df.select_dtypes(exclude=['object'])
@pytest.fixture(scope='module')<|fim▁hole|> n_sessions=75,
n_transactions=1000,
random_seed=0,
return_entityset=True)
return es
@pytest.fixture(scope='module')
def df(es):
df = es['customers'].df
df['target'] = np.random.randint(1, 3, df.shape[0]) # 1 or 2 values
return df
@pytest.fixture(scope='module')
def pipeline(es):
pipeline = Pipeline(steps=[
('ft', DFSTransformer(entityset=es,
target_entity="customers",
max_features=20)),
("numeric", FunctionTransformer(select_numeric, validate=False)),
('imp', SimpleImputer()),
('et', ExtraTreesClassifier(n_estimators=10))
])
return pipeline
def test_sklearn_transformer(es, df):
# Using with transformers
pipeline = Pipeline(steps=[
('ft', DFSTransformer(entityset=es,
target_entity="customers")),
("numeric", FunctionTransformer(select_numeric, validate=False)),
('sc', StandardScaler()),
])
X_train = pipeline.fit(df['customer_id']).transform(df['customer_id'])
assert X_train.shape[0] == 15
def test_sklearn_estimator(df, pipeline):
# Using with estimator
pipeline.fit(df['customer_id'].values, y=df.target.values) \
.predict(df['customer_id'].values)
result = pipeline.score(df['customer_id'].values, df.target.values)
assert isinstance(result, (float))
# Pickling / Unpickling Pipeline
# TODO fix this
# s = pickle.dumps(pipeline)
# pipe_pickled = pickle.loads(s)
# result = pipe_pickled.score(df['customer_id'].values, df.target.values)
# assert isinstance(result, (float))
def test_sklearn_cross_val_score(df, pipeline):
# Using with cross_val_score
results = cross_val_score(pipeline,
X=df['customer_id'].values,
y=df.target.values,
cv=2,
scoring="accuracy")
assert isinstance(results[0], (float))
assert isinstance(results[1], (float))
def test_sklearn_gridsearchcv(df, pipeline):
# Using with GridSearchCV
params = {
'et__max_depth': [5, 10]
}
grid = GridSearchCV(estimator=pipeline,
param_grid=params,
cv=3)
grid.fit(df['customer_id'].values, df.target.values)
assert len(grid.predict(df['customer_id'].values)) == 15
def test_sklearn_cuttoff(pipeline):
# Using cuttof_time to filter data
ct = pd.DataFrame()
ct['customer_id'] = [1, 2, 3]
ct['time'] = pd.to_datetime(['2014-1-1 04:00',
'2014-1-1 04:00',
'2014-1-1 04:00'])
ct['label'] = [True, True, False]
results = pipeline.fit(ct, y=ct.label).predict(ct)
assert len(results) == 3<|fim▁end|>
|
def es():
es = load_mock_customer(n_customers=15,
n_products=15,
|
<|file_name|>test_library_content.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Basic unit tests for LibraryContentBlock
Higher-level tests are in `cms/djangoapps/contentstore/tests/test_libraries.py`.
"""
import six
from bson.objectid import ObjectId
from fs.memoryfs import MemoryFS
from lxml import etree
from mock import Mock, patch
from search.search_engine_base import SearchEngine
from six.moves import range
from web_fragments.fragment import Fragment
from xblock.runtime import Runtime as VanillaRuntime
from xmodule.library_content_module import ANY_CAPA_TYPE_VALUE, LibraryContentBlock
from xmodule.library_tools import LibraryToolsService
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory
from xmodule.modulestore.tests.utils import MixedSplitTestCase
from xmodule.tests import get_test_system
from xmodule.validation import StudioValidationMessage
from xmodule.x_module import AUTHOR_VIEW
from .test_course_module import DummySystem as TestImportSystem
dummy_render = lambda block, _: Fragment(block.data) # pylint: disable=invalid-name
class LibraryContentTest(MixedSplitTestCase):
"""
Base class for tests of LibraryContentBlock (library_content_block.py)
"""
def setUp(self):
super(LibraryContentTest, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.tools = LibraryToolsService(self.store, self.user_id)
self.library = LibraryFactory.create(modulestore=self.store)
self.lib_blocks = [
self.make_block("html", self.library, data="Hello world from block {}".format(i))
for i in range(1, 5)
]
self.course = CourseFactory.create(modulestore=self.store)
self.chapter = self.make_block("chapter", self.course)
self.sequential = self.make_block("sequential", self.chapter)
self.vertical = self.make_block("vertical", self.sequential)
self.lc_block = self.make_block(
"library_content",
self.vertical,
max_count=1,
source_library_id=six.text_type(self.library.location.library_key)
)
def _bind_course_module(self, module):
"""
Bind a module (part of self.course) so we can access student-specific data.
"""
module_system = get_test_system(course_id=module.location.course_key)
module_system.descriptor_runtime = module.runtime._descriptor_system # pylint: disable=protected-access
module_system._services['library_tools'] = self.tools # pylint: disable=protected-access
def get_module(descriptor):
"""Mocks module_system get_module function"""
sub_module_system = get_test_system(course_id=module.location.course_key)
sub_module_system.get_module = get_module
sub_module_system.descriptor_runtime = descriptor._runtime # pylint: disable=protected-access
descriptor.bind_for_student(sub_module_system, self.user_id)
return descriptor
module_system.get_module = get_module
module.xmodule_runtime = module_system
class TestLibraryContentExportImport(LibraryContentTest):
"""
Export and import tests for LibraryContentBlock
"""
maxDiff = None
def test_xml_export_import_cycle(self):
"""
Test the export-import cycle.
"""
# Children will only set after calling this.
self.lc_block.refresh_children()
lc_block = self.store.get_item(self.lc_block.location)
expected_olx = (
'<library_content display_name="{block.display_name}" max_count="{block.max_count}"'
' source_library_id="{block.source_library_id}" source_library_version="{block.source_library_version}">\n'
' <html url_name="{block.children[0].block_id}"/>\n'
' <html url_name="{block.children[1].block_id}"/>\n'
' <html url_name="{block.children[2].block_id}"/>\n'
' <html url_name="{block.children[3].block_id}"/>\n'
'</library_content>\n'
).format(
block=lc_block,
)
export_fs = MemoryFS()
# Set the virtual FS to export the olx to.
lc_block.runtime._descriptor_system.export_fs = export_fs # pylint: disable=protected-access
# Export the olx.
node = etree.Element("unknown_root")
lc_block.add_xml_to_node(node)
# Read it back
with export_fs.open('{dir}/{file_name}.xml'.format(
dir=lc_block.scope_ids.usage_id.block_type,
file_name=lc_block.scope_ids.usage_id.block_id
)) as f:
exported_olx = f.read()
# And compare.
assert exported_olx == expected_olx
runtime = TestImportSystem(load_error_modules=True, course_id=lc_block.location.course_key)
runtime.resources_fs = export_fs
# Now import it.
olx_element = etree.fromstring(exported_olx)
id_generator = Mock()
imported_lc_block = LibraryContentBlock.parse_xml(olx_element, runtime, None, id_generator)
# Check the new XBlock has the same properties as the old one.
assert imported_lc_block.display_name == lc_block.display_name
assert imported_lc_block.source_library_id == lc_block.source_library_id
assert imported_lc_block.source_library_version == lc_block.source_library_version
assert imported_lc_block.mode == lc_block.mode
assert imported_lc_block.max_count == lc_block.max_count
assert imported_lc_block.capa_type == lc_block.capa_type
assert len(imported_lc_block.children) == 4
assert imported_lc_block.children == lc_block.children
class LibraryContentBlockTestMixin(object):
"""
Basic unit tests for LibraryContentBlock
"""
problem_types = [
["multiplechoiceresponse"], ["optionresponse"], ["optionresponse", "coderesponse"],
["coderesponse", "optionresponse"]
]
problem_type_lookup = {}
def _get_capa_problem_type_xml(self, *args):
""" Helper function to create empty CAPA problem definition """
problem = "<problem>"
for problem_type in args:
problem += "<{problem_type}></{problem_type}>".format(problem_type=problem_type)
problem += "</problem>"
return problem
def _create_capa_problems(self):
"""
Helper function to create a set of capa problems to test against.
Creates four blocks total.
"""
self.problem_type_lookup = {}
for problem_type in self.problem_types:
block = self.make_block("problem", self.library, data=self._get_capa_problem_type_xml(*problem_type))
self.problem_type_lookup[block.location] = problem_type
def test_lib_content_block(self):
"""
Test that blocks from a library are copied and added as children
"""
# Check that the LibraryContent block has no children initially
# Normally the children get added when the "source_libraries" setting
# is updated, but the way we do it through a factory doesn't do that.
assert len(self.lc_block.children) == 0
# Update the LibraryContent module:
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
# Check that all blocks from the library are now children of the block:
assert len(self.lc_block.children) == len(self.lib_blocks)
def test_children_seen_by_a_user(self):
"""
Test that each student sees only one block as a child of the LibraryContent block.
"""
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
# Make sure the runtime knows that the block's children vary per-user:
assert self.lc_block.has_dynamic_children()
assert len(self.lc_block.children) == len(self.lib_blocks)
# Check how many children each user will see:
assert len(self.lc_block.get_child_descriptors()) == 1
# Check that get_content_titles() doesn't return titles for hidden/unused children
assert len(self.lc_block.get_content_titles()) == 1
def test_validation_of_course_libraries(self):
"""
Test that the validation method of LibraryContent blocks can validate
the source_library setting.
"""
# When source_library_id is blank, the validation summary should say this block needs to be configured:
self.lc_block.source_library_id = ""
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.NOT_CONFIGURED == result.summary.type
# When source_library_id references a non-existent library, we should get an error:
self.lc_block.source_library_id = "library-v1:BAD+WOLF"
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.ERROR == result.summary.type
assert 'invalid' in result.summary.text
# When source_library_id is set but the block needs to be updated, the summary should say so:
self.lc_block.source_library_id = six.text_type(self.library.location.library_key)
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.WARNING == result.summary.type
assert 'out of date' in result.summary.text
# Now if we update the block, all validation should pass:
self.lc_block.refresh_children()
assert self.lc_block.validate()
def test_validation_of_matching_blocks(self):
"""
Test that the validation method of LibraryContent blocks can warn
the user about problems with other settings (max_count and capa_type).
"""
# Set max_count to higher value than exists in library
self.lc_block.max_count = 50
# In the normal studio editing process, editor_saved() calls refresh_children at this point
self.lc_block.refresh_children()
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.WARNING == result.summary.type
assert 'only 4 matching problems' in result.summary.text
# Add some capa problems so we can check problem type validation messages
self.lc_block.max_count = 1
self._create_capa_problems()
self.lc_block.refresh_children()
assert self.lc_block.validate()
# Existing problem type should pass validation
self.lc_block.max_count = 1
self.lc_block.capa_type = 'multiplechoiceresponse'
self.lc_block.refresh_children()
assert self.lc_block.validate()
# ... unless requested more blocks than exists in library
self.lc_block.max_count = 10
self.lc_block.capa_type = 'multiplechoiceresponse'
self.lc_block.refresh_children()
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.WARNING == result.summary.type
assert 'only 1 matching problem' in result.summary.text
# Missing problem type should always fail validation
self.lc_block.max_count = 1
self.lc_block.capa_type = 'customresponse'
self.lc_block.refresh_children()
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.WARNING == result.summary.type
assert 'no matching problem types' in result.summary.text
def test_capa_type_filtering(self):
"""
Test that the capa type filter is actually filtering children
"""
self._create_capa_problems()
assert len(self.lc_block.children) == 0
# precondition check
self.lc_block.capa_type = "multiplechoiceresponse"
self.lc_block.refresh_children()
assert len(self.lc_block.children) == 1
self.lc_block.capa_type = "optionresponse"
self.lc_block.refresh_children()
assert len(self.lc_block.children) == 3
self.lc_block.capa_type = "coderesponse"
self.lc_block.refresh_children()
assert len(self.lc_block.children) == 2
self.lc_block.capa_type = "customresponse"
self.lc_block.refresh_children()
assert len(self.lc_block.children) == 0
self.lc_block.capa_type = ANY_CAPA_TYPE_VALUE
self.lc_block.refresh_children()
assert len(self.lc_block.children) == (len(self.lib_blocks) + 4)
def test_non_editable_settings(self):
"""
Test the settings that are marked as "non-editable".
"""
non_editable_metadata_fields = self.lc_block.non_editable_metadata_fields
assert LibraryContentBlock.mode in non_editable_metadata_fields
assert LibraryContentBlock.display_name not in non_editable_metadata_fields
def test_overlimit_blocks_chosen_randomly(self):
"""
Tests that blocks to remove from selected children are chosen
randomly when len(selected) > max_count.
"""
blocks_seen = set()
total_tries, max_tries = 0, 100
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
# Eventually, we should see every child block selected
while len(blocks_seen) != len(self.lib_blocks):
self._change_count_and_refresh_children(len(self.lib_blocks))
# Now set the number of selections to 1
selected = self._change_count_and_refresh_children(1)
blocks_seen.update(selected)
total_tries += 1
if total_tries >= max_tries:
assert False, "Max tries exceeded before seeing all blocks."
break
def _change_count_and_refresh_children(self, count):
"""
Helper method that changes the max_count of self.lc_block, refreshes
children, and asserts that the number of selected children equals the count provided.
"""
self.lc_block.max_count = count
selected = self.lc_block.get_child_descriptors()
assert len(selected) == count
return selected
@patch('xmodule.library_tools.SearchEngine.get_search_engine', Mock(return_value=None, autospec=True))
class TestLibraryContentBlockNoSearchIndex(LibraryContentBlockTestMixin, LibraryContentTest):
"""
Tests for library container when no search index is available.
Tests fallback low-level CAPA problem introspection
"""
pass # pylint:disable=unnecessary-pass
search_index_mock = Mock(spec=SearchEngine) # pylint: disable=invalid-name
@patch('xmodule.library_tools.SearchEngine.get_search_engine', Mock(return_value=search_index_mock, autospec=True))
class TestLibraryContentBlockWithSearchIndex(LibraryContentBlockTestMixin, LibraryContentTest):
"""
Tests for library container with mocked search engine response.
"""
def _get_search_response(self, field_dictionary=None):
""" Mocks search response as returned by search engine """
target_type = field_dictionary.get('problem_types')
matched_block_locations = [
key for key, problem_types in
self.problem_type_lookup.items() if target_type in problem_types
]
return {
'results': [
{'data': {'id': str(location)}} for location in matched_block_locations
]
}
def setUp(self):
""" Sets up search engine mock """
super(TestLibraryContentBlockWithSearchIndex, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
search_index_mock.search = Mock(side_effect=self._get_search_response)
@patch(
'xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.render', VanillaRuntime.render
)
@patch('xmodule.html_module.HtmlBlock.author_view', dummy_render, create=True)
@patch('xmodule.x_module.DescriptorSystem.applicable_aside_types', lambda self, block: [])
class TestLibraryContentRender(LibraryContentTest):
"""
Rendering unit tests for LibraryContentBlock
"""
def test_preview_view(self):
""" Test preview view rendering """
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
assert len(self.lc_block.children) == len(self.lib_blocks)
self._bind_course_module(self.lc_block)
rendered = self.lc_block.render(AUTHOR_VIEW, {'root_xblock': self.lc_block})
assert 'Hello world from block 1' in rendered.content
def test_author_view(self):
""" Test author view rendering """
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
assert len(self.lc_block.children) == len(self.lib_blocks)
self._bind_course_module(self.lc_block)
rendered = self.lc_block.render(AUTHOR_VIEW, {})
assert '' == rendered.content
# content should be empty
assert 'LibraryContentAuthorView' == rendered.js_init_fn<|fim▁hole|>class TestLibraryContentAnalytics(LibraryContentTest):
"""
Test analytics features of LibraryContentBlock
"""
def setUp(self):
super(TestLibraryContentAnalytics, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.publisher = Mock()
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
self.lc_block.xmodule_runtime.publish = self.publisher
def _assert_event_was_published(self, event_type):
"""
Check that a LibraryContentBlock analytics event was published by self.lc_block.
"""
assert self.publisher.called
assert len(self.publisher.call_args[0]) == 3 # pylint:disable=unsubscriptable-object
_, event_name, event_data = self.publisher.call_args[0] # pylint:disable=unsubscriptable-object
assert event_name == 'edx.librarycontentblock.content.{}'.format(event_type)
assert event_data['location'] == six.text_type(self.lc_block.location)
return event_data
def test_assigned_event(self):
"""
Test the "assigned" event emitted when a student is assigned specific blocks.
"""
# In the beginning was the lc_block and it assigned one child to the student:
child = self.lc_block.get_child_descriptors()[0]
child_lib_location, child_lib_version = self.store.get_block_original_usage(child.location)
assert isinstance(child_lib_version, ObjectId)
event_data = self._assert_event_was_published("assigned")
block_info = {
"usage_key": six.text_type(child.location),
"original_usage_key": six.text_type(child_lib_location),
"original_usage_version": six.text_type(child_lib_version),
"descendants": [],
}
assert event_data ==\
{'location': six.text_type(self.lc_block.location),
'added': [block_info],
'result': [block_info],
'previous_count': 0, 'max_count': 1}
self.publisher.reset_mock()
# Now increase max_count so that one more child will be added:
self.lc_block.max_count = 2
children = self.lc_block.get_child_descriptors()
assert len(children) == 2
child, new_child = children if children[0].location == child.location else reversed(children)
event_data = self._assert_event_was_published("assigned")
assert event_data['added'][0]['usage_key'] == six.text_type(new_child.location)
assert len(event_data['result']) == 2
assert event_data['previous_count'] == 1
assert event_data['max_count'] == 2
def test_assigned_event_published(self):
"""
Same as test_assigned_event but uses the published branch
"""
self.store.publish(self.course.location, self.user_id)
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
self.lc_block.xmodule_runtime.publish = self.publisher
self.test_assigned_event()
def test_assigned_descendants(self):
"""
Test the "assigned" event emitted includes descendant block information.
"""
# Replace the blocks in the library with a block that has descendants:
with self.store.bulk_operations(self.library.location.library_key):
self.library.children = []
main_vertical = self.make_block("vertical", self.library)
inner_vertical = self.make_block("vertical", main_vertical)
html_block = self.make_block("html", inner_vertical)
problem_block = self.make_block("problem", inner_vertical)
self.lc_block.refresh_children()
# Reload lc_block and set it up for a student:
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
self.lc_block.xmodule_runtime.publish = self.publisher
# Get the keys of each of our blocks, as they appear in the course:
course_usage_main_vertical = self.lc_block.children[0]
course_usage_inner_vertical = self.store.get_item(course_usage_main_vertical).children[0]
inner_vertical_in_course = self.store.get_item(course_usage_inner_vertical)
course_usage_html = inner_vertical_in_course.children[0]
course_usage_problem = inner_vertical_in_course.children[1]
# Trigger a publish event:
self.lc_block.get_child_descriptors()
event_data = self._assert_event_was_published("assigned")
for block_list in (event_data["added"], event_data["result"]):
assert len(block_list) == 1
# main_vertical is the only root block added, and is the only result.
assert block_list[0]['usage_key'] == six.text_type(course_usage_main_vertical)
# Check that "descendants" is a flat, unordered list of all of main_vertical's descendants:
descendants_expected = (
(inner_vertical.location, course_usage_inner_vertical),
(html_block.location, course_usage_html),
(problem_block.location, course_usage_problem),
)
descendant_data_expected = {}
for lib_key, course_usage_key in descendants_expected:
descendant_data_expected[six.text_type(course_usage_key)] = {
"usage_key": six.text_type(course_usage_key),
"original_usage_key": six.text_type(lib_key),
"original_usage_version": six.text_type(self.store.get_block_original_usage(course_usage_key)[1]),
}
assert len(block_list[0]['descendants']) == len(descendant_data_expected)
for descendant in block_list[0]["descendants"]:
assert descendant == descendant_data_expected.get(descendant['usage_key'])
def test_removed_overlimit(self):
"""
Test the "removed" event emitted when we un-assign blocks previously assigned to a student.
We go from one blocks assigned to none because max_count has been decreased.
"""
# Decrease max_count to 1, causing the block to be overlimit:
self.lc_block.get_child_descriptors() # This line is needed in the test environment or the change has no effect
self.publisher.reset_mock() # Clear the "assigned" event that was just published.
self.lc_block.max_count = 0
# Check that the event says that one block was removed, leaving no blocks left:
children = self.lc_block.get_child_descriptors()
assert len(children) == 0
event_data = self._assert_event_was_published("removed")
assert len(event_data['removed']) == 1
assert event_data['result'] == []
assert event_data['reason'] == 'overlimit'
def test_removed_invalid(self):
"""
Test the "removed" event emitted when we un-assign blocks previously assigned to a student.
We go from two blocks assigned, to one because the others have been deleted from the library.
"""
# Start by assigning two blocks to the student:
self.lc_block.get_child_descriptors() # This line is needed in the test environment or the change has no effect
self.lc_block.max_count = 2
initial_blocks_assigned = self.lc_block.get_child_descriptors()
assert len(initial_blocks_assigned) == 2
self.publisher.reset_mock() # Clear the "assigned" event that was just published.
# Now make sure that one of the assigned blocks will have to be un-assigned.
# To cause an "invalid" event, we delete all blocks from the content library
# except for one of the two already assigned to the student:
keep_block_key = initial_blocks_assigned[0].location
keep_block_lib_usage_key, keep_block_lib_version = self.store.get_block_original_usage(keep_block_key)
assert keep_block_lib_usage_key is not None
deleted_block_key = initial_blocks_assigned[1].location
self.library.children = [keep_block_lib_usage_key]
self.store.update_item(self.library, self.user_id)
self.lc_block.refresh_children()
# Check that the event says that one block was removed, leaving one block left:
children = self.lc_block.get_child_descriptors()
assert len(children) == 1
event_data = self._assert_event_was_published("removed")
assert event_data['removed'] ==\
[{'usage_key': six.text_type(deleted_block_key),
'original_usage_key': None,
'original_usage_version': None,
'descendants': []}]
assert event_data['result'] ==\
[{'usage_key': six.text_type(keep_block_key),
'original_usage_key': six.text_type(keep_block_lib_usage_key),
'original_usage_version': six.text_type(keep_block_lib_version), 'descendants': []}]
assert event_data['reason'] == 'invalid'<|fim▁end|>
|
# but some js initialization should happen
|
<|file_name|>problem016.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
#!/usr/bin/env python3
print(sum(map(int, str(2**1000))))
|
<|file_name|>template.py<|end_file_name|><|fim▁begin|># (c) 2015, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import datetime
import os
import time
from ansible import constants as C
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum_s
from ansible.utils.unicode import to_bytes
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def get_checksum(self, tmp, dest, try_directory=False, source=None):
remote_checksum = self._remote_checksum(tmp, dest)
if remote_checksum in ('0', '2', '3', '4'):
# Note: 1 means the file is not present which is fine; template
# will create it. 3 means directory was specified instead of file
if try_directory and remote_checksum == '3' and source:
base = os.path.basename(source)
dest = os.path.join(dest, base)
remote_checksum = self.get_checksum(tmp, dest, try_directory=False)
if remote_checksum not in ('0', '2', '3', '4'):
return remote_checksum
result = dict(failed=True, msg="failed to checksum remote file."
" Checksum error code: %s" % remote_checksum)
return result
return remote_checksum
def run(self, tmp=None, task_vars=dict()):
''' handler for template operations '''
source = self._task.args.get('src', None)
dest = self._task.args.get('dest', None)
faf = self._task.first_available_file
if (source is None and faf is not None) or dest is None:
return dict(failed=True, msg="src and dest are required")
if tmp is None:
tmp = self._make_tmp_path()
if faf:
#FIXME: issue deprecation warning for first_available_file, use with_first_found or lookup('first_found',...) instead
found = False
for fn in faf:
fn_orig = fn
fnt = self._templar.template(fn)
fnd = self._loader.path_dwim(self._task._role_._role_path, 'templates', fnt)
if not os.path.exists(fnd):
of = task_vars.get('_original_file', None)
if of is not None:
fnd = self._loader.path_dwim(self._task._role_._role_path, 'templates', of)
if os.path.exists(fnd):
source = fnd
found = True
break
if not found:
return dict(failed=True, msg="could not find src in first_available_file list")
else:
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'templates', source)
else:
source = self._loader.path_dwim(source)<|fim▁hole|>
directory_prepended = False
if dest.endswith(os.sep):
directory_prepended = True
base = os.path.basename(source)
dest = os.path.join(dest, base)
# template the source data locally & get ready to transfer
try:
with open(source, 'r') as f:
template_data = f.read()
try:
template_uid = pwd.getpwuid(os.stat(source).st_uid).pw_name
except:
template_uid = os.stat(source).st_uid
vars = task_vars.copy()
vars['template_host'] = os.uname()[1]
vars['template_path'] = source
vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(source))
vars['template_uid'] = template_uid
vars['template_fullpath'] = os.path.abspath(source)
vars['template_run_date'] = datetime.datetime.now()
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
host = vars['template_host'],
uid = vars['template_uid'],
file = to_bytes(vars['template_path'])
)
vars['ansible_managed'] = time.strftime(
managed_str,
time.localtime(os.path.getmtime(source))
)
old_vars = self._templar._available_variables
self._templar.set_available_variables(vars)
resultant = self._templar.template(template_data, preserve_trailing_newlines=True)
self._templar.set_available_variables(old_vars)
except Exception as e:
return dict(failed=True, msg=type(e).__name__ + ": " + str(e))
local_checksum = checksum_s(resultant)
remote_checksum = self.get_checksum(tmp, dest, not directory_prepended, source=source)
if isinstance(remote_checksum, dict):
# Error from remote_checksum is a dict. Valid return is a str
return remote_checksum
if local_checksum != remote_checksum:
# if showing diffs, we need to get the remote value
dest_contents = ''
# FIXME: still need to implement diff mechanism
#if self.runner.diff:
# # using persist_files to keep the temp directory around to avoid needing to grab another
# dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, task_vars=task_vars, persist_files=True)
# if 'content' in dest_result.result:
# dest_contents = dest_result.result['content']
# if dest_result.result['encoding'] == 'base64':
# dest_contents = base64.b64decode(dest_contents)
# else:
# raise Exception("unknown encoding, failed: %s" % dest_result.result)
xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant)
# fix file permissions when the copy is done as a different user
if self._connection_info.become and self._connection_info.become_user != 'root':
self._remote_chmod('a+r', xfered, tmp)
# run the copy module
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(source),
follow=True,
),
)
result = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars)
if result.get('changed', False):
result['diff'] = dict(before=dest_contents, after=resultant)
return result
else:
# when running the file module based on the template data, we do
# not want the source filename (the name of the template) to be used,
# since this would mess up links, so we clear the src param and tell
# the module to follow links. When doing that, we have to set
# original_basename to the template just in case the dest is
# a directory.
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=None,
original_basename=os.path.basename(source),
follow=True,
),
)
return self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars)<|fim▁end|>
|
# Expand any user home dir specification
dest = self._remote_expand_user(dest, tmp)
|
<|file_name|>Character.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import fr.lelouet.rpg.model.character.CharStats;
public class Character extends CharStats {
public Character(RPGGame system) {
super(system);
}
/**
*
* @return true if this character is an avatar
*/
public boolean isPlayer() {
return true;
}
public int lvl;
}<|fim▁end|>
|
package fr.lelouet.rpg.model;
|
<|file_name|>functions.py<|end_file_name|><|fim▁begin|>"""
Some useful utility functions missing from numpy/scipy.
Copyright 2016 Deepak Subburam
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
"""
import numpy as np
def dir_clip(data, clips):
"""
'Directional' clip. Dimension of data and clips must be the same. Values in data
are clipped according to corresponding values in clips and returned as a new array.
new_value = portion of value between 0 and clip.
If clip is nan, new_value = value.
"""
if isinstance(data, np.ndarray): results = data.copy()
else: results = np.array(data)
mask = (np.sign(data) != np.sign(clips)) \
& ~np.isnan(data) & ~np.isnan(clips)
results[mask] = 0.0
mask = ~mask & (abs(data) > abs(clips))
results[mask] = clips[mask]
return results
def toward_zero(data, value):
"""
Subtract value from postive values of data, and add value to negative values
of data. Do not cross zero.
"""
results = data.copy()
results[data > 0] -= value
results[data < 0] += value
results[(data > 0) & (results < 0)] = 0.0
results[(data < 0) & (results > 0)] = 0.0
return results
def per_clip(data, caps):
"""
Return values in data clipped between %le values of (caps[0], caps[1]).
If caps is a scalar, only large values are capped.
"""
if np.isscalar(caps):
return np.fmin(data, np.percentile(data, caps))
low, high = np.percentile(data, caps)
return np.clip(data, low, high)
def scale2unit(data, eps=.1, dtype=None, soft_clip=99.99):<|fim▁hole|> Scale values to between -1.0 and +1.0 strictly, and less strictly between
-1.0 + <eps> or 1.0 - <eps>.
More precisely, amplitude is scaled such that <large_value> is set to
-1.0 + <eps> or 1.0 - <eps>, where <large_value> is
if soft_clip is None:
the max value of abs(data)
else:
soft_clip %le value of abs(data)
Result is returned as type <dtype>, which defaults to
if data.dtype is an integer type: float32
else: data.dtype
"""
if dtype is None:
dtype = data.dtype
if 'int' in str(dtype): dtype = np.float32
data = data / (np.percentile(abs(data), soft_clip) if soft_clip
else np.max(abs(data)))
if eps: data *= 1. - eps
if soft_clip: data = np.clip(data, -1.0, 1.0)
return data.astype(dtype, copy=False)
def softmax(data, axis=None):
"""Scale exp(data) to sum to unit along axis."""
edata = np.exp(data)
return edata / np.sum(edata, axis=axis)[:, None].swapaxes(-1, axis)
def sigmoid(data):
"""Sigmoid activation function."""
return 1 / (1 + np.exp(-data))
def logit(data, eps=1e-8):
"""Inverse of the sigmoid function."""
return -np.log(1 / (data + eps) - 1 + eps)
def elu(data, alpha=1.0, copy=True):
"""Exponential LU activation function."""
if copy: result = data.copy()
else: result = data
mask = data < 0
result[mask] = alpha * (np.exp(data[mask]) - 1.0)
return result
def celu(data, alpha, copy=True):
"""Continuously differentiable exponential LU activation function."""
if copy: result = data.copy()
else: result = data
mask = data < 0
result[mask] = alpha * (np.exp(data[mask] / alpha) - 1.0)
return result
def ielu(data, copy=True, eps=1e-20):
"""Inverse exponential LU activation function."""
if copy: result = data.copy()
else: result = data
mask = data < 0
result[mask] = np.log(data[mask] + 1.0 + eps)
return result
def llu(data, copy=True):
"""
Linear-log activation function; linear inside of +/-1.0,
log outside of it.
"""
if copy: result = data.copy()
else: result = data
mask = data > 1.0
result[mask] = np.log(data[mask]) + 1.0
mask = data < -1.0
result[mask] = -np.log(-data[mask]) - 1.0
return result
def illu(data, copy=True):
"""Inverse of llu."""
if copy: result = data.copy()
else: result = data
mask = data > 1.0
result[mask] = np.exp(data[mask] - 1.0)
mask = data < -1.0
result[mask] = -np.exp(-data[mask] - 1.0)
return result
def sroot(data, power=0.5):
"""
'Signed' square-root (default power = 0.5):
raised abs(data) to power, then multiply by sign(data).
"""
result = np.abs(data)**power
return np.sign(data) * result<|fim▁end|>
|
"""
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//
// SOS: the Stupid Operating System
// by Eliza Weisman ([email protected])
//
// Copyright (c) 2015-2017 Eliza Weisman
// Released under the terms of the MIT license. See `LICENSE` in the root
// directory of this repository for more information.
//
//! Architecture-specific implementation.
//!
//! This module consists of a number of modules containing
//! architecture-specific code for each targeted architecture. The `arch`
//! module uses conditional compilation to re-export the implementation for
//! which the kernel is currently being compiled.<|fim▁hole|>//! In order for the rest of the kernel to work properly, an
//! architecture-specific implementation module should define a number of
//! specific items. If these are not defined, the platform-independant kernel
//! implementation cannot function properly.
//!
//! Please note that currently only the architecture-specific implementation
//! for `x86_64` (long mode) is implemented. The `armv7` and `x86` (protected
//! mode) modules are currently much less complete.
// 64-bit x86_64 (long mode)
#[cfg(target_arch="x86_64")] mod x86_64;
#[cfg(target_arch="x86_64")] pub use self::x86_64::*;
// 32-bit x86 (protected mode)
// TODO: NYI
#[cfg(target_arch = "x86")] mod x86;
#[cfg(target_arch = "x86")] pub use self::x86::*;
// ARM v7
// TODO: NYI
#[cfg(target_arch = "armv7")] mod armv7;
#[cfg(target_arch = "armv7")] pub use self::x86::*;<|fim▁end|>
|
//!
|
<|file_name|>impact_function.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""InaSAFE Disaster risk tool by Australian Aid - Flood Vector Impact on
Buildings using QGIS.
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from collections import OrderedDict
from qgis.core import (
QgsField,
QgsSpatialIndex,
QgsVectorLayer,
QgsFeature,
QgsRectangle,
QgsFeatureRequest,
QgsCoordinateTransform,
QgsCoordinateReferenceSystem,
QgsGeometry)
from PyQt4.QtCore import QVariant
from safe.impact_functions.bases.classified_vh_classified_ve import \
ClassifiedVHClassifiedVE
from safe.impact_functions.inundation.flood_vector_building_impact.\
metadata_definitions import FloodPolygonBuildingFunctionMetadata
from safe.utilities.i18n import tr
from safe.storage.vector import Vector
from safe.common.exceptions import GetDataError
from safe.impact_reports.building_exposure_report_mixin import (
BuildingExposureReportMixin)
class FloodPolygonBuildingFunction(
ClassifiedVHClassifiedVE,
BuildingExposureReportMixin):
# noinspection PyUnresolvedReferences
"""Impact function for inundation (polygon-polygon)."""
_metadata = FloodPolygonBuildingFunctionMetadata()
def __init__(self):
super(FloodPolygonBuildingFunction, self).__init__()
# The 'wet' variable
self.wet = 'wet'
def notes(self):
"""Return the notes section of the report.
:return: The notes that should be attached to this impact report.
:rtype: list
"""
return [
{
'content': tr('Notes'),
'header': True
},
{
'content': tr(
'Buildings are said to be inundated when in a region with '
'field "%s" in "%s" .') % (
self.hazard_class_attribute,
', '.join([
unicode(hazard_class) for
hazard_class in self.hazard_class_mapping[self.wet]
]))
}
]
def run(self):
"""Experimental impact function."""
self.validate()
self.prepare()
# Get parameters from layer's keywords
self.hazard_class_attribute = self.hazard.keyword('field')
self.hazard_class_mapping = self.hazard.keyword('value_map')
self.exposure_class_attribute = self.exposure.keyword(
'structure_class_field')
# Prepare Hazard Layer
hazard_provider = self.hazard.layer.dataProvider()
# Check affected field exists in the hazard layer
affected_field_index = hazard_provider.fieldNameIndex(
self.hazard_class_attribute)
if affected_field_index == -1:
message = tr(
'Field "%s" is not present in the attribute table of the '
'hazard layer. Please change the Affected Field parameter in '
'the IF Option.') % self.hazard_class_attribute
raise GetDataError(message)
srs = self.exposure.layer.crs().toWkt()
exposure_provider = self.exposure.layer.dataProvider()
exposure_fields = exposure_provider.fields()
# Check self.exposure_class_attribute exists in exposure layer
building_type_field_index = exposure_provider.fieldNameIndex(
self.exposure_class_attribute)
if building_type_field_index == -1:
message = tr(
'Field "%s" is not present in the attribute table of '
'the exposure layer. Please change the Building Type '
'Field parameter in the IF Option.'
) % self.exposure_class_attribute
raise GetDataError(message)
# If target_field does not exist, add it:
if exposure_fields.indexFromName(self.target_field) == -1:
exposure_provider.addAttributes(
[QgsField(self.target_field, QVariant.Int)])
target_field_index = exposure_provider.fieldNameIndex(
self.target_field)
exposure_fields = exposure_provider.fields()
# Create layer to store the lines from E and extent
building_layer = QgsVectorLayer(
'Polygon?crs=' + srs, 'impact_buildings', 'memory')
building_provider = building_layer.dataProvider()
# Set attributes
building_provider.addAttributes(exposure_fields.toList())
building_layer.startEditing()
building_layer.commitChanges()
# Filter geometry and data using the requested extent
requested_extent = QgsRectangle(*self.requested_extent)
# This is a hack - we should be setting the extent CRS
# in the IF base class via safe/engine/core.py:calculate_impact
# for now we assume the extent is in 4326 because it
# is set to that from geo_extent
# See issue #1857
transform = QgsCoordinateTransform(
QgsCoordinateReferenceSystem(
'EPSG:%i' % self._requested_extent_crs),
self.hazard.layer.crs()
)
projected_extent = transform.transformBoundingBox(requested_extent)
request = QgsFeatureRequest()
request.setFilterRect(projected_extent)
# Split building_layer by H and save as result:
# 1) Filter from H inundated features
# 2) Mark buildings as inundated (1) or not inundated (0)
# make spatial index of affected polygons
hazard_index = QgsSpatialIndex()
hazard_geometries = {} # key = feature id, value = geometry
has_hazard_objects = False
for feature in self.hazard.layer.getFeatures(request):
value = feature[affected_field_index]
if value not in self.hazard_class_mapping[self.wet]:
continue
hazard_index.insertFeature(feature)
hazard_geometries[feature.id()] = QgsGeometry(feature.geometry())
has_hazard_objects = True
if not has_hazard_objects:
message = tr(
'There are no objects in the hazard layer with %s '
'value in %s. Please check your data or use another '
'attribute.') % (
self.hazard_class_attribute,
', '.join(self.hazard_class_mapping[self.wet]))<|fim▁hole|> features = []
for feature in self.exposure.layer.getFeatures(request):
building_geom = feature.geometry()
affected = False
# get tentative list of intersecting hazard features
# only based on intersection of bounding boxes
ids = hazard_index.intersects(building_geom.boundingBox())
for fid in ids:
# run (slow) exact intersection test
if hazard_geometries[fid].intersects(building_geom):
affected = True
break
f = QgsFeature()
f.setGeometry(building_geom)
f.setAttributes(feature.attributes())
f[target_field_index] = 1 if affected else 0
features.append(f)
# every once in a while commit the created features
# to the output layer
if len(features) == 1000:
(_, __) = building_provider.addFeatures(features)
features = []
(_, __) = building_provider.addFeatures(features)
building_layer.updateExtents()
# Generate simple impact report
self.buildings = {}
self.affected_buildings = OrderedDict([
(tr('Flooded'), {})
])
buildings_data = building_layer.getFeatures()
building_type_field_index = building_layer.fieldNameIndex(
self.exposure_class_attribute)
for building in buildings_data:
record = building.attributes()
building_type = record[building_type_field_index]
if building_type in [None, 'NULL', 'null', 'Null']:
building_type = 'Unknown type'
if building_type not in self.buildings:
self.buildings[building_type] = 0
for category in self.affected_buildings.keys():
self.affected_buildings[category][
building_type] = OrderedDict([
(tr('Buildings Affected'), 0)])
self.buildings[building_type] += 1
if record[target_field_index] == 1:
self.affected_buildings[tr('Flooded')][building_type][
tr('Buildings Affected')] += 1
# Lump small entries and 'unknown' into 'other' category
self._consolidate_to_other()
impact_summary = self.generate_html_report()
map_title = tr('Buildings inundated')
style_classes = [
dict(label=tr('Not Inundated'), value=0, colour='#1EFC7C',
transparency=0, size=0.5),
dict(label=tr('Inundated'), value=1, colour='#F31A1C',
transparency=0, size=0.5)]
style_info = dict(
target_field=self.target_field,
style_classes=style_classes,
style_type='categorizedSymbol')
# Convert QgsVectorLayer to inasafe layer and return it.
building_layer = Vector(
data=building_layer,
name=tr('Flooded buildings'),
keywords={
'impact_summary': impact_summary,
'map_title': map_title,
'target_field': self.target_field,
'buildings_total': self.total_buildings,
'buildings_affected': self.total_affected_buildings},
style_info=style_info)
self._impact = building_layer
return building_layer<|fim▁end|>
|
raise GetDataError(message)
|
<|file_name|>mobile.09c351.js<|end_file_name|><|fim▁begin|>/******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "./";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
'use strict';
var _addClass = __webpack_require__(25);
var _addClass2 = _interopRequireDefault(_addClass);
var _removeClass = __webpack_require__(26);
var _removeClass2 = _interopRequireDefault(_removeClass);
var _after = __webpack_require__(96);
var _after2 = _interopRequireDefault(_after);
var _browser = __webpack_require__(97);
var _browser2 = _interopRequireDefault(_browser);
var _fix = __webpack_require__(98);
var _fix2 = _interopRequireDefault(_fix);
var _util = __webpack_require__(27);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// fix hexo 不支持的配置
function isPathMatch(path, href) {
var reg = /\/|index.html/g;
return path.replace(reg, '') === href.replace(reg, '');
}
// 浏览器判断
function tabActive() {
var $tabs = document.querySelectorAll('.js-header-menu li a');
var path = window.location.pathname;
for (var i = 0, len = $tabs.length; i < len; i++) {
var $tab = $tabs[i];
if (isPathMatch(path, $tab.getAttribute('href'))) {
(0, _addClass2.default)($tab, 'active');
}
}
}
function getElementLeft(element) {
var actualLeft = element.offsetLeft;
var current = element.offsetParent;
while (current !== null) {
actualLeft += current.offsetLeft;
current = current.offsetParent;
}
return actualLeft;
}
function getElementTop(element) {
var actualTop = element.offsetTop;
var current = element.offsetParent;
while (current !== null) {
actualTop += current.offsetTop;
current = current.offsetParent;
}
return actualTop;
}
function scrollStop($dom, top, limit, zIndex, diff) {
var nowLeft = getElementLeft($dom);
var nowTop = getElementTop($dom) - top;
if (nowTop - limit <= diff) {
var $newDom = $dom.$newDom;
if (!$newDom) {
$newDom = $dom.cloneNode(true);
(0, _after2.default)($dom, $newDom);
$dom.$newDom = $newDom;
$newDom.style.position = 'fixed';
$newDom.style.top = (limit || nowTop) + 'px';
$newDom.style.left = nowLeft + 'px';
$newDom.style.zIndex = zIndex || 2;
$newDom.style.width = '100%';
$newDom.style.color = '#fff';
}
$newDom.style.visibility = 'visible';
$dom.style.visibility = 'hidden';
} else {
$dom.style.visibility = 'visible';
var _$newDom = $dom.$newDom;
if (_$newDom) {
_$newDom.style.visibility = 'hidden';
}
}
}
function handleScroll() {
var $overlay = document.querySelector('.js-overlay');
var $menu = document.querySelector('.js-header-menu');
scrollStop($overlay, document.body.scrollTop, -63, 2, 0);
scrollStop($menu, document.body.scrollTop, 1, 3, 0);
}
function bindScroll() {
document.querySelector('#container').addEventListener('scroll', function (e) {
handleScroll();
});
window.addEventListener('scroll', function (e) {
handleScroll();
});
handleScroll();
}
function init() {
if (_browser2.default.versions.mobile && window.screen.width < 800) {
tabActive();
bindScroll();
}
}
init();
(0, _util.addLoadEvent)(function () {
_fix2.default.init();
});
module.exports = {};
/***/ },
/* 1 */,
/* 2 */,
/* 3 */,
/* 4 */,
/* 5 */,
/* 6 */,
/* 7 */,
/* 8 */,
/* 9 */,
/* 10 */,
/* 11 */,
/* 12 */,
/* 13 */,
/* 14 */,
/* 15 */,
/* 16 */,
/* 17 */,
/* 18 */,
/* 19 */,
/* 20 */,
/* 21 */,
/* 22 */,
/* 23 */,
/* 24 */,
/* 25 */
/***/ function(module, exports) {
/**
* addClass : addClass(el, className)
* Adds a class name to an element. Compare with `$.fn.addClass`.
*
* var addClass = require('dom101/add-class');
*
* addClass(el, 'active');
*/
function addClass (el, className) {
if (el.classList) {
el.classList.add(className);
} else {
el.className += ' ' + className;
}
}
module.exports = addClass;
/***/ },
/* 26 */
/***/ function(module, exports) {
/**
* removeClass : removeClass(el, className)
* Removes a classname.
*
* var removeClass = require('dom101/remove-class');
*
* el.className = 'selected active';
* removeClass(el, 'active');
*
* el.className
* => "selected"
*/
function removeClass (el, className) {
if (el.classList) {
el.classList.remove(className);
} else {
var expr =
new RegExp('(^|\\b)' + className.split(' ').join('|') + '(\\b|$)', 'gi');
el.className = el.className.replace(expr, ' ');
}
}
module.exports = removeClass;
/***/ },
/* 27 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var _typeof2 = __webpack_require__(28);
var _typeof3 = _interopRequireDefault(_typeof2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var e = function () {
function r(e, r, n) {
return r || n ? String.fromCharCode(r || n) : u[e] || e;
}
function n(e) {
return p[e];
}
var t = /"|<|>|&| |'|&#(\d+);|&#(\d+)/g,
o = /['<> "&]/g,
u = {
""": '"',
"<": "<",
">": ">",
"&": "&",
" ": " "
},
c = /\u00a0/g,
a = /<br\s*\/?>/gi,
i = /\r?\n/g,
f = /\s/g,
p = {};
for (var s in u) {
p[u[s]] = s;
}return u["'"] = "'", p["'"] = "'", {
encode: function encode(e) {
return e ? ("" + e).replace(o, n).replace(i, "<br/>").replace(f, " ") : "";
},
decode: function decode(e) {
return e ? ("" + e).replace(a, "\n").replace(t, r).replace(c, " ") : "";
},
encodeBase16: function encodeBase16(e) {
if (!e) return e;
e += "";
for (var r = [], n = 0, t = e.length; t > n; n++) {
r.push(e.charCodeAt(n).toString(16).toUpperCase());
}return r.join("");
},
encodeBase16forJSON: function encodeBase16forJSON(e) {
if (!e) return e;
e = e.replace(/[\u4E00-\u9FBF]/gi, function (e) {
return escape(e).replace("%u", "\\u");
});
for (var r = [], n = 0, t = e.length; t > n; n++) {
r.push(e.charCodeAt(n).toString(16).toUpperCase());
}return r.join("");
},
decodeBase16: function decodeBase16(e) {
if (!e) return e;
e += "";
for (var r = [], n = 0, t = e.length; t > n; n += 2) {
r.push(String.fromCharCode("0x" + e.slice(n, n + 2)));
}return r.join("");
},
encodeObject: function encodeObject(r) {
if (r instanceof Array) for (var n = 0, t = r.length; t > n; n++) {
r[n] = e.encodeObject(r[n]);
} else if ("object" == (typeof r === "undefined" ? "undefined" : (0, _typeof3.default)(r))) for (var o in r) {
r[o] = e.encodeObject(r[o]);
} else if ("string" == typeof r) return e.encode(r);
return r;
},
loadScript: function loadScript(path) {
var $script = document.createElement('script');
document.getElementsByTagName('body')[0].appendChild($script);
$script.setAttribute('src', path);
},
addLoadEvent: function addLoadEvent(func) {
var oldonload = window.onload;
if (typeof window.onload != "function") {
window.onload = func;
} else {
window.onload = function () {
oldonload();
func();
};
}
}
};
}();
module.exports = e;
/***/ },
/* 28 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = true;
var _iterator = __webpack_require__(29);
var _iterator2 = _interopRequireDefault(_iterator);
var _symbol = __webpack_require__(80);
var _symbol2 = _interopRequireDefault(_symbol);
var _typeof = typeof _symbol2.default === "function" && typeof _iterator2.default === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof _symbol2.default === "function" && obj.constructor === _symbol2.default && obj !== _symbol2.default.prototype ? "symbol" : typeof obj; };
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = typeof _symbol2.default === "function" && _typeof(_iterator2.default) === "symbol" ? function (obj) {
return typeof obj === "undefined" ? "undefined" : _typeof(obj);
} : function (obj) {
return obj && typeof _symbol2.default === "function" && obj.constructor === _symbol2.default && obj !== _symbol2.default.prototype ? "symbol" : typeof obj === "undefined" ? "undefined" : _typeof(obj);
};
/***/ },
/* 29 */
/***/ function(module, exports, __webpack_require__) {
module.exports = { "default": __webpack_require__(30), __esModule: true };
/***/ },
/* 30 */
/***/ function(module, exports, __webpack_require__) {
__webpack_require__(31);
__webpack_require__(75);
module.exports = __webpack_require__(79).f('iterator');
/***/ },
/* 31 */
/***/ function(module, exports, __webpack_require__) {
'use strict';
var $at = __webpack_require__(32)(true);
// 21.1.3.27 String.prototype[@@iterator]()
__webpack_require__(35)(String, 'String', function(iterated){
this._t = String(iterated); // target
this._i = 0; // next index
// 21.1.5.2.1 %StringIteratorPrototype%.next()
}, function(){
var O = this._t
, index = this._i
, point;
if(index >= O.length)return {value: undefined, done: true};
point = $at(O, index);
this._i += point.length;
return {value: point, done: false};
});
/***/ },
/* 32 */
/***/ function(module, exports, __webpack_require__) {
var toInteger = __webpack_require__(33)
, defined = __webpack_require__(34);
// true -> String#at
// false -> String#codePointAt
module.exports = function(TO_STRING){
return function(that, pos){
var s = String(defined(that))
, i = toInteger(pos)
, l = s.length
, a, b;
if(i < 0 || i >= l)return TO_STRING ? '' : undefined;
a = s.charCodeAt(i);
return a < 0xd800 || a > 0xdbff || i + 1 === l || (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff
? TO_STRING ? s.charAt(i) : a
: TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000;
};
};
/***/ },
/* 33 */
/***/ function(module, exports) {
// 7.1.4 ToInteger
var ceil = Math.ceil
, floor = Math.floor;
module.exports = function(it){
return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it);
};
/***/ },
/* 34 */
/***/ function(module, exports) {
// 7.2.1 RequireObjectCoercible(argument)
module.exports = function(it){
if(it == undefined)throw TypeError("Can't call method on " + it);
return it;
};
/***/ },
/* 35 */
/***/ function(module, exports, __webpack_require__) {
'use strict';
var LIBRARY = __webpack_require__(36)
, $export = __webpack_require__(37)
, redefine = __webpack_require__(52)
, hide = __webpack_require__(42)
, has = __webpack_require__(53)
, Iterators = __webpack_require__(54)
, $iterCreate = __webpack_require__(55)
, setToStringTag = __webpack_require__(71)
, getPrototypeOf = __webpack_require__(73)
, ITERATOR = __webpack_require__(72)('iterator')
, BUGGY = !([].keys && 'next' in [].keys()) // Safari has buggy iterators w/o `next`
, FF_ITERATOR = '@@iterator'
, KEYS = 'keys'
, VALUES = 'values';
var returnThis = function(){ return this; };
module.exports = function(Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED){
$iterCreate(Constructor, NAME, next);
var getMethod = function(kind){
if(!BUGGY && kind in proto)return proto[kind];
switch(kind){
case KEYS: return function keys(){ return new Constructor(this, kind); };
case VALUES: return function values(){ return new Constructor(this, kind); };
} return function entries(){ return new Constructor(this, kind); };
};
var TAG = NAME + ' Iterator'
, DEF_VALUES = DEFAULT == VALUES
, VALUES_BUG = false
, proto = Base.prototype
, $native = proto[ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT]
, $default = $native || getMethod(DEFAULT)
, $entries = DEFAULT ? !DEF_VALUES ? $default : getMethod('entries') : undefined
, $anyNative = NAME == 'Array' ? proto.entries || $native : $native
, methods, key, IteratorPrototype;
// Fix native
if($anyNative){
IteratorPrototype = getPrototypeOf($anyNative.call(new Base));
if(IteratorPrototype !== Object.prototype){
// Set @@toStringTag to native iterators
setToStringTag(IteratorPrototype, TAG, true);
// fix for some old engines
if(!LIBRARY && !has(IteratorPrototype, ITERATOR))hide(IteratorPrototype, ITERATOR, returnThis);
}
}
// fix Array#{values, @@iterator}.name in V8 / FF
if(DEF_VALUES && $native && $native.name !== VALUES){
VALUES_BUG = true;
$default = function values(){ return $native.call(this); };
}
// Define iterator
if((!LIBRARY || FORCED) && (BUGGY || VALUES_BUG || !proto[ITERATOR])){
hide(proto, ITERATOR, $default);
}
// Plug for library
Iterators[NAME] = $default;
Iterators[TAG] = returnThis;
if(DEFAULT){
methods = {
values: DEF_VALUES ? $default : getMethod(VALUES),
keys: IS_SET ? $default : getMethod(KEYS),
entries: $entries
};
if(FORCED)for(key in methods){
if(!(key in proto))redefine(proto, key, methods[key]);
} else $export($export.P + $export.F * (BUGGY || VALUES_BUG), NAME, methods);
}
return methods;
};
/***/ },
/* 36 */
/***/ function(module, exports) {
module.exports = true;
/***/ },
/* 37 */
/***/ function(module, exports, __webpack_require__) {
var global = __webpack_require__(38)
, core = __webpack_require__(39)
, ctx = __webpack_require__(40)
, hide = __webpack_require__(42)
, PROTOTYPE = 'prototype';
var $export = function(type, name, source){
var IS_FORCED = type & $export.F
, IS_GLOBAL = type & $export.G
, IS_STATIC = type & $export.S
, IS_PROTO = type & $export.P
, IS_BIND = type & $export.B
, IS_WRAP = type & $export.W
, exports = IS_GLOBAL ? core : core[name] || (core[name] = {})
, expProto = exports[PROTOTYPE]
, target = IS_GLOBAL ? global : IS_STATIC ? global[name] : (global[name] || {})[PROTOTYPE]
, key, own, out;
if(IS_GLOBAL)source = name;
for(key in source){
// contains in native
own = !IS_FORCED && target && target[key] !== undefined;
if(own && key in exports)continue;
// export native or passed
out = own ? target[key] : source[key];
// prevent global pollution for namespaces
exports[key] = IS_GLOBAL && typeof target[key] != 'function' ? source[key]
// bind timers to global for call from export context
: IS_BIND && own ? ctx(out, global)
// wrap global constructors for prevent change them in library
: IS_WRAP && target[key] == out ? (function(C){
var F = function(a, b, c){
if(this instanceof C){
switch(arguments.length){
case 0: return new C;
case 1: return new C(a);
case 2: return new C(a, b);
} return new C(a, b, c);
} return C.apply(this, arguments);
};
F[PROTOTYPE] = C[PROTOTYPE];
return F;
// make static versions for prototype methods
})(out) : IS_PROTO && typeof out == 'function' ? ctx(Function.call, out) : out;
// export proto methods to core.%CONSTRUCTOR%.methods.%NAME%
if(IS_PROTO){
(exports.virtual || (exports.virtual = {}))[key] = out;
// export proto methods to core.%CONSTRUCTOR%.prototype.%NAME%
if(type & $export.R && expProto && !expProto[key])hide(expProto, key, out);
}
}
};
// type bitmap
$export.F = 1; // forced
$export.G = 2; // global
$export.S = 4; // static
$export.P = 8; // proto
$export.B = 16; // bind
$export.W = 32; // wrap
$export.U = 64; // safe
$export.R = 128; // real proto method for `library`
module.exports = $export;
/***/ },
/* 38 */
/***/ function(module, exports) {
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
var global = module.exports = typeof window != 'undefined' && window.Math == Math
? window : typeof self != 'undefined' && self.Math == Math ? self : Function('return this')();
if(typeof __g == 'number')__g = global; // eslint-disable-line no-undef
/***/ },
/* 39 */
/***/ function(module, exports) {
var core = module.exports = {version: '2.4.0'};
if(typeof __e == 'number')__e = core; // eslint-disable-line no-undef
/***/ },
/* 40 */
/***/ function(module, exports, __webpack_require__) {
// optional / simple context binding
var aFunction = __webpack_require__(41);
module.exports = function(fn, that, length){
aFunction(fn);
if(that === undefined)return fn;
switch(length){
case 1: return function(a){
return fn.call(that, a);
};
case 2: return function(a, b){
return fn.call(that, a, b);
};
case 3: return function(a, b, c){
return fn.call(that, a, b, c);
};
}
return function(/* ...args */){
return fn.apply(that, arguments);
};
};
/***/ },
/* 41 */
/***/ function(module, exports) {
module.exports = function(it){
if(typeof it != 'function')throw TypeError(it + ' is not a function!');
return it;
};
/***/ },
/* 42 */
/***/ function(module, exports, __webpack_require__) {
var dP = __webpack_require__(43)
, createDesc = __webpack_require__(51);
module.exports = __webpack_require__(47) ? function(object, key, value){
return dP.f(object, key, createDesc(1, value));
} : function(object, key, value){
object[key] = value;
return object;
};
/***/ },
/* 43 */
/***/ function(module, exports, __webpack_require__) {
var anObject = __webpack_require__(44)
, IE8_DOM_DEFINE = __webpack_require__(46)
, toPrimitive = __webpack_require__(50)
, dP = Object.defineProperty;
exports.f = __webpack_require__(47) ? Object.defineProperty : function defineProperty(O, P, Attributes){
anObject(O);
P = toPrimitive(P, true);
anObject(Attributes);
if(IE8_DOM_DEFINE)try {
return dP(O, P, Attributes);
} catch(e){ /* empty */ }
if('get' in Attributes || 'set' in Attributes)throw TypeError('Accessors not supported!');
if('value' in Attributes)O[P] = Attributes.value;
return O;
};
/***/ },
/* 44 */
/***/ function(module, exports, __webpack_require__) {
var isObject = __webpack_require__(45);
module.exports = function(it){
if(!isObject(it))throw TypeError(it + ' is not an object!');
return it;
};
/***/ },
/* 45 */
/***/ function(module, exports) {
module.exports = function(it){
return typeof it === 'object' ? it !== null : typeof it === 'function';
};
/***/ },
/* 46 */
/***/ function(module, exports, __webpack_require__) {
module.exports = !__webpack_require__(47) && !__webpack_require__(48)(function(){
return Object.defineProperty(__webpack_require__(49)('div'), 'a', {get: function(){ return 7; }}).a != 7;
});
/***/ },
/* 47 */
/***/ function(module, exports, __webpack_require__) {
// Thank's IE8 for his funny defineProperty
module.exports = !__webpack_require__(48)(function(){
return Object.defineProperty({}, 'a', {get: function(){ return 7; }}).a != 7;
});
/***/ },
/* 48 */
/***/ function(module, exports) {
module.exports = function(exec){
try {
return !!exec();
} catch(e){
return true;
}
};
/***/ },
/* 49 */
/***/ function(module, exports, __webpack_require__) {
var isObject = __webpack_require__(45)
, document = __webpack_require__(38).document
// in old IE typeof document.createElement is 'object'
, is = isObject(document) && isObject(document.createElement);
module.exports = function(it){
return is ? document.createElement(it) : {};
};
/***/ },
/* 50 */
/***/ function(module, exports, __webpack_require__) {
// 7.1.1 ToPrimitive(input [, PreferredType])
var isObject = __webpack_require__(45);
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
// and the second argument - flag - preferred type is a string
module.exports = function(it, S){
if(!isObject(it))return it;
var fn, val;
if(S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it)))return val;
if(typeof (fn = it.valueOf) == 'function' && !isObject(val = fn.call(it)))return val;
if(!S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it)))return val;
throw TypeError("Can't convert object to primitive value");
};
/***/ },
/* 51 */
/***/ function(module, exports) {
module.exports = function(bitmap, value){
return {
enumerable : !(bitmap & 1),
configurable: !(bitmap & 2),
writable : !(bitmap & 4),
value : value
};
};
/***/ },
/* 52 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(42);
/***/ },
/* 53 */
/***/ function(module, exports) {
var hasOwnProperty = {}.hasOwnProperty;
module.exports = function(it, key){
return hasOwnProperty.call(it, key);
};
/***/ },
/* 54 */
/***/ function(module, exports) {
module.exports = {};
/***/ },
/* 55 */
/***/ function(module, exports, __webpack_require__) {
'use strict';
var create = __webpack_require__(56)
, descriptor = __webpack_require__(51)
, setToStringTag = __webpack_require__(71)
, IteratorPrototype = {};
// 25.1.2.1.1 %IteratorPrototype%[@@iterator]()
__webpack_require__(42)(IteratorPrototype, __webpack_require__(72)('iterator'), function(){ return this; });
module.exports = function(Constructor, NAME, next){
Constructor.prototype = create(IteratorPrototype, {next: descriptor(1, next)});
setToStringTag(Constructor, NAME + ' Iterator');
};
/***/ },
/* 56 */
/***/ function(module, exports, __webpack_require__) {
// 19.1.2.2 / 15.2.3.5 Object.create(O [, Properties])
var anObject = __webpack_require__(44)
, dPs = __webpack_require__(57)
, enumBugKeys = __webpack_require__(69)
, IE_PROTO = __webpack_require__(66)('IE_PROTO')
, Empty = function(){ /* empty */ }
, PROTOTYPE = 'prototype';
// Create object with fake `null` prototype: use iframe Object with cleared prototype
var createDict = function(){
// Thrash, waste and sodomy: IE GC bug
var iframe = __webpack_require__(49)('iframe')
, i = enumBugKeys.length
, lt = '<'
, gt = '>'
, iframeDocument;
iframe.style.display = 'none';
__webpack_require__(70).appendChild(iframe);
iframe.src = 'javascript:'; // eslint-disable-line no-script-url
// createDict = iframe.contentWindow.Object;
// html.removeChild(iframe);
iframeDocument = iframe.contentWindow.document;
iframeDocument.open();
iframeDocument.write(lt + 'script' + gt + 'document.F=Object' + lt + '/script' + gt);
iframeDocument.close();
createDict = iframeDocument.F;
while(i--)delete createDict[PROTOTYPE][enumBugKeys[i]];
return createDict();
};
module.exports = Object.create || function create(O, Properties){
var result;
if(O !== null){
Empty[PROTOTYPE] = anObject(O);
result = new Empty;
Empty[PROTOTYPE] = null;
// add "__proto__" for Object.getPrototypeOf polyfill
result[IE_PROTO] = O;
} else result = createDict();
return Properties === undefined ? result : dPs(result, Properties);
};
/***/ },
/* 57 */
/***/ function(module, exports, __webpack_require__) {
var dP = __webpack_require__(43)
, anObject = __webpack_require__(44)
, getKeys = __webpack_require__(58);
module.exports = __webpack_require__(47) ? Object.defineProperties : function defineProperties(O, Properties){
anObject(O);
var keys = getKeys(Properties)
, length = keys.length
, i = 0
, P;
while(length > i)dP.f(O, P = keys[i++], Properties[P]);
return O;
};
/***/ },
/* 58 */
/***/ function(module, exports, __webpack_require__) {
// 19.1.2.14 / 15.2.3.14 Object.keys(O)
var $keys = __webpack_require__(59)
, enumBugKeys = __webpack_require__(69);
module.exports = Object.keys || function keys(O){
return $keys(O, enumBugKeys);
};
/***/ },
/* 59 */
/***/ function(module, exports, __webpack_require__) {
var has = __webpack_require__(53)
, toIObject = __webpack_require__(60)
, arrayIndexOf = __webpack_require__(63)(false)
, IE_PROTO = __webpack_require__(66)('IE_PROTO');
module.exports = function(object, names){
var O = toIObject(object)
, i = 0
, result = []
, key;
for(key in O)if(key != IE_PROTO)has(O, key) && result.push(key);
// Don't enum bug & hidden keys
while(names.length > i)if(has(O, key = names[i++])){
~arrayIndexOf(result, key) || result.push(key);
}
return result;
};
/***/ },
/* 60 */
/***/ function(module, exports, __webpack_require__) {
// to indexed object, toObject with fallback for non-array-like ES3 strings
var IObject = __webpack_require__(61)
, defined = __webpack_require__(34);
module.exports = function(it){
return IObject(defined(it));
};
/***/ },
/* 61 */
/***/ function(module, exports, __webpack_require__) {
// fallback for non-array-like ES3 and non-enumerable old V8 strings
var cof = __webpack_require__(62);
module.exports = Object('z').propertyIsEnumerable(0) ? Object : function(it){
return cof(it) == 'String' ? it.split('') : Object(it);
};
/***/ },
/* 62 */
/***/ function(module, exports) {
var toString = {}.toString;
module.exports = function(it){
return toString.call(it).slice(8, -1);
};
/***/ },
/* 63 */
/***/ function(module, exports, __webpack_require__) {
// false -> Array#indexOf
// true -> Array#includes
var toIObject = __webpack_require__(60)
, toLength = __webpack_require__(64)
, toIndex = __webpack_require__(65);
module.exports = function(IS_INCLUDES){
return function($this, el, fromIndex){
var O = toIObject($this)
, length = toLength(O.length)
, index = toIndex(fromIndex, length)
, value;
// Array#includes uses SameValueZero equality algorithm
if(IS_INCLUDES && el != el)while(length > index){
value = O[index++];
if(value != value)return true;
// Array#toIndex ignores holes, Array#includes - not
} else for(;length > index; index++)if(IS_INCLUDES || index in O){
if(O[index] === el)return IS_INCLUDES || index || 0;
} return !IS_INCLUDES && -1;
};
};
/***/ },
/* 64 */
/***/ function(module, exports, __webpack_require__) {
// 7.1.15 ToLength
var toInteger = __webpack_require__(33)
, min = Math.min;
module.exports = function(it){
return it > 0 ? min(toInteger(it), 0x1fffffffffffff) : 0; // pow(2, 53) - 1 == 9007199254740991
};
/***/ },
/* 65 */
/***/ function(module, exports, __webpack_require__) {
var toInteger = __webpack_require__(33)
, max = Math.max
, min = Math.min;
module.exports = function(index, length){
index = toInteger(index);
return index < 0 ? max(index + length, 0) : min(index, length);
};
/***/ },
/* 66 */
/***/ function(module, exports, __webpack_require__) {
var shared = __webpack_require__(67)('keys')
, uid = __webpack_require__(68);
module.exports = function(key){
return shared[key] || (shared[key] = uid(key));
};
/***/ },
/* 67 */
/***/ function(module, exports, __webpack_require__) {
var global = __webpack_require__(38)
, SHARED = '__core-js_shared__'
, store = global[SHARED] || (global[SHARED] = {});
module.exports = function(key){
return store[key] || (store[key] = {});
};
/***/ },
/* 68 */
/***/ function(module, exports) {
var id = 0
, px = Math.random();
module.exports = function(key){
return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36));
};
/***/ },
/* 69 */
/***/ function(module, exports) {
// IE 8- don't enum bug keys
module.exports = (
'constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf'
).split(',');
/***/ },
/* 70 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(38).document && document.documentElement;
/***/ },
/* 71 */
/***/ function(module, exports, __webpack_require__) {
var def = __webpack_require__(43).f
, has = __webpack_require__(53)
, TAG = __webpack_require__(72)('toStringTag');
module.exports = function(it, tag, stat){
if(it && !has(it = stat ? it : it.prototype, TAG))def(it, TAG, {configurable: true, value: tag});
};
/***/ },
/* 72 */
/***/ function(module, exports, __webpack_require__) {
var store = __webpack_require__(67)('wks')
, uid = __webpack_require__(68)
, Symbol = __webpack_require__(38).Symbol
, USE_SYMBOL = typeof Symbol == 'function';
var $exports = module.exports = function(name){
return store[name] || (store[name] =
USE_SYMBOL && Symbol[name] || (USE_SYMBOL ? Symbol : uid)('Symbol.' + name));
};
$exports.store = store;
/***/ },
/* 73 */
/***/ function(module, exports, __webpack_require__) {
// 19.1.2.9 / 15.2.3.2 Object.getPrototypeOf(O)
var has = __webpack_require__(53)
, toObject = __webpack_require__(74)
, IE_PROTO = __webpack_require__(66)('IE_PROTO')
, ObjectProto = Object.prototype;
module.exports = Object.getPrototypeOf || function(O){
O = toObject(O);
if(has(O, IE_PROTO))return O[IE_PROTO];
if(typeof O.constructor == 'function' && O instanceof O.constructor){
return O.constructor.prototype;
} return O instanceof Object ? ObjectProto : null;
};
/***/ },
/* 74 */
/***/ function(module, exports, __webpack_require__) {
// 7.1.13 ToObject(argument)
var defined = __webpack_require__(34);
module.exports = function(it){
return Object(defined(it));
};
/***/ },
/* 75 */
/***/ function(module, exports, __webpack_require__) {
__webpack_require__(76);
var global = __webpack_require__(38)
, hide = __webpack_require__(42)
, Iterators = __webpack_require__(54)
, TO_STRING_TAG = __webpack_require__(72)('toStringTag');
for(var collections = ['NodeList', 'DOMTokenList', 'MediaList', 'StyleSheetList', 'CSSRuleList'], i = 0; i < 5; i++){
var NAME = collections[i]
, Collection = global[NAME]
, proto = Collection && Collection.prototype;
if(proto && !proto[TO_STRING_TAG])hide(proto, TO_STRING_TAG, NAME);
Iterators[NAME] = Iterators.Array;
}
/***/ },
/* 76 */
/***/ function(module, exports, __webpack_require__) {
'use strict';
var addToUnscopables = __webpack_require__(77)
, step = __webpack_require__(78)
, Iterators = __webpack_require__(54)
, toIObject = __webpack_require__(60);
// 22.1.3.4 Array.prototype.entries()
// 22.1.3.13 Array.prototype.keys()
// 22.1.3.29 Array.prototype.values()
// 22.1.3.30 Array.prototype[@@iterator]()
module.exports = __webpack_require__(35)(Array, 'Array', function(iterated, kind){
this._t = toIObject(iterated); // target
this._i = 0; // next index
this._k = kind; // kind
// 22.1.5.2.1 %ArrayIteratorPrototype%.next()
}, function(){
var O = this._t
, kind = this._k
, index = this._i++;
if(!O || index >= O.length){
this._t = undefined;
return step(1);
}
if(kind == 'keys' )return step(0, index);
if(kind == 'values')return step(0, O[index]);
return step(0, [index, O[index]]);
}, 'values');
// argumentsList[@@iterator] is %ArrayProto_values% (9.4.4.6, 9.4.4.7)
Iterators.Arguments = Iterators.Array;
addToUnscopables('keys');
addToUnscopables('values');
addToUnscopables('entries');
/***/ },
/* 77 */
/***/ function(module, exports) {
module.exports = function(){ /* empty */ };
/***/ },
/* 78 */
/***/ function(module, exports) {
module.exports = function(done, value){
return {value: value, done: !!done};
};
/***/ },
/* 79 */
/***/ function(module, exports, __webpack_require__) {
exports.f = __webpack_require__(72);
/***/ },
/* 80 */
/***/ function(module, exports, __webpack_require__) {
module.exports = { "default": __webpack_require__(81), __esModule: true };
/***/ },
/* 81 */
/***/ function(module, exports, __webpack_require__) {
__webpack_require__(82);
__webpack_require__(93);
__webpack_require__(94);
__webpack_require__(95);
module.exports = __webpack_require__(39).Symbol;
/***/ },
/* 82 */
/***/ function(module, exports, __webpack_require__) {
'use strict';
// ECMAScript 6 symbols shim
var global = __webpack_require__(38)
, has = __webpack_require__(53)
, DESCRIPTORS = __webpack_require__(47)
, $export = __webpack_require__(37)
, redefine = __webpack_require__(52)
, META = __webpack_require__(83).KEY
, $fails = __webpack_require__(48)
, shared = __webpack_require__(67)
, setToStringTag = __webpack_require__(71)
, uid = __webpack_require__(68)
, wks = __webpack_require__(72)
, wksExt = __webpack_require__(79)
, wksDefine = __webpack_require__(84)
, keyOf = __webpack_require__(85)
, enumKeys = __webpack_require__(86)
, isArray = __webpack_require__(89)
, anObject = __webpack_require__(44)
, toIObject = __webpack_require__(60)
, toPrimitive = __webpack_require__(50)
, createDesc = __webpack_require__(51)
, _create = __webpack_require__(56)
, gOPNExt = __webpack_require__(90)
, $GOPD = __webpack_require__(92)
, $DP = __webpack_require__(43)
, $keys = __webpack_require__(58)
, gOPD = $GOPD.f
, dP = $DP.f
, gOPN = gOPNExt.f
, $Symbol = global.Symbol
, $JSON = global.JSON
, _stringify = $JSON && $JSON.stringify
, PROTOTYPE = 'prototype'
, HIDDEN = wks('_hidden')
, TO_PRIMITIVE = wks('toPrimitive')
, isEnum = {}.propertyIsEnumerable
, SymbolRegistry = shared('symbol-registry')
, AllSymbols = shared('symbols')
, OPSymbols = shared('op-symbols')
, ObjectProto = Object[PROTOTYPE]
, USE_NATIVE = typeof $Symbol == 'function'
, QObject = global.QObject;
// Don't use setters in Qt Script, https://github.com/zloirock/core-js/issues/173
var setter = !QObject || !QObject[PROTOTYPE] || !QObject[PROTOTYPE].findChild;
// fallback for old Android, https://code.google.com/p/v8/issues/detail?id=687
var setSymbolDesc = DESCRIPTORS && $fails(function(){
return _create(dP({}, 'a', {
get: function(){ return dP(this, 'a', {value: 7}).a; }
})).a != 7;
}) ? function(it, key, D){
var protoDesc = gOPD(ObjectProto, key);
if(protoDesc)delete ObjectProto[key];
dP(it, key, D);
if(protoDesc && it !== ObjectProto)dP(ObjectProto, key, protoDesc);
} : dP;
var wrap = function(tag){
var sym = AllSymbols[tag] = _create($Symbol[PROTOTYPE]);
sym._k = tag;
return sym;
};
var isSymbol = USE_NATIVE && typeof $Symbol.iterator == 'symbol' ? function(it){
return typeof it == 'symbol';
} : function(it){
return it instanceof $Symbol;
};
var $defineProperty = function defineProperty(it, key, D){
if(it === ObjectProto)$defineProperty(OPSymbols, key, D);
anObject(it);
key = toPrimitive(key, true);
anObject(D);
if(has(AllSymbols, key)){
if(!D.enumerable){
if(!has(it, HIDDEN))dP(it, HIDDEN, createDesc(1, {}));
it[HIDDEN][key] = true;
} else {
if(has(it, HIDDEN) && it[HIDDEN][key])it[HIDDEN][key] = false;
D = _create(D, {enumerable: createDesc(0, false)});
} return setSymbolDesc(it, key, D);
} return dP(it, key, D);
};
var $defineProperties = function defineProperties(it, P){
anObject(it);
var keys = enumKeys(P = toIObject(P))
, i = 0
, l = keys.length
, key;
while(l > i)$defineProperty(it, key = keys[i++], P[key]);
return it;
};
var $create = function create(it, P){
return P === undefined ? _create(it) : $defineProperties(_create(it), P);
};
var $propertyIsEnumerable = function propertyIsEnumerable(key){
var E = isEnum.call(this, key = toPrimitive(key, true));
if(this === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key))return false;
return E || !has(this, key) || !has(AllSymbols, key) || has(this, HIDDEN) && this[HIDDEN][key] ? E : true;
};
var $getOwnPropertyDescriptor = function getOwnPropertyDescriptor(it, key){
it = toIObject(it);
key = toPrimitive(key, true);
if(it === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key))return;
var D = gOPD(it, key);
if(D && has(AllSymbols, key) && !(has(it, HIDDEN) && it[HIDDEN][key]))D.enumerable = true;
return D;
};
var $getOwnPropertyNames = function getOwnPropertyNames(it){
var names = gOPN(toIObject(it))
, result = []
, i = 0
, key;
while(names.length > i){
if(!has(AllSymbols, key = names[i++]) && key != HIDDEN && key != META)result.push(key);
} return result;
};
var $getOwnPropertySymbols = function getOwnPropertySymbols(it){
var IS_OP = it === ObjectProto
, names = gOPN(IS_OP ? OPSymbols : toIObject(it))
, result = []
, i = 0
, key;
while(names.length > i){
if(has(AllSymbols, key = names[i++]) && (IS_OP ? has(ObjectProto, key) : true))result.push(AllSymbols[key]);
} return result;
};
// 19.4.1.1 Symbol([description])
if(!USE_NATIVE){
$Symbol = function Symbol(){
if(this instanceof $Symbol)throw TypeError('Symbol is not a constructor!');
var tag = uid(arguments.length > 0 ? arguments[0] : undefined);
var $set = function(value){
if(this === ObjectProto)$set.call(OPSymbols, value);
if(has(this, HIDDEN) && has(this[HIDDEN], tag))this[HIDDEN][tag] = false;
setSymbolDesc(this, tag, createDesc(1, value));
};
if(DESCRIPTORS && setter)setSymbolDesc(ObjectProto, tag, {configurable: true, set: $set});
return wrap(tag);
};
redefine($Symbol[PROTOTYPE], 'toString', function toString(){
return this._k;
});
$GOPD.f = $getOwnPropertyDescriptor;
$DP.f = $defineProperty;
__webpack_require__(91).f = gOPNExt.f = $getOwnPropertyNames;
__webpack_require__(88).f = $propertyIsEnumerable;
__webpack_require__(87).f = $getOwnPropertySymbols;
if(DESCRIPTORS && !__webpack_require__(36)){
redefine(ObjectProto, 'propertyIsEnumerable', $propertyIsEnumerable, true);
}
wksExt.f = function(name){
return wrap(wks(name));
}
}
$export($export.G + $export.W + $export.F * !USE_NATIVE, {Symbol: $Symbol});
for(var symbols = (
// 19.4.2.2, 19.4.2.3, 19.4.2.4, 19.4.2.6, 19.4.2.8, 19.4.2.9, 19.4.2.10, 19.4.2.11, 19.4.2.12, 19.4.2.13, 19.4.2.14
'hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables'
).split(','), i = 0; symbols.length > i; )wks(symbols[i++]);
for(var symbols = $keys(wks.store), i = 0; symbols.length > i; )wksDefine(symbols[i++]);
$export($export.S + $export.F * !USE_NATIVE, 'Symbol', {
// 19.4.2.1 Symbol.for(key)
'for': function(key){
return has(SymbolRegistry, key += '')
? SymbolRegistry[key]
: SymbolRegistry[key] = $Symbol(key);
},
// 19.4.2.5 Symbol.keyFor(sym)
keyFor: function keyFor(key){
if(isSymbol(key))return keyOf(SymbolRegistry, key);
throw TypeError(key + ' is not a symbol!');
},
useSetter: function(){ setter = true; },
useSimple: function(){ setter = false; }
});
$export($export.S + $export.F * !USE_NATIVE, 'Object', {
// 19.1.2.2 Object.create(O [, Properties])
create: $create,
// 19.1.2.4 Object.defineProperty(O, P, Attributes)
defineProperty: $defineProperty,
// 19.1.2.3 Object.defineProperties(O, Properties)
defineProperties: $defineProperties,
// 19.1.2.6 Object.getOwnPropertyDescriptor(O, P)
getOwnPropertyDescriptor: $getOwnPropertyDescriptor,
// 19.1.2.7 Object.getOwnPropertyNames(O)
getOwnPropertyNames: $getOwnPropertyNames,
// 19.1.2.8 Object.getOwnPropertySymbols(O)
getOwnPropertySymbols: $getOwnPropertySymbols
});
// 24.3.2 JSON.stringify(value [, replacer [, space]])
$JSON && $export($export.S + $export.F * (!USE_NATIVE || $fails(function(){
var S = $Symbol();
// MS Edge converts symbol values to JSON as {}
// WebKit converts symbol values to JSON as null
// V8 throws on boxed symbols
return _stringify([S]) != '[null]' || _stringify({a: S}) != '{}' || _stringify(Object(S)) != '{}';
})), 'JSON', {
stringify: function stringify(it){
if(it === undefined || isSymbol(it))return; // IE8 returns string on undefined
var args = [it]
, i = 1
, replacer, $replacer;
while(arguments.length > i)args.push(arguments[i++]);
replacer = args[1];
if(typeof replacer == 'function')$replacer = replacer;
if($replacer || !isArray(replacer))replacer = function(key, value){
if($replacer)value = $replacer.call(this, key, value);
if(!isSymbol(value))return value;
};
args[1] = replacer;
return _stringify.apply($JSON, args);
}
});
// 19.4.3.4 Symbol.prototype[@@toPrimitive](hint)
$Symbol[PROTOTYPE][TO_PRIMITIVE] || __webpack_require__(42)($Symbol[PROTOTYPE], TO_PRIMITIVE, $Symbol[PROTOTYPE].valueOf);
// 19.4.3.5 Symbol.prototype[@@toStringTag]
setToStringTag($Symbol, 'Symbol');
// 20.2.1.9 Math[@@toStringTag]
setToStringTag(Math, 'Math', true);
// 24.3.3 JSON[@@toStringTag]
setToStringTag(global.JSON, 'JSON', true);
/***/ },
/* 83 */
/***/ function(module, exports, __webpack_require__) {
var META = __webpack_require__(68)('meta')
, isObject = __webpack_require__(45)
, has = __webpack_require__(53)
, setDesc = __webpack_require__(43).f
, id = 0;
var isExtensible = Object.isExtensible || function(){
return true;
};
var FREEZE = !__webpack_require__(48)(function(){
return isExtensible(Object.preventExtensions({}));
});
var setMeta = function(it){
setDesc(it, META, {value: {
i: 'O' + ++id, // object ID
w: {} // weak collections IDs
}});
};
var fastKey = function(it, create){
// return primitive with prefix<|fim▁hole|> if(!isExtensible(it))return 'F';
// not necessary to add metadata
if(!create)return 'E';
// add missing metadata
setMeta(it);
// return object ID
} return it[META].i;
};
var getWeak = function(it, create){
if(!has(it, META)){
// can't set metadata to uncaught frozen object
if(!isExtensible(it))return true;
// not necessary to add metadata
if(!create)return false;
// add missing metadata
setMeta(it);
// return hash weak collections IDs
} return it[META].w;
};
// add metadata on freeze-family methods calling
var onFreeze = function(it){
if(FREEZE && meta.NEED && isExtensible(it) && !has(it, META))setMeta(it);
return it;
};
var meta = module.exports = {
KEY: META,
NEED: false,
fastKey: fastKey,
getWeak: getWeak,
onFreeze: onFreeze
};
/***/ },
/* 84 */
/***/ function(module, exports, __webpack_require__) {
var global = __webpack_require__(38)
, core = __webpack_require__(39)
, LIBRARY = __webpack_require__(36)
, wksExt = __webpack_require__(79)
, defineProperty = __webpack_require__(43).f;
module.exports = function(name){
var $Symbol = core.Symbol || (core.Symbol = LIBRARY ? {} : global.Symbol || {});
if(name.charAt(0) != '_' && !(name in $Symbol))defineProperty($Symbol, name, {value: wksExt.f(name)});
};
/***/ },
/* 85 */
/***/ function(module, exports, __webpack_require__) {
var getKeys = __webpack_require__(58)
, toIObject = __webpack_require__(60);
module.exports = function(object, el){
var O = toIObject(object)
, keys = getKeys(O)
, length = keys.length
, index = 0
, key;
while(length > index)if(O[key = keys[index++]] === el)return key;
};
/***/ },
/* 86 */
/***/ function(module, exports, __webpack_require__) {
// all enumerable object keys, includes symbols
var getKeys = __webpack_require__(58)
, gOPS = __webpack_require__(87)
, pIE = __webpack_require__(88);
module.exports = function(it){
var result = getKeys(it)
, getSymbols = gOPS.f;
if(getSymbols){
var symbols = getSymbols(it)
, isEnum = pIE.f
, i = 0
, key;
while(symbols.length > i)if(isEnum.call(it, key = symbols[i++]))result.push(key);
} return result;
};
/***/ },
/* 87 */
/***/ function(module, exports) {
exports.f = Object.getOwnPropertySymbols;
/***/ },
/* 88 */
/***/ function(module, exports) {
exports.f = {}.propertyIsEnumerable;
/***/ },
/* 89 */
/***/ function(module, exports, __webpack_require__) {
// 7.2.2 IsArray(argument)
var cof = __webpack_require__(62);
module.exports = Array.isArray || function isArray(arg){
return cof(arg) == 'Array';
};
/***/ },
/* 90 */
/***/ function(module, exports, __webpack_require__) {
// fallback for IE11 buggy Object.getOwnPropertyNames with iframe and window
var toIObject = __webpack_require__(60)
, gOPN = __webpack_require__(91).f
, toString = {}.toString;
var windowNames = typeof window == 'object' && window && Object.getOwnPropertyNames
? Object.getOwnPropertyNames(window) : [];
var getWindowNames = function(it){
try {
return gOPN(it);
} catch(e){
return windowNames.slice();
}
};
module.exports.f = function getOwnPropertyNames(it){
return windowNames && toString.call(it) == '[object Window]' ? getWindowNames(it) : gOPN(toIObject(it));
};
/***/ },
/* 91 */
/***/ function(module, exports, __webpack_require__) {
// 19.1.2.7 / 15.2.3.4 Object.getOwnPropertyNames(O)
var $keys = __webpack_require__(59)
, hiddenKeys = __webpack_require__(69).concat('length', 'prototype');
exports.f = Object.getOwnPropertyNames || function getOwnPropertyNames(O){
return $keys(O, hiddenKeys);
};
/***/ },
/* 92 */
/***/ function(module, exports, __webpack_require__) {
var pIE = __webpack_require__(88)
, createDesc = __webpack_require__(51)
, toIObject = __webpack_require__(60)
, toPrimitive = __webpack_require__(50)
, has = __webpack_require__(53)
, IE8_DOM_DEFINE = __webpack_require__(46)
, gOPD = Object.getOwnPropertyDescriptor;
exports.f = __webpack_require__(47) ? gOPD : function getOwnPropertyDescriptor(O, P){
O = toIObject(O);
P = toPrimitive(P, true);
if(IE8_DOM_DEFINE)try {
return gOPD(O, P);
} catch(e){ /* empty */ }
if(has(O, P))return createDesc(!pIE.f.call(O, P), O[P]);
};
/***/ },
/* 93 */
/***/ function(module, exports) {
/***/ },
/* 94 */
/***/ function(module, exports, __webpack_require__) {
__webpack_require__(84)('asyncIterator');
/***/ },
/* 95 */
/***/ function(module, exports, __webpack_require__) {
__webpack_require__(84)('observable');
/***/ },
/* 96 */
/***/ function(module, exports) {
/**
* after : after(el, newEl)
* Inserts a new element `newEl` just after `el`.
*
* var after = require('dom101/after');
* var newNode = document.createElement('div');
* var button = document.querySelector('#submit');
*
* after(button, newNode);
*/
function after (el, newEl) {
if (typeof newEl === 'string') {
return el.insertAdjacentHTML('afterend', newEl);
} else {
var next = el.nextSibling;
if (next) {
return el.parentNode.insertBefore(newEl, next);
} else {
return el.parentNode.appendChild(newEl);
}
}
}
module.exports = after;
/***/ },
/* 97 */
/***/ function(module, exports) {
'use strict';
var browser = {
versions: function () {
var u = window.navigator.userAgent;
return {
trident: u.indexOf('Trident') > -1, //IE内核
presto: u.indexOf('Presto') > -1, //opera内核
webKit: u.indexOf('AppleWebKit') > -1, //苹果、谷歌内核
gecko: u.indexOf('Gecko') > -1 && u.indexOf('KHTML') == -1, //火狐内核
mobile: !!u.match(/AppleWebKit.*Mobile.*/), //是否为移动终端
ios: !!u.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/), //ios终端
android: u.indexOf('Android') > -1 || u.indexOf('Linux') > -1, //android终端或者uc浏览器
iPhone: u.indexOf('iPhone') > -1 || u.indexOf('Mac') > -1, //是否为iPhone或者安卓QQ浏览器
iPad: u.indexOf('iPad') > -1, //是否为iPad
webApp: u.indexOf('Safari') == -1, //是否为web应用程序,没有头部与底部
weixin: u.indexOf('MicroMessenger') == -1 //是否为微信浏览器
};
}()
};
module.exports = browser;
/***/ },
/* 98 */
/***/ function(module, exports) {
'use strict';
function init() {
// 由于hexo分页不支持,手工美化
var $nav = document.querySelector('#page-nav');
if ($nav && !document.querySelector('#page-nav .extend.prev')) {
$nav.innerHTML = '<a class="extend prev disabled" rel="prev">« Prev</a>' + $nav.innerHTML;
}
if ($nav && !document.querySelector('#page-nav .extend.next')) {
$nav.innerHTML = $nav.innerHTML + '<a class="extend next disabled" rel="next">Next »</a>';
}
// 新窗口打开
if (yiliaConfig && yiliaConfig.open_in_new) {
var $a = document.querySelectorAll('.article-entry a:not(.article-more-a)');
$a.forEach(function ($em) {
$em.setAttribute('target', '_blank');
});
}
// about me 转义
var $aboutme = document.querySelector('#js-aboutme');
if ($aboutme && $aboutme.length !== 0) {
$aboutme.innerHTML = $aboutme.innerText;
}
}
module.exports = {
init: init
};
/***/ }
/******/ ]);<|fim▁end|>
|
if(!isObject(it))return typeof it == 'symbol' ? it : (typeof it == 'string' ? 'S' : 'P') + it;
if(!has(it, META)){
// can't set metadata to uncaught frozen object
|
<|file_name|>test_create_programs.py<|end_file_name|><|fim▁begin|># Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import json
import os<|fim▁hole|>from dataflow.core.utterance_tokenizer import UtteranceTokenizer
from dataflow.multiwoz.create_programs import create_programs_for_trade_dialogue
from dataflow.multiwoz.salience_model import DummySalienceModel, VanillaSalienceModel
def load_test_trade_dialogues(data_dir: str) -> Iterator[Dict[str, Any]]:
"""Returns selected test dialogues.
To extract a dialogue from the TRADE processed json file:
$ jq '.[] | select (.dialogue_idx == "MUL1626.json")' dev_dials.json
"""
multiwoz_2_1_dir = os.path.join(data_dir, "multiwoz_2_1")
for dialogue_id in [
"MUL1626.json",
"PMUL3166.json",
"MUL2258.json",
"MUL2199.json",
"MUL2096.json",
"PMUL3470.json",
"PMUL4478.json",
]:
trade_dialogue_file = os.path.join(multiwoz_2_1_dir, dialogue_id)
trade_dialogue = json.load(open(trade_dialogue_file))
yield trade_dialogue
def test_create_programs_with_dummy_salience_model(data_dir: str):
"""Tests creating programs with a dummy salience model."""
utterance_tokenizer = UtteranceTokenizer()
salience_model = DummySalienceModel()
expected_num_refer_calls = {
"MUL1626.json": 0,
"PMUL3166.json": 0,
"MUL2258.json": 0,
"MUL2199.json": 0,
"MUL2096.json": 0,
"PMUL3470.json": 0,
"PMUL4478.json": 0,
}
for trade_dialogue in load_test_trade_dialogues(data_dir):
dataflow_dialogue, num_refer_calls, _ = create_programs_for_trade_dialogue(
trade_dialogue=trade_dialogue,
keep_all_domains=True,
remove_none=False,
fill_none=False,
salience_model=salience_model,
no_revise=False,
avoid_empty_plan=False,
utterance_tokenizer=utterance_tokenizer,
)
dialogue_id = dataflow_dialogue.dialogue_id
assert (
num_refer_calls == expected_num_refer_calls[dialogue_id]
), "{} failed".format(dialogue_id)
def test_create_programs_without_revise(data_dir: str):
"""Tests creating programs without revise calls.
It should not use refer calls even with a valid salience model.
"""
utterance_tokenizer = UtteranceTokenizer()
salience_model = VanillaSalienceModel()
for trade_dialogue in load_test_trade_dialogues(data_dir):
for avoid_empty_plan in [True, False]:
_, num_refer_calls, _ = create_programs_for_trade_dialogue(
trade_dialogue=trade_dialogue,
keep_all_domains=True,
remove_none=False,
fill_none=False,
salience_model=salience_model,
no_revise=True,
avoid_empty_plan=avoid_empty_plan,
utterance_tokenizer=utterance_tokenizer,
)
assert num_refer_calls == 0
def test_create_programs_with_vanilla_salience_model(data_dir: str):
"""Tests creating programs with a vanilla salience model.
"""
utterance_tokenizer = UtteranceTokenizer()
salience_model = VanillaSalienceModel()
expected_num_refer_calls = {
"MUL1626.json": 1,
"PMUL3166.json": 0,
"MUL2258.json": 1,
"MUL2199.json": 1,
"MUL2096.json": 0,
"PMUL3470.json": 0,
"PMUL4478.json": 0,
}
for trade_dialogue in load_test_trade_dialogues(data_dir):
dataflow_dialogue, num_refer_calls, _ = create_programs_for_trade_dialogue(
trade_dialogue=trade_dialogue,
keep_all_domains=True,
remove_none=False,
fill_none=False,
salience_model=salience_model,
no_revise=False,
avoid_empty_plan=False,
utterance_tokenizer=utterance_tokenizer,
)
dialogue_id = dataflow_dialogue.dialogue_id
assert (
num_refer_calls == expected_num_refer_calls[dialogue_id]
), "{} failed".format(dialogue_id)
def test_create_programs_with_revise(trade_dialogue_1: Dict[str, Any]):
utterance_tokenizer = UtteranceTokenizer()
salience_model = VanillaSalienceModel()
expected_plans: List[str] = [
# turn 1
"""(find (Constraint[Hotel] :name (?= "none") :type (?= "none")))""",
# turn 2
"""(ReviseConstraint :new (Constraint[Hotel] :name (?= "hilton") :pricerange (?= "cheap") :type (?= "guest house")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 3
"""(ReviseConstraint :new (Constraint[Hotel] :name (?= "none")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 4
"""(abandon (Constraint[Hotel]))""",
# turn 5
"""(find (Constraint[Hotel] :area (?= "west")))""",
# turn 6
"""(find (Constraint[Restaurant] :area (refer (Constraint[Area]))))""",
# turn 7
"""(ReviseConstraint :new (Constraint[Restaurant] :pricerange (refer (Constraint[Pricerange]))) :oldLocation (Constraint[Constraint[Restaurant]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 8
"()",
# turn 9
"""(find (Constraint[Taxi] :departure (?= "none")))""",
# turn 10
"()",
]
dataflow_dialogue, _, _ = create_programs_for_trade_dialogue(
trade_dialogue=trade_dialogue_1,
keep_all_domains=True,
remove_none=False,
fill_none=False,
salience_model=salience_model,
no_revise=False,
avoid_empty_plan=False,
utterance_tokenizer=utterance_tokenizer,
)
for turn, expected_lispress in zip(dataflow_dialogue.turns, expected_plans):
lispress = turn.lispress
assert lispress == expected_lispress
def test_create_programs_with_revise_with_fill_none(trade_dialogue_1: Dict[str, Any]):
utterance_tokenizer = UtteranceTokenizer()
salience_model = VanillaSalienceModel()
expected_plans: List[str] = [
# turn 1
"""(find (Constraint[Hotel] :area (?= "none") :book-day (?= "none") :book-people (?= "none") :book-stay (?= "none") :internet (?= "none") :name (?= "none") :parking (?= "none") :pricerange (?= "none") :stars (?= "none") :type (?= "none")))""",
# turn 2
"""(ReviseConstraint :new (Constraint[Hotel] :name (?= "hilton") :pricerange (?= "cheap") :type (?= "guest house")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 3
"""(ReviseConstraint :new (Constraint[Hotel] :name (?= "none")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 4
"""(abandon (Constraint[Hotel]))""",
# turn 5
"""(find (Constraint[Hotel] :area (?= "west") :book-day (?= "none") :book-people (?= "none") :book-stay (?= "none") :internet (?= "none") :name (?= "none") :parking (?= "none") :pricerange (?= "none") :stars (?= "none") :type (?= "none")))""",
# turn 6
"""(find (Constraint[Restaurant] :area (refer (Constraint[Area])) :book-day (?= "none") :book-people (?= "none") :book-time (?= "none") :food (?= "none") :name (?= "none") :pricerange (?= "none")))""",
# turn 7
"""(ReviseConstraint :new (Constraint[Restaurant] :pricerange (refer (Constraint[Pricerange]))) :oldLocation (Constraint[Constraint[Restaurant]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 8
"()",
# turn 9
"""(find (Constraint[Taxi] :arriveby (?= "none") :departure (?= "none") :destination (?= "none") :leaveat (?= "none")))""",
# turn 10
"()",
]
dataflow_dialogue, _, _ = create_programs_for_trade_dialogue(
trade_dialogue=trade_dialogue_1,
keep_all_domains=True,
remove_none=False,
fill_none=True,
salience_model=salience_model,
no_revise=False,
avoid_empty_plan=False,
utterance_tokenizer=utterance_tokenizer,
)
for turn, expected_plan in zip(
dataflow_dialogue.turns, expected_plans # pylint: disable=no-member
):
lispress = turn.lispress
assert lispress == expected_plan
def test_create_programs_with_revise_with_avoid_empty_plan(
trade_dialogue_1: Dict[str, Any]
):
utterance_tokenizer = UtteranceTokenizer()
salience_model = VanillaSalienceModel()
expected_plans: List[str] = [
# turn 1
"""(find (Constraint[Hotel] :name (?= "none") :type (?= "none")))""",
# turn 2
"""(ReviseConstraint :new (Constraint[Hotel] :name (?= "hilton") :pricerange (?= "cheap") :type (?= "guest house")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 3
"""(ReviseConstraint :new (Constraint[Hotel] :name (?= "none")) :oldLocation (Constraint[Constraint[Hotel]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 4
"""(abandon (Constraint[Hotel]))""",
# turn 5
"""(find (Constraint[Hotel] :area (?= "west")))""",
# turn 6
"""(find (Constraint[Restaurant] :area (refer (Constraint[Area]))))""",
# turn 7
"""(ReviseConstraint :new (Constraint[Restaurant] :pricerange (refer (Constraint[Pricerange]))) :oldLocation (Constraint[Constraint[Restaurant]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 8
"""(ReviseConstraint :new (Constraint[Restaurant] :pricerange (refer (Constraint[Pricerange]))) :oldLocation (Constraint[Constraint[Restaurant]]) :rootLocation (roleConstraint #(Path "output")))""",
# turn 9
"""(find (Constraint[Taxi] :departure (?= "none")))""",
# turn 10
"""(ReviseConstraint :new (Constraint[Taxi] :departure (?= "none")) :oldLocation (Constraint[Constraint[Taxi]]) :rootLocation (roleConstraint #(Path "output")))""",
]
dataflow_dialogue, _, _ = create_programs_for_trade_dialogue(
trade_dialogue=trade_dialogue_1,
keep_all_domains=True,
remove_none=False,
fill_none=False,
salience_model=salience_model,
no_revise=False,
avoid_empty_plan=True,
utterance_tokenizer=utterance_tokenizer,
)
for turn_part, expected_plan in zip(dataflow_dialogue.turns, expected_plans):
lispress = turn_part.lispress
assert lispress == expected_plan<|fim▁end|>
|
from typing import Any, Dict, Iterator, List
|
<|file_name|>expansion-panel.d.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license<|fim▁hole|>import { CdkAccordionItem } from '@angular/cdk/accordion';
import { UniqueSelectionDispatcher } from '@angular/cdk/collections';
import { TemplatePortal } from '@angular/cdk/portal';
import { AfterContentInit, ChangeDetectorRef, OnChanges, OnDestroy, SimpleChanges, ViewContainerRef } from '@angular/core';
import { Subject } from 'rxjs';
import { MatAccordion } from './accordion';
import { MatExpansionPanelContent } from './expansion-panel-content';
/** MatExpansionPanel's states. */
export declare type MatExpansionPanelState = 'expanded' | 'collapsed';
/**
* `<mat-expansion-panel>`
*
* This component can be used as a single element to show expandable content, or as one of
* multiple children of an element with the MatAccordion directive attached.
*/
export declare class MatExpansionPanel extends CdkAccordionItem implements AfterContentInit, OnChanges, OnDestroy {
private _viewContainerRef;
/** Whether the toggle indicator should be hidden. */
hideToggle: boolean;
private _hideToggle;
/** Stream that emits for changes in `@Input` properties. */
readonly _inputChanges: Subject<SimpleChanges>;
/** Optionally defined accordion the expansion panel belongs to. */
accordion: MatAccordion;
/** Content that will be rendered lazily. */
_lazyContent: MatExpansionPanelContent;
/** Portal holding the user's content. */
_portal: TemplatePortal;
/** ID for the associated header element. Used for a11y labelling. */
_headerId: string;
constructor(accordion: MatAccordion, _changeDetectorRef: ChangeDetectorRef, _uniqueSelectionDispatcher: UniqueSelectionDispatcher, _viewContainerRef: ViewContainerRef);
/** Whether the expansion indicator should be hidden. */
_getHideToggle(): boolean;
/** Determines whether the expansion panel should have spacing between it and its siblings. */
_hasSpacing(): boolean;
/** Gets the expanded state string. */
_getExpandedState(): MatExpansionPanelState;
ngAfterContentInit(): void;
ngOnChanges(changes: SimpleChanges): void;
ngOnDestroy(): void;
_bodyAnimation(event: AnimationEvent): void;
}
export declare class MatExpansionPanelActionRow {
}<|fim▁end|>
|
*/
import { AnimationEvent } from '@angular/animations';
|
<|file_name|>scope.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package compiler
import (
"github.com/google/gapid/core/codegen"
"github.com/google/gapid/gapil/semantic"
)
// S is a nestable compiler scope.
// A scope holds parameters and local variables.
type S struct {
// The scope can emit any instructions into the current scope block.
*codegen.Builder
// Ctx is a pointer to the active context (context_t*).
Ctx *codegen.Value
// Location is a pointer to the current source code location (uint32_t*).
Location *codegen.Value
// Globals is a pointer to the current API's global state variables.
Globals *codegen.Value
// Arena is a pointer to the current memory arena (arena*).
Arena *codegen.Value
// Parameters is the current function's parameters.<|fim▁hole|> CurrentThread *codegen.Value
// The list of values that will be referenced or released when the scope
// closes.
pendingRefRels pendingRefRels
parent *S
locals map[*semantic.Local]local
locationIdx int
onExitLogic []func()
}
type local struct {
val *codegen.Value
isPtr bool
}
func (s *S) enter(f func(*S)) {
locals := make(map[*semantic.Local]local, len(s.locals))
for l, v := range s.locals {
locals[l] = v
}
child := &S{
Builder: s.Builder,
Ctx: s.Ctx,
Location: s.Location,
Globals: s.Globals,
Arena: s.Arena,
Parameters: s.Parameters,
CurrentThread: s.CurrentThread,
parent: s,
locals: locals,
}
f(child)
child.exit()
}
// Return overrides codegen.Builder.Return to ensure all the scopes are
// popped before emitting the terminating instruction.
func (s *S) Return(val *codegen.Value) {
for s := s; s != nil; s = s.parent {
s.exit()
}
s.Builder.Return(val)
}
// If overrides codegen.Builder.If to ensure all the scopes are popped after
// onTrue reaches its last instruction.
func (s *S) If(cond *codegen.Value, onTrue func(s *S)) {
s.Builder.If(cond, func() { s.enter(onTrue) })
}
// IfElse overrides codegen.Builder.IfElse to ensure all the scopes are
// popped after onTrue and onFalse reach their last instruction.
func (s *S) IfElse(cond *codegen.Value, onTrue, onFalse func(s *S)) {
s.Builder.IfElse(cond,
func() { s.enter(onTrue) },
func() { s.enter(onFalse) },
)
}
// ForN overrides codegen.Builder.ForN to ensure all the scopes are popped after
// cb reaches its last instruction.
func (s *S) ForN(n *codegen.Value, cb func(s *S, iterator *codegen.Value) (cont *codegen.Value)) {
s.Builder.ForN(n, func(iterator *codegen.Value) *codegen.Value {
var cont *codegen.Value
s.enter(func(s *S) { cont = cb(s, iterator) })
return cont
})
}
// SwitchCase is a single condition and block used as a case statement in a
// switch.
type SwitchCase struct {
Conditions func(*S) []*codegen.Value
Block func(*S)
}
// Switch overrides codegen.Builder.Switch to ensure all the scopes are
// popped after each condition and block reach their last instruction.
func (s *S) Switch(cases []SwitchCase, defaultCase func(s *S)) {
cs := make([]codegen.SwitchCase, len(cases))
for i, c := range cases {
i, c := i, c
cs[i] = codegen.SwitchCase{
Conditions: func() []*codegen.Value {
var out []*codegen.Value
s.enter(func(s *S) { out = c.Conditions(s) })
return out
},
Block: func() { s.enter(c.Block) },
}
}
var dc func()
if defaultCase != nil {
dc = func() { s.enter(defaultCase) }
}
s.Builder.Switch(cs, dc)
}
func (s *S) onExit(f func()) {
s.onExitLogic = append(s.onExitLogic, f)
}
func (s *S) exit() {
for _, f := range s.onExitLogic {
f()
}
if !s.IsBlockTerminated() {
// The last instruction written to the current block was a
// terminator instruction. This should only happen if we've emitted
// a return statement and the scopes around this statement are
// closing. The logic in Scope.Return() will have already exited
// all the contexts, so we can safely return here.
//
// TODO: This is really icky - more time should be spent thinking
// of ways to avoid special casing return statements like this.
s.pendingRefRels.apply(s)
}
}<|fim▁end|>
|
Parameters map[*semantic.Parameter]*codegen.Value
// The identifier of the currently executing thread.
|
<|file_name|>boilerplate.rs<|end_file_name|><|fim▁begin|>#![allow(clippy::single_match)]
use vangers::{
config::{settings::Terrain, Settings},
render::{ScreenTargets, DEPTH_FORMAT},
};
use futures::executor::{LocalPool, LocalSpawner};
use log::info;
use winit::{
event,
event_loop::{ControlFlow, EventLoop},
window::{Window, WindowBuilder},
};
pub trait Application {
fn on_key(&mut self, input: event::KeyboardInput) -> bool;
fn on_mouse_wheel(&mut self, _delta: event::MouseScrollDelta) {}
fn on_cursor_move(&mut self, _position: (f64, f64)) {}
fn on_mouse_button(&mut self, _state: event::ElementState, _button: event::MouseButton) {}
fn resize(&mut self, _device: &wgpu::Device, _extent: wgpu::Extent3d) {}
fn reload(&mut self, device: &wgpu::Device);
fn update(
&mut self,
device: &wgpu::Device,
delta: f32,
spawner: &LocalSpawner,
) -> Vec<wgpu::CommandBuffer>;
fn draw(
&mut self,
device: &wgpu::Device,
targets: ScreenTargets,
spawner: &LocalSpawner,
) -> wgpu::CommandBuffer;
}
pub struct Harness {
task_pool: LocalPool,
event_loop: EventLoop<()>,
window: Window,
pub device: wgpu::Device,
pub queue: wgpu::Queue,
pub downlevel_caps: wgpu::DownlevelCapabilities,
surface: wgpu::Surface,
pub color_format: wgpu::TextureFormat,
pub extent: wgpu::Extent3d,
reload_on_focus: bool,
depth_target: wgpu::TextureView,
}
pub struct HarnessOptions {
pub title: &'static str,
pub uses_level: bool,
}
impl Harness {
pub fn init(options: HarnessOptions) -> (Self, Settings) {
env_logger::init();
let mut task_pool = LocalPool::new();
info!("Loading the settings");
let settings = Settings::load("config/settings.ron");
let extent = wgpu::Extent3d {
width: settings.window.size[0],
height: settings.window.size[1],
depth_or_array_layers: 1,
};
info!("Initializing the window");
let instance = wgpu::Instance::new(settings.backend.to_wgpu());
let event_loop = EventLoop::new();
let window = WindowBuilder::new()
.with_title(options.title)
.with_inner_size(winit::dpi::PhysicalSize::new(extent.width, extent.height))
.with_resizable(true)
.build(&event_loop)
.unwrap();
let surface = unsafe { instance.create_surface(&window) };
info!("Initializing the device");
let adapter = task_pool
.run_until(instance.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::HighPerformance,
compatible_surface: Some(&surface),
force_fallback_adapter: false,
}))
.expect("Unable to initialize GPU via the selected backend.");
let downlevel_caps = adapter.get_downlevel_properties();
let adapter_limits = adapter.limits();
let mut limits = match settings.render.terrain {
Terrain::RayTraced { .. }
| Terrain::RayMipTraced { .. }
| Terrain::Sliced { .. }
| Terrain::Painted { .. } => wgpu::Limits::downlevel_webgl2_defaults(),
Terrain::Scattered { .. } => wgpu::Limits::default(),
};
if options.uses_level {
let desired_height = 16 << 10;
limits.max_texture_dimension_2d =
if adapter_limits.max_texture_dimension_2d < desired_height {
log::warn!(
"Adapter only supports {} texutre size",
adapter_limits.max_texture_dimension_2d
);
adapter_limits.max_texture_dimension_2d
} else {
desired_height
};
}
let (device, queue) = task_pool
.run_until(adapter.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: wgpu::Features::empty(),
limits,
},
if settings.render.wgpu_trace_path.is_empty() {
None
} else {
Some(std::path::Path::new(&settings.render.wgpu_trace_path))
},
))
.unwrap();
let config = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: surface
.get_preferred_format(&adapter)
.unwrap_or(wgpu::TextureFormat::Bgra8UnormSrgb),
width: extent.width,
height: extent.height,
present_mode: wgpu::PresentMode::Mailbox,
};
surface.configure(&device, &config);
let depth_target = device
.create_texture(&wgpu::TextureDescriptor {
label: Some("Depth"),
size: extent,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: DEPTH_FORMAT,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
})
.create_view(&wgpu::TextureViewDescriptor::default());
let harness = Harness {
task_pool,
event_loop,
window,
device,
downlevel_caps,
queue,
surface,
color_format: config.format,
extent,
reload_on_focus: settings.window.reload_on_focus,
depth_target,
};
(harness, settings)
}
pub fn main_loop<A: 'static + Application>(self, mut app: A) {
use std::time;
let mut last_time = time::Instant::now();
let mut needs_reload = false;
let Harness {
mut task_pool,
event_loop,
window,
device,
queue,
downlevel_caps: _,
surface,
color_format,
mut extent,
reload_on_focus,
mut depth_target,
} = self;
event_loop.run(move |event, _, control_flow| {
let _ = window;
*control_flow = ControlFlow::Poll;
task_pool.run_until_stalled();
match event {
event::Event::WindowEvent {
event: event::WindowEvent::Resized(size),
..
} => {
info!("Resizing to {:?}", size);
extent = wgpu::Extent3d {
width: size.width,
height: size.height,
depth_or_array_layers: 1,
};
let config = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: color_format,
width: size.width,
height: size.height,
present_mode: wgpu::PresentMode::Mailbox,
};
surface.configure(&device, &config);
depth_target = device
.create_texture(&wgpu::TextureDescriptor {
label: Some("Depth"),
size: extent,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: DEPTH_FORMAT,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
})
.create_view(&wgpu::TextureViewDescriptor::default());
app.resize(&device, extent);
}
event::Event::WindowEvent { event, .. } => match event {
event::WindowEvent::Focused(false) => {
needs_reload = reload_on_focus;
}
event::WindowEvent::Focused(true) if needs_reload => {
info!("Reloading shaders");
app.reload(&device);
needs_reload = false;
}
event::WindowEvent::CloseRequested => {
*control_flow = ControlFlow::Exit;
}
event::WindowEvent::KeyboardInput { input, .. } => {
if !app.on_key(input) {
*control_flow = ControlFlow::Exit;
}
}
event::WindowEvent::MouseWheel { delta, .. } => app.on_mouse_wheel(delta),
event::WindowEvent::CursorMoved { position, .. } => {
app.on_cursor_move(position.into())
}
event::WindowEvent::MouseInput { state, button, .. } => {
app.on_mouse_button(state, button)
}
_ => {}
},
event::Event::MainEventsCleared => {
let spawner = task_pool.spawner();
let duration = time::Instant::now() - last_time;
last_time += duration;
let delta = duration.as_secs() as f32 + duration.subsec_nanos() as f32 * 1.0e-9;
let update_command_buffers = app.update(&device, delta, &spawner);
if !update_command_buffers.is_empty() {
queue.submit(update_command_buffers);
}
match surface.get_current_texture() {
Ok(frame) => {
let view = frame
.texture
.create_view(&wgpu::TextureViewDescriptor::default());
let targets = ScreenTargets {
extent,<|fim▁hole|> queue.submit(Some(render_command_buffer));
frame.present();
}
Err(_) => {}
};
profiling::finish_frame!();
}
_ => (),
}
});
}
}<|fim▁end|>
|
color: &view,
depth: &depth_target,
};
let render_command_buffer = app.draw(&device, targets, &spawner);
|
<|file_name|>Interactable.cpp<|end_file_name|><|fim▁begin|>#include "Chapter7.h"
#include "Interactable.h"
bool IInteractable::CanInteract_Implementation()
{
return true;<|fim▁hole|>
}<|fim▁end|>
|
}
void IInteractable::PerformInteract_Implementation()
{
|
<|file_name|>cookie-debug.js<|end_file_name|><|fim▁begin|>define("jquery-plugin/cookie/1.3/cookie", ["$"], function (require, exports, module) {<|fim▁hole|> *
* Copyright 2011, Klaus Hartl
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://www.opensource.org/licenses/mit-license.php
* http://www.opensource.org/licenses/GPL-2.0
*/
(function ($, document, undefined) {
var pluses = /\+/g;
function raw(s) {
return s;
}
function decoded(s) {
return decodeURIComponent(s.replace(pluses, ' '));
}
var config = $.cookie = function (key, value, options) {
// write
if (value !== undefined) {
options = $.extend({}, config.defaults, options);
if (value === null) {
options.expires = -1;
}
if (typeof options.expires === 'number') {
var days = options.expires, t = options.expires = new Date();
t.setDate(t.getDate() + days);
}
value = config.json ? JSON.stringify(value) : String(value);
return (document.cookie = [
encodeURIComponent(key), '=', config.raw ? value : encodeURIComponent(value),
options.expires ? '; expires=' + options.expires.toUTCString() : '', // use expires attribute, max-age is not supported by IE
options.path ? '; path=' + options.path : '',
options.domain ? '; domain=' + options.domain : '',
options.secure ? '; secure' : ''
].join(''));
}
// read
var decode = config.raw ? raw : decoded;
var cookies = document.cookie.split('; ');
for (var i = 0, l = cookies.length; i < l; i++) {
var parts = cookies[i].split('=');
if (decode(parts.shift()) === key) {
var cookie = decode(parts.join('='));
return config.json ? JSON.parse(cookie) : cookie;
}
}
return null;
};
config.defaults = {};
$.removeCookie = function (key, options) {
if ($.cookie(key) !== null) {
$.cookie(key, null, options);
return true;
}
return false;
};
})(jQuery, document);
})<|fim▁end|>
|
var jQuery = require('$');
/*!
* jQuery Cookie Plugin v1.3
* https://github.com/carhartl/jquery-cookie
|
<|file_name|>running_mpi_executables.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
__copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu"
__license__ = "MIT"
import sys
import radical.pilot as rp
# READ: The RADICAL-Pilot documentation:
# http://radicalpilot.readthedocs.org/en/latest
#
# Try running this example with RADICAL_PILOT_VERBOSE=debug set if
# you want to see what happens behind the scenes!
#------------------------------------------------------------------------------
#
def pilot_state_cb (pilot, state):
""" this callback is invoked on all pilot state changes """
print "[Callback]: ComputePilot '%s' state: %s." % (pilot.uid, state)
if state == rp.FAILED:
sys.exit (1)
#------------------------------------------------------------------------------
#
def unit_state_cb (unit, state):
""" this callback is invoked on all unit state changes """
print "[Callback]: ComputeUnit '%s' state: %s." % (unit.uid, state)
if state == rp.FAILED:
sys.exit (1)
# ------------------------------------------------------------------------------
#
if __name__ == "__main__":
# we can optionally pass session name to RP
if len(sys.argv) > 1:
session_name = sys.argv[1]
else:
session_name = None
# Create a new session. No need to try/except this: if session creation
# fails, there is not much we can do anyways...
session = rp.Session(name=session_name)
print "session id: %s" % session.uid
# all other pilot code is now tried/excepted. If an exception is caught, we
# can rely on the session object to exist and be valid, and we can thus tear
# the whole RP stack down via a 'session.close()' call in the 'finally'
# clause...
try:
# Add a Pilot Manager. Pilot managers manage one or more ComputePilots.
pmgr = rp.PilotManager(session=session)
# Register our callback with the PilotManager. This callback will get
# called every time any of the pilots managed by the PilotManager
# change their state.
pmgr.register_callback(pilot_state_cb)
# Define a X-core on stamped that runs for N minutes and
# uses $HOME/radical.pilot.sandbox as sandbox directoy.
pdesc = rp.ComputePilotDescription()
pdesc.resource = "xsede.stampede"
pdesc.runtime = 15 # N minutes
pdesc.cores = 16 # X cores
pdesc.project = "TG-MCB090174"
# Launch the pilot.
pilot = pmgr.submit_pilots(pdesc)
cud_list = []
for unit_count in range(0, 4):
cu = rp.ComputeUnitDescription()
cu.pre_exec = ["module load python intel mvapich2 mpi4py"]
cu.executable = "python"
cu.arguments = ["helloworld_mpi.py"]
cu.input_staging = ["helloworld_mpi.py"]
# These two parameters are relevant to MPI execution:
# 'cores' sets the number of cores required by the task
# 'mpi' identifies the task as an MPI taskg
cu.cores = 8
cu.mpi = True
cud_list.append(cu)
# Combine the ComputePilot, the ComputeUnits and a scheduler via
# a UnitManager object.
umgr = rp.UnitManager(
session=session,
scheduler=rp.SCHED_DIRECT_SUBMISSION)
# Register our callback with the UnitManager. This callback will get
# called every time any of the units managed by the UnitManager
# change their state.
umgr.register_callback(unit_state_cb)
# Add the previously created ComputePilot to the UnitManager.
umgr.add_pilots(pilot)
# Submit the previously created ComputeUnit descriptions to the<|fim▁hole|> # Wait for all compute units to reach a terminal state (DONE or FAILED).
umgr.wait_units()
if not isinstance(units, list):
units = [units]
for unit in units:
print "* Task %s - state: %s, exit code: %s, started: %s, finished: %s, stdout: %s" \
% (unit.uid, unit.state, unit.exit_code, unit.start_time, unit.stop_time, unit.stdout)
except Exception as e:
# Something unexpected happened in the pilot code above
print "caught Exception: %s" % e
raise
except (KeyboardInterrupt, SystemExit) as e:
# the callback called sys.exit(), and we can here catch the
# corresponding KeyboardInterrupt exception for shutdown. We also catch
# SystemExit (which gets raised if the main threads exits for some other
# reason).
print "need to exit now: %s" % e
finally:
# always clean up the session, no matter if we caught an exception or
# not.
print "closing session"
session.close ()
# the above is equivalent to
#
# session.close (cleanup=True, terminate=True)
#
# it will thus both clean out the session's database record, and kill
# all remaining pilots (none in our example).
#-------------------------------------------------------------------------------<|fim▁end|>
|
# PilotManager. This will trigger the selected scheduler to start
# assigning ComputeUnits to the ComputePilots.
units = umgr.submit_units(cud_list)
|
<|file_name|>cpp.rs<|end_file_name|><|fim▁begin|>//! Enables the generation of header and source files for using intercom
//! libraries from C++ projects.
extern crate std;
use std::borrow::Cow;
use std::io::Write;
use super::GeneratorError;
use super::{pascal_case, LibraryContext, ModelOptions, TypeSystemOptions};
use intercom::typelib::{
Arg, CoClass, Direction, Interface, InterfaceVariant, Method, TypeInfo, TypeLib,
};
use handlebars::Handlebars;
use serde_derive::Serialize;
#[derive(PartialEq, Serialize, Debug)]
pub struct CppLibrary
{
pub lib_name: String,
pub interfaces: Vec<CppInterface>,
pub coclass_count: usize,
pub coclasses: Vec<CppClass>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppInterface
{
pub name: String,
pub iid_struct: String,
pub base: Option<String>,
pub methods: Vec<CppMethod>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppMethod
{
pub name: String,
pub ret_type: String,
pub args: Vec<CppArg>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppArg
{
pub name: String,
pub arg_type: String,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppClass
{
pub name: String,
pub clsid_struct: String,
pub interface_count: usize,
pub interfaces: Vec<String>,
}
impl CppLibrary
{<|fim▁hole|> let ctx = LibraryContext::try_from(&lib)?;
let mut interfaces = vec![];
let mut coclasses = vec![];
for t in &lib.types {
match t {
TypeInfo::Class(cls) => {
coclasses.push(CppClass::try_from(cls.as_ref(), opts, &ctx)?)
}
TypeInfo::Interface(itf) => {
interfaces.push(CppInterface::gather(itf.as_ref(), opts, &ctx)?)
}
}
}
let interfaces = interfaces
.into_iter()
.flatten()
.collect::<Vec<CppInterface>>();
Ok(Self {
lib_name: lib.name.to_string(),
interfaces,
coclass_count: coclasses.len(),
coclasses,
})
}
}
impl CppInterface
{
fn gather(
itf: &Interface,
opts: &ModelOptions,
ctx: &LibraryContext,
) -> Result<Vec<Self>, GeneratorError>
{
Ok(opts
.type_systems
.iter()
.map(
|ts_opts| match itf.variants.iter().find(|v| v.as_ref().ts == ts_opts.ts) {
Some(v) => Some(CppInterface::try_from(&itf, v.as_ref(), ts_opts, ctx)),
None => None,
},
)
.filter_map(|i| i)
.collect::<Result<Vec<_>, _>>()?)
}
fn try_from(
itf: &Interface,
itf_variant: &InterfaceVariant,
ts_opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
Ok(Self {
name: Self::final_name(&itf, ts_opts),
iid_struct: guid_as_struct(&itf_variant.iid),
base: Some("IUnknown".to_string()),
methods: itf_variant
.methods
.iter()
.map(|m| CppMethod::try_from(m.as_ref(), ts_opts, ctx))
.collect::<Result<Vec<_>, _>>()?,
})
}
pub fn final_name(itf: &Interface, opts: &TypeSystemOptions) -> String
{
let base_name = if itf.options.class_impl_interface {
Cow::from(format!("I{}", itf.name))
} else {
itf.name.clone()
};
match opts.use_full_name {
true => format!("{}_{:?}", base_name, opts.ts),
false => base_name.to_string(),
}
}
}
impl CppMethod
{
fn try_from(
method: &Method,
opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
Ok(Self {
name: pascal_case(&method.name),
ret_type: CppArg::cpp_type(&method.return_type, opts, ctx),
args: method
.parameters
.iter()
.map(|arg| CppArg::try_from(arg, opts, ctx))
.collect::<Result<Vec<_>, _>>()?,
})
}
}
impl CppArg
{
fn try_from(
arg: &Arg,
opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
let mut attrs = vec![];
match arg.direction {
Direction::In => attrs.push("in"),
Direction::Out => attrs.push("out"),
Direction::Retval => {
attrs.push("out");
attrs.push("retval");
}
Direction::Return => {
return Err("Direction::Return is invalid direction for arguments"
.to_string()
.into());
}
}
Ok(Self {
name: arg.name.to_string(),
arg_type: Self::cpp_type(arg, opts, ctx),
})
}
fn cpp_type(arg: &Arg, opts: &TypeSystemOptions, ctx: &LibraryContext) -> String
{
let base_name = ctx
.itfs_by_name
.get(arg.ty.as_ref())
.map(|itf| CppInterface::final_name(itf, opts))
.unwrap_or_else(|| arg.ty.to_string());
let indirection = match arg.direction {
Direction::In | Direction::Return => arg.indirection_level,
Direction::Out | Direction::Retval => arg.indirection_level + 1,
};
let base_name = match base_name.as_ref() {
"std::ffi::c_void" => "void".to_string(),
"HRESULT" => "intercom::HRESULT".to_string(),
other => other.to_string(),
};
format!("{}{}", base_name, "*".repeat(indirection as usize))
}
}
impl CppClass
{
fn try_from(
cls: &CoClass,
opts: &ModelOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
let interfaces = cls
.interfaces
.iter()
.flat_map(|itf_ref| {
opts.type_systems
.iter()
.map(|opt| {
let itf = ctx.itfs_by_ref[itf_ref.name.as_ref()];
CppInterface::final_name(itf, opt)
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
Ok(CppClass {
name: cls.name.to_string(),
clsid_struct: guid_as_struct(&cls.clsid),
interface_count: interfaces.len(),
interfaces,
})
}
}
/// Generates the manifest content.
///
/// - `out` - The writer to use for output.
pub fn write(
lib: intercom::typelib::TypeLib,
opts: ModelOptions,
out_header: Option<&mut dyn Write>,
out_source: Option<&mut dyn Write>,
) -> Result<(), GeneratorError>
{
let mut reg = Handlebars::new();
reg.register_template_string("cpp_header", include_str!("cpp_header.hbs"))
.expect("Error in the built-in C++ template.");
reg.register_template_string("cpp_source", include_str!("cpp_source.hbs"))
.expect("Error in the built-in C++ template.");
let cpp_model = CppLibrary::try_from(lib, &opts)?;
if let Some(out_header) = out_header {
let rendered = reg
.render("cpp_header", &cpp_model)
.expect("Rendering a valid ComCrate to C++ failed");
write!(out_header, "{}", rendered)?;
}
if let Some(out_source) = out_source {
let rendered = reg
.render("cpp_source", &cpp_model)
.expect("Rendering a valid ComCrate to C++ failed");
write!(out_source, "{}", rendered)?;
}
Ok(())
}
/// Converts a guid to binarys representation.
pub fn guid_as_struct(g: &intercom::GUID) -> String
{
format!( "{{0x{:08x},0x{:04x},0x{:04x},{{0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x}}}}}",
g.data1, g.data2, g.data3,
g.data4[0], g.data4[1], g.data4[2], g.data4[3],
g.data4[4], g.data4[5], g.data4[6], g.data4[7] )
}<|fim▁end|>
|
fn try_from(lib: TypeLib, opts: &ModelOptions) -> Result<Self, GeneratorError>
{
|
<|file_name|>ProvisioningValidatorTest.java<|end_file_name|><|fim▁begin|>/*
* #%L
* wcm.io
* %%
* Copyright (C) 2015 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.devops.conga.plugins.sling.validator;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.nio.charset.StandardCharsets;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.wcm.devops.conga.generator.spi.ValidationException;
import io.wcm.devops.conga.generator.spi.ValidatorPlugin;
import io.wcm.devops.conga.generator.spi.context.FileContext;
import io.wcm.devops.conga.generator.util.PluginManagerImpl;
public class ProvisioningValidatorTest {
private ValidatorPlugin underTest;
@BeforeEach
public void setUp() {<|fim▁hole|> }
@Test
public void testValid() throws Exception {
File file = new File(getClass().getResource("/validProvisioning.txt").toURI());
FileContext fileContext = new FileContext().file(file).charset(StandardCharsets.UTF_8);
assertTrue(underTest.accepts(fileContext, null));
underTest.apply(fileContext, null);
}
@Test
public void testInvalid() throws Exception {
File file = new File(getClass().getResource("/invalidProvisioning.txt").toURI());
FileContext fileContext = new FileContext().file(file).charset(StandardCharsets.UTF_8);
assertTrue(underTest.accepts(fileContext, null));
assertThrows(ValidationException.class, () -> {
underTest.apply(fileContext, null);
});
}
@Test
public void testInvalidFileExtension() throws Exception {
File file = new File(getClass().getResource("/noProvisioning.txt").toURI());
FileContext fileContext = new FileContext().file(file).charset(StandardCharsets.UTF_8);
assertFalse(underTest.accepts(fileContext, null));
}
}<|fim▁end|>
|
underTest = new PluginManagerImpl().get(ProvisioningValidator.NAME, ValidatorPlugin.class);
|
<|file_name|>Log.java<|end_file_name|><|fim▁begin|>package com.glory.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
@Entity
public class Log {
@Id
@GeneratedValue
private Long id;
private Long transactionId;
private String message;
public Log() {
}
public Log(Long id, Long transactionId, String message) {
super();
this.id = id;
this.transactionId = transactionId;
this.message = message;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getTransactionId() {
return transactionId;
}
public void setTransactionId(Long transactionId) {
this.transactionId = transactionId;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>pollers_test.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import tempfile
import unittest
import mock
import requests
from infra_libs import temporary_directory
from infra.services.mastermon import pollers
class FakePoller(pollers.Poller):
endpoint = '/foo'
def __init__(self, base_url):
super(FakePoller, self).__init__(base_url, {})
self.called_with_data = None
def handle_response(self, data):
self.called_with_data = data
@mock.patch('requests.get')
class PollerTest(unittest.TestCase):
def test_requests_url(self, mock_get):
response = mock_get.return_value
response.json.return_value = {'foo': 'bar'}
response.status_code = 200
p = FakePoller('http://foobar')
self.assertTrue(p.poll())
self.assertEquals(1, mock_get.call_count)
self.assertEquals('http://foobar/json/foo', mock_get.call_args[0][0])
def test_strips_trailing_slashes(self, mock_get):
response = mock_get.return_value
response.json.return_value = {'foo': 'bar'}
response.status_code = 200
p = FakePoller('http://foobar////')
self.assertTrue(p.poll())
self.assertEquals(1, mock_get.call_count)
self.assertEquals('http://foobar/json/foo', mock_get.call_args[0][0])
def test_returns_false_for_non_200(self, mock_get):
response = mock_get.return_value
response.status_code = 404
p = FakePoller('http://foobar')
self.assertFalse(p.poll())
def test_returns_false_for_exception(self, mock_get):
mock_get.side_effect = requests.exceptions.ConnectionError
p = FakePoller('http://foobar')
self.assertFalse(p.poll())
def test_calls_handle_response(self, mock_get):
response = mock_get.return_value
response.json.return_value = {'foo': 'bar'}
response.status_code = 200
p = FakePoller('http://foobar')
self.assertTrue(p.poll())
self.assertEqual({'foo': 'bar'}, p.called_with_data)
def test_handles_invalid_json(self, mock_get):
response = mock_get.return_value
response.json.side_effect = ValueError
response.status_code = 200
p = FakePoller('http://foobar')
self.assertFalse(p.poll())
self.assertIsNone(p.called_with_data)
class VarzPollerTest(unittest.TestCase):
def test_response(self):
p = pollers.VarzPoller('', {'x': 'y'})
p.handle_response({
'server_uptime': 123,
'accepting_builds': True,
'builders': {
'foo': {
'connected_slaves': 1,
'current_builds': 2,
'pending_builds': 3,
'state': "offline",
'total_slaves': 4,
'recent_builds_by_status': {
'0': 1,
'2': 2,
'4': 3,
'building': 4,
},
'recent_finished_build_times': [1, 2, 3],
'recent_successful_build_times': [1, 2, 3],
},
'bar': {
'connected_slaves': 5,
'current_builds': 6,
'pending_builds': 7,
'state': "idle",
'total_slaves': 8,
'recent_builds_by_status': {
'0': 1,
'2': 2,
'4': 3,
'building': 4,
},
'recent_finished_build_times': [1, 2, 3],
'recent_successful_build_times': [1, 2, 3],
},
},
})
self.assertEqual(123, p.uptime.get({'x': 'y'}))
self.assertEqual(True, p.accepting_builds.get({'x': 'y'}))
self.assertEqual(1, p.connected.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual(2, p.current_builds.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual(3, p.pending_builds.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual(4, p.total.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual('offline', p.state.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual(5, p.connected.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual(6, p.current_builds.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual(7, p.pending_builds.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual(8, p.total.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual('idle', p.state.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual(1, p.recent_builds.get(
{'builder': 'foo', 'x': 'y', 'status': 'success'}))
self.assertEqual(4, p.recent_builds.get(
{'builder': 'foo', 'x': 'y', 'status': 'building'}))
self.assertIsNotNone(p.recent_finished_build_times.get(
{'builder': 'foo', 'x': 'y'}))
self.assertIsNotNone(p.recent_successful_build_times.get(
{'builder': 'foo', 'x': 'y'}))
def test_response_with_missing_data(self):
p = pollers.VarzPoller('', {'x': 'y'})
p.handle_response({
'server_uptime': 123,
'accepting_builds': True,
'builders': {
'foo': {
'state': "offline",
'total_slaves': 4,<|fim▁hole|> },
'bar': {
'connected_slaves': 5,
'current_builds': 6,
'pending_builds': 7,
},
},
})
self.assertEqual(123, p.uptime.get({'x': 'y'}))
self.assertEqual(True, p.accepting_builds.get({'x': 'y'}))
self.assertEqual(0, p.connected.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual(0, p.current_builds.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual(0, p.pending_builds.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual(4, p.total.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual('offline', p.state.get({'builder': 'foo', 'x': 'y'}))
self.assertEqual(5, p.connected.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual(6, p.current_builds.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual(7, p.pending_builds.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual(0, p.total.get({'builder': 'bar', 'x': 'y'}))
self.assertEqual('unknown', p.state.get({'builder': 'bar', 'x': 'y'}))
class FilePollerTest(unittest.TestCase):
@staticmethod
def create_data_file(dirname, data_list):
with open(os.path.join(dirname, 'ts_mon.log'), 'w') as f:
for data in data_list:
f.write('%s\n' % json.dumps(data))
return f.name
def test_no_file(self):
with temporary_directory(prefix='poller-test-') as tempdir:
filename = os.path.join(tempdir, 'no-such-file')
p = pollers.FilePoller(filename, {})
self.assertTrue(p.poll())
self.assertFalse(os.path.isfile(pollers.rotated_filename(filename)))
@mock.patch('infra_libs.ts_mon.CounterMetric.increment')
@mock.patch('infra_libs.ts_mon.CumulativeDistributionMetric.add')
def test_file_has_data(self, fake_add, fake_increment):
result1 = {'builder': 'b1', 'slave': 's1',
'result': 'r1', 'project_id': 'chromium'}
result2 = {'builder': 'b1', 'slave': 's1',
'result': 'r1', 'project_id': 'unknown'}
# Check that we've listed all the required metric fields.
self.assertEqual(set(result1), set(pollers.FilePoller.field_keys))
self.assertEqual(set(result2), set(pollers.FilePoller.field_keys))
data1 = result1.copy()
data2 = result2.copy()
data1['random'] = 'value' # Extra field, should be ignored.
del data2['project_id'] # Missing field, should become 'unknown'.
data2['duration_s'] = 5
with temporary_directory(prefix='poller-test-') as tempdir:
filename = self.create_data_file(tempdir, [data1, data2])
p = pollers.FilePoller(filename, {})
self.assertTrue(p.poll())
fake_increment.assert_any_call(result1)
fake_increment.assert_any_call(result2)
fake_add.assert_any_call(data2['duration_s'], result2)
self.assertFalse(os.path.isfile(filename))
# Make sure the rotated file is still there - for debugging.
self.assertTrue(os.path.isfile(pollers.rotated_filename(filename)))
def test_file_has_bad_data(self):
"""Mostly a smoke test: don't crash on bad data."""
with temporary_directory(prefix='poller-test-') as tempdir:
filename = self.create_data_file(tempdir, [])
with open(filename, 'a') as f:
f.write('}')
p = pollers.FilePoller(filename, {})
self.assertTrue(p.poll())
self.assertFalse(os.path.isfile(filename))
# Make sure the rotated file is still there - for debugging.
self.assertTrue(os.path.isfile(pollers.rotated_filename(filename)))
def test_safe_remove_error(self):
"""Smoke test: the function should not raise an exception."""
pollers.safe_remove('nonexistent-file')<|fim▁end|>
| |
<|file_name|>unboxed-closures-infer-fnonce-call-twice.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we are able to infer a suitable kind for this closure
// that is just called (`FnMut`).
use std::mem;<|fim▁hole|> let tick = || mem::drop(counter);
tick();
tick(); //~ ERROR use of moved value: `tick`
}<|fim▁end|>
|
fn main() {
let mut counter: Vec<i32> = Vec::new();
|
<|file_name|>DataTypeTest.java<|end_file_name|><|fim▁begin|>package com.github.gv2011.util.bytes;
import static com.github.gv2011.testutil.Assert.assertThat;
import static com.github.gv2011.testutil.Matchers.is;
import org.junit.Test;
import com.github.gv2011.util.BeanUtils;
<|fim▁hole|>public class DataTypeTest {
@Test
public void test() {
final String encoded =
"multipart/related; boundary=example-2; start=\"<[email protected]>\"; type=\"Text/x-Okie\""
;
final DataType type = DataType.parse(encoded);
assertThat(type.getClass(), is(DataTypeImp.class));
assertThat(type.primaryType(), is("multipart"));
assertThat(type.subType(), is("related"));
assertThat(type.baseType(), is(DataType.parse("multipart/related")));
assertThat(type.parameters(), is(
ICollections.mapBuilder()
.put("boundary", "example-2")
.put("start", "<[email protected]>")
.put("type", "Text/x-Okie")
.build()
));
assertThat(type.toString(), is(encoded));
assertThat(
BeanUtils.typeRegistry().beanType(DataType.class).toJson(type),
is(JsonUtils.jsonFactory().primitive(encoded))
);
}
@Test(expected=IllegalStateException.class)
public void testValidation() {
BeanUtils.beanBuilder(DataType.class)
.set(DataType::primaryType).to("multipart")
.set(DataType::subType).to("@")
.build()
;
}
@Test//(expected=IllegalStateException.class)
public void testValidation2() {
BeanUtils.beanBuilder(DataType.class)
.set(DataType::primaryType).to("multipart")
.set(DataType::subType).to("related")
.build()
;
}
}<|fim▁end|>
|
import com.github.gv2011.util.icol.ICollections;
import com.github.gv2011.util.json.JsonUtils;
|
<|file_name|>opaque_node.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
use gfx::display_list::OpaqueNode;
use libc::{c_void, uintptr_t};
use script::layout_interface::LayoutJS;
use script::layout_interface::Node;
use script::layout_interface::TrustedNodeAddress;
use script_traits::UntrustedNodeAddress;
pub trait OpaqueNodeMethods {
/// Converts a DOM node (script view) to an `OpaqueNode`.
fn from_script_node(node: TrustedNodeAddress) -> Self;
/// Converts a DOM node to an `OpaqueNode'.
fn from_jsmanaged(node: &LayoutJS<Node>) -> Self;
/// Converts this node to an `UntrustedNodeAddress`. An `UntrustedNodeAddress` is just the type
/// of node that script expects to receive in a hit test.
fn to_untrusted_node_address(&self) -> UntrustedNodeAddress;
}
impl OpaqueNodeMethods for OpaqueNode {
fn from_script_node(node: TrustedNodeAddress) -> OpaqueNode {
unsafe {
OpaqueNodeMethods::from_jsmanaged(&LayoutJS::from_trusted_node_address(node))
}<|fim▁hole|>
fn from_jsmanaged(node: &LayoutJS<Node>) -> OpaqueNode {
unsafe {
let ptr: uintptr_t = node.get_jsobject() as uintptr_t;
OpaqueNode(ptr)
}
}
fn to_untrusted_node_address(&self) -> UntrustedNodeAddress {
UntrustedNodeAddress(self.0 as *const c_void)
}
}<|fim▁end|>
|
}
|
<|file_name|>cluster_size_autoscaling.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e
import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"os/exec"
"strconv"
"strings"
"time"
"k8s.io/kubernetes/pkg/api"
client "k8s.io/kubernetes/pkg/client/unversioned"
"k8s.io/kubernetes/pkg/fields"<|fim▁hole|> "k8s.io/kubernetes/pkg/util/sets"
"k8s.io/kubernetes/test/e2e/framework"
testutils "k8s.io/kubernetes/test/utils"
"github.com/golang/glog"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
const (
defaultTimeout = 3 * time.Minute
resizeTimeout = 5 * time.Minute
scaleUpTimeout = 5 * time.Minute
scaleDownTimeout = 15 * time.Minute
gkeEndpoint = "https://test-container.sandbox.googleapis.com"
gkeUpdateTimeout = 15 * time.Minute
)
var _ = framework.KubeDescribe("Cluster size autoscaling [Slow]", func() {
f := framework.NewDefaultFramework("autoscaling")
var c *client.Client
var nodeCount int
var coresPerNode int
var memCapacityMb int
var originalSizes map[string]int
BeforeEach(func() {
c = f.Client
framework.SkipUnlessProviderIs("gce", "gke")
nodes := framework.GetReadySchedulableNodesOrDie(f.ClientSet)
nodeCount = len(nodes.Items)
Expect(nodeCount).NotTo(BeZero())
cpu := nodes.Items[0].Status.Capacity[api.ResourceCPU]
mem := nodes.Items[0].Status.Capacity[api.ResourceMemory]
coresPerNode = int((&cpu).MilliValue() / 1000)
memCapacityMb = int((&mem).Value() / 1024 / 1024)
originalSizes = make(map[string]int)
sum := 0
for _, mig := range strings.Split(framework.TestContext.CloudConfig.NodeInstanceGroup, ",") {
size, err := GroupSize(mig)
framework.ExpectNoError(err)
By(fmt.Sprintf("Initial size of %s: %d", mig, size))
originalSizes[mig] = size
sum += size
}
Expect(nodeCount).Should(Equal(sum))
if framework.ProviderIs("gke") {
val, err := isAutoscalerEnabled(3)
framework.ExpectNoError(err)
if !val {
err = enableAutoscaler("default-pool", 3, 5)
framework.ExpectNoError(err)
}
}
})
AfterEach(func() {
By(fmt.Sprintf("Restoring initial size of the cluster"))
setMigSizes(originalSizes)
framework.ExpectNoError(framework.WaitForClusterSize(c, nodeCount, scaleDownTimeout))
})
It("shouldn't increase cluster size if pending pod is too large [Feature:ClusterSizeAutoscalingScaleUp]", func() {
By("Creating unschedulable pod")
ReserveMemory(f, "memory-reservation", 1, memCapacityMb, false)
defer framework.DeleteRCAndPods(f.Client, f.ClientSet, f.Namespace.Name, "memory-reservation")
By("Waiting for scale up hoping it won't happen")
// Verfiy, that the appropreate event was generated.
eventFound := false
EventsLoop:
for start := time.Now(); time.Since(start) < scaleUpTimeout; time.Sleep(20 * time.Second) {
By("Waiting for NotTriggerScaleUp event")
events, err := f.Client.Events(f.Namespace.Name).List(api.ListOptions{})
framework.ExpectNoError(err)
for _, e := range events.Items {
if e.InvolvedObject.Kind == "Pod" && e.Reason == "NotTriggerScaleUp" && strings.Contains(e.Message, "it wouldn't fit if a new node is added") {
By("NotTriggerScaleUp event found")
eventFound = true
break EventsLoop
}
}
}
Expect(eventFound).Should(Equal(true))
// Verify, that cluster size is not changed.
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size <= nodeCount }, time.Second))
})
It("should increase cluster size if pending pods are small [Feature:ClusterSizeAutoscalingScaleUp]", func() {
ReserveMemory(f, "memory-reservation", 100, nodeCount*memCapacityMb, false)
defer framework.DeleteRCAndPods(f.Client, f.ClientSet, f.Namespace.Name, "memory-reservation")
// Verify, that cluster size is increased
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size >= nodeCount+1 }, scaleUpTimeout))
framework.ExpectNoError(waitForAllCaPodsReadyInNamespace(f, c))
})
It("should increase cluster size if pending pods are small and there is another node pool that is not autoscaled [Feature:ClusterSizeAutoscalingScaleUp]", func() {
framework.SkipUnlessProviderIs("gke")
By("Creating new node-pool with one n1-standard-4 machine")
const extraPoolName = "extra-pool"
addNodePool(extraPoolName, "n1-standard-4", 1)
defer deleteNodePool(extraPoolName)
framework.ExpectNoError(framework.WaitForClusterSize(c, nodeCount+1, resizeTimeout))
glog.Infof("Not enabling cluster autoscaler for the node pool (on purpose).")
ReserveMemory(f, "memory-reservation", 100, nodeCount*memCapacityMb, false)
defer framework.DeleteRCAndPods(f.Client, f.ClientSet, f.Namespace.Name, "memory-reservation")
// Verify, that cluster size is increased
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size >= nodeCount+1 }, scaleUpTimeout))
framework.ExpectNoError(waitForAllCaPodsReadyInNamespace(f, c))
})
It("should disable node pool autoscaling [Feature:ClusterSizeAutoscalingScaleUp]", func() {
framework.SkipUnlessProviderIs("gke")
By("Creating new node-pool with one n1-standard-4 machine")
const extraPoolName = "extra-pool"
addNodePool(extraPoolName, "n1-standard-4", 1)
defer deleteNodePool(extraPoolName)
framework.ExpectNoError(framework.WaitForClusterSize(c, nodeCount+1, resizeTimeout))
framework.ExpectNoError(enableAutoscaler(extraPoolName, 1, 2))
framework.ExpectNoError(disableAutoscaler(extraPoolName, 1, 2))
})
It("should increase cluster size if pods are pending due to host port conflict [Feature:ClusterSizeAutoscalingScaleUp]", func() {
CreateHostPortPods(f, "host-port", nodeCount+2, false)
defer framework.DeleteRCAndPods(f.Client, f.ClientSet, f.Namespace.Name, "host-port")
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size >= nodeCount+2 }, scaleUpTimeout))
framework.ExpectNoError(waitForAllCaPodsReadyInNamespace(f, c))
})
It("should add node to the particular mig [Feature:ClusterSizeAutoscalingScaleUp]", func() {
labels := map[string]string{"cluster-autoscaling-test.special-node": "true"}
By("Finding the smallest MIG")
minMig := ""
minSize := nodeCount
for mig, size := range originalSizes {
if size <= minSize {
minMig = mig
minSize = size
}
}
removeLabels := func(nodesToClean sets.String) {
By("Removing labels from nodes")
updateNodeLabels(c, nodesToClean, nil, labels)
}
nodes, err := GetGroupNodes(minMig)
ExpectNoError(err)
nodesSet := sets.NewString(nodes...)
defer removeLabels(nodesSet)
By(fmt.Sprintf("Annotating nodes of the smallest MIG(%s): %v", minMig, nodes))
updateNodeLabels(c, nodesSet, labels, nil)
CreateNodeSelectorPods(f, "node-selector", minSize+1, labels, false)
By("Waiting for new node to appear and annotating it")
WaitForGroupSize(minMig, int32(minSize+1))
// Verify, that cluster size is increased
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size >= nodeCount+1 }, scaleUpTimeout))
newNodes, err := GetGroupNodes(minMig)
ExpectNoError(err)
newNodesSet := sets.NewString(newNodes...)
newNodesSet.Delete(nodes...)
defer removeLabels(newNodesSet)
By(fmt.Sprintf("Setting labels for new nodes: %v", newNodesSet.List()))
updateNodeLabels(c, newNodesSet, labels, nil)
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size >= nodeCount+1 }, scaleUpTimeout))
framework.ExpectNoError(waitForAllCaPodsReadyInNamespace(f, c))
framework.ExpectNoError(framework.DeleteRCAndPods(f.Client, f.ClientSet, f.Namespace.Name, "node-selector"))
})
It("should scale up correct target pool [Feature:ClusterSizeAutoscalingScaleUp]", func() {
framework.SkipUnlessProviderIs("gke")
By("Creating new node-pool with one n1-standard-4 machine")
const extraPoolName = "extra-pool"
addNodePool(extraPoolName, "n1-standard-4", 1)
defer deleteNodePool(extraPoolName)
framework.ExpectNoError(framework.WaitForClusterSize(c, nodeCount+1, resizeTimeout))
framework.ExpectNoError(enableAutoscaler(extraPoolName, 1, 2))
By("Creating rc with 2 pods too big to fit default-pool but fitting extra-pool")
ReserveMemory(f, "memory-reservation", 2, 2*memCapacityMb, false)
defer framework.DeleteRCAndPods(f.Client, f.ClientSet, f.Namespace.Name, "memory-reservation")
// Apparently GKE master is restarted couple minutes after the node pool is added
// reseting all the timers in scale down code. Adding 5 extra minutes to workaround
// this issue.
// TODO: Remove the extra time when GKE restart is fixed.
framework.ExpectNoError(framework.WaitForClusterSize(c, nodeCount+2, scaleUpTimeout+5*time.Minute))
})
It("should correctly scale down after a node is not needed [Feature:ClusterSizeAutoscalingScaleDown]", func() {
By("Manually increase cluster size")
increasedSize := 0
newSizes := make(map[string]int)
for key, val := range originalSizes {
newSizes[key] = val + 2
increasedSize += val + 2
}
setMigSizes(newSizes)
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size >= increasedSize }, scaleUpTimeout))
By("Some node should be removed")
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size < increasedSize }, scaleDownTimeout))
})
It("should correctly scale down after a node is not needed when there is non autoscaled pool[Feature:ClusterSizeAutoscalingScaleDown]", func() {
framework.SkipUnlessProviderIs("gke")
By("Manually increase cluster size")
increasedSize := 0
newSizes := make(map[string]int)
for key, val := range originalSizes {
newSizes[key] = val + 2
increasedSize += val + 2
}
setMigSizes(newSizes)
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size >= increasedSize }, scaleUpTimeout))
const extraPoolName = "extra-pool"
addNodePool(extraPoolName, "n1-standard-1", 3)
defer deleteNodePool(extraPoolName)
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size >= increasedSize+3 }, scaleUpTimeout))
By("Some node should be removed")
// Apparently GKE master is restarted couple minutes after the node pool is added
// reseting all the timers in scale down code. Adding 10 extra minutes to workaround
// this issue.
// TODO: Remove the extra time when GKE restart is fixed.
framework.ExpectNoError(WaitForClusterSizeFunc(f.Client,
func(size int) bool { return size < increasedSize+3 }, scaleDownTimeout+10*time.Minute))
})
})
func getGKEClusterUrl() string {
out, err := exec.Command("gcloud", "auth", "print-access-token").Output()
framework.ExpectNoError(err)
token := strings.Replace(string(out), "\n", "", -1)
return fmt.Sprintf("%s/v1/projects/%s/zones/%s/clusters/%s?access_token=%s",
gkeEndpoint,
framework.TestContext.CloudConfig.ProjectID,
framework.TestContext.CloudConfig.Zone,
framework.TestContext.CloudConfig.Cluster,
token)
}
func isAutoscalerEnabled(expectedMinNodeCountInTargetPool int) (bool, error) {
resp, err := http.Get(getGKEClusterUrl())
if err != nil {
return false, err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return false, err
}
strBody := string(body)
glog.Infof("Cluster config %s", strBody)
if strings.Contains(strBody, "\"minNodeCount\": "+strconv.Itoa(expectedMinNodeCountInTargetPool)) {
return true, nil
}
return false, nil
}
func enableAutoscaler(nodePool string, minCount, maxCount int) error {
if nodePool == "default-pool" {
glog.Infof("Using gcloud to enable autoscaling for pool %s", nodePool)
output, err := exec.Command("gcloud", "alpha", "container", "clusters", "update", framework.TestContext.CloudConfig.Cluster,
"--enable-autoscaling",
"--min-nodes="+strconv.Itoa(minCount),
"--max-nodes="+strconv.Itoa(maxCount),
"--node-pool="+nodePool,
"--project="+framework.TestContext.CloudConfig.ProjectID,
"--zone="+framework.TestContext.CloudConfig.Zone).Output()
if err != nil {
return fmt.Errorf("Failed to enable autoscaling: %v", err)
}
glog.Infof("Config update result: %s", output)
} else {
glog.Infof("Using direct api access to enable autoscaling for pool %s", nodePool)
updateRequest := "{" +
" \"update\": {" +
" \"desiredNodePoolId\": \"" + nodePool + "\"," +
" \"desiredNodePoolAutoscaling\": {" +
" \"enabled\": \"true\"," +
" \"minNodeCount\": \"" + strconv.Itoa(minCount) + "\"," +
" \"maxNodeCount\": \"" + strconv.Itoa(maxCount) + "\"" +
" }" +
" }" +
"}"
url := getGKEClusterUrl()
glog.Infof("Using gke api url %s", url)
putResult, err := doPut(url, updateRequest)
if err != nil {
return fmt.Errorf("Failed to put %s: %v", url, err)
}
glog.Infof("Config update result: %s", putResult)
}
for startTime := time.Now(); startTime.Add(gkeUpdateTimeout).After(time.Now()); time.Sleep(30 * time.Second) {
if val, err := isAutoscalerEnabled(minCount); err == nil && val {
return nil
}
}
return fmt.Errorf("autoscaler not enabled")
}
func disableAutoscaler(nodePool string, minCount, maxCount int) error {
if nodePool == "default-pool" {
glog.Infof("Using gcloud to disable autoscaling for pool %s", nodePool)
output, err := exec.Command("gcloud", "alpha", "container", "clusters", "update", framework.TestContext.CloudConfig.Cluster,
"--no-enable-autoscaling",
"--node-pool="+nodePool,
"--project="+framework.TestContext.CloudConfig.ProjectID,
"--zone="+framework.TestContext.CloudConfig.Zone).Output()
if err != nil {
return fmt.Errorf("Failed to enable autoscaling: %v", err)
}
glog.Infof("Config update result: %s", output)
} else {
glog.Infof("Using direct api access to disable autoscaling for pool %s", nodePool)
updateRequest := "{" +
" \"update\": {" +
" \"desiredNodePoolId\": \"" + nodePool + "\"," +
" \"desiredNodePoolAutoscaling\": {" +
" \"enabled\": \"false\"," +
" }" +
" }" +
"}"
url := getGKEClusterUrl()
glog.Infof("Using gke api url %s", url)
putResult, err := doPut(url, updateRequest)
if err != nil {
return fmt.Errorf("Failed to put %s: %v", url, err)
}
glog.Infof("Config update result: %s", putResult)
}
for startTime := time.Now(); startTime.Add(gkeUpdateTimeout).After(time.Now()); time.Sleep(30 * time.Second) {
if val, err := isAutoscalerEnabled(minCount); err == nil && !val {
return nil
}
}
return fmt.Errorf("autoscaler still enabled")
}
func addNodePool(name string, machineType string, numNodes int) {
output, err := exec.Command("gcloud", "alpha", "container", "node-pools", "create", name, "--quiet",
"--machine-type="+machineType,
"--num-nodes="+strconv.Itoa(numNodes),
"--project="+framework.TestContext.CloudConfig.ProjectID,
"--zone="+framework.TestContext.CloudConfig.Zone,
"--cluster="+framework.TestContext.CloudConfig.Cluster).CombinedOutput()
framework.ExpectNoError(err)
glog.Infof("Creating node-pool %s: %s", name, output)
}
func deleteNodePool(name string) {
glog.Infof("Deleting node pool %s", name)
output, err := exec.Command("gcloud", "alpha", "container", "node-pools", "delete", name, "--quiet",
"--project="+framework.TestContext.CloudConfig.ProjectID,
"--zone="+framework.TestContext.CloudConfig.Zone,
"--cluster="+framework.TestContext.CloudConfig.Cluster).CombinedOutput()
if err != nil {
glog.Infof("Error: %v", err)
}
glog.Infof("Node-pool deletion output: %s", output)
}
func doPut(url, content string) (string, error) {
req, err := http.NewRequest("PUT", url, bytes.NewBuffer([]byte(content)))
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return "", err
}
strBody := string(body)
return strBody, nil
}
func CreateNodeSelectorPods(f *framework.Framework, id string, replicas int, nodeSelector map[string]string, expectRunning bool) {
By(fmt.Sprintf("Running RC which reserves host port and defines node selector"))
config := &testutils.RCConfig{
Client: f.Client,
Name: "node-selector",
Namespace: f.Namespace.Name,
Timeout: defaultTimeout,
Image: framework.GetPauseImageName(f.Client),
Replicas: replicas,
HostPorts: map[string]int{"port1": 4321},
NodeSelector: map[string]string{"cluster-autoscaling-test.special-node": "true"},
}
err := framework.RunRC(*config)
if expectRunning {
framework.ExpectNoError(err)
}
}
func CreateHostPortPods(f *framework.Framework, id string, replicas int, expectRunning bool) {
By(fmt.Sprintf("Running RC which reserves host port"))
config := &testutils.RCConfig{
Client: f.Client,
Name: id,
Namespace: f.Namespace.Name,
Timeout: defaultTimeout,
Image: framework.GetPauseImageName(f.Client),
Replicas: replicas,
HostPorts: map[string]int{"port1": 4321},
}
err := framework.RunRC(*config)
if expectRunning {
framework.ExpectNoError(err)
}
}
func ReserveCpu(f *framework.Framework, id string, replicas, millicores int) {
By(fmt.Sprintf("Running RC which reserves %v millicores", millicores))
request := int64(millicores / replicas)
config := &testutils.RCConfig{
Client: f.Client,
Name: id,
Namespace: f.Namespace.Name,
Timeout: defaultTimeout,
Image: framework.GetPauseImageName(f.Client),
Replicas: replicas,
CpuRequest: request,
}
framework.ExpectNoError(framework.RunRC(*config))
}
func ReserveMemory(f *framework.Framework, id string, replicas, megabytes int, expectRunning bool) {
By(fmt.Sprintf("Running RC which reserves %v MB of memory", megabytes))
request := int64(1024 * 1024 * megabytes / replicas)
config := &testutils.RCConfig{
Client: f.Client,
Name: id,
Namespace: f.Namespace.Name,
Timeout: defaultTimeout,
Image: framework.GetPauseImageName(f.Client),
Replicas: replicas,
MemRequest: request,
}
err := framework.RunRC(*config)
if expectRunning {
framework.ExpectNoError(err)
}
}
// WaitForClusterSize waits until the cluster size matches the given function.
func WaitForClusterSizeFunc(c *client.Client, sizeFunc func(int) bool, timeout time.Duration) error {
for start := time.Now(); time.Since(start) < timeout; time.Sleep(20 * time.Second) {
nodes, err := c.Nodes().List(api.ListOptions{FieldSelector: fields.Set{
"spec.unschedulable": "false",
}.AsSelector()})
if err != nil {
glog.Warningf("Failed to list nodes: %v", err)
continue
}
numNodes := len(nodes.Items)
// Filter out not-ready nodes.
framework.FilterNodes(nodes, func(node api.Node) bool {
return framework.IsNodeConditionSetAsExpected(&node, api.NodeReady, true)
})
numReady := len(nodes.Items)
if numNodes == numReady && sizeFunc(numReady) {
glog.Infof("Cluster has reached the desired size")
return nil
}
glog.Infof("Waiting for cluster, current size %d, not ready nodes %d", numNodes, numNodes-numReady)
}
return fmt.Errorf("timeout waiting %v for appropriate cluster size", timeout)
}
func waitForAllCaPodsReadyInNamespace(f *framework.Framework, c *client.Client) error {
var notready []string
for start := time.Now(); time.Now().Before(start.Add(scaleUpTimeout)); time.Sleep(20 * time.Second) {
pods, err := c.Pods(f.Namespace.Name).List(api.ListOptions{})
if err != nil {
return fmt.Errorf("failed to get pods: %v", err)
}
notready = make([]string, 0)
for _, pod := range pods.Items {
ready := false
for _, c := range pod.Status.Conditions {
if c.Type == api.PodReady && c.Status == api.ConditionTrue {
ready = true
}
}
if !ready {
notready = append(notready, pod.Name)
}
}
if len(notready) == 0 {
glog.Infof("All pods ready")
return nil
}
glog.Infof("Some pods are not ready yet: %v", notready)
}
glog.Info("Timeout on waiting for pods being ready")
glog.Info(framework.RunKubectlOrDie("get", "pods", "-o", "json", "--all-namespaces"))
glog.Info(framework.RunKubectlOrDie("get", "nodes", "-o", "json"))
// Some pods are still not running.
return fmt.Errorf("Some pods are still not running: %v", notready)
}
func setMigSizes(sizes map[string]int) {
for mig, desiredSize := range sizes {
currentSize, err := GroupSize(mig)
framework.ExpectNoError(err)
if desiredSize != currentSize {
By(fmt.Sprintf("Setting size of %s to %d", mig, desiredSize))
err = ResizeGroup(mig, int32(desiredSize))
framework.ExpectNoError(err)
}
}
}<|fim▁end|>
| |
<|file_name|>my_trip.js<|end_file_name|><|fim▁begin|>var express = require('express');
var router = express.Router();
/*
** www.g-trotter.eu/my-trip
*/
router.route('/')
.get(function(req, res) {
res.json({
path: 'www.g-trotter.eu/my-trip'<|fim▁hole|><|fim▁end|>
|
})
})
module.exports = router;
|
<|file_name|>data_source_obmcs_core_route_table.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
package main
import (
"time"
"github.com/MustWin/baremetal-sdk-go"
"github.com/hashicorp/terraform/helper/schema"
"github.com/oracle/terraform-provider-baremetal/options"
"github.com/oracle/terraform-provider-baremetal/client"
"github.com/oracle/terraform-provider-baremetal/crud"
)
func RouteTableDatasource() *schema.Resource {
return &schema.Resource{
Read: readRouteTables,
Schema: map[string]*schema.Schema{
"compartment_id": {
Type: schema.TypeString,
Required: true,
},
"limit": {
Type: schema.TypeInt,
Optional: true,
},
"page": {
Type: schema.TypeString,
Optional: true,
},
"route_tables": {
Type: schema.TypeList,
Computed: true,
Elem: RouteTableResource(),
},
"vcn_id": {
Type: schema.TypeString,
Required: true,
},
},
}
}
func readRouteTables(d *schema.ResourceData, m interface{}) (e error) {
client := m.(client.BareMetalClient)
reader := &RouteTableDatasourceCrud{}
reader.D = d
reader.Client = client
return crud.ReadResource(reader)
}
type RouteTableDatasourceCrud struct {<|fim▁hole|> crud.BaseCrud
Res *baremetal.ListRouteTables
}
func (s *RouteTableDatasourceCrud) Get() (e error) {
compartmentID := s.D.Get("compartment_id").(string)
vcnID := s.D.Get("vcn_id").(string)
opts := &baremetal.ListOptions{}
options.SetListOptions(s.D, opts)
s.Res = &baremetal.ListRouteTables{RouteTables: []baremetal.RouteTable{}}
for {
var list *baremetal.ListRouteTables
if list, e = s.Client.ListRouteTables(compartmentID, vcnID, opts); e != nil {
break
}
s.Res.RouteTables = append(s.Res.RouteTables, list.RouteTables...)
if hasNextPage := options.SetNextPageOption(list.NextPage, &opts.PageListOptions); !hasNextPage {
break
}
}
return
}
func (s *RouteTableDatasourceCrud) SetData() {
if s.Res != nil {
s.D.SetId(time.Now().UTC().String())
resources := []map[string]interface{}{}
for _, v := range s.Res.RouteTables {
rules := []map[string]interface{}{}
for _, val := range v.RouteRules {
rule := map[string]interface{}{
"cidr_block": val.CidrBlock,
"network_entity_id": val.NetworkEntityID,
}
rules = append(rules, rule)
}
res := map[string]interface{}{
"compartment_id": v.CompartmentID,
"display_name": v.DisplayName,
"id": v.ID,
"route_rules": rules,
"time_modified": v.TimeModified.String(),
"state": v.State,
"time_created": v.TimeCreated.String(),
}
resources = append(resources, res)
}
s.D.Set("route_tables", resources)
}
return
}<|fim▁end|>
| |
<|file_name|>constify-module-enums-types.rs<|end_file_name|><|fim▁begin|>#![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
pub mod foo {
pub type Type = ::std::os::raw::c_uint;
pub const THIS: Type = 0;
pub const SHOULD_BE: Type = 1;
pub const A_CONSTANT: Type = 2;
pub const ALSO_THIS: Type = 42;
pub const AND_ALSO_THIS: Type = 42;
}
pub mod anon_enum {
pub type Type = ::std::os::raw::c_uint;
pub const Variant1: Type = 0;
pub const Variant2: Type = 1;
pub const Variant3: Type = 2;
}
pub mod ns1_foo {
pub type Type = ::std::os::raw::c_uint;
pub const THIS: Type = 0;
pub const SHOULD_BE: Type = 1;
pub const A_CONSTANT: Type = 2;
pub const ALSO_THIS: Type = 42;
}
pub mod ns2_Foo {
pub type Type = ::std::os::raw::c_int;
pub const Variant1: Type = 0;
pub const Variant2: Type = 1;
}
pub use self::anon_enum::Type as anon_enum_alias1;
pub use self::anon_enum_alias1 as anon_enum_alias2;
pub use self::anon_enum_alias2 as anon_enum_alias3;
pub use self::foo::Type as foo_alias1;
pub use self::foo_alias1 as foo_alias2;
pub use self::foo_alias2 as foo_alias3;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct bar {
pub member1: foo::Type,
pub member2: foo_alias1,
pub member3: foo_alias2,
pub member4: foo_alias3,
pub member5: ns1_foo::Type,
pub member6: *mut ns2_Foo::Type,
pub member7: anon_enum::Type,
pub member8: anon_enum_alias1,
pub member9: anon_enum_alias2,
pub member10: anon_enum_alias3,
}
#[test]
fn bindgen_test_layout_bar() {
assert_eq!(
::std::mem::size_of::<bar>(),
48usize,
concat!("Size of: ", stringify!(bar))
);
assert_eq!(
::std::mem::align_of::<bar>(),
8usize,
concat!("Alignment of ", stringify!(bar))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member1 as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member1)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member2 as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member2)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member3 as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member3)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member4 as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member4)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member5 as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member5)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member6 as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member6)
)
);<|fim▁hole|> 32usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member7)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member8 as *const _ as usize },
36usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member8)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member9 as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member9)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<bar>())).member10 as *const _ as usize
},
44usize,
concat!(
"Offset of field: ",
stringify!(bar),
"::",
stringify!(member10)
)
);
}
impl Default for bar {
fn default() -> Self {
let mut s = ::std::mem::MaybeUninit::<Self>::uninit();
unsafe {
::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1);
s.assume_init()
}
}
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct Baz {
pub member1: ns2_Foo::Type,
}
#[test]
fn bindgen_test_layout_Baz() {
assert_eq!(
::std::mem::size_of::<Baz>(),
4usize,
concat!("Size of: ", stringify!(Baz))
);
assert_eq!(
::std::mem::align_of::<Baz>(),
4usize,
concat!("Alignment of ", stringify!(Baz))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<Baz>())).member1 as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(Baz),
"::",
stringify!(member1)
)
);
}
impl Default for Baz {
fn default() -> Self {
let mut s = ::std::mem::MaybeUninit::<Self>::uninit();
unsafe {
::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1);
s.assume_init()
}
}
}
pub mod one_Foo {
pub type Type = ::std::os::raw::c_int;
pub const Variant1: Type = 0;
pub const Variant2: Type = 1;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct Bar {
pub baz: *mut one_Foo::Type,
}
#[test]
fn bindgen_test_layout_Bar() {
assert_eq!(
::std::mem::size_of::<Bar>(),
8usize,
concat!("Size of: ", stringify!(Bar))
);
assert_eq!(
::std::mem::align_of::<Bar>(),
8usize,
concat!("Alignment of ", stringify!(Bar))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<Bar>())).baz as *const _ as usize },
0usize,
concat!("Offset of field: ", stringify!(Bar), "::", stringify!(baz))
);
}
impl Default for Bar {
fn default() -> Self {
let mut s = ::std::mem::MaybeUninit::<Self>::uninit();
unsafe {
::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1);
s.assume_init()
}
}
}
extern "C" {
#[link_name = "\u{1}_Z5func13fooPS_PS0_"]
pub fn func1(
arg1: foo::Type,
arg2: *mut foo::Type,
arg3: *mut *mut foo::Type,
) -> *mut foo::Type;
}
extern "C" {
#[link_name = "\u{1}_Z5func23fooPS_PS0_"]
pub fn func2(
arg1: foo_alias1,
arg2: *mut foo_alias1,
arg3: *mut *mut foo_alias1,
) -> *mut foo_alias1;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct Thing<T> {
pub thing: T,
pub _phantom_0: ::std::marker::PhantomData<::std::cell::UnsafeCell<T>>,
}
impl<T> Default for Thing<T> {
fn default() -> Self {
let mut s = ::std::mem::MaybeUninit::<Self>::uninit();
unsafe {
::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1);
s.assume_init()
}
}
}
extern "C" {
#[link_name = "\u{1}_Z5func35ThingI3fooE"]
pub fn func3(arg1: Thing<foo::Type>) -> foo::Type;
}
extern "C" {
#[link_name = "\u{1}_Z5func45ThingIS_I3fooEE"]
pub fn func4(arg1: Thing<Thing<foo::Type>>) -> foo::Type;
}<|fim▁end|>
|
assert_eq!(
unsafe { &(*(::std::ptr::null::<bar>())).member7 as *const _ as usize },
|
<|file_name|>mozmap.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `MozMap` (open-ended dictionary) type.
use crate::dom::bindings::conversions::jsid_to_string;
use crate::dom::bindings::error::report_pending_exception;
use crate::dom::bindings::str::DOMString;
use js::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible};
use js::jsapi::JSContext;
use js::jsapi::JS_NewPlainObject;
use js::jsapi::JSITER_HIDDEN;
use js::jsapi::JSITER_OWNONLY;
use js::jsapi::JSITER_SYMBOLS;
use js::jsapi::JSPROP_ENUMERATE;
use js::jsval::ObjectValue;
use js::jsval::UndefinedValue;
use js::rust::wrappers::GetPropertyKeys;
use js::rust::wrappers::JS_DefineUCProperty2;
use js::rust::wrappers::JS_GetPropertyById;
use js::rust::HandleValue;
use js::rust::IdVector;
use js::rust::MutableHandleValue;
use std::collections::HashMap;
use std::ops::Deref;
/// The `MozMap` (open-ended dictionary) type.
#[derive(Clone, JSTraceable)]
pub struct MozMap<T> {
map: HashMap<DOMString, T>,
}
impl<T> MozMap<T> {
/// Create an empty `MozMap`.
pub fn new() -> Self {
MozMap {
map: HashMap::new(),
}
}
}
impl<T> Deref for MozMap<T> {
type Target = HashMap<DOMString, T>;
fn deref(&self) -> &HashMap<DOMString, T> {
&self.map
}
}
impl<T, C> FromJSValConvertible for MozMap<T>
where
T: FromJSValConvertible<Config = C>,
C: Clone,
{
type Config = C;
unsafe fn from_jsval(
cx: *mut JSContext,
value: HandleValue,
config: C,
) -> Result<ConversionResult<Self>, ()> {
if !value.is_object() {
return Ok(ConversionResult::Failure(
"MozMap value was not an object".into(),
));
}
rooted!(in(cx) let object = value.to_object());
let ids = IdVector::new(cx);
if !GetPropertyKeys(<|fim▁hole|> cx,
object.handle(),
JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS,
ids.get(),
) {
// TODO: can GetPropertyKeys fail?
// (it does so if the object has duplicate keys)
// https://github.com/servo/servo/issues/21462
report_pending_exception(cx, false);
return Ok(ConversionResult::Failure(
"Getting MozMap value property keys failed".into(),
));
}
let mut map = HashMap::new();
for id in &*ids {
rooted!(in(cx) let id = *id);
rooted!(in(cx) let mut property = UndefinedValue());
if !JS_GetPropertyById(cx, object.handle(), id.handle(), property.handle_mut()) {
return Err(());
}
let property = match T::from_jsval(cx, property.handle(), config.clone())? {
ConversionResult::Success(property) => property,
ConversionResult::Failure(message) => return Ok(ConversionResult::Failure(message)),
};
// TODO: Is this guaranteed to succeed?
// https://github.com/servo/servo/issues/21463
if let Some(key) = jsid_to_string(cx, id.handle()) {
map.insert(key, property);
}
}
Ok(ConversionResult::Success(MozMap { map: map }))
}
}
impl<T: ToJSValConvertible> ToJSValConvertible for MozMap<T> {
#[inline]
unsafe fn to_jsval(&self, cx: *mut JSContext, mut rval: MutableHandleValue) {
rooted!(in(cx) let js_object = JS_NewPlainObject(cx));
assert!(!js_object.handle().is_null());
rooted!(in(cx) let mut js_value = UndefinedValue());
for (key, value) in &self.map {
let key = key.encode_utf16().collect::<Vec<_>>();
value.to_jsval(cx, js_value.handle_mut());
assert!(JS_DefineUCProperty2(
cx,
js_object.handle(),
key.as_ptr(),
key.len(),
js_value.handle(),
JSPROP_ENUMERATE as u32
));
}
rval.set(ObjectValue(js_object.handle().get()));
}
}<|fim▁end|>
| |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""goto_cloud URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/<|fim▁hole|>Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]<|fim▁end|>
| |
<|file_name|>netutil.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Miscellaneous network utility code."""
from __future__ import absolute_import, division, print_function, with_statement
import errno
import os
import re
import socket
import ssl
import stat
from lib.tornado.concurrent import dummy_executor, run_on_executor
from lib.tornado.ioloop import IOLoop
from lib.tornado.platform.auto import set_close_exec
from lib.tornado.util import Configurable
def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
"""Creates listening sockets bound to the given port and address.
Returns a list of socket objects (multiple sockets are returned if
the given address maps to multiple IP addresses, which is most common
for mixed IPv4 and IPv6 use).
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either `socket.AF_INET`
or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
`socket.listen() <socket.socket.listen>`.
``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like
``socket.AI_PASSIVE | socket.AI_NUMERICHOST``.
"""
sockets = []
if address == "":
address = None
if not socket.has_ipv6 and family == socket.AF_UNSPEC:
# Python can be compiled with --disable-ipv6, which causes
# operations on AF_INET6 sockets to fail, but does not
# automatically exclude those results from getaddrinfo
# results.
# http://bugs.python.org/issue16208
family = socket.AF_INET
if flags is None:
flags = socket.AI_PASSIVE
for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM,
0, flags)):
af, socktype, proto, canonname, sockaddr = res
sock = socket.socket(af, socktype, proto)
set_close_exec(sock.fileno())
if os.name != 'nt':
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if af == socket.AF_INET6:
# On linux, ipv6 sockets accept ipv4 too by default,
# but this makes it impossible to bind to both
# 0.0.0.0 in ipv4 and :: in ipv6. On other systems,
# separate sockets *must* be used to listen for both ipv4
# and ipv6. For consistency, always disable ipv4 on our
# ipv6 sockets and use a separate ipv4 socket when needed.
#
# Python 2.x on windows doesn't have IPPROTO_IPV6.
if hasattr(socket, "IPPROTO_IPV6"):
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
sock.setblocking(0)
sock.bind(sockaddr)
sock.listen(backlog)
sockets.append(sock)
return sockets
if hasattr(socket, 'AF_UNIX'):
def bind_unix_socket(file, mode=0o600, backlog=128):
"""Creates a listening unix socket.
If a socket with the given name already exists, it will be deleted.
If any other file with that name exists, an exception will be
raised.
Returns a socket object (not a list of socket objects like
`bind_sockets`)
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
set_close_exec(sock.fileno())
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
try:
st = os.stat(file)
except OSError as err:
if err.errno != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(file)
else:
raise ValueError("File %s exists and is not a socket", file)
sock.bind(file)
os.chmod(file, mode)
sock.listen(backlog)
return sock
def add_accept_handler(sock, callback, io_loop=None):
"""Adds an `.IOLoop` event handler to accept new connections on ``sock``.
When a connection is accepted, ``callback(connection, address)`` will
be run (``connection`` is a socket object, and ``address`` is the
address of the other end of the connection). Note that this signature
is different from the ``callback(fd, events)`` signature used for
`.IOLoop` handlers.
"""
if io_loop is None:
io_loop = IOLoop.current()
def accept_handler(fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
return
raise
callback(connection, address)
io_loop.add_handler(sock.fileno(), accept_handler, IOLoop.READ)
def is_valid_ip(ip):
"""Returns true if the given string is a well-formed IP address.
Supports IPv4 and IPv6.
"""
try:
res = socket.getaddrinfo(ip, 0, socket.AF_UNSPEC,
socket.SOCK_STREAM,
0, socket.AI_NUMERICHOST)
return bool(res)
except socket.gaierror as e:
if e.args[0] == socket.EAI_NONAME:
return False
raise
return True
class Resolver(Configurable):
"""Configurable asynchronous DNS resolver interface.
By default, a blocking implementation is used (which simply calls
`socket.getaddrinfo`). An alternative implementation can be
chosen with the `Resolver.configure <.Configurable.configure>`
class method::
Resolver.configure('tornado.netutil.ThreadedResolver')
The implementations of this interface included with Tornado are
* `tornado.netutil.BlockingResolver`
* `tornado.netutil.ThreadedResolver`
* `tornado.netutil.OverrideResolver`
* `tornado.platform.twisted.TwistedResolver`
* `tornado.platform.caresresolver.CaresResolver`
"""
@classmethod
def configurable_base(cls):
return Resolver
@classmethod
def configurable_default(cls):
return BlockingResolver
def resolve(self, host, port, family=socket.AF_UNSPEC, callback=None):
"""Resolves an address.
The ``host`` argument is a string which may be a hostname or a
literal IP address.
Returns a `.Future` whose result is a list of (family,
address) pairs, where address is a tuple suitable to pass to
`socket.connect <socket.socket.connect>` (i.e. a ``(host,
port)`` pair for IPv4; additional fields may be present for
IPv6). If a ``callback`` is passed, it will be run with the
result as an argument when it is complete.
"""
raise NotImplementedError()
class ExecutorResolver(Resolver):
def initialize(self, io_loop=None, executor=None):
self.io_loop = io_loop or IOLoop.current()<|fim▁hole|>
@run_on_executor
def resolve(self, host, port, family=socket.AF_UNSPEC):
addrinfo = socket.getaddrinfo(host, port, family)
results = []
for family, socktype, proto, canonname, address in addrinfo:
results.append((family, address))
return results
class BlockingResolver(ExecutorResolver):
"""Default `Resolver` implementation, using `socket.getaddrinfo`.
The `.IOLoop` will be blocked during the resolution, although the
callback will not be run until the next `.IOLoop` iteration.
"""
def initialize(self, io_loop=None):
super(BlockingResolver, self).initialize(io_loop=io_loop)
class ThreadedResolver(ExecutorResolver):
"""Multithreaded non-blocking `Resolver` implementation.
Requires the `concurrent.futures` package to be installed
(available in the standard library since Python 3.2,
installable with ``pip install futures`` in older versions).
The thread pool size can be configured with::
Resolver.configure('tornado.netutil.ThreadedResolver',
num_threads=10)
"""
def initialize(self, io_loop=None, num_threads=10):
from concurrent.futures import ThreadPoolExecutor
super(ThreadedResolver, self).initialize(
io_loop=io_loop, executor=ThreadPoolExecutor(num_threads))
class OverrideResolver(Resolver):
"""Wraps a resolver with a mapping of overrides.
This can be used to make local DNS changes (e.g. for testing)
without modifying system-wide settings.
The mapping can contain either host strings or host-port pairs.
"""
def initialize(self, resolver, mapping):
self.resolver = resolver
self.mapping = mapping
def resolve(self, host, port, *args, **kwargs):
if (host, port) in self.mapping:
host, port = self.mapping[(host, port)]
elif host in self.mapping:
host = self.mapping[host]
return self.resolver.resolve(host, port, *args, **kwargs)
# These are the keyword arguments to ssl.wrap_socket that must be translated
# to their SSLContext equivalents (the other arguments are still passed
# to SSLContext.wrap_socket).
_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile',
'cert_reqs', 'ca_certs', 'ciphers'])
def ssl_options_to_context(ssl_options):
"""Try to convert an ``ssl_options`` dictionary to an
`~ssl.SSLContext` object.
The ``ssl_options`` dictionary contains keywords to be passed to
`ssl.wrap_socket`. In Python 3.2+, `ssl.SSLContext` objects can
be used instead. This function converts the dict form to its
`~ssl.SSLContext` equivalent, and may be used when a component which
accepts both forms needs to upgrade to the `~ssl.SSLContext` version
to use features like SNI or NPN.
"""
if isinstance(ssl_options, dict):
assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options
if (not hasattr(ssl, 'SSLContext') or
isinstance(ssl_options, ssl.SSLContext)):
return ssl_options
context = ssl.SSLContext(
ssl_options.get('ssl_version', ssl.PROTOCOL_SSLv23))
if 'certfile' in ssl_options:
context.load_cert_chain(ssl_options['certfile'], ssl_options.get('keyfile', None))
if 'cert_reqs' in ssl_options:
context.verify_mode = ssl_options['cert_reqs']
if 'ca_certs' in ssl_options:
context.load_verify_locations(ssl_options['ca_certs'])
if 'ciphers' in ssl_options:
context.set_ciphers(ssl_options['ciphers'])
return context
def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs):
"""Returns an ``ssl.SSLSocket`` wrapping the given socket.
``ssl_options`` may be either a dictionary (as accepted by
`ssl_options_to_context`) or an `ssl.SSLContext` object.
Additional keyword arguments are passed to ``wrap_socket``
(either the `~ssl.SSLContext` method or the `ssl` module function
as appropriate).
"""
context = ssl_options_to_context(ssl_options)
if hasattr(ssl, 'SSLContext') and isinstance(context, ssl.SSLContext):
if server_hostname is not None and getattr(ssl, 'HAS_SNI'):
# Python doesn't have server-side SNI support so we can't
# really unittest this, but it can be manually tested with
# python3.2 -m tornado.httpclient https://sni.velox.ch
return context.wrap_socket(socket, server_hostname=server_hostname,
**kwargs)
else:
return context.wrap_socket(socket, **kwargs)
else:
return ssl.wrap_socket(socket, **dict(context, **kwargs))
if hasattr(ssl, 'match_hostname'): # python 3.2+
ssl_match_hostname = ssl.match_hostname
SSLCertificateError = ssl.CertificateError
else:
# match_hostname was added to the standard library ssl module in python 3.2.
# The following code was backported for older releases and copied from
# https://bitbucket.org/brandon/backports.ssl_match_hostname
class SSLCertificateError(ValueError):
pass
def _dnsname_to_pat(dn):
pats = []
for frag in dn.split(r'.'):
if frag == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
else:
# Otherwise, '*' matches any dotless fragment.
frag = re.escape(frag)
pats.append(frag.replace(r'\*', '[^.]*'))
return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
def ssl_match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
are mostly followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if not san:
# The subject is only checked when subjectAltName is empty
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise SSLCertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise SSLCertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise SSLCertificateError("no appropriate commonName or "
"subjectAltName fields were found")<|fim▁end|>
|
self.executor = executor or dummy_executor
|
<|file_name|>auth0.py<|end_file_name|><|fim▁begin|>import cis_profile
import cis_publisher
import boto3
import botocore
import os
import logging
import json
import time
from auth0.v3.authentication import GetToken
from auth0.v3.management import Auth0
from auth0.v3.exceptions import Auth0Error
from datetime import datetime, timezone, timedelta
from traceback import format_exc
# from http.client import HTTPConnection
logger = logging.getLogger(__name__)
# HTTPConnection.debuglevel = 1
class Auth0Publisher:
def __init__(self, context={}):
self.secret_manager = cis_publisher.secret.Manager()
self.context = context
self.report = None
self.config = cis_publisher.common.get_config()
self.s3_cache = None
self.s3_cache_require_update = False
# Only fields we care about for the user entries
# auth0 field->cis field map
self.az_cis_fields = {
"created_at": "created",
"given_name": "first_name",
"family_name": "last_name",
"name": None,
"nickname": None,
"user_id": "user_id",
"email": "primary_email",
"identities": "identities",
"blocked": "active",
}
self.az_blacklisted_connections = ["Mozilla-LDAP", "Mozilla-LDAP-Dev"]
self.az_whitelisted_connections = ["email", "github", "google-oauth2", "firefoxaccounts"]
self.az_users = None
self.all_cis_user_ids = None
self.user_ids_only = None
def get_s3_cache(self):
"""
If cache exists and is not older than timedelta() then return it, else don't
return: dict JSON
"""
if self.s3_cache is not None:
return self.s3_cache
s3 = boto3.client("s3")
bucket = os.environ.get("CIS_BUCKET_URL")
cache_time = int(os.environ.get("CIS_AUTHZERO_CACHE_TIME_SECONDS", 120))
recent = datetime.now(timezone.utc) - timedelta(seconds=cache_time)
try:
objects = s3.list_objects_v2(Bucket=bucket)
# bucket has zero contents?
if "Contents" not in objects:
logger.info("No S3 cache present")
return None
# Recent file?
for o in objects["Contents"]:
if o["Key"] == "cache.json" and recent > o["LastModified"]:
logger.info(
f"S3 cache too old, not using ({recent} gt {o['LastModified']}"
f", was cached for: {cache_time}s)"
)
return None
response = s3.get_object(Bucket=bucket, Key="cache.json")
data = response["Body"].read()
except botocore.exceptions.ClientError as e:
logger.error("Could not find S3 cache file: {}".format(e))
return None
logger.info("Using S3 cache")
self.s3_cache = json.loads(data)
return self.s3_cache
def save_s3_cache(self, data):
"""
@data dict JSON
"""
if self.s3_cache_require_update is False:
return
s3 = boto3.client("s3")
bucket = os.environ.get("CIS_BUCKET_URL")
s3.put_object(Bucket=bucket, Key="cache.json", Body=json.dumps(data))
logger.info("Wrote S3 cache file")
def publish(self, user_ids=None, chunk_size=100):
"""
Glue to create or fetch cis_profile.User profiles for this publisher
Then pass everything over to the Publisher class
None, ALL profiles are sent.
@user_ids: list of str - user ids to publish. If None, all users are published.
@chunk_size: int when no user_id is selected, this is the size of the chunk/slice we'll create to divide the
work between function calls (to self)
"""
if user_ids is None:
le = "All"
else:
le = len(user_ids)
logger.info("Starting Auth0 Publisher [{} users]".format(le))
# XXX login_method is overridden when posting the user or listing users, i.e. the one here does not matter
publisher = cis_publisher.Publish([], login_method="github", publisher_name="auth0")
# These are the users auth0 knows about
self.az_users = self.fetch_az_users(user_ids)
self.all_cis_user_ids = self.fetch_all_cis_user_ids(publisher)
# Should we fan-out processing to multiple function calls?
if user_ids is None:
# Because we do not care about most attributes update, we only process new users, or users that will be
# deactivated in order to save time. Note that there is (currently) no auth0 hook to notify of new user
# event, so this (the auth0 publisher that is) function needs to be reasonably fast to avoid delays when
# provisioning users
# So first, remove all known users from the requested list
user_ids_to_process_set = set(self.get_az_user_ids()) - set(self.all_cis_user_ids)
az_user_ids_set = set(self.get_az_user_ids())
# Add blocked users so that they get deactivated
logger.info(
"Converting filtering list, size of user_ids_to_process {}".format(len(user_ids_to_process_set))
)
for u in self.az_users:
if u["user_id"] in az_user_ids_set:
if ("blocked" in u.keys()) and (u["blocked"] is True):
user_ids_to_process_set.add(u["user_id"])
logger.info(
"After filtering out known CIS users/in auth0 blocked users, we will process {} users".format(
len(user_ids_to_process_set)
)
)
self.save_s3_cache({"az_users": self.az_users, "all_cis_user_ids": self.all_cis_user_ids})
self.fan_out(publisher, chunk_size, list(user_ids_to_process_set))
else:
# Don't cache auth0 list if we're just getting a single user, so that we get the most up to date data
# and because it's pretty fast for a single user
if len(user_ids) == 1:
os.environ["CIS_AUTHZERO_CACHE_TIME_SECONDS"] = "0"
logger.info("CIS_AUTHZERO_CACHE_TIME_SECONDS was set to 0 (caching disabled) for this run")
self.process(publisher, user_ids)
def fetch_all_cis_user_ids(self, publisher):
"""
Get all known CIS user ids for the whitelisted login methods
This is here because CIS only returns user ids per specific login methods
We also cache this
"""
self.s3_cache = self.get_s3_cache()
if self.s3_cache is not None:
self.all_cis_user_ids = self.s3_cache["all_cis_user_ids"]
return self.all_cis_user_ids
if self.all_cis_user_ids is not None:
return self.all_cis_user_ids
# Not cached, fetch it
self.s3_cache_require_update = True
# These are the users CIS knows about
self.all_cis_user_ids = []
for c in self.az_whitelisted_connections:
# FIXME we're not using the real login method here because
# Code in the CIS Vault matches against the start of `user_id` instead of the actual login method
# This is fine for most methods, except this one... ideally the code should change in the CIS Vault when it
# uses something else than DynamoDB and is able to match efficiently on other attributes
if c == "firefoxaccounts":
c = "oauth2|firefoxaccounts"
publisher.login_method = c
publisher.get_known_cis_users(include_inactive=False)<|fim▁hole|> # Invalidate publisher memory cache
publisher.known_cis_users = None
# XXX in case we got duplicates for some reason, we uniquify
self.all_cis_user_ids = list(set(self.all_cis_user_ids))
logger.info("Got {} known CIS users for all whitelisted login methods".format(len(self.all_cis_user_ids)))
return self.all_cis_user_ids
def get_az_user_ids(self):
"""
Extract a list of user_ids from a dict of auth0 users
return: list of user_ids
"""
if self.user_ids_only is not None:
return self.user_ids_only
self.user_ids_only = []
for u in self.fetch_az_users():
self.user_ids_only.append(u["user_id"])
return self.user_ids_only
def fetch_az_users(self, user_ids=None):
"""
Fetches ALL valid users from auth0'z database
Returns list of user attributes
"""
# Memory cached?
if self.az_users is not None:
return self.az_users
# S3 cached?
self.get_s3_cache()
if self.s3_cache is not None:
self.az_users = self.s3_cache["az_users"]
return self.az_users
# Don't use cache for just one user
if self.az_users is not None and (user_ids is not None and len(user_ids) != 1):
return self.az_users
# Not cached, fetch it
if user_ids is not None and len(user_ids) != 1:
self.s3_cache_require_update = True
az_api_url = self.config("AUTHZERO_API", namespace="cis", default="auth-dev.mozilla.auth0.com")
az_client_id = self.secret_manager.secret("az_client_id")
az_client_secret = self.secret_manager.secret("az_client_secret")
az_fields = self.az_cis_fields.keys()
# Build the connection query (excludes LDAP)
# Excluded: "Mozilla-LDAP", "Mozilla-LDAP-Dev"
# Excluded: Old users without any group
# This can also be retrieved from /api/v2/connections
# Ignore non-verified `email` (such as unfinished passwordless flows) as we don't consider these to be valid
# users
max_date = datetime.utcnow() - timedelta(days=31) # maximum login length + 1 day
max_date_str = max_date.strftime("%Y-%m-%d")
exclusion_query = (
f"logins_count:[2 TO *] AND NOT last_login:[* TO {max_date_str}] AND "
'(groups:(everyone) OR (NOT _exists_:"groups"))'
)
az_query = exclusion_query + " AND email_verified:true AND ("
t = ""
for azc in self.az_whitelisted_connections:
az_query = az_query + t + 'identities.connection:"{}"'.format(azc)
t = " OR "
az_query += ")"
# NOTE XXX: There is no way to tell auth0's ES "don't include matches where the first identity.connection is a
# blacklisted connection", so we do this instead. This 100% relies on auth0 user_ids NOT being opaque,
# unfortunately
az_query += ' AND NOT (user_id:"ad|*")'
# Build query for user_ids if some are specified (else it gets all of them)
# NOTE: We can't query all that many users because auth0 uses a GET query which is limited in size by httpd
# (nginx - 8kb by default)
if user_ids and len(user_ids) > 6:
logger.warning(
"Cannot query the requested number of user_ids from auth0, query would be too large. "
"Querying all user_ids instead."
)
user_ids = None
# we had to add this because it gets called by the CIS-New-User hook, where the query wouldn't work
# because exclusion_query excludes users who have only a single login success
elif len(user_ids) == 1:
logger.info("Restricting auth0 user query to single user_id: {}".format(user_ids[0]))
az_query = f'user_id:"{user_ids[0]}"'
elif user_ids:
logger.info("Restricting auth0 user query to user_ids: {}".format(user_ids))
# e.g.: user_id:"email|foo" OR user_id:"email|bar" OR user_id:"ad|Mozilla-LDAP|baz"
or_joined_user_query = " OR ".join([f'user_id:"{u}"' for u in user_ids])
az_query += f" AND ({or_joined_user_query})"
logger.debug("About to get Auth0 user list")
az_getter = GetToken(az_api_url)
az_token = az_getter.client_credentials(az_client_id, az_client_secret, "https://{}/api/v2/".format(az_api_url))
auth0 = Auth0(az_api_url, az_token["access_token"])
# Query the entire thing
logger.info("Querying auth0 user database, query is: {}".format(az_query))
user_list = []
# This is an artificial upper limit of 100*9999 (per_page*page) i.e. 999 900 users max - just in case things
# go wrong
retries = 15
backoff = 20
for p in range(0, 9999):
tmp = None
try:
tmp = auth0.users.list(page=p, per_page=100, fields=az_fields, q=az_query)["users"]
logger.debug("Requesting auth0 user list, at page {}".format(p))
except Auth0Error as e:
# 429 is Rate limit exceeded and we can still retry
if (e.error_code == 429 or e.status_code == 429) and retries > 0:
backoff += 1
logger.debug(
"Rate limit exceeded, backing off for {} seconds, retries left {} error: {}".format(
backoff, retries, e
)
)
retries -= 1
time.sleep(backoff)
else:
logger.warning("Error: {}".format(e))
raise
if tmp == [] or tmp is None:
# stop when our page is empty
logger.debug("Crawled {} pages from auth0 users API".format(p))
break
else:
user_list.extend(tmp)
logger.info("Received {} users from auth0".format(len(user_list)))
self.az_users = user_list
return self.az_users
def convert_az_users(self, az_users):
"""
Convert a list of auth0 user fields to cis_profile Users
@az_users list of dicts with user attributes
Returns [cis_profile.Users]
"""
profiles = []
logger.info("Converting auth0 users into CIS Profiles ({} user(s))".format(len(az_users)))
for u in az_users:
p = cis_profile.User()
# Must have fields
p.user_id.value = u["user_id"]
p.user_id.signature.publisher.name = "access_provider"
p.update_timestamp("user_id")
p.active.value = True
if "blocked" in u.keys():
if u["blocked"]:
p.active.value = False
p.active.signature.publisher.name = "access_provider"
p.update_timestamp("active")
p.primary_email.value = u["email"]
p.primary_email.metadata.display = "private"
p.primary_email.signature.publisher.name = "access_provider"
p.update_timestamp("primary_email")
try:
p.login_method.value = u["identities"][0]["connection"]
p.update_timestamp("login_method")
except IndexError:
logger.critical("Could not find login method for user {}, skipping integration".format(p.user_id.value))
continue
# Should have fields (cannot be "None" or "" but can be " ")
tmp = u.get("given_name", u.get("name", u.get("family_name", u.get("nickname", " "))))
p.first_name.value = tmp
p.first_name.metadata.display = "private"
p.first_name.signature.publisher.name = "access_provider"
p.update_timestamp("first_name")
tmp = u.get("family_name", " ")
p.last_name.value = tmp
p.last_name.metadata.display = "private"
p.last_name.signature.publisher.name = "access_provider"
p.update_timestamp("last_name")
# May have fields (its ok if these are not set)
tmp = u.get("node_id", None)
if tmp is not None:
p.identities.github_id_v4.value = tmp
p.identities.github_id_v4.display = "private"
p.identities.github_id_v4.signature.publisher.name = "access_provider"
p.update_timestamp("identities.github_id_v4")
if "identities" in u.keys():
# If blacklisted connection is in the first entry, skip (first entry = "main" user)
if u["identities"][0].get("connection") in self.az_blacklisted_connections:
logger.warning(
"ad/LDAP account returned from search - this should not happen. User will be skipped."
" User_id: {}".format(p.user_id.value)
)
continue
for ident in u["identities"]:
if ident.get("provider") == "google-oauth2":
p.identities.google_oauth2_id.value = ident.get("user_id")
p.identities.google_oauth2_id.metadata.display = "private"
p.identities.google_oauth2_id.signature.publisher.name = "access_provider"
p.update_timestamp("identities.google_oauth2_id")
p.identities.google_primary_email.value = p.primary_email.value
p.identities.google_primary_email.metadata.display = "private"
p.identities.google_primary_email.signature.publisher.name = "access_provider"
p.update_timestamp("identities.google_primary_email")
elif ident.get("provider") == "oauth2" and ident.get("connection") == "firefoxaccounts":
p.identities.firefox_accounts_id.value = ident.get("user_id")
p.identities.firefox_accounts_id.metadata.display = "private"
p.identities.firefox_accounts_id.signature.publisher.name = "access_provider"
p.update_timestamp("identities.firefox_accounts_id")
p.identities.firefox_accounts_primary_email.value = p.primary_email.value
p.identities.firefox_accounts_primary_email.metadata.display = "private"
p.identities.firefox_accounts_primary_email.signature.publisher.name = "access_provider"
p.update_timestamp("identities.firefox_accounts_primary_email")
elif ident.get("provider") == "github":
if ident.get("nickname") is not None:
# Match the hack in
# https://github.com/mozilla-iam/dino-park-whoami/blob/master/src/update.rs#L42 (see
# function definition at the top of the file as well)
p.usernames.value = {"HACK#GITHUB": ident.get("nickname")}
p.usernames.metadata.display = "private"
p.usernames.signature.publisher.name = "access_provider"
p.identities.github_id_v3.value = ident.get("user_id")
p.identities.github_id_v3.metadata.display = "private"
p.identities.github_id_v3.signature.publisher.name = "access_provider"
p.update_timestamp("identities.github_id_v3")
if "profileData" in ident.keys():
p.identities.github_primary_email.value = ident["profileData"].get("email")
p.identities.github_primary_email.metadata.verified = ident["profileData"].get(
"email_verified", False
)
p.identities.github_primary_email.metadata.display = "private"
p.identities.github_primary_email.signature.publisher.name = "access_provider"
p.update_timestamp("identities.github_primary_email")
p.identities.github_id_v4.value = ident["profileData"].get("node_id")
p.identities.github_id_v4.metadata.display = "private"
p.identities.github_id_v4.signature.publisher.name = "access_provider"
p.update_timestamp("identities.github_id_v4")
# Sign and verify everything
try:
p.sign_all(publisher_name="access_provider")
except Exception as e:
logger.critical(
"Profile data signing failed for user {} - skipped signing, verification "
"WILL FAIL ({})".format(p.primary_email.value, e)
)
logger.debug("Profile data {}".format(p.as_dict()))
try:
p.validate()
except Exception as e:
logger.critical(
"Profile schema validation failed for user {} - skipped validation, verification "
"WILL FAIL({})".format(p.primary_email.value, e)
)
logger.debug("Profile data {}".format(p.as_dict()))
try:
p.verify_all_publishers(cis_profile.User())
except Exception as e:
logger.critical(
"Profile publisher verification failed for user {} - skipped signing, verification "
"WILL FAIL ({})".format(p.primary_email.value, e)
)
logger.debug("Profile data {}".format(p.as_dict()))
logger.debug("Profile signed and ready to publish for user_id {}".format(p.user_id.value))
profiles.append(p)
logger.info("All profiles in this request were converted to CIS Profiles")
return profiles
def process(self, publisher, user_ids):
"""
Process profiles and post them
@publisher object the publisher object to operate on
@user_ids list of user ids to process in this batch
"""
# Only process the requested user_ids from the list of all az users
# as the list is often containing all users, not just the ones we requested
todo_user_ids = list(set(self.get_az_user_ids()) & set(user_ids))
todo_users = []
for u in self.az_users:
if u["user_id"] in todo_user_ids:
todo_users.append(u)
profiles = self.convert_az_users(todo_users)
logger.info("Processing {} profiles".format(len(profiles)))
publisher.profiles = profiles
failures = []
try:
failures = publisher.post_all(user_ids=user_ids, create_users=True)
except Exception as e:
logger.error("Failed to post_all() profiles. Trace: {}".format(format_exc()))
raise e
if len(failures) > 0:
logger.error("Failed to post {} profiles: {}".format(len(failures), failures))
def fan_out(self, publisher, chunk_size, user_ids_to_process):
"""
Splices all users to process into chunks
and self-invoke as many times as needed to complete all work in parallel lambda functions
When self-invoking, this will effectively call self.process() instead of self.fan_out()
"]
Note: chunk_size should never result in the invoke() argument to exceed 128KB (len(Payload.encode('utf-8') <
128KB) as this is the maximum AWS Lambda payload size.
@publisher object the cis_publisher object to operate on
@chunk_size int size of the chunk to process
"""
sliced = [user_ids_to_process[i : i + chunk_size] for i in range(0, len(user_ids_to_process), chunk_size)]
logger.info(
"No user_id selected. Creating slices of work, chunk size: {}, slices: {}, total users: {} and "
"faning-out work to self".format(chunk_size, len(sliced), len(user_ids_to_process))
)
lambda_client = boto3.client("lambda")
for s in sliced:
lambda_client.invoke(FunctionName=self.context.function_name, InvocationType="Event", Payload=json.dumps(s))
time.sleep(3) # give api calls a chance, otherwise this storms resources
logger.info("Exiting slicing function successfully")<|fim▁end|>
|
self.all_cis_user_ids += publisher.known_cis_users_by_user_id.keys()
|
<|file_name|>blockgen.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package rpctest
import (
"errors"
"math"
"math/big"
"runtime"
"time"
"github.com/ltcsuite/ltcd/blockchain"
"github.com/ltcsuite/ltcd/chaincfg"
"github.com/ltcsuite/ltcd/chaincfg/chainhash"
"github.com/ltcsuite/ltcd/ltcutil"
"github.com/ltcsuite/ltcd/mining"
"github.com/ltcsuite/ltcd/txscript"
"github.com/ltcsuite/ltcd/wire"
)
// solveBlock attempts to find a nonce which makes the passed block header hash
// to a value less than the target difficulty. When a successful solution is
// found true is returned and the nonce field of the passed header is updated
// with the solution. False is returned if no solution exists.
func solveBlock(header *wire.BlockHeader, targetDifficulty *big.Int) bool {
// sbResult is used by the solver goroutines to send results.
type sbResult struct {
found bool
nonce uint32
}
// solver accepts a block header and a nonce range to test. It is
// intended to be run as a goroutine.
quit := make(chan bool)
results := make(chan sbResult)
solver := func(hdr wire.BlockHeader, startNonce, stopNonce uint32) {
// We need to modify the nonce field of the header, so make sure
// we work with a copy of the original header.
for i := startNonce; i >= startNonce && i <= stopNonce; i++ {
select {
case <-quit:
return
default:
hdr.Nonce = i
hash := hdr.PowHash()
if blockchain.HashToBig(&hash).Cmp(targetDifficulty) <= 0 {
select {
case results <- sbResult{true, i}:
return
case <-quit:
return
}
}
}
}
select {
case results <- sbResult{false, 0}:
case <-quit:
return
}
}
startNonce := uint32(0)
stopNonce := uint32(math.MaxUint32)
numCores := uint32(runtime.NumCPU())
noncesPerCore := (stopNonce - startNonce) / numCores
for i := uint32(0); i < numCores; i++ {
rangeStart := startNonce + (noncesPerCore * i)
rangeStop := startNonce + (noncesPerCore * (i + 1)) - 1
if i == numCores-1 {
rangeStop = stopNonce
}
go solver(*header, rangeStart, rangeStop)
}
for i := uint32(0); i < numCores; i++ {
result := <-results
if result.found {
close(quit)
header.Nonce = result.nonce
return true
}
}
return false
}
// standardCoinbaseScript returns a standard script suitable for use as the
// signature script of the coinbase transaction of a new block. In particular,
// it starts with the block height that is required by version 2 blocks.
func standardCoinbaseScript(nextBlockHeight int32, extraNonce uint64) ([]byte, error) {
return txscript.NewScriptBuilder().AddInt64(int64(nextBlockHeight)).
AddInt64(int64(extraNonce)).Script()
}
// createCoinbaseTx returns a coinbase transaction paying an appropriate
// subsidy based on the passed block height to the provided address.
func createCoinbaseTx(coinbaseScript []byte, nextBlockHeight int32,
addr ltcutil.Address, mineTo []wire.TxOut,
net *chaincfg.Params) (*ltcutil.Tx, error) {
// Create the script to pay to the provided payment address.
pkScript, err := txscript.PayToAddrScript(addr)
if err != nil {
return nil, err
}
tx := wire.NewMsgTx(wire.TxVersion)
tx.AddTxIn(&wire.TxIn{
// Coinbase transactions have no inputs, so previous outpoint is
// zero hash and max index.
PreviousOutPoint: *wire.NewOutPoint(&chainhash.Hash{},
wire.MaxPrevOutIndex),
SignatureScript: coinbaseScript,
Sequence: wire.MaxTxInSequenceNum,
})
if len(mineTo) == 0 {
tx.AddTxOut(&wire.TxOut{
Value: blockchain.CalcBlockSubsidy(nextBlockHeight, net),
PkScript: pkScript,
})
} else {
for i := range mineTo {
tx.AddTxOut(&mineTo[i])
}
}
return ltcutil.NewTx(tx), nil
}
// CreateBlock creates a new block building from the previous block with a<|fim▁hole|>// builds off of the genesis block for the specified chain.
func CreateBlock(prevBlock *ltcutil.Block, inclusionTxs []*ltcutil.Tx,
blockVersion int32, blockTime time.Time, miningAddr ltcutil.Address,
mineTo []wire.TxOut, net *chaincfg.Params) (*ltcutil.Block, error) {
var (
prevHash *chainhash.Hash
blockHeight int32
prevBlockTime time.Time
)
// If the previous block isn't specified, then we'll construct a block
// that builds off of the genesis block for the chain.
if prevBlock == nil {
prevHash = net.GenesisHash
blockHeight = 1
prevBlockTime = net.GenesisBlock.Header.Timestamp.Add(time.Minute)
} else {
prevHash = prevBlock.Hash()
blockHeight = prevBlock.Height() + 1
prevBlockTime = prevBlock.MsgBlock().Header.Timestamp
}
// If a target block time was specified, then use that as the header's
// timestamp. Otherwise, add one second to the previous block unless
// it's the genesis block in which case use the current time.
var ts time.Time
switch {
case !blockTime.IsZero():
ts = blockTime
default:
ts = prevBlockTime.Add(time.Second)
}
extraNonce := uint64(0)
coinbaseScript, err := standardCoinbaseScript(blockHeight, extraNonce)
if err != nil {
return nil, err
}
coinbaseTx, err := createCoinbaseTx(coinbaseScript, blockHeight,
miningAddr, mineTo, net)
if err != nil {
return nil, err
}
// Create a new block ready to be solved.
blockTxns := []*ltcutil.Tx{coinbaseTx}
if inclusionTxs != nil {
blockTxns = append(blockTxns, inclusionTxs...)
}
// We must add the witness commitment to the coinbase if any
// transactions are segwit.
witnessIncluded := false
for i := 1; i < len(blockTxns); i++ {
if blockTxns[i].MsgTx().HasWitness() {
witnessIncluded = true
break
}
}
if witnessIncluded {
_ = mining.AddWitnessCommitment(coinbaseTx, blockTxns)
}
merkles := blockchain.BuildMerkleTreeStore(blockTxns, false)
var block wire.MsgBlock
block.Header = wire.BlockHeader{
Version: blockVersion,
PrevBlock: *prevHash,
MerkleRoot: *merkles[len(merkles)-1],
Timestamp: ts,
Bits: net.PowLimitBits,
}
for _, tx := range blockTxns {
if err := block.AddTransaction(tx.MsgTx()); err != nil {
return nil, err
}
}
found := solveBlock(&block.Header, net.PowLimit)
if !found {
return nil, errors.New("Unable to solve block")
}
utilBlock := ltcutil.NewBlock(&block)
utilBlock.SetHeight(blockHeight)
return utilBlock, nil
}<|fim▁end|>
|
// specified blockversion and timestamp. If the timestamp passed is zero (not
// initialized), then the timestamp of the previous block will be used plus 1
// second is used. Passing nil for the previous block results in a block that
|
<|file_name|>JBoss5JMX.py<|end_file_name|><|fim▁begin|>from src.platform.jboss.interfaces import JMXInterface
<|fim▁hole|> super(FPrint, self).__init__()
self.version = "5.0"<|fim▁end|>
|
class FPrint(JMXInterface):
def __init__(self):
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn main() {
let a = 1;
println!("{}", greet());
// place holder for didChange test.
}
fn greet() -> i32 {
42
}
// place holder for didChange test.
mod libs;
use libs::yo;<|fim▁hole|>}
#[cfg(feature = "f42")]
fn conditional_fn() -> i32 {
42
}<|fim▁end|>
|
fn ref_in_main() -> i32 {
yo() + 1
|
<|file_name|>rss.py<|end_file_name|><|fim▁begin|>import sys
import urllib2
import HTMLParser
import xml.etree.ElementTree as ET
from logging import getLogger
class Rss(object):
"""A class for handling RSS feeds"""
def __init__(self,url=None):
if not url:
self.url = ''
self.articles = ''
else:
self.url = url
self.articles = []
self.logger = getLogger(__name__)
def get_rss_into_articles(self):
self.xml = urllib2.urlopen(self.url.encode('utf-8')).read()<|fim▁hole|> try:
title = item.find("title")
link = item.find("link")
descr = item.find("description")
pubDate = item.find("pubDate")
strgDate = str(pubDate.text)
article = Article(title.text,link.text,descr.text, strgDate)
self.articles.append(article)
except Exception as e:
self.logger.error("Error in get_rss routine! Error report: " + e)
return self.articles
class Article(object):
"""A class for handling the details of an article"""
def __init__(self):
self.title = ''
self.link = ''
self.descr = ''
self.pubDate = ''
self.pic_links = []
self.logger = getLogger(__name__)
def __init__(self, title,link,descr,pubDate):
self.title = title
self.link = link
self.descr = descr
self.pubDate = pubDate
self.full_txt = ""
self.pic_links = []
self.pics = []
def get_full_txt(self):
try:
response = urllib2.urlopen(self.link).read().decode('utf-8', 'ignore')
parser = RssHTMLParser()
parser.feed(response)
self.pic_links = parser.img_links
self.full_txt = parser.data
except Exception as e:
self.logger.error("Error in get_full_txt() of RssClass.Article Error: " + e)
def get_photos(self, pic_links=None):
pics = []
if pic_links == None:
try:
for link in self.pic_links:
img = urllib2.urlopen(link).read()
#f = open('/home/parallels/Desktop/pic.jpg', 'wb')
#f.write(img)
#f.close()
self.pics.append(img)
except Exception as e:
self.logger.error("Error in RssClass.get_photos() using self.pic_links. Error: " + e)
else:
try:
for link in pic_links:
image = urllib2.urlopen(self.link).read()
pics.append(image)
except Exception as e:
self.logger.error("Error in RssClass.get_photos() using pic_links. Error: " + e)
return pics
class RssHTMLParser(HTMLParser.HTMLParser):
def __init__(self):
HTMLParser.HTMLParser.__init__(self)
self.is_start_p = False
self.is_end_p = False
self.is_start_sp = False
self.data = ""
self.img_links = []
def handle_starttag(self, tag, attrs):
if tag == 'p':
self.is_start_p = True
elif tag == 'span':
self.is_start_sp = True
elif self.is_start_p and tag == 'a':
self.is_start_p = True
elif self.is_start_p and tag == 'img' or self.is_start_sp and tag == 'img':
for attr in attrs:
if attr[0] == 'src':
self.img_links.append(attr[1])
else:
self.is_start_p = False
self.is_start_sp = False
def handle_endtag(self, tag):
if tag == 'p':
self.is_start_p = False
self.is_end_p = True
elif tag == 'a' and self.is_start_p:
self.is_start_p = True
else:
self.is_end_p = False
def handle_data(self, data):
if self.is_start_p:
self.data += data
elif self.is_end_p:
self.data += ' '<|fim▁end|>
|
root = ET.fromstring(self.xml)
for item in root.findall(".//item"):
|
<|file_name|>utils.xml.js<|end_file_name|><|fim▁begin|>var xml = require('xmlbuilder');
var fs = require('fs');
/**
* Function is used to create plis file which is required for downloading ios app.
* @param {string} name app name
* @param {string} path path to application
* @param {string} title title for alert
* @param {Function} callback function which will be called when plist file is created
*/
function creatPlist(name, path, title, callback){
var d = xml.create('plist', {'version':'1.0'})
.ele('dict')
.ele('key','items').up()
.ele('array')
.ele('dict')
.ele('key','assets').up()
.ele('array')
.ele('dict')
.ele('key','kind').up()
.ele('string','software-package').up()
.ele('key','url').up()
.ele('string',path).up()
.up()
.up()
.ele('key','metadata').up()
.ele('dict')
.ele('key','bundle-identifier').up()
.ele('string', name).up()
.ele('key', 'kind').up()
.ele('string','software').up()
.ele('key','title').up()
.ele('string', title)
.up()
.up()
.up()
.up()
.up()
.end({ pretty: true});
<|fim▁hole|> callback(err,filePath);
});
console.log(xml);
}
//--------------EXPORTS---------------//
exports.creatPlist = creatPlist;<|fim▁end|>
|
//generate unique file path:) use this for now.
var filePath = './processing/file' + new Date().getMilliseconds() + '.plist';
fs.writeFile(filePath, d, function(err){
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""
Django settings for dfiid project.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
from django.core.exceptions import ImproperlyConfigured<|fim▁hole|>
def get_env(setting):
""" Get the environment setting or return exception """
try:
return os.environ[setting]
except KeyError:
error_msg = 'Set the %s env variable' % setting
raise ImproperlyConfigured(error_msg)
SECRET_KEY = get_env('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sitemaps',
'nocaptcha_recaptcha',
'core',
'user',
'content',
'notify',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'dfiid.urls'
WSGI_APPLICATION = 'dfiid.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': get_env('DB_NAME'),
'USER': get_env('DB_USER'),
'PASSWORD': get_env('DB_PASSWORD'),
'HOST': get_env('DB_HOST'),
'PORT': get_env('DB_PORT'),
}
}
LANGUAGE_CODE = get_env('LANGUAGE')
TIME_ZONE = 'Atlantic/Canary'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/s/'
STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), )
STATIC_ROOT = os.path.join(BASE_DIR, 's')
MEDIA_URL = '/m/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'm')
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
AUTH_USER_MODEL = 'user.User'
LOGIN_URL = '/login'
LOGIN_REDIRECT_URL = '/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
NORECAPTCHA_SITE_KEY = get_env('NORECAPTCHA_SITE_KEY')
NORECAPTCHA_SECRET_KEY = get_env('NORECAPTCHA_SECRET_KEY')<|fim▁end|>
| |
<|file_name|>rStarTree.go<|end_file_name|><|fim▁begin|>package storage
/*
An implementation of a 2-dimentional R*-Tree used for storing <lat,long> coordinates of boats. See references [0] and [9] for description of the datastructure (haven't followed the instructions 100%)
Notes:
- MBR - Minimum Bounding Rectangle
- The FindWithin() & FindAll() function returns the coordinates (& the mmsi number?) for the boat. More info about the boats are found when clicking the leafletjs markers/ querying the API
- The height of a node will never change, but its level will increase as the root is split
- All leaf nodes must be on the same level
- Internal nodes contains entries of the form <childNode, mbr>
- Leaf nodes contains entries of the form <mbr, mmsi>
- Wiki: best performance has been experienced with a minimum fill of 30%–40% of the maximum number of entries
- Boats are stored as zero-area rectangles instead of points, because it works better with the R*tree
*/
import (
"errors"
"log"
"sort"
"github.com/tormol/AIS/geo"
)
const RTree_M = 5 //max entries per node.
const RTree_m = 2 //min entries per node. 40% of M is best
// RTree is a two-dimensional R*-tree implementation with float64 positions and uint32 values
type RTree struct {
root *node
numOfBoats int
}
// NumOfBoats return the total number of boats stored in the structure.
func (rt *RTree) NumOfBoats() int {
return rt.numOfBoats
}
// Match is used to store a match found when searching the tree.
type Match struct {
MMSI uint32
Lat float64
Long float64
}
type node struct {
parent *node //Points to parent node
entries []entry //Array of all the node's entries (should have a default length of M+1)
height int //Height of the node ( = number of edges between node and a leafnode)
}
// isLeaf returns true of the node is a leafnode.
func (n *node) isLeaf() bool { return n.height == 0 }
// Needed for node to be sortable [11]:
type byLat []entry // for sorting by Latitude
type byLong []entry // for sorting by Longitude
func (e byLat) Len() int { return len(e) }
func (e byLat) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (e byLat) Less(i, j int) bool { //first sorted by min, then if tie, by max
if e[i].mbr.Min().Lat < e[j].mbr.Min().Lat {
return true
} else if e[i].mbr.Min().Lat == e[j].mbr.Min().Lat {
return e[i].mbr.Max().Lat < e[j].mbr.Max().Lat
}
return false
}
func (e byLong) Len() int { return len(e) }
func (e byLong) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (e byLong) Less(i, j int) bool { //first sorted by min, then if tie by max
if e[i].mbr.Min().Long < e[j].mbr.Min().Long {
return true
} else if e[i].mbr.Min().Long == e[j].mbr.Min().Long {
return e[i].mbr.Max().Long < e[j].mbr.Max().Long
}
return false
}
/* As described in [9]:
- A non-leaf node contains entries of the form (child_pointer, rectangle)
- A leaf node contains entries of the form (Object_ID, rectangle)
*/
type entry struct {
mbr *geo.Rectangle //Points to the MBR containing all the children of this entry
child *node //Points to the node (only used in internal nodes)
mmsi uint32 //The mmsi number of the boat (only used in leafnode-entries)
dist float64 //The distance from center of mbr to center of parents mbr (used for the reInsert algorithm)
}
/*
Needed for sorting a list of entries by the distance from their center
to the center of the "parent node" mbr. (used by reInsert algorithm)
*/
type byDist []entry
func (e byDist) Len() int { return len(e) }
func (e byDist) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (e byDist) Less(i, j int) bool { return e[i].dist < e[j].dist }
// NewRTree returns a pointer to a new R-Tree object.
func NewRTree() *RTree { //TODO could take M (and m) as input?
return &RTree{
root: &node{
parent: nil,
entries: make([]entry, 0, RTree_M+1),
height: 0,
},
}
}
// InsertData inserts a new boat into the tree structure.
func (rt *RTree) InsertData(lat, long float64, mmsi uint32) error {
r, err := geo.NewRectangle(lat, long, lat, long)
if err != nil {
return err
}
newEntry := entry{ //Dont have to set all the parameters... the rest will be set to its null-value
mbr: r,
mmsi: mmsi,
}
//[ID1] Insert starting with the leaf height as parameter
rt.insert(0, newEntry, true)
rt.numOfBoats++
return nil
}
// insert inserts an entry into a node at a given height.
func (rt *RTree) insert(height int, newEntry entry, first bool) { //first is needed in case of overflowTreatment, it should normaly be true
//[I1] ChooseSubtree with height as a parameter to find the node N
n := rt.chooseSubtree(newEntry.mbr, height)
//If an internal entry is re-inserted, the node's parent pointer must be updated
if height >= 1 {
newEntry.child.parent = n
}
//[I2] Append newEntry to n if room, else call OverflowTreatment [for reinsertion or split]
n.entries = append(n.entries, newEntry)
if len(n.entries) >= RTree_M+1 { // n is full -> call overflowTreatment
didSplit, nn := rt.overflowTreatment(n, first) //OT finds the appropriate height from n.height
if didSplit {
//[I3] if OverflowTreatment was called and a split was performed: propagate OT upwards
if nn.height == rt.root.height { // if root was split: create a new root
newRoot := node{
parent: nil,
entries: make([]entry, 0, RTree_M+1),
height: rt.root.height + 1,
}
nEntry := entry{mbr: n.recalculateMBR(), child: n}
nnEntry := entry{mbr: nn.recalculateMBR(), child: nn}
newRoot.entries = append(newRoot.entries, nEntry)
newRoot.entries = append(newRoot.entries, nnEntry)
n.parent = &newRoot
nn.parent = &newRoot
rt.root = &newRoot
//fmt.Printf("Root was split...^ new height is %d\n", newRoot.height)
return //The root has no MBR, so there is no need to adjust any MBRs
}
// n was split into n & nn -> insert nn into the tree at the same height
rt.insert(nn.height+1, entry{mbr: nn.recalculateMBR(), child: nn}, true)
}
}
//[I4] Adjust all MBR in the insertion path
for n.height < rt.root.height {
pIdx, err := n.parentEntriesIdx()
CheckErr(err, "insert had some trouble adjusting the MBR...")
n.parent.entries[pIdx].mbr = n.recalculateMBR()
n = n.parent
}
return
}
// overflowTreatment handles the overflowing node n.
// It will first try a reinsert, then do a split.
func (rt *RTree) overflowTreatment(n *node, first bool) (bool, *node) { //returns if n wasSplit, and nn (false -> reInserted )
//[OT1] if height is not root && this is first call of OT in given height during insertion: reInsert. else: split
if first && n.height < rt.root.height {
rt.reInsert(n)
return false, nil
} else { // The entry has been inserted before -> split the node
nn, err := n.split()
CheckErr(err, "overflowTreatment failed to split a node")
return true, nn
}
}
// reInsert is uses to re-insert some of the entries of the node.
// It is used when the node is full.
func (rt *RTree) reInsert(n *node) {
//[RI1] for all M+1 entries: compute distance between their center and the center of the mbr of n
// Finding the center of the MBR of n
i, err := n.parentEntriesIdx()
CheckErr(err, "reInsert had some trouble locating the entry in the parent node")
centerOfMBR := n.parent.entries[i].mbr.Center()
// Computing the distance for all entries in n
for _, ent := range n.entries {
ent.dist = ent.mbr.Center().DistanceTo(centerOfMBR)
}
//[RI2] sort the entries by distance in decreasing order
sort.Sort(sort.Reverse(byDist(n.entries)))
//[RI3] remove the first p entries from n, and adjust mbr of n
f := (RTree_M * 0.3) //30% of M performs best according to [9]
p := int(f)
tmp := make([]entry, p)
copy(tmp, n.entries[:p])
n.entries = n.entries[p:] //TODO now the cap of n.entries is only 8...
newMBR := n.recalculateMBR()
n.parent.entries[i].mbr = newMBR
//[RI4] starting with min distance: invoke insert to reinsert the entries
for k := len(tmp) - 1; k >= 0; k-- {
rt.insert(n.height, tmp[k], false) // "first" is set to false because the entry has previously been inserted
}
}
// chooseSubtree chooses the leaf node (or the best node of a given height) in which to place a new entry.
func (rt *RTree) chooseSubtree(r *geo.Rectangle, height int) *node {
n := rt.root //CS1
for !n.isLeaf() && n.height > height { //CS2 n.height gets lower for every iteration
bestChild := n.entries[0]
pointsToLeaves := false
if n.height == 1 {
pointsToLeaves = true
}
var bestDifference float64 //must be reset for each node n
if pointsToLeaves {
bestDifference = bestChild.overlapChangeWith(r)
} else {
bestDifference = bestChild.mbr.AreaDifference(bestChild.mbr.MBRWith(r))
}
for i := 1; i < len(n.entries); i++ {
e := n.entries[i]
if pointsToLeaves { //childpointer points to leaves -> [Determine the minimum overlap cost]
overlapDifference := e.overlapChangeWith(r)
if overlapDifference <= bestDifference {
if overlapDifference < bestDifference { //strictly smaller
bestDifference = overlapDifference
bestChild = e //CS3 set new bestChild, repeat from CS2
} else { //tie -> choose the entry whose rectangle needs least area enlargement
eNew := e.mbr.MBRWith(r).AreaDifference(e.mbr)
eOld := bestChild.mbr.MBRWith(r).AreaDifference(bestChild.mbr)
if eNew < eOld {
bestDifference = overlapDifference
bestChild = e //CS3 set new bestChild, repeat from CS2
} else if e.mbr.Area() < bestChild.mbr.Area() { //if tie again: -> choose the entry with the smallest MBR
bestDifference = overlapDifference
bestChild = e //CS3 set new bestChild, repeat from CS2
} //else the bestChild is kept
}
}
} else { //childpointer do not point to leaves -> choose the child-node whose rectangle needs least enlargement to include r
newMBR := e.mbr.MBRWith(r)
areaDifference := e.mbr.AreaDifference(newMBR)
if areaDifference <= bestDifference { //we have a new best (or a tie)
if areaDifference < bestDifference {
bestDifference = areaDifference //CS3 set new bestChild, repeat from CS2
bestChild = e
} else if e.mbr.Area() < bestChild.mbr.Area() { // change in MBR is a tie -> keep the rectangle with the smallest area
bestDifference = areaDifference //CS3 set new bestChild, repeat from CS2
bestChild = e
}
}
}
}
n = bestChild.child
}
return n
}
// overlapChangeWith calculates how much overlap enlargement it takes to include the given rectangle.
func (e *entry) overlapChangeWith(r *geo.Rectangle) float64 {
return e.mbr.OverlapWith(r)
}
// split() will split a node in order to add a new entry to a full node (using the R*Tree algorithm)[9].
func (n *node) split() (*node, error) {
// the goal is to partition the set of M+1 entries into two groups
// sorts the entries by the best axis, and finds the best index to split into two distributions
if len(n.entries) != RTree_M+1 {
return nil, errors.New("Cannot split: node n does not contain M+1 entries")
}
k := n.chooseSplitAxis()
group1 := make([]entry, 0, RTree_M+1)
group2 := make([]entry, 0, RTree_M+1)
nn := &node{
parent: n.parent,
entries: []entry{},
height: n.height,<|fim▁hole|> } else {
group2 = append(group2, e)
if e.child != nil { //update the parent pointer if splitting an internal node
e.child.parent = nn
}
}
}
//group1
n.entries = group1
//group2
nn.entries = group2
return nn, nil
}
// chooseSplitAxis() chooses the axis perpendicular to which the split is performed.
func (n *node) chooseSplitAxis() int { //TODO Make the code prettier
//[CSA 1]
//Entries sorted by Latitude
S_lat := 0.000000 //used to determine the best axis to split on
bestK_lat := 0 //used to determine the best distribution
minOverlap_lat := -1.000000
best_area_lat := -1.000000
sortByLat := make([]entry, len(n.entries)) // len(sortByLat) == len(n.entries) is needed for copy to work
copy(sortByLat, n.entries)
sort.Sort(byLat(sortByLat))
//Entries sorted by Longitude
S_long := 0.000000 //used to determine the best axis to split on
bestK_long := 0 //used to determine the best distribution
minOverlap_long := -1.000000
best_area_long := -1.000000
sort.Sort(byLong(n.entries))
//For each axis: M - 2m + 2 distributions of the M+1 entries into two groups are determined
d := (RTree_M - (2 * RTree_m) + 2)
for k := 1; k <= d; k++ {
//By Latitude
LatGroup1 := make([]entry, (RTree_m - 1 + k))
LatGroup2 := make([]entry, (RTree_M - len(LatGroup1) + 1))
copy(LatGroup1, sortByLat[:RTree_m-1+k])
copy(LatGroup2, sortByLat[RTree_m-1+k:])
latGoodness := marginOf(LatGroup1) + marginOf(LatGroup2)
S_lat += latGoodness
// test if this distribution has the best overlap value for latitude
mbr1 := mbrOf(LatGroup1...)
mbr2 := mbrOf(LatGroup2...)
if o := mbr1.OverlapWith(mbr2); o <= minOverlap_lat || minOverlap_lat == -1 {
if o < minOverlap_lat || minOverlap_lat == -1 {
bestK_lat = k //we have a new best
minOverlap_lat = o
best_area_lat = mbr1.Area() + mbr2.Area()
} else { //tie -> keep the distribution with the least area
a_now := mbr1.Area() + mbr2.Area()
if a_now < best_area_lat {
bestK_lat = k //we have a new best
minOverlap_lat = o
best_area_lat = mbr1.Area() + mbr2.Area()
}
}
} //else don't change the value
//By Longitude
LongGroup1 := make([]entry, (RTree_m - 1 + k))
LongGroup2 := make([]entry, (RTree_M - len(LongGroup1) + 1))
copy(LongGroup1, n.entries[:RTree_m-1+k])
copy(LongGroup2, n.entries[RTree_m-1+k:])
longGoodness := marginOf(LongGroup1) + marginOf(LongGroup2)
S_long += longGoodness
// test if this distribution has the best overlap value for longitude
mbr1 = mbrOf(LongGroup1...)
mbr2 = mbrOf(LongGroup2...)
if o := mbr1.OverlapWith(mbr2); o <= minOverlap_long || minOverlap_long == -1 {
if o < minOverlap_long || minOverlap_long == -1 {
bestK_long = k //we have a new best
minOverlap_long = o
best_area_long = mbr1.Area() + mbr2.Area()
} else { //tie -> keep the distribution with the least area
a_now := mbr1.Area() + mbr2.Area()
if a_now < best_area_long {
bestK_long = k //we have a new best
minOverlap_long = o
best_area_long = mbr1.Area() + mbr2.Area()
}
}
} //else don't change the value
}
//CSA2: Choose the axis with the minimum S as split axis
if S_lat < S_long {
n.entries = sortByLat
return bestK_lat
}
return bestK_long
}
// recalculateMBR returns the MBR that contains all the children of n.
func (n *node) recalculateMBR() *geo.Rectangle {
return mbrOf(n.entries...)
}
// marginOf returns the margin of the MBR containing the entries.
func marginOf(entries []entry) float64 {
return mbrOf(entries...).Margin()
}
// mbrOf returns the MBR of some entry-objects.
func mbrOf(entries ...entry) *geo.Rectangle {
nMinLat := entries[0].mbr.Min().Lat
nMinLong := entries[0].mbr.Min().Long
nMaxLat := entries[0].mbr.Max().Lat
nMaxLong := entries[0].mbr.Max().Long
for _, e := range entries {
if e.mbr.Min().Lat < nMinLat {
nMinLat = e.mbr.Min().Lat
}
if e.mbr.Min().Long < nMinLong {
nMinLong = e.mbr.Min().Long
}
if e.mbr.Max().Lat > nMaxLat {
nMaxLat = e.mbr.Max().Lat
}
if e.mbr.Max().Long > nMaxLong {
nMaxLong = e.mbr.Max().Long
}
}
r, err := geo.NewRectangle(nMinLat, nMinLong, nMaxLat, nMaxLong)
CheckErr(err, "mbrOf had some trouble creating a new MBR of the provided entries")
return r
}
// FindWithin returns all the boats that overlaps a given rectangle of the map [0].
func (rt *RTree) FindWithin(r *geo.Rectangle) *[]Match {
n := rt.root
matches := []entry{}
if !n.isLeaf() {
matches = append(matches, n.searchChildren(r, matches)...)
} else { //only need to search the root node
for _, e := range n.entries {
if geo.Overlaps(e.mbr, r) {
matches = append(matches, e)
}
}
}
return rt.toMatches(matches)
}
// searchChildren is the recursive method for finding the nodes whose mbr overlaps the searchBox [0].
func (n *node) searchChildren(searchBox *geo.Rectangle, matches []entry) []entry { //TODO Test performance by searching children concurrently?
if !n.isLeaf() { //Internal node:
for _, e := range n.entries {
if geo.Overlaps(e.mbr, searchBox) {
matches = e.child.searchChildren(searchBox, matches) //recursively search the child node
}
}
} else { //Leaf node:
for _, e := range n.entries {
if geo.Overlaps(e.mbr, searchBox) {
matches = append(matches, e)
}
}
}
return matches
}
// Update is used to update the location of a boat that is already stored in the structure.
// It deletes the old entry, and inserts a new entry.
func (rt *RTree) Update(mmsi uint32, oldLat, oldLong, newLat, newLong float64) error {
// Old coordinates
oldR, err := geo.NewRectangle(oldLat, oldLong, oldLat, oldLong)
if err != nil {
return errors.New("Illegal coordinates, please use <latitude, longitude> coodinates")
}
// Deletes the old coordinates
err = rt.delete(mmsi, oldR)
if err != nil {
return err
}
// Inserts the new coordinates
rt.InsertData(newLat, newLong, mmsi)
return nil
}
// delete removes the Point(zero-area Rectangle) from the RTree [0].
func (rt *RTree) delete(mmsi uint32, r *geo.Rectangle) error {
//D1 [Find node containing record] (and also the index of the entry)
l, idx := rt.root.findLeaf(mmsi, r)
if l != nil && idx >= 0 {
//D2 [Delete record]
l.entries = append(l.entries[:idx], l.entries[idx+1:]...)
//D3 [Propagate changes]
rt.condenseTree(l)
} else {
return errors.New("Failed to delete, could not find the leaf node containing the boat")
}
rt.numOfBoats--
return nil
}
// findLeaf finds the leaf node containing the given rectangle r [0].
func (n *node) findLeaf(mmsi uint32, r *geo.Rectangle) (*node, int) {
if !n.isLeaf() { //FL1
for _, e := range n.entries {
if geo.Overlaps(e.mbr, r) {
l, idx := e.child.findLeaf(mmsi, r) // Searches childnode
if l != nil {
return l, idx // The childnode was the correct leafnode
}
}
}
} else { //FL2 [Search leaf node for record]
for idx, ent := range n.entries {
if geo.Overlaps(ent.mbr, r) && mmsi == ent.mmsi { //locating the exact entry
return n, idx
}
}
}
return nil, -1 // no match found
}
// condenseTree is used when an entry has been deleted from n [0].
// It traverses the tree from the node and up to the root and makes the necessary changes to the nodes.
func (rt *RTree) condenseTree(n *node) {
//CT1 [initialize]
q := []entry{} // Contains orphaned entries
for rt.root != n {
//CT2 [find parent entry]
p := n.parent
idx, err := n.parentEntriesIdx()
CheckErr(err, "Trouble condensing the tree")
en := p.entries[idx] // the entry containing n
//CT3 [eliminate under-full node]
if len(n.entries) < RTree_m {
p.entries = append(p.entries[:idx], p.entries[idx+1:]...) //[8] remove n from its parent
q = append(q, en.child.entries...)
} else {
//CT4 [Adjust MBR] (if n has not been eliminated)
en.mbr = n.recalculateMBR()
}
n = p // CT5 [Move up one height in tree]
}
//CT6 [Re-insert orphaned entries]
for _, e := range q {
if e.child != nil { //inserting an internal
rt.insert(e.child.height+1, e, true) //TODO false or true?
_, err := e.child.parent.parentEntriesIdx()
CheckErr(err, "Cannot find parent of re-inserted orphaned internal entry")
} else { //inserting a leaf entry
rt.insert(0, e, true) //TODO false or true?
}
}
//D4 [Shorten tree] (if root has only 1 child, promote that child to root)
if len(rt.root.entries) == 1 && !rt.root.isLeaf() {
rt.root = rt.root.entries[0].child
rt.root.parent = nil
//fmt.Printf("Promoted a child to root, new height is %d\n", rt.root.height)
}
}
// parentEntriesIdx returns the index of the node in its parent's list of entries.
func (n *node) parentEntriesIdx() (int, error) {
p := n.parent
if p != nil {
for idx, e := range p.entries {
if e.child == n {
return idx, nil
}
}
}
return -1, errors.New("This node is not found in parent's entries")
}
// toMatches returns a slice of Match-objects that can be used to create GeoJSON output
func (rt *RTree) toMatches(matches []entry) *[]Match {
s := []Match{}
for _, m := range matches {
s = append(s, Match{m.mmsi, m.mbr.Max().Lat, m.mbr.Max().Long})
}
return &s
}
// CheckErr is a function for checking an error.
// Takes the error and a message as input and does log.Fatalf() if error.
func CheckErr(err error, message string) {
if err != nil {
log.Fatalf("ERROR: %s \n %s", message, err)
}
}
/*
TODOs:
- 180 meridianen... (~International date line)
References:
[0] http://www.cs.jhu.edu/%7Emisha/ReadingSeminar/Papers/Guttman84.pdf
[1] https://en.wikipedia.org/wiki/Tree_%28data_structure%29
https://en.wikipedia.org/wiki/R-tree
https://www.youtube.com/watch?v=39GuS7c4uZI
https://blog.golang.org/go-slices-usage-and-internals
https://blog.golang.org/go-maps-in-action
[7] http://stackoverflow.com/questions/1760757/how-to-efficiently-concatenate-strings-in-go http://herman.asia/efficient-string-concatenation-in-go
[8] http://stackoverflow.com/questions/25025409/delete-element-in-a-slice
[9] http://dbs.mathematik.uni-marburg.de/publications/myPapers/1990/BKSS90.pdf (R* Trees)
[10] https://en.wikipedia.org/wiki/R*_tree
[11] https://golang.org/pkg/sort/
[12] http://www.eng.auburn.edu/~weishinn/Comp7970/Presentation/rstartree.pdf
https://golang.org/ref/spec#Passing_arguments_to_..._parameters
[13] http://geojsonlint.com/
[14] http://stackoverflow.com/questions/7933460/how-do-you-write-multiline-strings-in-go#7933487
*/<|fim▁end|>
|
}
for i, e := range n.entries {
if i < RTree_m-1+k {
group1 = append(group1, e)
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>from django.conf import settings
PIPELINE = getattr(settings, 'PIPELINE', not settings.DEBUG)
PIPELINE_ROOT = getattr(settings, 'PIPELINE_ROOT', settings.STATIC_ROOT)
PIPELINE_URL = getattr(settings, 'PIPELINE_URL', settings.STATIC_URL)
PIPELINE_STORAGE = getattr(settings, 'PIPELINE_STORAGE',
'pipeline.storage.PipelineFinderStorage')
PIPELINE_FALLBACK_STORAGE = getattr(settings, 'PIPELINE_FALLBACK_STORAGE',
'pipeline.storage.FallbackStaticStorage')
PIPELINE_CSS_COMPRESSOR = getattr(settings, 'PIPELINE_CSS_COMPRESSOR',
'pipeline.compressors.yui.YUICompressor'
)
PIPELINE_JS_COMPRESSOR = getattr(settings, 'PIPELINE_JS_COMPRESSOR',
'pipeline.compressors.yui.YUICompressor'
)
PIPELINE_COMPILERS = getattr(settings, 'PIPELINE_COMPILERS', [])
PIPELINE_PRECOMPILERS = getattr(settings, 'PIPELINE_PRECOMPILERS', [])
PIPELINE_CSS = getattr(settings, 'PIPELINE_CSS', {})
PIPELINE_JS = getattr(settings, 'PIPELINE_JS', {})
PIPELINE_TEMPLATE_NAMESPACE = getattr(settings, 'PIPELINE_TEMPLATE_NAMESPACE', "window.JST")
PIPELINE_TEMPLATE_EXT = getattr(settings, 'PIPELINE_TEMPLATE_EXT', ".jst")
PIPELINE_TEMPLATE_FUNC = getattr(settings, 'PIPELINE_TEMPLATE_FUNC', "template")
PIPELINE_DISABLE_WRAPPER = getattr(settings, 'PIPELINE_DISABLE_WRAPPER', False)
PIPELINE_CSSTIDY_BINARY = getattr(settings, 'PIPELINE_CSSTIDY_BINARY', '/usr/bin/env csstidy')
PIPELINE_CSSTIDY_ARGUMENTS = getattr(settings, 'PIPELINE_CSSTIDY_ARGUMENTS', '--template=highest')
PIPELINE_YUI_BINARY = getattr(settings, 'PIPELINE_YUI_BINARY', '/usr/bin/env yuicompressor')
PIPELINE_YUI_CSS_ARGUMENTS = getattr(settings, 'PIPELINE_YUI_CSS_ARGUMENTS', '')
PIPELINE_YUI_JS_ARGUMENTS = getattr(settings, 'PIPELINE_YUI_JS_ARGUMENTS', '')
PIPELINE_CLOSURE_BINARY = getattr(settings, 'PIPELINE_CLOSURE_BINARY', '/usr/bin/env closure')
PIPELINE_CLOSURE_ARGUMENTS = getattr(settings, 'PIPELINE_CLOSURE_ARGUMENTS', '')
PIPELINE_UGLIFYJS_BINARY = getattr(settings, 'PIPELINE_UGLIFYJS_BINARY', '/usr/bin/env uglifyjs')
PIPELINE_UGLIFYJS_ARGUMENTS = getattr(settings, 'PIPELINE_UGLIFYJS_ARGUMENTS', '')
PIPELINE_COFFEE_SCRIPT_BINARY = getattr(settings, 'PIPELINE_COFFEE_SCRIPT_BINARY', '/usr/bin/env coffee')
PIPELINE_COFFEE_SCRIPT_ARGUMENTS = getattr(settings, 'PIPELINE_COFFEE_SCRIPT_ARGUMENTS', '')<|fim▁hole|>PIPELINE_STYLUS_BINARY = getattr(settings, 'PIPELINE_STYLUS_BINARY', '/usr/bin/env stylus')
PIPELINE_STYLUS_ARGUMENTS = getattr(settings, 'PIPELINE_STYLUS_ARGUMENTS', '')
PIPELINE_LESS_BINARY = getattr(settings, 'PIPELINE_LESS_BINARY', '/usr/bin/env lessc')
PIPELINE_LESS_ARGUMENTS = getattr(settings, 'PIPELINE_LESS_ARGUMENTS', '')
PIPELINE_MIMETYPES = getattr(settings, 'PIPELINE_MIMETYPES', (
('text/coffeescript', '.coffee'),
('text/less', '.less'),
('text/javascript', '.js'),
('text/x-sass', '.sass'),
('text/x-scss', '.scss')
))
PIPELINE_EMBED_MAX_IMAGE_SIZE = getattr(settings, 'PIPELINE_EMBED_MAX_IMAGE_SIZE', 32700)
PIPELINE_EMBED_PATH = getattr(settings, 'PIPELINE_EMBED_PATH', r'[/]?embed/')
if PIPELINE_COMPILERS is None:
PIPELINE_COMPILERS = []<|fim▁end|>
|
PIPELINE_SASS_BINARY = getattr(settings, 'PIPELINE_SASS_BINARY', '/usr/bin/env sass')
PIPELINE_SASS_ARGUMENTS = getattr(settings, 'PIPELINE_SASS_ARGUMENTS', '')
|
<|file_name|>alife.rs<|end_file_name|><|fim▁begin|>use super::*;
use app::constants::*;
use backend::messagebus::Outbox;
use backend::obj;
use backend::obj::Identified;
use backend::obj::Transformable;
use backend::world;
use backend::world::agent;
use backend::world::alert;
use backend::world::gen;
use backend::world::particle;
use backend::world::segment;
use backend::world::AgentState;
use core::clock::SimulationTimer;
use core::geometry;
use rand;
use serialize::base64::{self, ToBase64};
use std::collections::HashMap;
type StateMap = HashMap<obj::Id, agent::State>;
type GeneMap = HashMap<obj::Id, gen::Dna>;
pub struct AlifeSystem {
dt: Seconds,
simulation_timer: SimulationTimer,
source: Box<[world::Feeder]>,
eaten: StateMap,
touched: GeneMap,
}
impl System for AlifeSystem {
fn clear(&mut self) {
self.source = Box::new([]);
self.eaten.clear();
self.touched.clear();
}
fn import(&mut self, world: &world::World) {
self.source = world.feeders().to_vec().into_boxed_slice();
self.eaten = Self::find_eaten_resources(<|fim▁hole|> );
self.touched =
Self::find_touched_spores(&world.agents(agent::AgentType::Minion), &world.agents(agent::AgentType::Spore));
}
fn update(&mut self, _: &dyn AgentState, dt: Seconds) {
self.dt = dt;
self.simulation_timer.tick(dt);
}
fn export(&self, world: &mut world::World, outbox: &dyn Outbox) {
Self::update_resources(
self.dt,
&self.simulation_timer,
&mut world.agents_mut(agent::AgentType::Resource),
&self.eaten,
);
let MinionEndState(spores, corpses) = Self::update_minions(
outbox,
self.dt,
world.extent,
&mut world.agents_mut(agent::AgentType::Minion),
&self.eaten,
);
let SporeEndState(hatch, fertilised) = Self::update_spores(
self.dt,
&self.simulation_timer,
&mut world.agents_mut(agent::AgentType::Spore),
&self.touched,
);
for (transform, dna) in &*spores {
outbox.post(alert::Alert::NewSpore.into());
world.new_spore(outbox, transform.clone(), dna);
}
for (transform, dna) in &*hatch {
outbox.post(alert::Alert::NewMinion.into());
world.hatch_spore(outbox, transform.clone(), dna);
}
for (transforms, dna) in &*corpses {
outbox.post(alert::Alert::DieMinion.into());
for transform in &**transforms {
world.decay_to_resource(outbox, transform.clone(), dna);
}
}
for _ in 0..fertilised {
outbox.post(alert::Alert::DieMinion.into());
}
}
}
impl Default for AlifeSystem {
fn default() -> Self {
AlifeSystem {
dt: Seconds::new(1. / 60.),
simulation_timer: SimulationTimer::new(),
source: Box::new([]),
eaten: StateMap::new(),
touched: GeneMap::new(),
}
}
}
struct MinionEndState(Box<[(geometry::Transform, gen::Dna)]>, Box<[(Box<[geometry::Transform]>, gen::Dna)]>);
struct SporeEndState(Box<[(geometry::Transform, gen::Dna)]>, usize);
impl AlifeSystem {
fn find_eaten_resources(minions: &agent::AgentMap, resources: &agent::AgentMap) -> StateMap {
let mut eaten = HashMap::new();
for agent in minions.values().filter(|&a| a.state.is_active()) {
for segment in agent.segments.iter().filter(|&s| s.flags.contains(segment::Flags::MOUTH)) {
if let Some(key) = segment.state.last_touched {
if let Some(&agent::Agent { ref state, .. }) = resources.get(&key.id()) {
eaten.insert(key.id(), (*state).clone());
}
}
}
}
eaten
}
fn find_touched_spores(minions: &agent::AgentMap, spores: &agent::AgentMap) -> GeneMap {
let mut touched = HashMap::new();
for spore in spores.values().filter(|&a| a.state.is_active() && !a.state.is_fertilised()) {
for segment in spore.segments.iter() {
if let Some(key) = segment.state.last_touched {
if let Some(ref agent) = minions.get(&key.id()) {
if agent.gender() != spore.gender() {
touched.insert(key.id(), agent.dna().clone());
}
}
}
}
}
touched
}
fn update_minions(
outbox: &dyn Outbox,
dt: Seconds,
extent: geometry::Rect,
minions: &mut agent::AgentMap,
eaten: &StateMap,
) -> MinionEndState {
let mut spawns = Vec::new();
let mut corpses = Vec::new();
for agent in minions.values_mut() {
if agent.state.is_active() {
agent.state.reset_growth();
let segment = agent.segment(0).unwrap().clone();
let id = agent.id();
let maturity = segment.state.maturity();
let livery_color = segment.livery.albedo;
let transform = segment.transform().clone();
if maturity < 1. {
// just grow a bit
let r = GROWTH_COST_RATIO;
if agent.state.consume_ratio(1. - r, r) {
let growth = 1. + r;
agent.state.grow_by(growth);
outbox.post(alert::Alert::GrowMinion.into());
outbox.post(particle::Emitter::for_new_spore(transform, livery_color, id).into());
let zero = agent.segment(0).unwrap().transform.position;
for segment in agent.segments.iter_mut() {
let maturity = segment.state.maturity();
segment.state.set_maturity(maturity * growth);
segment.transform.position = zero + (segment.transform.position - zero) * growth;
}
}
} else if agent.state.consume_ratio(SPAWN_COST_THRESHOLD, SPAWN_COST_RATIO) {
spawns.push((agent.last_segment().transform().clone(), agent.dna().clone()));
}
for segment in agent.segments.iter_mut() {
let p = segment.transform().position;
if p.x < extent.min.x || p.x > extent.max.x || p.y < extent.min.y || p.y > extent.max.y {
agent.state.die();
}
if segment.flags.contains(segment::Flags::MOUTH) {
if let Some(id) = segment.state.last_touched {
if let Some(eaten_state) = eaten.get(&id.id()) {
let energy = eaten_state.energy();
agent.state.absorb(energy);
}
}
}
agent.state.consume(dt * segment.state.charge() * segment.growing_radius());
segment.state.update(dt);
}
if agent.state.energy() < 1. {
let transforms = agent.segments.iter().map(|segment| segment.transform.clone()).collect::<Vec<_>>();
corpses.push((transforms.into_boxed_slice(), agent.dna().clone()));
agent.state.die();
}
if let Some(segment) = agent.first_segment(segment::Flags::TRACKER) {
agent.state.track_position(segment.transform.position);
}
}
}
MinionEndState(spawns.into_boxed_slice(), corpses.into_boxed_slice())
}
fn update_resources(dt: Seconds, timer: &SimulationTimer, resources: &mut agent::AgentMap, eaten: &StateMap) {
for resource in resources.values_mut() {
if eaten.get(&resource.id()).is_some()
|| resource.state.energy() <= 0.
|| resource.state.lifecycle().is_expired(timer)
{
resource.state.die();
} else if resource.state.is_active() {
for segment in resource.segments.iter_mut() {
segment.state.update(dt)
}
}
}
}
fn crossover(dna: &gen::Dna, foreign_dna: &Option<gen::Dna>) -> gen::Dna {
match *foreign_dna {
Some(ref foreign) => gen::Genome::copy_from(&foreign).crossover(&mut rand::thread_rng(), dna).dna_cloned(),
None => dna.clone(),
}
}
fn update_spores(
dt: Seconds,
timer: &SimulationTimer,
spores: &mut agent::AgentMap,
touched: &GeneMap,
) -> SporeEndState {
let mut spawns = Vec::new();
let mut fertilise_count = 0usize;
for (spore_id, spore) in spores.iter_mut() {
if spore.state.lifecycle().is_expired(timer) {
spore.state.die();
spawns.push((spore.transform().clone(), Self::crossover(spore.dna(), spore.state.foreign_dna())))
} else if spore.state.is_active() {
for segment in spore.segments.iter_mut() {
if let Some(key) = segment.state.last_touched {
if let Some(touched_dna) = touched.get(&key.id()) {
debug!(
"fertilised: {} by {} as {}",
spore_id,
key.id(),
touched_dna.to_base64(base64::STANDARD)
);
fertilise_count += 1;
spore.state.fertilise(touched_dna);
}
}
}
for segment in spore.segments.iter_mut() {
segment.state.update(dt)
}
}
}
SporeEndState(spawns.into_boxed_slice(), fertilise_count)
}
}<|fim▁end|>
|
&world.agents(agent::AgentType::Minion),
&world.agents(agent::AgentType::Resource),
|
<|file_name|>SimpleUnitCell.js<|end_file_name|><|fim▁begin|>Clazz.declarePackage ("J.util");
Clazz.load (null, "J.util.SimpleUnitCell", ["java.lang.Float", "J.util.ArrayUtil", "$.Matrix4f", "$.V3"], function () {
c$ = Clazz.decorateAsClass (function () {
this.notionalUnitcell = null;
this.matrixCartesianToFractional = null;
this.matrixFractionalToCartesian = null;
this.na = 0;
this.nb = 0;
this.nc = 0;
this.a = 0;
this.b = 0;
this.c = 0;
this.alpha = 0;
this.beta = 0;
this.gamma = 0;
this.cosAlpha = 0;
this.sinAlpha = 0;
this.cosBeta = 0;
this.sinBeta = 0;
this.cosGamma = 0;
this.sinGamma = 0;
this.volume = 0;
this.cA_ = 0;
this.cB_ = 0;
this.a_ = 0;
this.b_ = 0;
this.c_ = 0;
this.dimension = 0;
this.matrixCtoFAbsolute = null;
this.matrixFtoCAbsolute = null;
Clazz.instantialize (this, arguments);
}, J.util, "SimpleUnitCell");
$_M(c$, "isSupercell",
function () {
return (this.na > 1 || this.nb > 1 || this.nc > 1);
});
c$.isValid = $_M(c$, "isValid",
function (parameters) {
return (parameters != null && (parameters[0] > 0 || parameters.length > 14 && !Float.isNaN (parameters[14])));
}, "~A");
Clazz.makeConstructor (c$,
function () {
});
c$.newA = $_M(c$, "newA",
function (parameters) {
var c = new J.util.SimpleUnitCell ();
c.set (parameters);
return c;
}, "~A");
$_M(c$, "set",
function (parameters) {
if (!J.util.SimpleUnitCell.isValid (parameters)) return;
this.notionalUnitcell = J.util.ArrayUtil.arrayCopyF (parameters, parameters.length);
this.a = parameters[0];
this.b = parameters[1];
this.c = parameters[2];
this.alpha = parameters[3];
this.beta = parameters[4];
this.gamma = parameters[5];
this.na = Math.max (1, parameters.length >= 25 && !Float.isNaN (parameters[22]) ? Clazz.floatToInt (parameters[22]) : 1);
this.nb = Math.max (1, parameters.length >= 25 && !Float.isNaN (parameters[23]) ? Clazz.floatToInt (parameters[23]) : 1);
this.nc = Math.max (1, parameters.length >= 25 && !Float.isNaN (parameters[24]) ? Clazz.floatToInt (parameters[24]) : 1);
if (this.a <= 0) {
var va = J.util.V3.new3 (parameters[6], parameters[7], parameters[8]);
var vb = J.util.V3.new3 (parameters[9], parameters[10], parameters[11]);
var vc = J.util.V3.new3 (parameters[12], parameters[13], parameters[14]);
this.a = va.length ();
this.b = vb.length ();
this.c = vc.length ();
if (this.a == 0) return;
if (this.b == 0) this.b = this.c = -1;
else if (this.c == 0) this.c = -1;
this.alpha = (this.b < 0 || this.c < 0 ? 90 : vb.angle (vc) / 0.017453292);
this.beta = (this.c < 0 ? 90 : va.angle (vc) / 0.017453292);
this.gamma = (this.b < 0 ? 90 : va.angle (vb) / 0.017453292);
if (this.c < 0) {
var n = J.util.ArrayUtil.arrayCopyF (parameters, -1);
if (this.b < 0) {
vb.set (0, 0, 1);
vb.cross (vb, va);
if (vb.length () < 0.001) vb.set (0, 1, 0);
vb.normalize ();
n[9] = vb.x;
n[10] = vb.y;
n[11] = vb.z;
}if (this.c < 0) {
vc.cross (va, vb);
vc.normalize ();
n[12] = vc.x;
n[13] = vc.y;
n[14] = vc.z;
}parameters = n;
}}this.a *= this.na;
if (this.b <= 0) {
this.b = this.c = 1;
this.dimension = 1;
} else if (this.c <= 0) {
this.c = 1;
this.b *= this.nb;
this.dimension = 2;
} else {
this.b *= this.nb;
this.c *= this.nc;
this.dimension = 3;
}this.cosAlpha = Math.cos (0.017453292 * this.alpha);
this.sinAlpha = Math.sin (0.017453292 * this.alpha);
this.cosBeta = Math.cos (0.017453292 * this.beta);
this.sinBeta = Math.sin (0.017453292 * this.beta);
this.cosGamma = Math.cos (0.017453292 * this.gamma);
this.sinGamma = Math.sin (0.017453292 * this.gamma);
var unitVolume = Math.sqrt (this.sinAlpha * this.sinAlpha + this.sinBeta * this.sinBeta + this.sinGamma * this.sinGamma + 2.0 * this.cosAlpha * this.cosBeta * this.cosGamma - 2);
this.volume = this.a * this.b * this.c * unitVolume;
this.cA_ = (this.cosAlpha - this.cosBeta * this.cosGamma) / this.sinGamma;
this.cB_ = unitVolume / this.sinGamma;
this.a_ = this.b * this.c * this.sinAlpha / this.volume;
this.b_ = this.a * this.c * this.sinBeta / this.volume;
this.c_ = this.a * this.b * this.sinGamma / this.volume;
if (parameters.length > 21 && !Float.isNaN (parameters[21])) {
var scaleMatrix = Clazz.newFloatArray (16, 0);
for (var i = 0; i < 16; i++) {
var f;
switch (i % 4) {
case 0:
f = this.na;
break;
case 1:
f = this.nb;
break;
case 2:
f = this.nc;
break;
default:
f = 1;
break;
}
scaleMatrix[i] = parameters[6 + i] * f;
}
this.matrixCartesianToFractional = J.util.Matrix4f.newA (scaleMatrix);
this.matrixFractionalToCartesian = new J.util.Matrix4f ();
this.matrixFractionalToCartesian.invertM (this.matrixCartesianToFractional);
} else if (parameters.length > 14 && !Float.isNaN (parameters[14])) {
var m = this.matrixFractionalToCartesian = new J.util.Matrix4f ();
m.setColumn4 (0, parameters[6] * this.na, parameters[7] * this.na, parameters[8] * this.na, 0);
m.setColumn4 (1, parameters[9] * this.nb, parameters[10] * this.nb, parameters[11] * this.nb, 0);
m.setColumn4 (2, parameters[12] * this.nc, parameters[13] * this.nc, parameters[14] * this.nc, 0);
m.setColumn4 (3, 0, 0, 0, 1);
this.matrixCartesianToFractional = new J.util.Matrix4f ();
this.matrixCartesianToFractional.invertM (this.matrixFractionalToCartesian);
} else {
var m = this.matrixFractionalToCartesian = new J.util.Matrix4f ();
m.setColumn4 (0, this.a, 0, 0, 0);
m.setColumn4 (1, (this.b * this.cosGamma), (this.b * this.sinGamma), 0, 0);
m.setColumn4 (2, (this.c * this.cosBeta), (this.c * (this.cosAlpha - this.cosBeta * this.cosGamma) / this.sinGamma), (this.volume / (this.a * this.b * this.sinGamma)), 0);
m.setColumn4 (3, 0, 0, 0, 1);
this.matrixCartesianToFractional = new J.util.Matrix4f ();
this.matrixCartesianToFractional.invertM (this.matrixFractionalToCartesian);
}this.matrixCtoFAbsolute = this.matrixCartesianToFractional;
this.matrixFtoCAbsolute = this.matrixFractionalToCartesian;
}, "~A");
$_M(c$, "toSupercell",
function (fpt) {
fpt.x /= this.na;
fpt.y /= this.nb;
fpt.z /= this.nc;
return fpt;
}, "J.util.P3");
$_M(c$, "toCartesian",
function (pt, isAbsolute) {
if (this.matrixFractionalToCartesian != null) (isAbsolute ? this.matrixFtoCAbsolute : this.matrixFractionalToCartesian).transform (pt);
}, "J.util.Tuple3f,~B");
$_M(c$, "toFractional",
function (pt, isAbsolute) {
if (this.matrixCartesianToFractional == null) return;
(isAbsolute ? this.matrixCtoFAbsolute : this.matrixCartesianToFractional).transform (pt);
}, "J.util.Tuple3f,~B");
$_M(c$, "isPolymer",
function () {
return (this.dimension == 1);
});
$_M(c$, "isSlab",
function () {
return (this.dimension == 2);
});
$_M(c$, "getNotionalUnitCell",
function () {
return this.notionalUnitcell;
});
$_M(c$, "getUnitCellAsArray",
function (vectorsOnly) {
var m = this.matrixFractionalToCartesian;
return (vectorsOnly ? [m.m00, m.m10, m.m20, m.m01, m.m11, m.m21, m.m02, m.m12, m.m22] : [this.a, this.b, this.c, this.alpha, this.beta, this.gamma, m.m00, m.m10, m.m20, m.m01, m.m11, m.m21, m.m02, m.m12, m.m22, this.dimension, this.volume]);
}, "~B");
$_M(c$, "getInfo",
function (infoType) {
switch (infoType) {
case 0:
return this.a;
case 1:
return this.b;
case 2:
return this.c;
case 3:
return this.alpha;
case 4:
return this.beta;
case 5:
<|fim▁hole|>return NaN;
}, "~N");
c$.ijkToPoint3f = $_M(c$, "ijkToPoint3f",
function (nnn, cell, c) {
c -= 5;
cell.x = Clazz.doubleToInt (nnn / 100) + c;
cell.y = Clazz.doubleToInt ((nnn % 100) / 10) + c;
cell.z = (nnn % 10) + c;
}, "~N,J.util.P3,~N");
Clazz.defineStatics (c$,
"toRadians", 0.017453292,
"INFO_DIMENSIONS", 6,
"INFO_GAMMA", 5,
"INFO_BETA", 4,
"INFO_ALPHA", 3,
"INFO_C", 2,
"INFO_B", 1,
"INFO_A", 0);
});<|fim▁end|>
|
return this.gamma;
case 6:
return this.dimension;
}
|
<|file_name|>Panel.js<|end_file_name|><|fim▁begin|>/**
* @aside guide tabs
* @aside video tabs-toolbars
* @aside example tabs
* @aside example tabs-bottom
*
* Tab Panels are a great way to allow the user to switch between several pages that are all full screen. Each
* Component in the Tab Panel gets its own Tab, which shows the Component when tapped on. Tabs can be positioned at
* the top or the bottom of the Tab Panel, and can optionally accept title and icon configurations.
*
* Here's how we can set up a simple Tab Panel with tabs at the bottom. Use the controls at the top left of the example
* to toggle between code mode and live preview mode (you can also edit the code and see your changes in the live
* preview):
*
* @example miniphone preview
* Ext.create('Ext.TabPanel', {
* fullscreen: true,
* tabBarPosition: 'bottom',
*
* defaults: {
* styleHtmlContent: true
* },
*
* items: [
* {
* title: 'Home',
* iconCls: 'home',
* html: 'Home Screen'
* },
* {
* title: 'Contact',
* iconCls: 'user',
* html: 'Contact Screen'
* }
* ]
* });
* One tab was created for each of the {@link Ext.Panel panels} defined in the items array. Each tab automatically uses
* the title and icon defined on the item configuration, and switches to that item when tapped on. We can also position
* the tab bar at the top, which makes our Tab Panel look like this:
*
* @example miniphone preview
* Ext.create('Ext.TabPanel', {
* fullscreen: true,
*
* defaults: {
* styleHtmlContent: true
* },
*
* items: [
* {
* title: 'Home',
* html: 'Home Screen'
* },
* {
* title: 'Contact',
* html: 'Contact Screen'
* }
* ]
* });
*
*/
Ext.define('Ext.tab.Panel', {
extend: 'Ext.Container',
xtype: 'tabpanel',
alternateClassName: 'Ext.TabPanel',
requires: ['Ext.tab.Bar'],
config: {
/**
* @cfg {String} ui
* Sets the UI of this component.
* Available values are: `light` and `dark`.
* @accessor
*/
ui: 'dark',
/**
* @cfg {Object} tabBar
* An Ext.tab.Bar configuration.
* @accessor
*/
tabBar: true,
/**
* @cfg {String} tabBarPosition
* The docked position for the {@link #tabBar} instance.
* Possible values are 'top' and 'bottom'.
* @accessor
*/
tabBarPosition: 'top',
/**
* @cfg layout
* @inheritdoc
*/
layout: {
type: 'card',
animation: {
type: 'slide',
direction: 'left'
}
},
/**
* @cfg cls
* @inheritdoc
*/
cls: Ext.baseCSSPrefix + 'tabpanel'
/**
* @cfg {Boolean/String/Object} scrollable
* @accessor
* @hide
*/
/**
* @cfg {Boolean/String/Object} scroll
* @hide
*/
},
initialize: function() {
this.callParent();
this.on({
order: 'before',
activetabchange: 'doTabChange',
delegate: '> tabbar',
scope: this
});
this.on({
disabledchange: 'onItemDisabledChange',
delegate: '> component',
scope: this
});
},
platformConfig: [{
theme: ['Blackberry'],
tabBarPosition: 'bottom'
}],<|fim▁hole|> /**
* Tab panels should not be scrollable. Instead, you should add scrollable to any item that
* you want to scroll.
* @private
*/
applyScrollable: function() {
return false;
},
/**
* Updates the Ui for this component and the {@link #tabBar}.
*/
updateUi: function(newUi, oldUi) {
this.callParent(arguments);
if (this.initialized) {
this.getTabBar().setUi(newUi);
}
},
/**
* @private
*/
doSetActiveItem: function(newActiveItem, oldActiveItem) {
if (newActiveItem) {
var items = this.getInnerItems(),
oldIndex = items.indexOf(oldActiveItem),
newIndex = items.indexOf(newActiveItem),
reverse = oldIndex > newIndex,
animation = this.getLayout().getAnimation(),
tabBar = this.getTabBar(),
oldTab = tabBar.parseActiveTab(oldIndex),
newTab = tabBar.parseActiveTab(newIndex);
if (animation && animation.setReverse) {
animation.setReverse(reverse);
}
this.callParent(arguments);
if (newIndex != -1) {
this.forcedChange = true;
tabBar.setActiveTab(newIndex);
this.forcedChange = false;
if (oldTab) {
oldTab.setActive(false);
}
if (newTab) {
newTab.setActive(true);
}
}
}
},
/**
* Updates this container with the new active item.
* @param {Object} tabBar
* @param {Object} newTab
* @return {Boolean}
*/
doTabChange: function(tabBar, newTab) {
var oldActiveItem = this.getActiveItem(),
newActiveItem;
this.setActiveItem(tabBar.indexOf(newTab));
newActiveItem = this.getActiveItem();
return this.forcedChange || oldActiveItem !== newActiveItem;
},
/**
* Creates a new {@link Ext.tab.Bar} instance using {@link Ext#factory}.
* @param {Object} config
* @return {Object}
* @private
*/
applyTabBar: function(config) {
if (config === true) {
config = {};
}
if (config) {
Ext.applyIf(config, {
ui: this.getUi(),
docked: this.getTabBarPosition()
});
}
return Ext.factory(config, Ext.tab.Bar, this.getTabBar());
},
/**
* Adds the new {@link Ext.tab.Bar} instance into this container.
* @private
*/
updateTabBar: function(newTabBar) {
if (newTabBar) {
this.add(newTabBar);
this.setTabBarPosition(newTabBar.getDocked());
}
},
/**
* Updates the docked position of the {@link #tabBar}.
* @private
*/
updateTabBarPosition: function(position) {
var tabBar = this.getTabBar();
if (tabBar) {
tabBar.setDocked(position);
}
},
onItemAdd: function(card) {
var me = this;
if (!card.isInnerItem()) {
return me.callParent(arguments);
}
var tabBar = me.getTabBar(),
initialConfig = card.getInitialConfig(),
tabConfig = initialConfig.tab || {},
tabTitle = (card.getTitle) ? card.getTitle() : initialConfig.title,
tabIconCls = (card.getIconCls) ? card.getIconCls() : initialConfig.iconCls,
tabHidden = (card.getHidden) ? card.getHidden() : initialConfig.hidden,
tabDisabled = (card.getDisabled) ? card.getDisabled() : initialConfig.disabled,
tabBadgeText = (card.getBadgeText) ? card.getBadgeText() : initialConfig.badgeText,
innerItems = me.getInnerItems(),
index = innerItems.indexOf(card),
tabs = tabBar.getItems(),
activeTab = tabBar.getActiveTab(),
currentTabInstance = (tabs.length >= innerItems.length) && tabs.getAt(index),
tabInstance;
if (tabTitle && !tabConfig.title) {
tabConfig.title = tabTitle;
}
if (tabIconCls && !tabConfig.iconCls) {
tabConfig.iconCls = tabIconCls;
}
if (tabHidden && !tabConfig.hidden) {
tabConfig.hidden = tabHidden;
}
if (tabDisabled && !tabConfig.disabled) {
tabConfig.disabled = tabDisabled;
}
if (tabBadgeText && !tabConfig.badgeText) {
tabConfig.badgeText = tabBadgeText;
}
//<debug warn>
if (!currentTabInstance && !tabConfig.title && !tabConfig.iconCls) {
if (!tabConfig.title && !tabConfig.iconCls) {
Ext.Logger.error('Adding a card to a tab container without specifying any tab configuration');
}
}
//</debug>
tabInstance = Ext.factory(tabConfig, Ext.tab.Tab, currentTabInstance);
if (!currentTabInstance) {
tabBar.insert(index, tabInstance);
}
card.tab = tabInstance;
me.callParent(arguments);
if (!activeTab && activeTab !== 0) {
tabBar.setActiveTab(tabBar.getActiveItem());
}
},
/**
* If an item gets enabled/disabled and it has an tab, we should also enable/disable that tab
* @private
*/
onItemDisabledChange: function(item, newDisabled) {
if (item && item.tab) {
item.tab.setDisabled(newDisabled);
}
},
// @private
onItemRemove: function(item, index) {
this.getTabBar().remove(item.tab, this.getAutoDestroy());
this.callParent(arguments);
}
}, function() {
//<deprecated product=touch since=2.0>
/**
* @cfg {Boolean} tabBarDock
* @inheritdoc Ext.tab.Panel#tabBarPosition
* @deprecated 2.0.0 Please use {@link #tabBarPosition} instead.
*/
Ext.deprecateProperty(this, 'tabBarDock', 'tabBarPosition');
//</deprecated>
});<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2014 The 6502-rs Developers
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. Neither the names of the copyright holders nor the names of any
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE<|fim▁hole|>// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#![feature(core)]
#![feature(hash)]
#![feature(rustc_private)]
// Needed for debug! / log! macros
#[macro_use]
extern crate log;
#[macro_use]
extern crate rustc_bitflags;
pub mod address;
pub mod instruction;
pub mod machine;
pub mod memory;
pub mod range_incl;
pub mod registers;<|fim▁end|>
|
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>//! # Keystore files (UTC / JSON) module errors
use super::core;
use std::{error, fmt};
/// Keystore file errors
#[derive(Debug)]
pub enum Error {
/// An unsupported cipher
UnsupportedCipher(String),
/// An unsupported key derivation function
UnsupportedKdf(String),
/// An unsupported pseudo-random function
UnsupportedPrf(String),<|fim▁hole|>
/// `keccak256_mac` field validation failed
FailedMacValidation,
/// Core module error wrapper
CoreFault(core::Error),
/// Invalid Kdf depth value
InvalidKdfDepth(String),
}
impl From<core::Error> for Error {
fn from(err: core::Error) -> Self {
Error::CoreFault(err)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::UnsupportedCipher(ref str) => write!(f, "Unsupported cipher: {}", str),
Error::UnsupportedKdf(ref str) => {
write!(f, "Unsupported key derivation function: {}", str)
}
Error::UnsupportedPrf(ref str) => {
write!(f, "Unsupported pseudo-random function: {}", str)
}
Error::FailedMacValidation => write!(f, "Message authentication code failed"),
Error::CoreFault(ref err) => f.write_str(&err.to_string()),
Error::InvalidKdfDepth(ref str) => write!(f, "Invalid security level: {}", str),
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
"Keystore file error"
}
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::CoreFault(ref err) => Some(err),
_ => None,
}
}
}<|fim▁end|>
| |
<|file_name|>select2_locale_it-e45548dc93d14ad49b80a69023ecfd28.js<|end_file_name|><|fim▁begin|>/**
* Select2 Italian translation
*/
(function ($) {<|fim▁hole|>
$.extend($.fn.select2.defaults, {
formatNoMatches: function () { return "Nessuna corrispondenza trovata"; },
formatInputTooShort: function (input, min) { var n = min - input.length; return "Inserisci ancora " + n + " caratter" + (n == 1? "e" : "i"); },
formatInputTooLong: function (input, max) { var n = input.length - max; return "Inserisci " + n + " caratter" + (n == 1? "e" : "i") + " in meno"; },
formatSelectionTooBig: function (limit) { return "Puoi selezionare solo " + limit + " element" + (limit == 1 ? "o" : "i"); },
formatLoadMore: function (pageNumber) { return "Caricamento in corso..."; },
formatSearching: function () { return "Ricerca..."; }
});
})(jQuery);<|fim▁end|>
|
"use strict";
|
<|file_name|>pr14814.C<|end_file_name|><|fim▁begin|>/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-forwprop2" } */
/* LLVM LOCAL test not applicable */
/* { dg-require-fdump "" } */
class YY { public:
YY(const YY &v) { e[0] = v.e[0]; e[1] = v.e[1]; e[2] = v.e[2]; }
double &y() { return e[1]; }
double e[3]; };
class XX { public:<|fim▁hole|>
int foo(XX& r) {
if (r.direction().y() < 0.000001) return 0;
return 1; }
/* { dg-final { scan-tree-dump-times "&this" 0 "forwprop2" } } */
/* { dg-final { scan-tree-dump-times "&r" 0 "forwprop2" } } */
/* { dg-final { cleanup-tree-dump "forwprop2" } } */<|fim▁end|>
|
YY direction() const { return v; }
YY v; };
|
<|file_name|>smart_truncate_chars.py<|end_file_name|><|fim▁begin|>from django import template
from djangopress.core.util import smart_truncate_chars as _smart_truncate_chars
register = template.Library()
<|fim▁hole|># truncate chars but leaving last word complete
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
return _smart_truncate_chars(value, max_length)<|fim▁end|>
| |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from django.http import HttpRequest
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
try:
from allauth.account import app_settings as allauth_settings
from allauth.utils import (email_address_exists,
get_username_max_length)
from allauth.account.adapter import get_adapter
from allauth.account.utils import setup_user_email
except ImportError:
raise ImportError("allauth needs to be added to INSTALLED_APPS.")
from rest_framework import serializers
from requests.exceptions import HTTPError
# Import is needed only if we are using social login, in which
# case the allauth.socialaccount will be declared
if 'allauth.socialaccount' in settings.INSTALLED_APPS:
from allauth.socialaccount.helpers import complete_social_login
class SocialLoginSerializer(serializers.Serializer):
access_token = serializers.CharField(required=False, allow_blank=True)
code = serializers.CharField(required=False, allow_blank=True)
def _get_request(self):
request = self.context.get('request')
if not isinstance(request, HttpRequest):
request = request._request
return request
def get_social_login(self, adapter, app, token, response):
"""
:param adapter: allauth.socialaccount Adapter subclass.
Usually OAuthAdapter or Auth2Adapter
:param app: `allauth.socialaccount.SocialApp` instance
:param token: `allauth.socialaccount.SocialToken` instance
:param response: Provider's response for OAuth1. Not used in the
:returns: A populated instance of the
`allauth.socialaccount.SocialLoginView` instance
"""
request = self._get_request()
social_login = adapter.complete_login(request, app, token, response=response)
social_login.token = token
return social_login
def validate(self, attrs):
view = self.context.get('view')
request = self._get_request()
if not view:
raise serializers.ValidationError(
_("View is not defined, pass it as a context variable")
)
adapter_class = getattr(view, 'adapter_class', None)
if not adapter_class:
raise serializers.ValidationError(_("Define adapter_class in view"))
adapter = adapter_class(request)
app = adapter.get_provider().get_app(request)
# More info on code vs access_token
# http://stackoverflow.com/questions/8666316/facebook-oauth-2-0-code-and-token
# Case 1: We received the access_token
if attrs.get('access_token'):
access_token = attrs.get('access_token')
# Case 2: We received the authorization code
elif attrs.get('code'):
self.callback_url = getattr(view, 'callback_url', None)
self.client_class = getattr(view, 'client_class', None)
if not self.callback_url:
raise serializers.ValidationError(
_("Define callback_url in view")
)
if not self.client_class:
raise serializers.ValidationError(
_("Define client_class in view")
)
code = attrs.get('code')
provider = adapter.get_provider()
scope = provider.get_scope(request)
client = self.client_class(
request,
app.client_id,
app.secret,
adapter.access_token_method,
adapter.access_token_url,
self.callback_url,
scope
)
token = client.get_access_token(code)
access_token = token['access_token']
else:
raise serializers.ValidationError(
_("Incorrect input. access_token or code is required."))
social_token = adapter.parse_token({'access_token': access_token})
social_token.app = app
try:
login = self.get_social_login(adapter, app, social_token, access_token)
complete_social_login(request, login)
except HTTPError:
raise serializers.ValidationError(_('Incorrect value'))
if not login.is_existing:<|fim▁hole|> login.lookup()
login.save(request, connect=True)
attrs['user'] = login.account.user
return attrs
class RegisterSerializer(serializers.Serializer):
username = serializers.CharField(
max_length=get_username_max_length(),
min_length=allauth_settings.USERNAME_MIN_LENGTH,
required=allauth_settings.USERNAME_REQUIRED
)
email = serializers.EmailField(required=allauth_settings.EMAIL_REQUIRED)
password1 = serializers.CharField(write_only=True)
password2 = serializers.CharField(write_only=True)
def validate_username(self, username):
username = get_adapter().clean_username(username)
return username
def validate_email(self, email):
email = get_adapter().clean_email(email)
if allauth_settings.UNIQUE_EMAIL:
if email and email_address_exists(email):
raise serializers.ValidationError(
_("A user is already registered with this e-mail address."))
return email
def validate_password1(self, password):
return get_adapter().clean_password(password)
def validate(self, data):
if data['password1'] != data['password2']:
raise serializers.ValidationError(_("The two password fields didn't match."))
return data
def custom_signup(self, request, user):
pass
def get_cleaned_data(self):
return {
'username': self.validated_data.get('username', ''),
'password1': self.validated_data.get('password1', ''),
'email': self.validated_data.get('email', '')
}
def save(self, request):
adapter = get_adapter()
user = adapter.new_user(request)
self.cleaned_data = self.get_cleaned_data()
adapter.save_user(request, user, self)
self.custom_signup(request, user)
setup_user_email(request, user, [])
return user
class VerifyEmailSerializer(serializers.Serializer):
key = serializers.CharField()<|fim▁end|>
| |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class InternalFlowException(Exception):
pass
class ReturnException(InternalFlowException):
def __init__(self, value):
self._value = value
@property
def value(self):
return self._value
class BreakException(InternalFlowException):
pass
class ContinueException(InternalFlowException):
pass
class DslInvalidOperationError(Exception):
pass
class NoMethodFound(Exception):
def __init__(self, name):
super(NoMethodFound, self).__init__('Method "%s" is not found' % name)
class NoClassFound(Exception):
def __init__(self, name):
super(NoClassFound, self).__init__('Class "%s" is not found' % name)
class NoPackageFound(Exception):
def __init__(self, name):
super(NoPackageFound, self).__init__(
'Package "%s" is not found' % name)
<|fim▁hole|> super(NoPackageForClassFound, self).__init__('Package for class "%s" '
'is not found' % name)
class NoObjectFoundError(Exception):
def __init__(self, object_id):
super(NoObjectFoundError, self).__init__(
'Object "%s" is not found in object store' % object_id)
class AmbiguousMethodName(Exception):
def __init__(self, name):
super(AmbiguousMethodName, self).__init__(
'Found more that one method "%s"' % name)
class DslContractSyntaxError(Exception):
pass
class ContractViolationException(Exception):
pass
class ValueIsMissingError(Exception):
pass
class DslSyntaxError(Exception):
pass
class PropertyAccessError(Exception):
pass
class AmbiguousPropertyNameError(PropertyAccessError):
def __init__(self, name):
super(AmbiguousPropertyNameError, self).__init__(
'Found more that one property "%s"' % name)
class NoWriteAccess(PropertyAccessError):
def __init__(self, name):
super(NoWriteAccess, self).__init__(
'Property "%s" is immutable to the caller' % name)
class NoWriteAccessError(PropertyAccessError):
def __init__(self, name):
super(NoWriteAccessError, self).__init__(
'Property "%s" is immutable to the caller' % name)
class PropertyReadError(PropertyAccessError):
def __init__(self, name, murano_class):
super(PropertyAccessError, self).__init__(
'Property "%s" in class "%s" cannot be read' %
(name, murano_class.name))
class PropertyWriteError(PropertyAccessError):
def __init__(self, name, murano_class):
super(PropertyAccessError, self).__init__(
'Property "%s" in class "%s" cannot be written' %
(name, murano_class.name))
class UninitializedPropertyAccessError(PropertyAccessError):
def __init__(self, name, murano_class):
super(PropertyAccessError, self).__init__(
'Access to uninitialized property '
'"%s" in class "%s" is forbidden' % (name, murano_class.name))<|fim▁end|>
|
class NoPackageForClassFound(Exception):
def __init__(self, name):
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import time
import logging
import string
import requests
import unicodedata
import base64
try: import cPickle as pickle
except: import pickle
import datetime
from django.utils import timezone
import json
from pprint import pprint
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.http import HttpResponseForbidden
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import redirect, render
logger = logging.getLogger(__name__)
import boto.ec2
import boto.ec2.cloudwatch
from django.contrib.auth.models import User
from userprofile.models import Profile as userprofile
from userprofile.views import _log_user_activity
from amazon import s3_funcs
from amazon import s3_funcs_shortcuts
from django.contrib.auth.decorators import login_required
from django.template.defaultfilters import filesizeformat, upper
from django.contrib.humanize.templatetags.humanize import naturalday
from cloudly.templatetags.cloud_extras import clean_ps_command
from operator import itemgetter, attrgetter, methodcaller
from cloudly.templatetags.cloud_extras import clear_filename, get_file_extension
from vms.models import Cache
import decimal
from django.db.models.base import ModelState
import pymongo
from pymongo import MongoClient
from pymongo import ASCENDING, DESCENDING
client = MongoClient('mongo', 27017)
mongo = client.cloudly
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
@login_required()
def update_session(request):
for value in request.POST:
if(value != 'secret'):
request.session[value] = request.POST[value]
request.session.modified = True
return render_to_response('ajax_null.html', locals())
@login_required()
def aws_vm_view(request,vm_name):
print '-- aws_vm_view'
print request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
user.last_login = datetime.datetime.now()
user.save()
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
ip = request.META['REMOTE_ADDR']
_log_user_activity(profile,"click","/aws/"+vm_name,"aws_vm_view",ip=ip)
vms_cache = Cache.objects.get(user=user)
vm_cache = vms_cache.vms_response
vm_cache = base64.b64decode(vm_cache)
try:
vm_cache = pickle.loads(vm_cache)[vm_name]
except:
return HttpResponse("XXX " + vm_name)
ec2_region = vm_cache['instance']['region']['name']
if(vm_cache['user_id']!=request.user.id):
return HttpResponse("access denied")
if(vms_cache.vms_console_output_cache):
console_output = vms_cache.vms_console_output_cache
else:
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
aws_ec2_verified = profile.aws_ec2_verified
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
reservations = ec2conn.get_all_instances(instance_ids=[vm_name,])
instance = reservations[0].instances[0]
console_output = instance.get_console_output()
console_output = console_output.output
if(not console_output):
console_output = ""
vms_cache.vms_console_output_cache = console_output
vms_cache.save()
end = datetime.datetime.utcnow()
start = end - datetime.timedelta(minutes=60)
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
cloudwatch = boto.ec2.cloudwatch.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="NetworkIn")[0]
networkin_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="NetworkOut")[0]
networkout_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="DiskReadOps")[0]
disk_readops_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="DiskWriteOps")[0]
disk_writeops_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="DiskReadBytes")[0]
disk_readbytes_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="DiskWriteBytes")[0]
disk_writebytes_datapoints = metric.query(start, end, 'Average', '')
networkin_datapoints = json.dumps(networkin_datapoints,default=date_handler)
networkout_datapoints = json.dumps(networkout_datapoints,default=date_handler)
disk_readops_datapoints = json.dumps(disk_readops_datapoints,default=date_handler)
disk_writeops_datapoints = json.dumps(disk_writeops_datapoints,default=date_handler)
disk_readbytes_datapoints = json.dumps(disk_readbytes_datapoints,default=date_handler)
disk_writebytes_datapoints = json.dumps(disk_writebytes_datapoints,default=date_handler)
return render_to_response('aws_vm.html', {'vm_name':vm_name,'vm_cache':vm_cache,'console_output':console_output,'networkin_datapoints':networkin_datapoints,'networkout_datapoints':networkout_datapoints,'disk_readops_datapoints':disk_readops_datapoints,'disk_writeops_datapoints':disk_writeops_datapoints,'disk_readbytes_datapoints':disk_readbytes_datapoints,'disk_writebytes_datapoints':disk_writebytes_datapoints,}, context_instance=RequestContext(request))
@login_required()
def control_aws_vm(request, vm_name, action):
print request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
user.last_login = datetime.datetime.now()
user.save()
ip = request.META['REMOTE_ADDR']
_log_user_activity(profile,"click","/aws/"+vm_name+"/"+action+"/","control_aws_vm",ip=ip)
vms_cache = Cache.objects.get(user=user)
vm_cache = vms_cache.vms_response
vm_cache = base64.b64decode(vm_cache)
vm_cache = pickle.loads(vm_cache)[vm_name]
if(vm_cache['user_id']!=request.user.id):
return HttpResponse("access denied")
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
aws_ec2_verified = profile.aws_ec2_verified
ec2_region = vm_cache['instance']['region']['name']
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
if(action=="reboot"):
ec2conn.reboot_instances([vm_name,])
if(action=="start"):
ec2conn.start_instances([vm_name,])
if(action=="stop"):
ec2conn.stop_instances([vm_name,])
if(action=="terminate"):
ec2conn.terminate_instances([vm_name,])
return HttpResponseRedirect("/")
@login_required()
def server_view(request, hwaddr):
print '-- server_view'
print request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
ip = request.META['REMOTE_ADDR']
_log_user_activity(profile,"click","/server/"+hwaddr,"server_view",ip=ip)
hwaddr_orig = hwaddr
hwaddr = hwaddr.replace('-',':')
server = mongo.servers.find_one({'secret':profile.secret,'uuid':hwaddr,})
server_status = "Running"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>20):
server_status = "Stopped"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>1800):
server_status = "Offline"
try:
uuid = server['uuid']
except:
return HttpResponse("access denied")
disks_usage_ = []
#disks_usage = mongo.disks_usage.find({'uuid':uuid,}).sort('_id',-1).limit(60)
#for i in disks_usage: disks_usage_.append(i)
disks_usage = disks_usage_
networking_ = []
#networking = mongo.networking.find({'uuid':uuid,}).sort('_id',-1).limit(60)
#for i in networking: networking_.append(i)
networking = networking_
mem_usage_ = []
#mem_usage = mongo.memory_usage.find({'uuid':uuid,}).sort('_id',-1).limit(60)
#for i in mem_usage: mem_usage_.append(i)
mem_usage = mem_usage_
loadavg_ = []
#loadavg = mongo.loadavg.find({'uuid':uuid,}).sort('_id',-1).limit(60)
#for i in loadavg: loadavg_.append(i)
loadavg = loadavg_
activity = mongo.activity.find({'uuid':uuid,}).sort('_id',-1).limit(3)
disks = []
disks_ = server[u'disks_usage']
for disk in disks_:
if not disk[5] in disks:
disks.append(disk[5])
return render_to_response('server_detail.html', {'secret':profile.secret,'hwaddr':hwaddr,'hwaddr_orig':hwaddr_orig,'server':server,'server_status':server_status,'disks_usage':disks_usage,'disks':disks,'mem_usage':mem_usage,'loadavg':loadavg,'networking':networking,'activity':activity,}, context_instance=RequestContext(request))
@login_required()
def ajax_update_server_name(request):
response = {}
response["success"] = "true"
response = str(response).replace('u"','"')
response = response.replace("'",'"')
server_ = request.POST['server']
secret = request.POST['secret']
server_ = server_.replace('-', ':')
server = mongo.servers.find_one({'secret':secret,'uuid':server_,})
if request.POST["servername"] == "":
server['name'] = request.POST['server'].replace("-", ":")
else:
server['name'] = request.POST["servername"]
server = mongo.servers.update({'secret':secret, 'uuid':server_}, server)
vms_cache = Cache.objects.get(user=request.user)
vms_cache.delete()
return HttpResponse(response, content_type="application/json")
@login_required()
def ajax_vms_refresh(request):
user = request.user
profile = userprofile.objects.get(user=request.user)
print 'Refreshing', user, 'VMs cache..'
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
aws_ec2_verified = profile.aws_ec2_verified
virtual_machines = {}
servers = mongo.servers.find({'secret':profile.secret,}).sort('_id',-1)
vms_cache = Cache.objects.get_or_create(user=user)
vms_cache = vms_cache[0]
vms_cache.is_updating = True
vms_cache.save()
if(servers.count()):
print 'servers count', servers.count()
for server in servers:
instance_metrics = {}
instance_metrics['id'] = server['uuid']
instance_metrics['user_id'] = request.user.id
instance_metrics['provider'] = 'agent'
instance_metrics['instance'] = {}
instance_metrics['instance']['user_id'] = request.user.id
instance_metrics['instance']['state'] = {}
instance_metrics['instance']['tags'] = {}
try:
instance_metrics["instance"]['tags']['Name'] = server['name']
#instance_metrics["instance"]['tags']['Name'] = ''.join(x for x in unicodedata.normalize('NFKD', server['hostname']) if x in string.ascii_letters).lower()
except:
instance_metrics["instance"]['tags']['Name'] = server['hostname'].replace('.','-').lower()
uuid = server['uuid']
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>20):
instance_metrics['instance']['state']['state'] = "Stopped"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>1800):
instance_metrics['instance']['state']['state'] = "Offline"
else:
instance_metrics['instance']['state']['state'] = "Running"
cpu_usage_ = ""
params = {'start':'2m-ago','m':'sum:' + uuid.replace(':','-') + '.sys.cpu'}
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
try:
tsdb_response = tsdb_response[0]['dps']
except:
tsdb_response = []
c=0
for i in tsdb_response:
cpu_usage_ += str(round(tsdb_response[i],2))
cpu_usage_ += ","
if(c==60): break
c+=1
cpu_usage = cpu_usage_[:-1]
cpu_usage_reversed = ""
cpu_usage_array_reversed = []
for i in cpu_usage.split(','): cpu_usage_array_reversed.insert(0,i)
for i in cpu_usage_array_reversed: cpu_usage_reversed += str(i)+","
cpu_usage_reversed = cpu_usage_reversed[:-1]
instance_metrics['cpu_utilization_datapoints'] = cpu_usage_reversed
virtual_machines[server['uuid'].replace(':','-')] = instance_metrics
#print 'virtual_machines', virtual_machines
if aws_ec2_verified:
aws_regions = profile.aws_enabled_regions.split(',')
print 'AWS regions', aws_regions
for ec2_region in aws_regions:
if(ec2_region):
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
cloudwatch = boto.ec2.cloudwatch.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
try:
reservations = ec2conn.get_all_instances()
except:
vms_cache.is_updating = False
vms_cache.vms_response = ""
vms_cache.save()
print vms_cache.is_updating
print vms_cache.vms_response
#return HttpResponse("access denied")
instances = [i for r in reservations for i in r.instances]
for instance in instances:
if not instance: continue
instance_metrics = {}
instance_metrics['instance'] = {}
print '** instance', instance.id, instance.private_ip_address
volumes = []
for volume in ec2conn.get_all_volumes(filters={'attachment.instance-id': instance.id}):
volumes.append([volume.id, volume.iops, volume.size,])
groups = []
for group in instance.__dict__['groups']:
groups.append([group.id, group.name,])
instance_metrics['id'] = instance.id
instance_metrics['user_id'] = request.user.id
instance_metrics['provider'] = "aws-ec2"
instance_metrics['instance']['placement'] = instance.placement
instance_metrics['instance']['user_id'] = request.user.id
instance_metrics['instance']['groups'] = groups
instance_metrics['instance']['block_device_mapping'] = volumes
instance_metrics['instance']['architecture'] = instance.architecture
instance_metrics['instance']['client_token'] = instance.client_token
instance_metrics['instance']['dns_name'] = instance.dns_name
instance_metrics['instance']['private_ip_address'] = instance.private_ip_address
instance_metrics['instance']['hypervisor'] = instance.hypervisor
instance_metrics['instance']['id'] = instance.id
instance_metrics['instance']['image_id'] = instance.image_id
instance_metrics['instance']['instance_type'] = instance.instance_type
instance_metrics['instance']['ip_address'] = instance.ip_address
instance_metrics['instance']['key_name'] = instance.key_name
instance_metrics['instance']['launch_time'] = instance.launch_time
instance_metrics['instance']['monitored'] = instance.monitored
instance_metrics['instance']['persistent'] = instance.persistent
instance_metrics['instance']['ramdisk'] = instance.ramdisk
instance_metrics['instance']['root_device_name'] = instance.root_device_name
instance_metrics['instance']['root_device_type'] = instance.root_device_type
instance_metrics['instance']['tags'] = instance.tags
instance_metrics['instance']['virtualization_type'] = instance.virtualization_type
instance_metrics['instance']['vpc_id'] = instance.vpc_id
instance_metrics['instance']['region'] = {"endpoint":instance.region.endpoint,"name":instance.region.name,}
instance_metrics['instance']['state'] = {"state":instance.state,"code":instance.state_code,"state_reason":instance.state_reason,}
virtual_machines[instance.id] = instance_metrics
print 'Updating', request.user, 'cache..'
print instance.platform, instance.product_codes
try:
ec2conn.monitor_instance(str(instance.id))
except:
print instance.id, 'instance not in a monitorable state!!'.upper()
#pprint(instance_metrics)
continue
# Here is where you define start - end for the Logs...............
end = datetime.datetime.utcnow()
start = end - datetime.timedelta(minutes=60)
# This is how you list all possible values on the response....
# print ec2conn.list_metrics()
try:
metric = cloudwatch.list_metrics(dimensions={'InstanceId':instance.id}, metric_name="CPUUtilization")[0]
except: continue
cpu_utilization_datapoints = metric.query(start, end, 'Average', 'Percent')
instance_metrics['cpu_utilization_datapoints'] = json.dumps(cpu_utilization_datapoints,default=date_handler)
virtual_machines[instance.id] = instance_metrics
vms_cache.vms_response = base64.b64encode(pickle.dumps(virtual_machines, pickle.HIGHEST_PROTOCOL))
vms_cache.last_seen = timezone.now()
vms_cache.is_updating = False
vms_cache.save()
print 'VMs cache was succesfully updated.'
return HttpResponse("ALLDONE")
@login_required()
def ajax_virtual_machines(request):
print '-- ajax virtual machines'
print request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
try:
vms_cache = Cache.objects.get(user=user)
vm_cache = vms_cache.vms_response
vm_cache = base64.b64decode(vm_cache)
except: vm_cache = {}
try:
vm_cache = pickle.loads(vm_cache)
except: vm_cache = {}
c=0
ajax_vms_response = "{"
for vm in vm_cache:
if(vm_cache[vm]["instance"]["state"]["state"].lower()!="terminated"):
data_median = 0
isotope_filter_classes = " offline linux "
try:
data = ""
cpu_utilization_datapoints = vm_cache[vm]["cpu_utilization_datapoints"]
cpu_utilization_datapoints = json.loads(cpu_utilization_datapoints)
z=0
for i in cpu_utilization_datapoints:
data += str(i["Average"])
try:
data_median += float(i["Average"])
except: pass
if(len(cpu_utilization_datapoints)-1>z):
data += ","
#print data
z+=1
try:
data_median = data_median/z
except: data_median = 0
except:
try:
data = vm_cache[vm]["cpu_utilization_datapoints"]
z = 0
data_median = 0
for i in data.split(','):
z+=1
data_median += float(i)
data_median = data_median/z
except: data = ""
try:
instance_name = vm_cache[vm]["instance"]["tags"]["Name"]
except:
instance_name = vm
print 'instance_name', instance_name
color = "silver "
vm_state = vm_cache[vm]["instance"]["state"]["state"].title()
server_mac_address = vm_cache[vm]['id']
server_mac_address = str(server_mac_address).replace(':','-')
if(vm_state=="Running"):
isotope_filter_classes = " linux "
if(data_median<17):
color = "lightBlue "
if(data_median>=17 and data_median<=35):
color = "green "
isotope_filter_classes += " busy"
if(data_median>35 and data_median<=50):
color = "darkGreen "
isotope_filter_classes += " busy"
if(data_median>50 and data_median<=70):
color = "lightOrange "
isotope_filter_classes += " busy"
if(data_median>70):
isotope_filter_classes += " busy critical"
color = "red "
if data_median>85:
vm_state = "Hot hot hot!"
if(vm_state=="Stopping"):
color = "pink "
if(vm_state=="Pending"):
color = "pink "
if(vm_state=="Shutting-Down"):
color = "pink "
if(vm_state=="Stopped"):
isotope_filter_classes += " offline"
if(vm_cache[vm]['provider']!='agent'):
isotope_filter_classes += " cloud"
ajax_vms_response += "\""
ajax_vms_response += server_mac_address
ajax_vms_response += "\": {"
ajax_vms_response += "\"vmcolor\":\""
ajax_vms_response += color
ajax_vms_response += "\","
ajax_vms_response += "\"vmname\":\""
ajax_vms_response += instance_name
ajax_vms_response += "\","
ajax_vms_response += "\"vmtitle\":\""
ajax_vms_response += isotope_filter_classes
ajax_vms_response += "\","
ajax_vms_response += "\"averge\":\""
ajax_vms_response += data
ajax_vms_response += "\","
ajax_vms_response += "\"state\":\""
ajax_vms_response += vm_state
ajax_vms_response += "\","
ajax_vms_response += "\"link\":\""
if(vm_cache[vm]['provider']=='agent'):
ajax_vms_response += "/server/"+vm+"/"
else:
ajax_vms_response += "/aws/"+vm+"/"
ajax_vms_response += "\""
ajax_vms_response += "},"
if(c==len(vm_cache)-1):
ajax_vms_response += "}"
c+=1
#print '-_'*80
#print vm_cache[vm]["instance"]["state"]["state"].title(), vm
ajax_vms_response = ajax_vms_response.replace(",}","}")
if(not vm_cache): ajax_vms_response = {}
return render_to_response('ajax_virtual_machines.html', {'user':user,'ajax_vms_response':ajax_vms_response,'vms_cached_response':vm_cache,}, context_instance=RequestContext(request))<|fim▁hole|>
@login_required()
def ajax_aws_graphs(request, instance_id, graph_type="all"):
print '-- ajax_aws_graphs', request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
vms_cache = Cache.objects.get(user=user)
vm_cache = vms_cache.vms_response
vm_cache = base64.b64decode(vm_cache)
try:
vm_cache = pickle.loads(vm_cache)[instance_id]
except:
return HttpResponse("XXX " + instance_id)
if(vm_cache['user_id']!=request.user.id):
return HttpResponse("access denied")
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
aws_ec2_verified = profile.aws_ec2_verified
ec2_region = vm_cache['instance']['region']['name']
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
cloudwatch = boto.ec2.cloudwatch.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
reservations = ec2conn.get_all_instances(instance_ids=[instance_id,])
instance = reservations[0].instances[0]
end = datetime.datetime.utcnow()
start = end - datetime.timedelta(days=10)
metric = cloudwatch.list_metrics(dimensions={'InstanceId':instance_id}, metric_name="CPUUtilization")[0]
cpu_utilization_datapoints = metric.query(start, end, 'Average', 'Percent',period=3600)
return HttpResponse("data " + instance_id + "=" + str(instance) + " ** " + graph_type.upper())
@login_required()
def ajax_server_graphs(request, hwaddr, graph_type=""):
print '-- ajax_server_graphs, type', graph_type
print request.user
graphs_mixed_respose = []
secret = request.POST['secret']
uuid = request.POST['server']
uuid = uuid.replace('-',':')
server = mongo.servers.find_one({'secret':secret,'uuid':uuid,})
print 'debug', secret, uuid
try:
uuid = server['uuid']
except:
return HttpResponse("access denied")
server_status = "Running"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>20):
server_status = "Stopped"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>1800):
server_status = "Offline"
#activity = mongo.activity.find({'uuid':uuid,}).sort('_id',-1).limit(3)
if(graph_type=="server_info"):
graphs_mixed_respose = {}
graphs_mixed_respose['name'] = server['name']
graphs_mixed_respose['server_info_hostname'] = server['hostname']
graphs_mixed_respose['cpu_used'] = server['cpu_usage']['cpu_used']
graphs_mixed_respose['memory_used'] = server['memory_usage']['memory_used_percentage']
graphs_mixed_respose['swap_used'] = server['memory_usage']['swap_used_percentage']
graphs_mixed_respose['loadavg_used'] = server['loadavg'][1]
graphs_mixed_respose['server_info_uptime'] = server['uptime']
graphs_mixed_respose['server_info_loadavg'] = server['loadavg']
graphs_mixed_respose['server_info_status'] = server_status
graphs_mixed_respose = str(graphs_mixed_respose).replace('u"','"')
graphs_mixed_respose = graphs_mixed_respose.replace("'",'"')
graphs_mixed_respose = str(graphs_mixed_respose).replace('u"','"')
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="processes"):
processes_ = []
processes = server['processes']
c=0
for line in processes:
if(c>0):
if not line:break
line = line.split(' ')
line_ = []
for i in line:
if i: line_.append(i)
line = line_
process_user = line[0]
process_pid = line[1]
process_cpu = line[2]
process_mem = line[3]
process_vsz = line[4]
process_rss = line[5]
process_tty = line[6]
process_stat = line[7]
process_start_time = line[8]+'-'+line[9]
process_command = line[10:]
process_name = clean_ps_command(process_command[0])
process = {
'pid': process_pid,
'cpu': process_cpu+'%',
'mem': process_mem+'%',
# 'vsz': process_vsz,
# 'rss': process_rss,
# 'tty': process_tty,
# 'stat': process_stat,
# 'start_time': process_start_time,
'process': process_name,
'command': ' '.join(str(x) for x in process_command).replace("[", "").replace("]","")
}
process['user'] = '<span class=\\"label label-success\\">'
if int(float(process_cpu)) > 50:
process['user'] = '<span class=\\"label label-warning\\">'
if int(float(process_cpu)) > 75:
process['user'] = '<span class=\\"label label-danger\\">'
process['user'] += process_user
process['user'] += '</span>'
processes_.append(process)
c+=1
processes = {}
processes['data'] = processes_
processes = str(processes).replace(" u'"," '").replace("[u'","['").replace("'",'"').replace("\\\\", "\\")
return HttpResponse(processes, content_type="application/json")
if(graph_type=="network_connections"):
network_connections_ = []
network_connections = server['network_connections']['listen']
for conn in network_connections:
connection = {}
connection['proto'] = conn[1]
connection['recv-q'] = conn[2]
connection['send-q'] = conn[3]
connection['address'] = conn[4]
if conn[6]:
connection['port'] = conn[5] + "/" + conn[6]
else:
connection['port'] = conn[5]
network_connections_.append(connection)
network_connections = {}
network_connections['data'] = network_connections_
network_connections = str(network_connections).replace(" u'"," '")
network_connections = str(network_connections).replace("'",'"')
return HttpResponse(network_connections, content_type="application/json")
if(graph_type=="active_network_connections"):
active_network_connections_ = []
active_network_connections = server['network_connections']['established']
for conn in active_network_connections:
connection = {}
connection['proto'] = conn[1]
connection['recv-q'] = conn[2]
connection['send-q'] = conn[3]
connection['local-address'] = conn[7]
connection['foreign-address'] = conn[4]
connection['foreign-port'] = conn[5]
active_network_connections_.append(connection)
active_network_connections = {}
active_network_connections['data'] = active_network_connections_
active_network_connections = str(active_network_connections).replace(" u'"," '")
active_network_connections = str(active_network_connections).replace("'",'"')
return HttpResponse(active_network_connections, content_type="application/json")
if(graph_type=="loadavg"):
params = None
graph_interval = request.POST['interval']
graphs_mixed_respose = [[],[],[]]
loadavg_specific_queries = ['1-min','5-mins','15-mins']
count = 0
for i in loadavg_specific_queries:
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.loadavg'}
params_ = params
params_['m'] = params['m'] + "{avg="+i+"}"
tsdb = requests.get('http://hbase:4242/api/query', params=params_)
params = params_
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose[count].append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose[count] = sorted(graphs_mixed_respose[count], key=itemgetter(0))
graphs_mixed_respose[count] = graphs_mixed_respose[count][::-1]
count += 1
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="disks"):
print '*'*1000
print request.POST
mount_ponit = request.POST['mountPoint']
graph_interval = request.POST['interval']
graphs_mixed_respose = []
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.disks'}
params['m'] += "{mm=disk_used,mount_point="+mount_ponit+"}"
if(params):
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose.append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose = sorted(graphs_mixed_respose, key=itemgetter(0))
graphs_mixed_respose = [graphs_mixed_respose[::-1],]
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="cpu_usage"):
params = None
graph_interval = request.POST['interval']
graphs_mixed_respose = []
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.cpu'}
if(params):
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose.append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose = sorted(graphs_mixed_respose, key=itemgetter(0))
graphs_mixed_respose = [graphs_mixed_respose[::-1],]
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="mem_usage" or graph_type=="swap_usage"):
params = None
graph_interval = request.POST['interval']
graphs_mixed_respose = []
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.memory'}
if(graph_type=="mem_usage"):
params['m'] += "{mm=memory_used}"
if(graph_type=="swap_usage"):
params['m'] += "{mm=swap_used}"
if(params):
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose.append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose = sorted(graphs_mixed_respose, key=itemgetter(0))
graphs_mixed_respose = [graphs_mixed_respose[::-1],]
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="network_input_packets" or graph_type=="inbound_traffic" or graph_type=="network_output_packets" or graph_type=="outbound_traffic"):
params = None
graph_interval = request.POST['interval']
graphs_mixed_respose = []
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.network'}
if(graph_type=="network_input_packets"):
params['m'] += "{mm=input_accept_packets}"
if(graph_type=="network_input_bytes"):
params['m'] += "{mm=input_accept_bytes}"
if(graph_type=="network_output_packets"):
params['m'] += "{mm=output_accept_packets}"
if(graph_type=="network_output_bytes"):
params['m'] += "{mm=output_accept_bytes}"
if(params):
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose.append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose = sorted(graphs_mixed_respose, key=itemgetter(0))
graphs_mixed_respose = [graphs_mixed_respose[::-1],]
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
return HttpResponse("I'm sorry I don't understand")
def ajax_virtual_machines_box(request):
return render_to_response('ajax_virtual_machines_box.html', locals(), context_instance=RequestContext(request))<|fim▁end|>
| |
<|file_name|>DirectoryMetadata.py<|end_file_name|><|fim▁begin|>""" DIRAC FileCatalog mix-in class to manage directory metadata
"""
# pylint: disable=protected-access
import six
import os
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Utilities.Time import queryTime
class DirectoryMetadata(object):
def __init__(self, database=None):
self.db = database
def setDatabase(self, database):
self.db = database
##############################################################################
#
# Manage Metadata fields
#
def addMetadataField(self, pName, pType, credDict):
"""Add a new metadata parameter to the Metadata Database.
:param str pName: parameter name
:param str pType: parameter type in the MySQL notation
:return: S_OK/S_ERROR, Value - comment on a positive result
"""
result = self.db.fmeta.getFileMetadataFields(credDict)
if not result["OK"]:
return result
if pName in result["Value"]:
return S_ERROR("The metadata %s is already defined for Files" % pName)
result = self._getMetadataFields(credDict)
if not result["OK"]:
return result
if pName in result["Value"]:
if pType.lower() == result["Value"][pName].lower():
return S_OK("Already exists")
return S_ERROR(
"Attempt to add an existing metadata with different type: %s/%s" % (pType, result["Value"][pName])
)
valueType = pType
if pType.lower()[:3] == "int":
valueType = "INT"
elif pType.lower() == "string":
valueType = "VARCHAR(128)"
elif pType.lower() == "float":
valueType = "FLOAT"
elif pType.lower() == "date":
valueType = "DATETIME"
elif pType == "MetaSet":
valueType = "VARCHAR(64)"
req = "CREATE TABLE FC_Meta_%s ( DirID INTEGER NOT NULL, Value %s, PRIMARY KEY (DirID), INDEX (Value) )" % (
pName,
valueType,
)
result = self.db._query(req)
if not result["OK"]:
return result
result = self.db.insertFields("FC_MetaFields", ["MetaName", "MetaType"], [pName, pType])
if not result["OK"]:
return result
metadataID = result["lastRowId"]
result = self.__transformMetaParameterToData(pName)
if not result["OK"]:
return result
return S_OK("Added new metadata: %d" % metadataID)
def deleteMetadataField(self, pName, credDict):
"""Remove metadata field
:param str pName: meta parameter name
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR
"""
req = "DROP TABLE FC_Meta_%s" % pName
result = self.db._update(req)
error = ""
if not result["OK"]:
error = result["Message"]
req = "DELETE FROM FC_MetaFields WHERE MetaName='%s'" % pName
result = self.db._update(req)
if not result["OK"]:
if error:
result["Message"] = error + "; " + result["Message"]
return result
def getMetadataFields(self, credDict):
"""Get all the defined metadata fields
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value is the metadata:metadata type dictionary
"""
return self._getMetadataFields(credDict)
def _getMetadataFields(self, credDict):
"""Get all the defined metadata fields as they are defined in the database
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value is the metadata:metadata type dictionary
"""
req = "SELECT MetaName,MetaType FROM FC_MetaFields"
result = self.db._query(req)
if not result["OK"]:
return result
metaDict = {}
for row in result["Value"]:
metaDict[row[0]] = row[1]
return S_OK(metaDict)
def addMetadataSet(self, metaSetName, metaSetDict, credDict):
"""Add a new metadata set with the contents from metaSetDict
:param str metaSetName: metaSet name
:param dict metaSetDict: contents of the meta set definition
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR
"""
result = self._getMetadataFields(credDict)
if not result["OK"]:
return result
metaTypeDict = result["Value"]
# Check the sanity of the metadata set contents
for key in metaSetDict:
if key not in metaTypeDict:
return S_ERROR("Unknown key %s" % key)
result = self.db.insertFields("FC_MetaSetNames", ["MetaSetName"], [metaSetName])
if not result["OK"]:
return result
metaSetID = result["lastRowId"]
req = "INSERT INTO FC_MetaSets (MetaSetID,MetaKey,MetaValue) VALUES %s"
vList = []
for key, value in metaSetDict.items():
vList.append("(%d,'%s','%s')" % (metaSetID, key, str(value)))
vString = ",".join(vList)
result = self.db._update(req % vString)
return result
def getMetadataSet(self, metaSetName, expandFlag, credDict):
"""Get fully expanded contents of the metadata set
:param str metaSetName: metaSet name
:param bool expandFlag: flag to whether to expand the metaset recursively
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value dictionary of the meta set definition contents
"""
result = self._getMetadataFields(credDict)
if not result["OK"]:
return result
metaTypeDict = result["Value"]
req = "SELECT S.MetaKey,S.MetaValue FROM FC_MetaSets as S, FC_MetaSetNames as N "
req += "WHERE N.MetaSetName='%s' AND N.MetaSetID=S.MetaSetID" % metaSetName
result = self.db._query(req)
if not result["OK"]:
return result
if not result["Value"]:
return S_OK({})
resultDict = {}
for key, value in result["Value"]:
if key not in metaTypeDict:
return S_ERROR("Unknown key %s" % key)
if expandFlag:
if metaTypeDict[key] == "MetaSet":
result = self.getMetadataSet(value, expandFlag, credDict)
if not result["OK"]:
return result
resultDict.update(result["Value"])
else:
resultDict[key] = value
else:
resultDict[key] = value
return S_OK(resultDict)
#############################################################################################
#
# Set and get directory metadata
#
#############################################################################################
def setMetadata(self, dPath, metaDict, credDict):
"""Set the value of a given metadata field for the the given directory path
:param str dPath: directory path
:param dict metaDict: dictionary with metadata
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR
"""
result = self._getMetadataFields(credDict)
if not result["OK"]:
return result
metaFields = result["Value"]
result = self.db.dtree.findDir(dPath)
if not result["OK"]:
return result
if not result["Value"]:
return S_ERROR("Path not found: %s" % dPath)
dirID = result["Value"]
dirmeta = self.getDirectoryMetadata(dPath, credDict, ownData=False)
if not dirmeta["OK"]:
return dirmeta
for metaName, metaValue in metaDict.items():
if metaName not in metaFields:
result = self.setMetaParameter(dPath, metaName, metaValue, credDict)
if not result["OK"]:
return result
continue
# Check that the metadata is not defined for the parent directories
if metaName in dirmeta["Value"]:
return S_ERROR("Metadata conflict detected for %s for directory %s" % (metaName, dPath))
result = self.db.insertFields("FC_Meta_%s" % metaName, ["DirID", "Value"], [dirID, metaValue])
if not result["OK"]:
if result["Message"].find("Duplicate") != -1:
req = "UPDATE FC_Meta_%s SET Value='%s' WHERE DirID=%d" % (metaName, metaValue, dirID)
result = self.db._update(req)
if not result["OK"]:
return result
else:
return result
return S_OK()
def removeMetadata(self, dPath, metaData, credDict):
"""Remove the specified metadata for the given directory
:param str dPath: directory path
:param dict metaData: metadata dictionary
:param dict credDict: client credential dictionary
:return: standard Dirac result object
"""
result = self._getMetadataFields(credDict)
if not result["OK"]:
return result
metaFields = result["Value"]
result = self.db.dtree.findDir(dPath)
if not result["OK"]:
return result
if not result["Value"]:
return S_ERROR("Path not found: %s" % dPath)
dirID = result["Value"]
failedMeta = {}
for meta in metaData:
if meta in metaFields:
# Indexed meta case
req = "DELETE FROM FC_Meta_%s WHERE DirID=%d" % (meta, dirID)
result = self.db._update(req)
if not result["OK"]:
failedMeta[meta] = result["Value"]
else:
# Meta parameter case
req = "DELETE FROM FC_DirMeta WHERE MetaKey='%s' AND DirID=%d" % (meta, dirID)
result = self.db._update(req)
if not result["OK"]:
failedMeta[meta] = result["Value"]
if failedMeta:
metaExample = list(failedMeta)[0]
result = S_ERROR("Failed to remove %d metadata, e.g. %s" % (len(failedMeta), failedMeta[metaExample]))
result["FailedMetadata"] = failedMeta
else:
return S_OK()
def setMetaParameter(self, dPath, metaName, metaValue, credDict):
"""Set an meta parameter - metadata which is not used in the the data
search operations
:param str dPath: directory name
:param str metaName: meta parameter name
:param str metaValue: meta parameter value
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR
"""
result = self.db.dtree.findDir(dPath)
if not result["OK"]:
return result
if not result["Value"]:
return S_ERROR("Path not found: %s" % dPath)
dirID = result["Value"]
result = self.db.insertFields(
"FC_DirMeta", ["DirID", "MetaKey", "MetaValue"], [dirID, metaName, str(metaValue)]
)
return result
def getDirectoryMetaParameters(self, dpath, credDict, inherited=True):
"""Get meta parameters for the given directory
:param str dPath: directory name
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value dictionary of meta parameters
"""
if inherited:
result = self.db.dtree.getPathIDs(dpath)
if not result["OK"]:
return result
pathIDs = result["Value"]
dirID = pathIDs[-1]
else:
result = self.db.dtree.findDir(dpath)
if not result["OK"]:
return result
if not result["Value"]:
return S_ERROR("Path not found: %s" % dpath)
dirID = result["Value"]
pathIDs = [dirID]
if len(pathIDs) > 1:
pathString = ",".join([str(x) for x in pathIDs])
req = "SELECT DirID,MetaKey,MetaValue from FC_DirMeta where DirID in (%s)" % pathString
else:
req = "SELECT DirID,MetaKey,MetaValue from FC_DirMeta where DirID=%d " % dirID
result = self.db._query(req)
if not result["OK"]:
return result
if not result["Value"]:
return S_OK({})
metaDict = {}
for _dID, key, value in result["Value"]:
if key in metaDict:
if isinstance(metaDict[key], list):
metaDict[key].append(value)
else:
metaDict[key] = [metaDict[key]].append(value)
else:
metaDict[key] = value
return S_OK(metaDict)
def getDirectoryMetadata(self, path, credDict, inherited=True, ownData=True):
"""Get metadata for the given directory aggregating metadata for the directory itself
and for all the parent directories if inherited flag is True. Get also the non-indexed
metadata parameters.
:param str path: directory name
:param dict credDict: client credential dictionary
:param bool inherited: flag to include metadata from the parent directories
:param bool ownData: flag to include metadata for the directory itself
:return: S_OK/S_ERROR, Value dictionary of metadata
"""
result = self.db.dtree.getPathIDs(path)
if not result["OK"]:
return result
pathIDs = result["Value"]
result = self._getMetadataFields(credDict)
if not result["OK"]:
return result
metaFields = result["Value"]
metaDict = {}
metaOwnerDict = {}
metaTypeDict = {}
dirID = pathIDs[-1]
if not inherited:
pathIDs = pathIDs[-1:]
if not ownData:
pathIDs = pathIDs[:-1]
pathString = ",".join([str(x) for x in pathIDs])
for meta in metaFields:
req = "SELECT Value,DirID FROM FC_Meta_%s WHERE DirID in (%s)" % (meta, pathString)
result = self.db._query(req)
if not result["OK"]:
return result
if len(result["Value"]) > 1:
return S_ERROR("Metadata conflict for %s for directory %s" % (meta, path))
if result["Value"]:
metaDict[meta] = result["Value"][0][0]
if int(result["Value"][0][1]) == dirID:
metaOwnerDict[meta] = "OwnMetadata"
else:
metaOwnerDict[meta] = "ParentMetadata"
metaTypeDict[meta] = metaFields[meta]
# Get also non-searchable data
result = self.getDirectoryMetaParameters(path, credDict, inherited)
if result["OK"]:
metaDict.update(result["Value"])
for meta in result["Value"]:
metaOwnerDict[meta] = "OwnParameter"
result = S_OK(metaDict)
result["MetadataOwner"] = metaOwnerDict
result["MetadataType"] = metaTypeDict
return result
def __transformMetaParameterToData(self, metaName):
"""Relocate the meta parameters of all the directories to the corresponding
indexed metadata table
:param str metaName: name of the parameter to transform
:return: S_OK/S_ERROR
"""
req = "SELECT DirID,MetaValue from FC_DirMeta WHERE MetaKey='%s'" % metaName
result = self.db._query(req)
if not result["OK"]:
return result
if not result["Value"]:
return S_OK()
dirDict = {}
for dirID, meta in result["Value"]:
dirDict[dirID] = meta
dirList = list(dirDict)
# Exclude child directories from the list
for dirID in dirList:
result = self.db.dtree.getSubdirectoriesByID(dirID)
if not result["OK"]:
return result
if not result["Value"]:
continue
childIDs = list(result["Value"])
for childID in childIDs:
if childID in dirList:
del dirList[dirList.index(childID)]
insertValueList = []
for dirID in dirList:
insertValueList.append("( %d,'%s' )" % (dirID, dirDict[dirID]))
req = "INSERT INTO FC_Meta_%s (DirID,Value) VALUES %s" % (metaName, ", ".join(insertValueList))
result = self.db._update(req)
if not result["OK"]:
return result
req = "DELETE FROM FC_DirMeta WHERE MetaKey='%s'" % metaName
result = self.db._update(req)
return result
############################################################################################
#
# Find directories corresponding to the metadata
#
def __createMetaSelection(self, value, table=""):
"""Create an SQL selection element for the given meta value
:param dict value: dictionary with selection instructions suitable for the database search
:param str table: table name
:return: selection string
"""
if isinstance(value, dict):
selectList = []
for operation, operand in value.items():
if operation in [">", "<", ">=", "<="]:
if isinstance(operand, list):
return S_ERROR("Illegal query: list of values for comparison operation")
if isinstance(operand, six.integer_types):
selectList.append("%sValue%s%d" % (table, operation, operand))
elif isinstance(operand, float):
selectList.append("%sValue%s%f" % (table, operation, operand))
else:
selectList.append("%sValue%s'%s'" % (table, operation, operand))
elif operation == "in" or operation == "=":
if isinstance(operand, list):
vString = ",".join(["'" + str(x) + "'" for x in operand])
selectList.append("%sValue IN (%s)" % (table, vString))
else:
selectList.append("%sValue='%s'" % (table, operand))
elif operation == "nin" or operation == "!=":
if isinstance(operand, list):
vString = ",".join(["'" + str(x) + "'" for x in operand])
selectList.append("%sValue NOT IN (%s)" % (table, vString))
else:
selectList.append("%sValue!='%s'" % (table, operand))
selectString = " AND ".join(selectList)
elif isinstance(value, list):
vString = ",".join(["'" + str(x) + "'" for x in value])
selectString = "%sValue in (%s)" % (table, vString)
else:
if value == "Any":
selectString = ""
else:
selectString = "%sValue='%s' " % (table, value)
return S_OK(selectString)
def __findSubdirByMeta(self, metaName, value, pathSelection="", subdirFlag=True):
"""Find directories for the given metaName datum. If the the metaName datum type is a list,
combine values in OR. In case the metaName datum is 'Any', finds all the subdirectories
for which the metaName datum is defined at all.
:param str metaName: metadata name
:param dict,list value: dictionary with selection instructions suitable for the database search
:param str pathSelection: directory path selection string
:param bool subdirFlag: fla to include subdirectories
:return: S_OK/S_ERROR, Value list of found directories
"""
result = self.__createMetaSelection(value, "M.")
if not result["OK"]:
return result
selectString = result["Value"]
req = " SELECT M.DirID FROM FC_Meta_%s AS M" % metaName
if pathSelection:
req += " JOIN ( %s ) AS P WHERE M.DirID=P.DirID" % pathSelection
if selectString:
if pathSelection:
req += " AND %s" % selectString
else:
req += " WHERE %s" % selectString
result = self.db._query(req)
if not result["OK"]:
return result
if not result["Value"]:
return S_OK([])
dirList = []
for row in result["Value"]:
dirID = row[0]
dirList.append(dirID)
# if subdirFlag:
# result = self.db.dtree.getSubdirectoriesByID( dirID )
# if not result['OK']:
# return result
# dirList += result['Value']
if subdirFlag:
result = self.db.dtree.getAllSubdirectoriesByID(dirList)
if not result["OK"]:
return result
dirList += result["Value"]
return S_OK(dirList)
def __findSubdirMissingMeta(self, metaName, pathSelection):
"""Find directories not having the given meta datum defined
:param str metaName: metadata name
:param str pathSelection: directory path selection string
:return: S_OK,S_ERROR , Value list of directories
"""
result = self.__findSubdirByMeta(metaName, "Any", pathSelection)
if not result["OK"]:
return result
dirList = result["Value"]
table = self.db.dtree.getTreeTable()
dirString = ",".join([str(x) for x in dirList])
if dirList:
req = "SELECT DirID FROM %s WHERE DirID NOT IN ( %s )" % (table, dirString)
else:
req = "SELECT DirID FROM %s" % table
result = self.db._query(req)
if not result["OK"]:
return result
if not result["Value"]:
return S_OK([])
dirList = [x[0] for x in result["Value"]]
return S_OK(dirList)
def __expandMetaDictionary(self, metaDict, credDict):
"""Update the dictionary with metadata query by expand metaSet type metadata
:param dict metaDict: metaDict to be expanded
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR , Value dictionary of metadata
"""
result = self._getMetadataFields(credDict)
if not result["OK"]:
return result
metaTypeDict = result["Value"]
resultDict = {}
extraDict = {}
for key, value in metaDict.items():
if key not in metaTypeDict:
# return S_ERROR( 'Unknown metadata field %s' % key )
extraDict[key] = value
continue
keyType = metaTypeDict[key]
if keyType != "MetaSet":
resultDict[key] = value
else:
result = self.getMetadataSet(value, True, credDict)
if not result["OK"]:
return result
mDict = result["Value"]
for mk, mv in mDict.items():
if mk in resultDict:
return S_ERROR("Contradictory query for key %s" % mk)
else:
resultDict[mk] = mv
result = S_OK(resultDict)
result["ExtraMetadata"] = extraDict
return result
def __checkDirsForMetadata(self, metaName, value, pathString):
"""Check if any of the given directories conform to the given metadata
:param str metaName: matadata name
:param dict,list value: dictionary with selection instructions suitable for the database search
:param str pathString: string of comma separated directory names
:return: S_OK/S_ERROR, Value directory ID
"""
result = self.__createMetaSelection(value, "M.")
if not result["OK"]:
return result
selectString = result["Value"]
if selectString:
req = "SELECT M.DirID FROM FC_Meta_%s AS M WHERE %s AND M.DirID IN (%s)" % (
metaName,
selectString,
pathString,
)
else:
req = "SELECT M.DirID FROM FC_Meta_%s AS M WHERE M.DirID IN (%s)" % (metaName, pathString)
result = self.db._query(req)
if not result["OK"]:
return result
elif not result["Value"]:
return S_OK(None)
elif len(result["Value"]) > 1:
return S_ERROR("Conflict in the directory metadata hierarchy")
else:
return S_OK(result["Value"][0][0])
@queryTime
def findDirIDsByMetadata(self, queryDict, path, credDict):
"""Find Directories satisfying the given metadata and being subdirectories of
the given path
:param dict queryDict: dictionary containing query data
:param str path: starting directory path
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value list of selected directory IDs
"""
pathDirList = []
pathDirID = 0
pathString = "0"
if path != "/":
result = self.db.dtree.getPathIDs(path)
if not result["OK"]:
# as result[Value] is already checked in getPathIDs
return result
pathIDs = result["Value"]
pathDirID = pathIDs[-1]
pathString = ",".join([str(x) for x in pathIDs])
result = self.__expandMetaDictionary(queryDict, credDict)
if not result["OK"]:
return result
metaDict = result["Value"]
# Now check the meta data for the requested directory and its parents
finalMetaDict = dict(metaDict)
for meta in metaDict:
result = self.__checkDirsForMetadata(meta, metaDict[meta], pathString)
if not result["OK"]:
return result
elif result["Value"] is not None:
# Some directory in the parent hierarchy is already conforming with the
# given metadata, no need to check it further
del finalMetaDict[meta]
if finalMetaDict:
pathSelection = ""
if pathDirID:
result = self.db.dtree.getSubdirectoriesByID(pathDirID, includeParent=True, requestString=True)
if not result["OK"]:
return result
pathSelection = result["Value"]
dirList = []
first = True
for meta, value in finalMetaDict.items():
if value == "Missing":
result = self.__findSubdirMissingMeta(meta, pathSelection)
else:
result = self.__findSubdirByMeta(meta, value, pathSelection)
if not result["OK"]:
return result
mList = result["Value"]
if first:
dirList = mList
first = False
else:
newList = []
for d in dirList:
if d in mList:
newList.append(d)
dirList = newList
else:
if pathDirID:
result = self.db.dtree.getSubdirectoriesByID(pathDirID, includeParent=True)
if not result["OK"]:
return result
pathDirList = list(result["Value"])
finalList = []
dirSelect = False
if finalMetaDict:
dirSelect = True
finalList = dirList
if pathDirList:
finalList = list(set(dirList) & set(pathDirList))
else:
if pathDirList:
dirSelect = True
finalList = pathDirList
result = S_OK(finalList)
if finalList:
result["Selection"] = "Done"
elif dirSelect:
result["Selection"] = "None"
else:
result["Selection"] = "All"
return result
@queryTime
def findDirectoriesByMetadata(self, queryDict, path, credDict):
"""Find Directory names satisfying the given metadata and being subdirectories of
the given path
:param dict queryDict: dictionary containing query data
:param str path: starting directory path
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value list of selected directory paths
"""
result = self.findDirIDsByMetadata(queryDict, path, credDict)
if not result["OK"]:
return result
dirIDList = result["Value"]
dirNameDict = {}
if dirIDList:
result = self.db.dtree.getDirectoryPaths(dirIDList)
if not result["OK"]:
return result
dirNameDict = result["Value"]
elif result["Selection"] == "None":
dirNameDict = {0: "None"}
elif result["Selection"] == "All":
dirNameDict = {0: "All"}
return S_OK(dirNameDict)
def findFilesByMetadata(self, metaDict, path, credDict):
"""Find Files satisfying the given metadata
:param dict metaDict: dictionary with the selection metadata
:param str path: starting directory path
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value list files in selected directories
"""
result = self.findDirectoriesByMetadata(metaDict, path, credDict)
if not result["OK"]:
return result
dirDict = result["Value"]
dirList = list(dirDict)
fileList = []
result = self.db.dtree.getFilesInDirectory(dirList, credDict)
if not result["OK"]:
return result
for _fileID, dirID, fname in result["Value"]:
fileList.append(dirDict[dirID] + "/" + os.path.basename(fname))
return S_OK(fileList)
def findFileIDsByMetadata(self, metaDict, path, credDict, startItem=0, maxItems=25):
"""Find Files satisfying the given metadata
:param dict metaDict: dictionary with the selection metadata
:param str path: starting directory path
:param dict credDict: client credential dictionary
:param int startItem: offset in the file list
:param int maxItems: max number of files to rteurn
:return: S_OK/S_ERROR, Value list file IDs in selected directories
"""
result = self.findDirIDsByMetadata(metaDict, path, credDict)
if not result["OK"]:
return result
dirList = result["Value"]
return self.db.dtree.getFileIDsInDirectoryWithLimits(dirList, credDict, startItem, maxItems)
################################################################################################
#
# Find metadata compatible with other metadata in order to organize dynamically updated metadata selectors
def __findCompatibleDirectories(self, metaName, value, fromDirs):
"""Find directories compatible with the given metaName datum.
Optionally limit the list of compatible directories to only those in the
fromDirs list
:param str metaName: metadata name
:param dict,list value: dictionary with selection instructions suitable for the database search
:param list fromDirs: list of directories to choose from
:return: S_OK/S_ERROR, Value list of selected directories
"""
# The directories compatible with the given metaName datum are:
# - directory for which the datum is defined
# - all the subdirectories of the above directory
# - all the directories in the parent hierarchy of the above directory
# Find directories defining the metaName datum and their subdirectories
result = self.__findSubdirByMeta(metaName, value, subdirFlag=False)
if not result["OK"]:
return result
selectedDirs = result["Value"]
if not selectedDirs:
return S_OK([])
result = self.db.dtree.getAllSubdirectoriesByID(selectedDirs)
if not result["OK"]:
return result
subDirs = result["Value"]
# Find parent directories of the directories defining the metaName datum
parentDirs = []
for psub in selectedDirs:
result = self.db.dtree.getPathIDsByID(psub)
if not result["OK"]:
return result
parentDirs += result["Value"]
# Constrain the output to only those that are present in the input list
resDirs = parentDirs + subDirs + selectedDirs
if fromDirs:
resDirs = list(set(resDirs) & set(fromDirs))
return S_OK(resDirs)
def __findDistinctMetadata(self, metaList, dList):
"""Find distinct metadata values defined for the list of the input directories.
Limit the search for only metadata in the input list
:param list metaList: list of metadata names
:param list dList: list of directories to limit the selection
:return: S_OK/S_ERROR, Value dictionary of metadata
"""
if dList:
dString = ",".join([str(x) for x in dList])
else:
dString = None
metaDict = {}
for meta in metaList:
req = "SELECT DISTINCT(Value) FROM FC_Meta_%s" % meta
if dString:
req += " WHERE DirID in (%s)" % dString
result = self.db._query(req)
if not result["OK"]:
return result
if result["Value"]:
metaDict[meta] = []
for row in result["Value"]:
metaDict[meta].append(row[0])
return S_OK(metaDict)
def getCompatibleMetadata(self, queryDict, path, credDict):
"""Get distinct metadata values compatible with the given already defined metadata
:param dict queryDict: dictionary containing query data
:param str path: starting directory path
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value dictionary of metadata
"""
pathDirID = 0
if path != "/":
result = self.db.dtree.findDir(path)
if not result["OK"]:
return result
if not result["Value"]:
return S_ERROR("Path not found: %s" % path)
pathDirID = int(result["Value"])
pathDirs = []
if pathDirID:
result = self.db.dtree.getSubdirectoriesByID(pathDirID, includeParent=True)
if not result["OK"]:
return result
if result["Value"]:
pathDirs = list(result["Value"])
result = self.db.dtree.getPathIDsByID(pathDirID)
if not result["OK"]:
return result
if result["Value"]:
pathDirs += result["Value"]
# Get the list of metadata fields to inspect
result = self._getMetadataFields(credDict)
if not result["OK"]:
return result
metaFields = result["Value"]
comFields = list(metaFields)
# Commented out to return compatible data also for selection metadata
# for m in metaDict:
# if m in comFields:
# del comFields[comFields.index( m )]
result = self.__expandMetaDictionary(queryDict, credDict)
if not result["OK"]:
return result
metaDict = result["Value"]
fromList = pathDirs
anyMeta = True
if metaDict:
anyMeta = False
for meta, value in metaDict.items():
result = self.__findCompatibleDirectories(meta, value, fromList)
if not result["OK"]:
return result
cdirList = result["Value"]
if cdirList:
fromList = cdirList
else:
fromList = []
break
if anyMeta or fromList:
result = self.__findDistinctMetadata(comFields, fromList)
else:
result = S_OK({})
return result
def removeMetadataForDirectory(self, dirList, credDict):
"""Remove all the metadata for the given directory list
:param list dirList: list of directory paths
:param dict credDict: client credential dictionary
:return: S_OK/S_ERROR, Value Successful/Failed dictionaries
"""
if not dirList:
return S_OK({"Successful": {}, "Failed": {}})
failed = {}
successful = {}
dirs = dirList
if not isinstance(dirList, list):
dirs = [dirList]
dirListString = ",".join([str(d) for d in dirs])
# Get the list of metadata fields to inspect
result = self._getMetadataFields(credDict)<|fim▁hole|> if not result["OK"]:
return result
metaFields = result["Value"]
for meta in metaFields:
req = "DELETE FROM FC_Meta_%s WHERE DirID in ( %s )" % (meta, dirListString)
result = self.db._query(req)
if not result["OK"]:
failed[meta] = result["Message"]
else:
successful[meta] = "OK"
return S_OK({"Successful": successful, "Failed": failed})<|fim▁end|>
| |
<|file_name|>0010_remove_unneeded_objects.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
# Deleting field id
db.delete_column('layers_layer', 'id')
# set new primary key for layers_layer
db.create_primary_key('layers_layer', ['resourcebase_ptr_id'])
def backwards(self, orm):
raise RuntimeError("Cannot reverse this migration.")
models = {<|fim▁hole|> 'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': u"orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 1, 10, 5, 46, 57, 679891)'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 1, 10, 5, 46, 57, 688538)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 1, 10, 5, 46, 57, 688151)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'base.contactrole': {
'Meta': {'unique_together': "(('contact', 'resource', 'role'),)", 'object_name': 'ContactRole'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['people.Profile']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.ResourceBase']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['people.Role']"})
},
u'base.region': {
'Meta': {'ordering': "('name',)", 'object_name': 'Region'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'base.resourcebase': {
'Meta': {'object_name': 'ResourceBase'},
'abstract': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bbox_x0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_x1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_y0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_y1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.TopicCategory']", 'null': 'True', 'blank': 'True'}),
'constraints_other': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'contacts': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['people.Profile']", 'through': u"orm['base.ContactRole']", 'symmetrical': 'False'}),
'csw_anytext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'csw_insert_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'csw_mdsource': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '256'}),
'csw_schema': ('django.db.models.fields.CharField', [], {'default': "'http://www.isotc211.org/2005/gmd'", 'max_length': '64'}),
'csw_type': ('django.db.models.fields.CharField', [], {'default': "'dataset'", 'max_length': '32'}),
'csw_typename': ('django.db.models.fields.CharField', [], {'default': "'gmd:MD_Metadata'", 'max_length': '32'}),
'csw_wkt_geometry': ('django.db.models.fields.TextField', [], {'default': "'POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'"}),
'data_quality_statement': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_type': ('django.db.models.fields.CharField', [], {'default': "'publication'", 'max_length': '255'}),
'distribution_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'distribution_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'edition': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'eng'", 'max_length': '3'}),
'maintenance_frequency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'metadata_uploaded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'metadata_xml': ('django.db.models.fields.TextField', [], {'default': '\'<gmd:MD_Metadata xmlns:gmd="http://www.isotc211.org/2005/gmd"/>\'', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'purpose': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'regions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['base.Region']", 'symmetrical': 'False', 'blank': 'True'}),
'restriction_code_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.RestrictionCodeType']", 'null': 'True', 'blank': 'True'}),
'spatial_representation_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.SpatialRepresentationType']", 'null': 'True', 'blank': 'True'}),
'srid': ('django.db.models.fields.CharField', [], {'default': "'EPSG:4326'", 'max_length': '255'}),
'supplemental_information': ('django.db.models.fields.TextField', [], {'default': "u'No information provided'"}),
'temporal_extent_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'temporal_extent_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Thumbnail']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
u'base.restrictioncodetype': {
'Meta': {'ordering': "('identifier',)", 'object_name': 'RestrictionCodeType'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '255'}),
'gn_description': ('django.db.models.fields.TextField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'is_choice': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'base.spatialrepresentationtype': {
'Meta': {'ordering': "('identifier',)", 'object_name': 'SpatialRepresentationType'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'gn_description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'is_choice': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'base.thumbnail': {
'Meta': {'object_name': 'Thumbnail'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thumb_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'thumb_spec': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'})
},
u'base.topiccategory': {
'Meta': {'ordering': "('identifier',)", 'object_name': 'TopicCategory'},
'description': ('django.db.models.fields.TextField', [], {}),
'gn_description': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "'location'", 'max_length': '255'}),
'is_choice': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'layers.attribute': {
'Meta': {'object_name': 'Attribute'},
'attribute': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'attribute_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'attribute_type': ('django.db.models.fields.CharField', [], {'default': "'xsd:string'", 'max_length': '50'}),
'average': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'count': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_stats_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_set'", 'to': u"orm['layers.Layer']"}),
'max': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'median': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'min': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'stddev': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'sum': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'unique_values': ('django.db.models.fields.TextField', [], {'default': "'NA'", 'null': 'True', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'layers.layer': {
'Meta': {'object_name': 'Layer', '_ormbases': [u'base.ResourceBase']},
'default_style': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'layer_default_style'", 'null': 'True', 'to': u"orm['layers.Style']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'popular_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'resourcebase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['base.ResourceBase']", 'unique': 'True', 'primary_key': 'True'}),
'share_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'store': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'storeType': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'styles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'layer_styles'", 'symmetrical': 'False', 'to': u"orm['layers.Style']"}),
'typename': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'workspace': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'layers.style': {
'Meta': {'object_name': 'Style'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sld_body': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sld_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'sld_url': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'sld_version': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'workspace': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'people.profile': {
'Meta': {'object_name': 'Profile'},
'area': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'delivery': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'profile': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'profile'", 'unique': 'True', 'null': 'True', 'to': u"orm['auth.User']"}),
'voice': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'people.role': {
'Meta': {'object_name': 'Role'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['layers']<|fim▁end|>
|
u'actstream.action': {
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.core.cache import cache
from django.shortcuts import render
from django.http import Http404
from styleguide.utils import (Styleguide, STYLEGUIDE_DIR_NAME,
STYLEGUIDE_DEBUG, STYLEGUIDE_CACHE_NAME,
STYLEGUIDE_ACCESS)
def index(request, module_name=None, component_name=None):
if not STYLEGUIDE_ACCESS(request.user):
raise Http404()
styleguide = None
if not STYLEGUIDE_DEBUG:
styleguide = cache.get(STYLEGUIDE_CACHE_NAME)
if styleguide is None:
styleguide = Styleguide()
cache.set(STYLEGUIDE_CACHE_NAME, styleguide, None)
if module_name is not None:
styleguide.set_current_module(module_name)
<|fim▁hole|><|fim▁end|>
|
context = {'styleguide': styleguide}
index_path = "%s/index.html" % STYLEGUIDE_DIR_NAME
return render(request, index_path, context)
|
<|file_name|>poxliff.py<|end_file_name|><|fim▁begin|>#
# Copyright 2006-2009 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""XLIFF classes specifically suited for handling the PO representation in
XLIFF.
This way the API supports plurals as if it was a PO file, for example.
"""
import re
from lxml import etree
from translate.misc.multistring import multistring
from translate.misc.xml_helpers import setXMLspace
from translate.storage import base, lisa, poheader, xliff
from translate.storage.placeables import general
def hasplurals(thing):
if not isinstance(thing, multistring):
return False
return len(thing.strings) > 1
class PoXliffUnit(xliff.xliffunit):
"""A class to specifically handle the plural units created from a po file."""
rich_parsers = general.parsers
def __init__(self, source=None, empty=False, **kwargs):
self._rich_source = None
self._rich_target = None
self._state_n = 0
self.units = []
if empty:
return
if not hasplurals(source):
super().__init__(source)
return
self.xmlelement = etree.Element(self.namespaced("group"))
self.xmlelement.set("restype", "x-gettext-plurals")
self.source = source
def __eq__(self, other):
if isinstance(other, PoXliffUnit):
if len(self.units) != len(other.units):
return False
if not super().__eq__(other):
return False
for i in range(len(self.units) - 1):
if not self.units[i + 1] == other.units[i + 1]:
return False
return True
if len(self.units) <= 1:
if isinstance(other, lisa.LISAunit):
return super().__eq__(other)<|fim▁hole|> return self.source == other.source and self.target == other.target
return False
# XXX: We don't return language nodes correctly at the moment
# def getlanguageNodes(self):
# if not self.hasplural():
# return super().getlanguageNodes()
# else:
# return self.units[0].getlanguageNodes()
@property
def source(self):
if not self.hasplural():
return super().source
return multistring([unit.source for unit in self.units])
@source.setter
def source(self, source):
self.setsource(source, sourcelang="en")
def setsource(self, source, sourcelang="en"):
# TODO: consider changing from plural to singular, etc.
self._rich_source = None
if not hasplurals(source):
super().setsource(source, sourcelang)
else:
target = self.target
for unit in self.units:
try:
self.xmlelement.remove(unit.xmlelement)
except ValueError:
pass
self.units = []
for s in source.strings:
newunit = xliff.xliffunit(s)
# newunit.namespace = self.namespace #XXX?necessary?
self.units.append(newunit)
self.xmlelement.append(newunit.xmlelement)
self.target = target
# We don't support any rich strings yet
multistring_to_rich = base.TranslationUnit.multistring_to_rich
rich_to_multistring = base.TranslationUnit.rich_to_multistring
rich_source = base.TranslationUnit.rich_source
rich_target = base.TranslationUnit.rich_target
def gettarget(self, lang=None):
if self.hasplural():
strings = [unit.target for unit in self.units]
if strings:
return multistring(strings)
else:
return None
else:
return super().gettarget(lang)
def settarget(self, target, lang="xx", append=False):
self._rich_target = None
if self.target == target:
return
if not self.hasplural():
super().settarget(target, lang, append)
return
if not isinstance(target, multistring):
target = multistring(target)
source = self.source
sourcel = len(source.strings)
targetl = len(target.strings)
if sourcel < targetl:
sources = source.strings + [source.strings[-1]] * (targetl - sourcel)
targets = target.strings
id = self.getid()
self.source = multistring(sources)
self.setid(id)
elif targetl < sourcel:
targets = target.strings + [""] * (sourcel - targetl)
else:
targets = target.strings
for i in range(len(self.units)):
self.units[i].target = targets[i]
def addnote(self, text, origin=None, position="append"):
"""Add a note specifically in a "note" tag"""
note = etree.SubElement(self.xmlelement, self.namespaced("note"))
note.text = text
if origin:
note.set("from", origin)
for unit in self.units[1:]:
unit.addnote(text, origin)
def getnotes(self, origin=None):
# NOTE: We support both <context> and <note> tags in xliff files for comments
if origin == "translator":
notes = super().getnotes("translator")
trancomments = self.gettranslatorcomments()
if notes == trancomments or trancomments.find(notes) >= 0:
notes = ""
elif notes.find(trancomments) >= 0:
trancomments = notes
notes = ""
return trancomments + notes
elif origin in ["programmer", "developer", "source code"]:
devcomments = super().getnotes("developer")
autocomments = self.getautomaticcomments()
if devcomments == autocomments or autocomments.find(devcomments) >= 0:
devcomments = ""
elif devcomments.find(autocomments) >= 0:
autocomments = devcomments
devcomments = ""
return autocomments
else:
return super().getnotes(origin)
def markfuzzy(self, value=True):
super().markfuzzy(value)
for unit in self.units[1:]:
unit.markfuzzy(value)
def marktranslated(self):
super().marktranslated()
for unit in self.units[1:]:
unit.marktranslated()
def setid(self, id):
super().setid(id)
if len(self.units) > 1:
for i in range(len(self.units)):
self.units[i].setid("%s[%d]" % (id, i))
def getlocations(self):
"""Returns all the references (source locations)"""
groups = self.getcontextgroups("po-reference")
references = []
for group in groups:
sourcefile = ""
linenumber = ""
for (type, text) in group:
if type == "sourcefile":
sourcefile = text
elif type == "linenumber":
linenumber = text
assert sourcefile
if linenumber:
sourcefile = sourcefile + ":" + linenumber
references.append(sourcefile)
return references
def getautomaticcomments(self):
"""Returns the automatic comments (x-po-autocomment), which corresponds
to the #. style po comments.
"""
def hasautocomment(grp):
return grp[0] == "x-po-autocomment"
groups = self.getcontextgroups("po-entry")
comments = []
for group in groups:
commentpairs = filter(hasautocomment, group)
for (type, text) in commentpairs:
comments.append(text)
return "\n".join(comments)
def gettranslatorcomments(self):
"""Returns the translator comments (x-po-trancomment), which
corresponds to the # style po comments.
"""
def hastrancomment(grp):
return grp[0] == "x-po-trancomment"
groups = self.getcontextgroups("po-entry")
comments = []
for group in groups:
commentpairs = filter(hastrancomment, group)
for (type, text) in commentpairs:
comments.append(text)
return "\n".join(comments)
def isheader(self):
return "gettext-domain-header" in (self.getrestype() or "")
def istranslatable(self):
return super().istranslatable() and not self.isheader()
@classmethod
def createfromxmlElement(cls, element, namespace=None):
if element.tag.endswith("trans-unit"):
object = cls(None, empty=True)
object.xmlelement = element
object.namespace = namespace
return object
assert element.tag.endswith("group")
group = cls(None, empty=True)
group.xmlelement = element
group.namespace = namespace
units = list(element.iterdescendants(group.namespaced("trans-unit")))
for unit in units:
subunit = xliff.xliffunit.createfromxmlElement(unit)
subunit.namespace = namespace
group.units.append(subunit)
return group
def hasplural(self):
return self.xmlelement.tag == self.namespaced("group")
class PoXliffFile(xliff.xlifffile, poheader.poheader):
"""a file for the po variant of Xliff files"""
UnitClass = PoXliffUnit
def __init__(self, *args, **kwargs):
if "sourcelanguage" not in kwargs:
kwargs["sourcelanguage"] = "en-US"
xliff.xlifffile.__init__(self, *args, **kwargs)
def createfilenode(self, filename, sourcelanguage="en-US", datatype="po"):
# Let's ignore the sourcelanguage parameter opting for the internal
# one. PO files will probably be one language
return super().createfilenode(
filename, sourcelanguage=self.sourcelanguage, datatype="po"
)
def _insert_header(self, header):
header.xmlelement.set("restype", "x-gettext-domain-header")
header.xmlelement.set("approved", "no")
setXMLspace(header.xmlelement, "preserve")
self.addunit(header)
def addheaderunit(self, target, filename):
unit = self.addsourceunit(target, filename, True)
unit.target = target
unit.xmlelement.set("restype", "x-gettext-domain-header")
unit.xmlelement.set("approved", "no")
setXMLspace(unit.xmlelement, "preserve")
return unit
def addplural(self, source, target, filename, createifmissing=False):
"""This method should now be unnecessary, but is left for reference"""
assert isinstance(source, multistring)
if not isinstance(target, multistring):
target = multistring(target)
sourcel = len(source.strings)
targetl = len(target.strings)
if sourcel < targetl:
sources = source.strings + [source.strings[-1]] * targetl - sourcel
targets = target.strings
else:
sources = source.strings
targets = target.strings
self._messagenum += 1
pluralnum = 0
group = self.creategroup(filename, True, restype="x-gettext-plural")
for (src, tgt) in zip(sources, targets):
unit = self.UnitClass(src)
unit.target = tgt
unit.setid("%d[%d]" % (self._messagenum, pluralnum))
pluralnum += 1
group.append(unit.xmlelement)
self.units.append(unit)
if pluralnum < sourcel:
for string in sources[pluralnum:]:
unit = self.UnitClass(src)
unit.xmlelement.set("translate", "no")
unit.setid("%d[%d]" % (self._messagenum, pluralnum))
pluralnum += 1
group.append(unit.xmlelement)
self.units.append(unit)
return self.units[-pluralnum]
def parse(self, xml):
"""Populates this object from the given xml string"""
# TODO: Make more robust
def ispluralgroup(node):
"""determines whether the xml node refers to a getttext plural"""
return node.get("restype") == "x-gettext-plurals"
def isnonpluralunit(node):
"""determindes whether the xml node contains a plural like id.
We want to filter out all the plural nodes, except the very first
one in each group.
"""
return re.match(r".+\[[123456]\]$", node.get("id") or "") is None
def pluralunits(pluralgroups):
for pluralgroup in pluralgroups:
yield self.UnitClass.createfromxmlElement(
pluralgroup, namespace=self.namespace
)
self.filename = getattr(xml, "name", "")
if hasattr(xml, "read"):
xml.seek(0)
xmlsrc = xml.read()
xml = xmlsrc
parser = etree.XMLParser(resolve_entities=False)
self.document = etree.fromstring(xml, parser).getroottree()
self.initbody()
root_node = self.document.getroot()
assert root_node.tag == self.namespaced(self.rootNode)
groups = root_node.iterdescendants(self.namespaced("group"))
pluralgroups = filter(ispluralgroup, groups)
termEntries = root_node.iterdescendants(
self.namespaced(self.UnitClass.rootNode)
)
singularunits = list(filter(isnonpluralunit, termEntries))
if len(singularunits) == 0:
return
pluralunit_iter = pluralunits(pluralgroups)
nextplural = next(pluralunit_iter, None)
for entry in singularunits:
term = self.UnitClass.createfromxmlElement(entry, namespace=self.namespace)
if nextplural and str(term.getid()) == ("%s[0]" % nextplural.getid()):
self.addunit(nextplural, new=False)
nextplural = next(pluralunit_iter, None)
else:
self.addunit(term, new=False)<|fim▁end|>
|
else:
|
<|file_name|>tifffile.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# tifffile.py
# Copyright (c) 2008-2014, Christoph Gohlke
# Copyright (c) 2008-2014, The Regents of the University of California
# Produced at the Laboratory for Fluorescence Dynamics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holders nor the names of any
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Read and write image data from and to TIFF files.
Image and metadata can be read from TIFF, BigTIFF, OME-TIFF, STK, LSM, NIH,
SGI, ImageJ, MicroManager, FluoView, SEQ and GEL files.
Only a subset of the TIFF specification is supported, mainly uncompressed
and losslessly compressed 2**(0 to 6) bit integer, 16, 32 and 64-bit float,
grayscale and RGB(A) images, which are commonly used in bio-scientific imaging.
Specifically, reading JPEG and CCITT compressed image data or EXIF, IPTC, GPS,
and XMP metadata is not implemented.
Only primary info records are read for STK, FluoView, MicroManager, and
NIH image formats.
TIFF, the Tagged Image File Format, is under the control of Adobe Systems.
BigTIFF allows for files greater than 4 GB. STK, LSM, FluoView, SGI, SEQ, GEL,
and OME-TIFF, are custom extensions defined by Molecular Devices (Universal
Imaging Corporation), Carl Zeiss MicroImaging, Olympus, Silicon Graphics
International, Media Cybernetics, Molecular Dynamics, and the Open Microscopy
Environment consortium respectively.
For command line usage run ``python tifffile.py --help``
:Author:
`Christoph Gohlke <http://www.lfd.uci.edu/~gohlke/>`_
:Organization:
Laboratory for Fluorescence Dynamics, University of California, Irvine
:Version: 2014.08.24
Requirements
------------
* `CPython 2.7 or 3.4 <http://www.python.org>`_
* `Numpy 1.8.2 <http://www.numpy.org>`_
* `Matplotlib 1.4 <http://www.matplotlib.org>`_ (optional for plotting)
* `Tifffile.c 2013.11.05 <http://www.lfd.uci.edu/~gohlke/>`_
(recommended for faster decoding of PackBits and LZW encoded strings)
Notes
-----
The API is not stable yet and might change between revisions.
Tested on little-endian platforms only.
Other Python packages and modules for reading bio-scientific TIFF files:
* `Imread <http://luispedro.org/software/imread>`_
* `PyLibTiff <http://code.google.com/p/pylibtiff>`_
* `SimpleITK <http://www.simpleitk.org>`_
* `PyLSM <https://launchpad.net/pylsm>`_
* `PyMca.TiffIO.py <http://pymca.sourceforge.net/>`_ (same as fabio.TiffIO)
* `BioImageXD.Readers <http://www.bioimagexd.net/>`_
* `Cellcognition.io <http://cellcognition.org/>`_
* `CellProfiler.bioformats
<https://github.com/CellProfiler/python-bioformats>`_
Acknowledgements
----------------
* Egor Zindy, University of Manchester, for cz_lsm_scan_info specifics.
* Wim Lewis for a bug fix and some read_cz_lsm functions.
* Hadrien Mary for help on reading MicroManager files.
References
----------
(1) TIFF 6.0 Specification and Supplements. Adobe Systems Incorporated.
http://partners.adobe.com/public/developer/tiff/
(2) TIFF File Format FAQ. http://www.awaresystems.be/imaging/tiff/faq.html
(3) MetaMorph Stack (STK) Image File Format.
http://support.meta.moleculardevices.com/docs/t10243.pdf
(4) Image File Format Description LSM 5/7 Release 6.0 (ZEN 2010).
Carl Zeiss MicroImaging GmbH. BioSciences. May 10, 2011
(5) File Format Description - LSM 5xx Release 2.0.
http://ibb.gsf.de/homepage/karsten.rodenacker/IDL/Lsmfile.doc
(6) The OME-TIFF format.
http://www.openmicroscopy.org/site/support/file-formats/ome-tiff
(7) UltraQuant(r) Version 6.0 for Windows Start-Up Guide.
http://www.ultralum.com/images%20ultralum/pdf/UQStart%20Up%20Guide.pdf
(8) Micro-Manager File Formats.
http://www.micro-manager.org/wiki/Micro-Manager_File_Formats
(9) Tags for TIFF and Related Specifications. Digital Preservation.
http://www.digitalpreservation.gov/formats/content/tiff_tags.shtml
Examples
--------
>>> data = numpy.random.rand(5, 301, 219)
>>> imsave('temp.tif', data)
>>> image = imread('temp.tif')
>>> numpy.testing.assert_array_equal(image, data)
>>> with TiffFile('temp.tif') as tif:
... images = tif.asarray()
... for page in tif:
... for tag in page.tags.values():
... t = tag.name, tag.value
... image = page.asarray()
"""
import sys
import os
import re
import glob
import math
import zlib
import time
import json
import struct
import warnings
import tempfile
import datetime
import collections
from fractions import Fraction
from xml.etree import cElementTree as etree
import numpy
try:
import _tifffile
except ImportError:
warnings.warn(
"failed to import the optional _tifffile C extension module.\n"
"Loading of some compressed images will be slow.\n"
"Tifffile.c can be obtained at http://www.lfd.uci.edu/~gohlke/")
__version__ = '2014.08.24'
__docformat__ = 'restructuredtext en'
__all__ = ('imsave', 'imread', 'imshow', 'TiffFile', 'TiffWriter',
'TiffSequence')
def imsave(filename, data, **kwargs):
"""Write image data to TIFF file.
Refer to the TiffWriter class and member functions for documentation.
Parameters
----------
filename : str
Name of file to write.
data : array_like
Input image. The last dimensions are assumed to be image depth,
height, width, and samples.
kwargs : dict
Parameters 'byteorder', 'bigtiff', and 'software' are passed to
the TiffWriter class.
Parameters 'photometric', 'planarconfig', 'resolution',
'description', 'compress', 'volume', and 'extratags' are passed to
the TiffWriter.save function.
Examples
--------
>>> data = numpy.random.rand(2, 5, 3, 301, 219)
>>> description = '{"shape": %s}' % str(list(data.shape))
>>> imsave('temp.tif', data, compress=6,
... extratags=[(270, 's', 0, description, True)])
"""
tifargs = {}
for key in ('byteorder', 'bigtiff', 'software', 'writeshape'):
if key in kwargs:
tifargs[key] = kwargs[key]
del kwargs[key]
if 'writeshape' not in kwargs:
kwargs['writeshape'] = True
if 'bigtiff' not in tifargs and data.size * \
data.dtype.itemsize > 2000 * 2 ** 20:
tifargs['bigtiff'] = True
with TiffWriter(filename, **tifargs) as tif:
tif.save(data, **kwargs)
class TiffWriter(object):
"""Write image data to TIFF file.
TiffWriter instances must be closed using the close method, which is
automatically called when using the 'with' statement.
Examples
--------
>>> data = numpy.random.rand(2, 5, 3, 301, 219)
>>> with TiffWriter('temp.tif', bigtiff=True) as tif:
... for i in range(data.shape[0]):
... tif.save(data[i], compress=6)
"""
TYPES = {'B': 1, 's': 2, 'H': 3, 'I': 4, '2I': 5, 'b': 6,
'h': 8, 'i': 9, 'f': 11, 'd': 12, 'Q': 16, 'q': 17}
TAGS = {
'new_subfile_type': 254, 'subfile_type': 255,
'image_width': 256, 'image_length': 257, 'bits_per_sample': 258,
'compression': 259, 'photometric': 262, 'fill_order': 266,
'document_name': 269, 'image_description': 270, 'strip_offsets': 273,
'orientation': 274, 'samples_per_pixel': 277, 'rows_per_strip': 278,
'strip_byte_counts': 279, 'x_resolution': 282, 'y_resolution': 283,
'planar_configuration': 284, 'page_name': 285, 'resolution_unit': 296,
'software': 305, 'datetime': 306, 'predictor': 317, 'color_map': 320,
'tile_width': 322, 'tile_length': 323, 'tile_offsets': 324,
'tile_byte_counts': 325, 'extra_samples': 338, 'sample_format': 339,
'image_depth': 32997, 'tile_depth': 32998}
def __init__(self, filename, bigtiff=False, byteorder=None,
software='tifffile.py'):
"""Create a new TIFF file for writing.
Use bigtiff=True when creating files greater than 2 GB.
Parameters
----------
filename : str
Name of file to write.
bigtiff : bool
If True, the BigTIFF format is used.
byteorder : {'<', '>'}
The endianness of the data in the file.
By default this is the system's native byte order.
software : str
Name of the software used to create the image.
Saved with the first page only.
"""
if byteorder not in (None, '<', '>'):
raise ValueError("invalid byteorder %s" % byteorder)
if byteorder is None:
byteorder = '<' if sys.byteorder == 'little' else '>'
self._byteorder = byteorder
self._software = software
self._fh = open(filename, 'wb')
self._fh.write({'<': b'II', '>': b'MM'}[byteorder])
if bigtiff:
self._bigtiff = True
self._offset_size = 8
self._tag_size = 20
self._numtag_format = 'Q'
self._offset_format = 'Q'
self._val_format = '8s'
self._fh.write(struct.pack(byteorder + 'HHH', 43, 8, 0))
else:
self._bigtiff = False
self._offset_size = 4
self._tag_size = 12
self._numtag_format = 'H'
self._offset_format = 'I'
self._val_format = '4s'
self._fh.write(struct.pack(byteorder + 'H', 42))
# first IFD
self._ifd_offset = self._fh.tell()
self._fh.write(struct.pack(byteorder + self._offset_format, 0))
def save(self, data, photometric=None, planarconfig=None, resolution=None,
description=None, volume=False, writeshape=False, compress=0,
extratags=()):
"""Write image data to TIFF file.
Image data are written in one stripe per plane.
Dimensions larger than 2 to 4 (depending on photometric mode, planar
configuration, and SGI mode) are flattened and saved as separate pages.
The 'sample_format' and 'bits_per_sample' TIFF tags are derived from
the data type.
Parameters
----------
data : array_like
Input image. The last dimensions are assumed to be image depth,
height, width, and samples.
photometric : {'minisblack', 'miniswhite', 'rgb'}
The color space of the image data.
By default this setting is inferred from the data shape.
planarconfig : {'contig', 'planar'}
Specifies if samples are stored contiguous or in separate planes.
By default this setting is inferred from the data shape.
'contig': last dimension contains samples.
'planar': third last dimension contains samples.
resolution : (float, float) or ((int, int), (int, int))
X and Y resolution in dots per inch as float or rational numbers.
description : str
The subject of the image. Saved with the first page only.
compress : int
Values from 0 to 9 controlling the level of zlib compression.
If 0, data are written uncompressed (default).
volume : bool
If True, volume data are stored in one tile (if applicable) using
the SGI image_depth and tile_depth tags.
Image width and depth must be multiple of 16.
Few software can read this format, e.g. MeVisLab.
writeshape : bool
If True, write the data shape to the image_description tag
if necessary and no other description is given.
extratags: sequence of tuples
Additional tags as [(code, dtype, count, value, writeonce)].
code : int
The TIFF tag Id.
dtype : str
Data type of items in 'value' in Python struct format.
One of B, s, H, I, 2I, b, h, i, f, d, Q, or q.
count : int
Number of data values. Not used for string values.
value : sequence
'Count' values compatible with 'dtype'.
writeonce : bool
If True, the tag is written to the first page only.
"""
if photometric not in (None, 'minisblack', 'miniswhite', 'rgb'):
raise ValueError("invalid photometric %s" % photometric)
if planarconfig not in (None, 'contig', 'planar'):
raise ValueError("invalid planarconfig %s" % planarconfig)
if not 0 <= compress <= 9:
raise ValueError("invalid compression level %s" % compress)
fh = self._fh
byteorder = self._byteorder
numtag_format = self._numtag_format
val_format = self._val_format
offset_format = self._offset_format
offset_size = self._offset_size
tag_size = self._tag_size
data = numpy.asarray(
data,
dtype=byteorder +
data.dtype.char,
order='C')
data_shape = shape = data.shape
data = numpy.atleast_2d(data)
# normalize shape of data
samplesperpixel = 1
extrasamples = 0
if volume and data.ndim < 3:
volume = False
if photometric is None:
if planarconfig:
photometric = 'rgb'
elif data.ndim > 2 and shape[-1] in (3, 4):
photometric = 'rgb'
elif volume and data.ndim > 3 and shape[-4] in (3, 4):
photometric = 'rgb'
elif data.ndim > 2 and shape[-3] in (3, 4):
photometric = 'rgb'
else:
photometric = 'minisblack'
if planarconfig and len(shape) <= (3 if volume else 2):
planarconfig = None
photometric = 'minisblack'
if photometric == 'rgb':
if len(shape) < 3:
raise ValueError("not a RGB(A) image")
if len(shape) < 4:
volume = False
if planarconfig is None:
if shape[-1] in (3, 4):
planarconfig = 'contig'
elif shape[-4 if volume else -3] in (3, 4):
planarconfig = 'planar'
elif shape[-1] > shape[-4 if volume else -3]:
planarconfig = 'planar'
else:
planarconfig = 'contig'
if planarconfig == 'contig':
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
samplesperpixel = data.shape[-1]
else:
data = data.reshape(
(-1,) + shape[(-4 if volume else -3):] + (1,))
samplesperpixel = data.shape[1]
if samplesperpixel > 3:
extrasamples = samplesperpixel - 3
elif planarconfig and len(shape) > (3 if volume else 2):
if planarconfig == 'contig':
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
samplesperpixel = data.shape[-1]
else:
data = data.reshape(
(-1,) + shape[(-4 if volume else -3):] + (1,))
samplesperpixel = data.shape[1]
extrasamples = samplesperpixel - 1
else:
planarconfig = None
# remove trailing 1s
while len(shape) > 2 and shape[-1] == 1:
shape = shape[:-1]
if len(shape) < 3:
volume = False
if False and (
len(shape) > (3 if volume else 2) and shape[-1] < 5 and
all(shape[-1] < i
for i in shape[(-4 if volume else -3):-1])):
# DISABLED: non-standard TIFF, e.g. (220, 320, 2)
planarconfig = 'contig'
samplesperpixel = shape[-1]
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
else:
data = data.reshape(
(-1, 1) + shape[(-3 if volume else -2):] + (1,))
if samplesperpixel == 2:
warnings.warn("writing non-standard TIFF (samplesperpixel 2)")
if volume and (data.shape[-2] % 16 or data.shape[-3] % 16):
warnings.warn("volume width or length are not multiple of 16")
volume = False
data = numpy.swapaxes(data, 1, 2)
data = data.reshape(
(data.shape[0] * data.shape[1],) + data.shape[2:])
# data.shape is now normalized 5D or 6D, depending on volume
# (pages, planar_samples, (depth,) height, width, contig_samples)
assert len(data.shape) in (5, 6)
shape = data.shape
bytestr = bytes if sys.version[0] == '2' else (
lambda x: bytes(x) if isinstance(x, str) else x)
tags = [] # list of (code, ifdentry, ifdvalue, writeonce)
if volume:
# use tiles to save volume data
tag_byte_counts = TiffWriter.TAGS['tile_byte_counts']
tag_offsets = TiffWriter.TAGS['tile_offsets']
else:
# else use strips
tag_byte_counts = TiffWriter.TAGS['strip_byte_counts']
tag_offsets = TiffWriter.TAGS['strip_offsets']
def pack(fmt, *val):
return struct.pack(byteorder + fmt, *val)
def addtag(code, dtype, count, value, writeonce=False):
# Compute ifdentry & ifdvalue bytes from code, dtype, count, value.
# Append (code, ifdentry, ifdvalue, writeonce) to tags list.
code = int(TiffWriter.TAGS.get(code, code))
try:
tifftype = TiffWriter.TYPES[dtype]
except KeyError:
raise ValueError("unknown dtype %s" % dtype)
rawcount = count
if dtype == 's':
value = bytestr(value) + b'\0'
count = rawcount = len(value)
value = (value, )
if len(dtype) > 1:
count *= int(dtype[:-1])
dtype = dtype[-1]
ifdentry = [pack('HH', code, tifftype),
pack(offset_format, rawcount)]
ifdvalue = None
if count == 1:
if isinstance(value, (tuple, list)):
value = value[0]
ifdentry.append(pack(val_format, pack(dtype, value)))
elif struct.calcsize(dtype) * count <= offset_size:
ifdentry.append(pack(val_format,
pack(str(count) + dtype, *value)))
else:
ifdentry.append(pack(offset_format, 0))
ifdvalue = pack(str(count) + dtype, *value)
tags.append((code, b''.join(ifdentry), ifdvalue, writeonce))
def rational(arg, max_denominator=1000000):
# return nominator and denominator from float or two integers
try:
f = Fraction.from_float(arg)
except TypeError:
f = Fraction(arg[0], arg[1])
f = f.limit_denominator(max_denominator)
return f.numerator, f.denominator
if self._software:
addtag('software', 's', 0, self._software, writeonce=True)
self._software = None # only save to first page
if description:
addtag('image_description', 's', 0, description, writeonce=True)
elif writeshape and shape[0] > 1 and shape != data_shape:
addtag('image_description', 's', 0,
"shape=(%s)" % (",".join('%i' % i for i in data_shape)),
writeonce=True)
addtag('datetime', 's', 0,
datetime.datetime.now().strftime("%Y:%m:%d %H:%M:%S"),
writeonce=True)
addtag('compression', 'H', 1, 32946 if compress else 1)
addtag('orientation', 'H', 1, 1)
addtag('image_width', 'I', 1, shape[-2])
addtag('image_length', 'I', 1, shape[-3])
if volume:
addtag('image_depth', 'I', 1, shape[-4])
addtag('tile_depth', 'I', 1, shape[-4])
addtag('tile_width', 'I', 1, shape[-2])
addtag('tile_length', 'I', 1, shape[-3])
addtag('new_subfile_type', 'I', 1, 0 if shape[0] == 1 else 2)
addtag('sample_format', 'H', 1,
{'u': 1, 'i': 2, 'f': 3, 'c': 6}[data.dtype.kind])
addtag('photometric', 'H', 1,
{'miniswhite': 0, 'minisblack': 1, 'rgb': 2}[photometric])
addtag('samples_per_pixel', 'H', 1, samplesperpixel)
if planarconfig and samplesperpixel > 1:
addtag('planar_configuration', 'H', 1, 1
if planarconfig == 'contig' else 2)
addtag('bits_per_sample', 'H', samplesperpixel,
(data.dtype.itemsize * 8, ) * samplesperpixel)
else:
addtag('bits_per_sample', 'H', 1, data.dtype.itemsize * 8)
if extrasamples:
if photometric == 'rgb' and extrasamples == 1:
addtag('extra_samples', 'H', 1, 1) # associated alpha channel
else:
addtag('extra_samples', 'H', extrasamples, (0,) * extrasamples)
if resolution:
addtag('x_resolution', '2I', 1, rational(resolution[0]))
addtag('y_resolution', '2I', 1, rational(resolution[1]))
addtag('resolution_unit', 'H', 1, 2)
addtag('rows_per_strip', 'I', 1,
shape[-3] * (shape[-4] if volume else 1))
# use one strip or tile per plane
strip_byte_counts = (data[0, 0].size * data.dtype.itemsize,) * shape[1]
addtag(tag_byte_counts, offset_format, shape[1], strip_byte_counts)
addtag(tag_offsets, offset_format, shape[1], (0, ) * shape[1])
# add extra tags from users
for t in extratags:
addtag(*t)
# the entries in an IFD must be sorted in ascending order by tag code
tags = sorted(tags, key=lambda x: x[0])
if not self._bigtiff and (fh.tell() + data.size * data.dtype.itemsize
> 2 ** 31 - 1):
raise ValueError("data too large for non-bigtiff file")
for pageindex in range(shape[0]):
# update pointer at ifd_offset
pos = fh.tell()
fh.seek(self._ifd_offset)
fh.write(pack(offset_format, pos))
fh.seek(pos)
# write ifdentries
fh.write(pack(numtag_format, len(tags)))
tag_offset = fh.tell()
fh.write(b''.join(t[1] for t in tags))
self._ifd_offset = fh.tell()
fh.write(pack(offset_format, 0)) # offset to next IFD
# write tag values and patch offsets in ifdentries, if necessary
for tagindex, tag in enumerate(tags):
if tag[2]:
pos = fh.tell()
fh.seek(tag_offset + tagindex * tag_size + offset_size + 4)
fh.write(pack(offset_format, pos))
fh.seek(pos)
if tag[0] == tag_offsets:
strip_offsets_offset = pos
elif tag[0] == tag_byte_counts:
strip_byte_counts_offset = pos
fh.write(tag[2])
# write image data
data_offset = fh.tell()
if compress:
strip_byte_counts = []
for plane in data[pageindex]:
plane = zlib.compress(plane, compress)
strip_byte_counts.append(len(plane))
fh.write(plane)
else:
# if this fails try update Python/numpy
data[pageindex].tofile(fh)
fh.flush()
# update strip and tile offsets and byte_counts if necessary
pos = fh.tell()
for tagindex, tag in enumerate(tags):
if tag[0] == tag_offsets: # strip or tile offsets
if tag[2]:
fh.seek(strip_offsets_offset)
strip_offset = data_offset
for size in strip_byte_counts:
fh.write(pack(offset_format, strip_offset))
strip_offset += size
else:
fh.seek(tag_offset + tagindex * tag_size +
offset_size + 4)
fh.write(pack(offset_format, data_offset))
elif tag[0] == tag_byte_counts: # strip or tile byte_counts
if compress:
if tag[2]:
fh.seek(strip_byte_counts_offset)
for size in strip_byte_counts:
fh.write(pack(offset_format, size))
else:
fh.seek(tag_offset + tagindex * tag_size +
offset_size + 4)
fh.write(pack(offset_format, strip_byte_counts[0]))
break
fh.seek(pos)
fh.flush()
# remove tags that should be written only once
if pageindex == 0:
tags = [t for t in tags if not t[-1]]
def close(self):
self._fh.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def imread(files, **kwargs):
"""Return image data from TIFF file(s) as numpy array.
The first image series is returned if no arguments are provided.
Parameters
----------
files : str or list
File name, glob pattern, or list of file names.
key : int, slice, or sequence of page indices
Defines which pages to return as array.
series : int
Defines which series of pages in file to return as array.
multifile : bool
If True (default), OME-TIFF data may include pages from multiple files.
pattern : str
Regular expression pattern that matches axes names and indices in
file names.
kwargs : dict
Additional parameters passed to the TiffFile or TiffSequence asarray
function.
Examples
--------
>>> im = imread('test.tif', key=0)
>>> im.shape
(256, 256, 4)
>>> ims = imread(['test.tif', 'test.tif'])
>>> ims.shape
(2, 256, 256, 4)
"""
kwargs_file = {}
if 'multifile' in kwargs:
kwargs_file['multifile'] = kwargs['multifile']
del kwargs['multifile']
else:
kwargs_file['multifile'] = True
kwargs_seq = {}
if 'pattern' in kwargs:
kwargs_seq['pattern'] = kwargs['pattern']
del kwargs['pattern']
if isinstance(files, str) and any(i in files for i in '?*'):
files = glob.glob(files)
if not files:
raise ValueError('no files found')
if len(files) == 1:
files = files[0]
if isinstance(files, str):
with TiffFile(files, **kwargs_file) as tif:
return tif.asarray(**kwargs)
else:
with TiffSequence(files, **kwargs_seq) as imseq:
return imseq.asarray(**kwargs)
class lazyattr(object):
"""Lazy object attribute whose value is computed on first access."""
__slots__ = ('func', )
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
if instance is None:
return self
value = self.func(instance)
if value is NotImplemented:
return getattr(super(owner, instance), self.func.__name__)
setattr(instance, self.func.__name__, value)
return value
class TiffFile(object):
"""Read image and metadata from TIFF, STK, LSM, and FluoView files.
TiffFile instances must be closed using the close method, which is
automatically called when using the 'with' statement.
Attributes
----------
pages : list
All TIFF pages in file.
series : list of Records(shape, dtype, axes, TiffPages)
TIFF pages with compatible shapes and types.
micromanager_metadata: dict
Extra MicroManager non-TIFF metadata in the file, if exists.
All attributes are read-only.
Examples
--------
>>> with TiffFile('test.tif') as tif:
... data = tif.asarray()
... data.shape
(256, 256, 4)
"""
def __init__(self, arg, name=None, offset=None, size=None,
multifile=True, multifile_close=True):
"""Initialize instance from file.
Parameters
----------
arg : str or open file
Name of file or open file object.
The file objects are closed in TiffFile.close().
name : str
Optional name of file in case 'arg' is a file handle.
offset : int
Optional start position of embedded file. By default this is
the current file position.
size : int
Optional size of embedded file. By default this is the number
of bytes from the 'offset' to the end of the file.
multifile : bool
If True (default), series may include pages from multiple files.
Currently applies to OME-TIFF only.
multifile_close : bool
If True (default), keep the handles of other files in multifile
series closed. This is inefficient when few files refer to
many pages. If False, the C runtime may run out of resources.
"""
self._fh = FileHandle(arg, name=name, offset=offset, size=size)
self.offset_size = None
self.pages = []
self._multifile = bool(multifile)
self._multifile_close = bool(multifile_close)
self._files = {self._fh.name: self} # cache of TiffFiles
try:
self._fromfile()
except Exception:
self._fh.close()
raise
@property
def filehandle(self):
"""Return file handle."""
return self._fh
@property
def filename(self):
"""Return name of file handle."""
return self._fh.name
def close(self):
"""Close open file handle(s)."""
for tif in self._files.values():
tif._fh.close()
self._files = {}
def _fromfile(self):
"""Read TIFF header and all page records from file."""
self._fh.seek(0)
try:
self.byteorder = {b'II': '<', b'MM': '>'}[self._fh.read(2)]
except KeyError:
raise ValueError("not a valid TIFF file")
version = struct.unpack(self.byteorder + 'H', self._fh.read(2))[0]
if version == 43: # BigTiff
self.offset_size, zero = struct.unpack(self.byteorder + 'HH',
self._fh.read(4))
if zero or self.offset_size != 8:
raise ValueError("not a valid BigTIFF file")
elif version == 42:
self.offset_size = 4
else:
raise ValueError("not a TIFF file")
self.pages = []
while True:
try:
page = TiffPage(self)
self.pages.append(page)
except StopIteration:
break
if not self.pages:
raise ValueError("empty TIFF file")
if self.is_micromanager:
# MicroManager files contain metadata not stored in TIFF tags.
self.micromanager_metadata = read_micromanager_metadata(self._fh)
if self.is_lsm:
self._fix_lsm_strip_offsets()
self._fix_lsm_strip_byte_counts()
def _fix_lsm_strip_offsets(self):
"""Unwrap strip offsets for LSM files greater than 4 GB."""
for series in self.series:
wrap = 0
previous_offset = 0
for page in series.pages:
strip_offsets = []
for current_offset in page.strip_offsets:
if current_offset < previous_offset:
wrap += 2 ** 32
strip_offsets.append(current_offset + wrap)
previous_offset = current_offset
page.strip_offsets = tuple(strip_offsets)
def _fix_lsm_strip_byte_counts(self):
"""Set strip_byte_counts to size of compressed data.
The strip_byte_counts tag in LSM files contains the number of bytes
for the uncompressed data.
"""
if not self.pages:
return
strips = {}
for page in self.pages:
assert len(page.strip_offsets) == len(page.strip_byte_counts)
for offset, bytecount in zip(page.strip_offsets,
page.strip_byte_counts):
strips[offset] = bytecount
offsets = sorted(strips.keys())
offsets.append(min(offsets[-1] + strips[offsets[-1]], self._fh.size))
for i, offset in enumerate(offsets[:-1]):
strips[offset] = min(strips[offset], offsets[i + 1] - offset)
for page in self.pages:
if page.compression:
page.strip_byte_counts = tuple(
strips[offset] for offset in page.strip_offsets)
@lazyattr
def series(self):
"""Return series of TiffPage with compatible shape and properties."""
if not self.pages:
return []
series = []
page0 = self.pages[0]
if self.is_ome:
series = self._omeseries()
elif self.is_fluoview:
dims = {b'X': 'X', b'Y': 'Y', b'Z': 'Z', b'T': 'T',
b'WAVELENGTH': 'C', b'TIME': 'T', b'XY': 'R',
b'EVENT': 'V', b'EXPOSURE': 'L'}
mmhd = list(reversed(page0.mm_header.dimensions))
series = [Record(
axes=''.join(dims.get(i[0].strip().upper(), 'Q')
for i in mmhd if i[1] > 1),
shape=tuple(int(i[1]) for i in mmhd if i[1] > 1),
pages=self.pages, dtype=numpy.dtype(page0.dtype))]
elif self.is_lsm:
lsmi = page0.cz_lsm_info
axes = CZ_SCAN_TYPES[lsmi.scan_type]
if page0.is_rgb:
axes = axes.replace('C', '').replace('XY', 'XYC')
axes = axes[::-1]
shape = tuple(getattr(lsmi, CZ_DIMENSIONS[i]) for i in axes)
pages = [p for p in self.pages if not p.is_reduced]
series = [Record(axes=axes, shape=shape, pages=pages,
dtype=numpy.dtype(pages[0].dtype))]
if len(pages) != len(self.pages): # reduced RGB pages
pages = [p for p in self.pages if p.is_reduced]
cp = 1
i = 0
while cp < len(pages) and i < len(shape) - 2:
cp *= shape[i]
i += 1
shape = shape[:i] + pages[0].shape
axes = axes[:i] + 'CYX'
series.append(Record(axes=axes, shape=shape, pages=pages,
dtype=numpy.dtype(pages[0].dtype)))
elif self.is_imagej:
shape = []
axes = []
ij = page0.imagej_tags
if 'frames' in ij:
shape.append(ij['frames'])
axes.append('T')
if 'slices' in ij:
shape.append(ij['slices'])
axes.append('Z')
if 'channels' in ij and not self.is_rgb:
shape.append(ij['channels'])
axes.append('C')
remain = len(self.pages) // (product(shape) if shape else 1)
if remain > 1:
shape.append(remain)
axes.append('I')
shape.extend(page0.shape)
axes.extend(page0.axes)
axes = ''.join(axes)
series = [Record(pages=self.pages, shape=tuple(shape), axes=axes,
dtype=numpy.dtype(page0.dtype))]
elif self.is_nih:
if len(self.pages) == 1:
shape = page0.shape
axes = page0.axes
else:
shape = (len(self.pages),) + page0.shape
axes = 'I' + page0.axes
series = [Record(pages=self.pages, shape=shape, axes=axes,
dtype=numpy.dtype(page0.dtype))]
elif page0.is_shaped:
# TODO: shaped files can contain multiple series
shape = page0.tags['image_description'].value[7:-1]
shape = tuple(int(i) for i in shape.split(b','))
series = [Record(pages=self.pages, shape=shape,
axes='Q' * len(shape),
dtype=numpy.dtype(page0.dtype))]
# generic detection of series
if not series:
shapes = []
pages = {}
for page in self.pages:
if not page.shape:
continue
shape = page.shape + (page.axes,
page.compression in TIFF_DECOMPESSORS)
if shape not in pages:
shapes.append(shape)
pages[shape] = [page]
else:
pages[shape].append(page)
series = [Record(pages=pages[s],
axes=(('I' + s[-2])
if len(pages[s]) > 1 else s[-2]),
dtype=numpy.dtype(pages[s][0].dtype),
shape=((len(pages[s]), ) + s[:-2]
if len(pages[s]) > 1 else s[:-2]))
for s in shapes]
# remove empty series, e.g. in MD Gel files
series = [s for s in series if sum(s.shape) > 0]
return series
def asarray(self, key=None, series=None, memmap=False):
"""Return image data from multiple TIFF pages as numpy array.
By default the first image series is returned.
Parameters
----------
key : int, slice, or sequence of page indices
Defines which pages to return as array.
series : int
Defines which series of pages to return as array.
memmap : bool
If True, return an array stored in a binary file on disk
if possible.
"""
if key is None and series is None:
series = 0
if series is not None:
pages = self.series[series].pages
else:
pages = self.pages
if key is None:
pass
elif isinstance(key, int):
pages = [pages[key]]
elif isinstance(key, slice):
pages = pages[key]
elif isinstance(key, collections.Iterable):
pages = [pages[k] for k in key]
else:
raise TypeError("key must be an int, slice, or sequence")
if not len(pages):
raise ValueError("no pages selected")
if self.is_nih:
if pages[0].is_palette:
result = stack_pages(pages, colormapped=False, squeeze=False)
result = numpy.take(pages[0].color_map, result, axis=1)
result = numpy.swapaxes(result, 0, 1)
else:
result = stack_pages(pages, memmap=memmap,
colormapped=False, squeeze=False)
elif len(pages) == 1:
return pages[0].asarray(memmap=memmap)
elif self.is_ome:
assert not self.is_palette, "color mapping disabled for ome-tiff"
if any(p is None for p in pages):
# zero out missing pages
firstpage = next(p for p in pages if p)
nopage = numpy.zeros_like(
firstpage.asarray(memmap=False))
s = self.series[series]
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=s.dtype, shape=s.shape)
result = result.reshape(-1)
else:
result = numpy.empty(s.shape, s.dtype).reshape(-1)
index = 0
class KeepOpen:
# keep Tiff files open between consecutive pages
def __init__(self, parent, close):
self.master = parent
self.parent = parent
self._close = close
def open(self, page):
if self._close and page and page.parent != self.parent:
if self.parent != self.master:
self.parent.filehandle.close()
self.parent = page.parent
self.parent.filehandle.open()
def close(self):
if self._close and self.parent != self.master:
self.parent.filehandle.close()
keep = KeepOpen(self, self._multifile_close)
for page in pages:
keep.open(page)
if page:
a = page.asarray(memmap=False, colormapped=False,
reopen=False)
else:
a = nopage
try:
result[index:index + a.size] = a.reshape(-1)
except ValueError as e:
warnings.warn("ome-tiff: %s" % e)
break
index += a.size
keep.close()
else:
result = stack_pages(pages, memmap=memmap)
if key is None:
try:
result.shape = self.series[series].shape
except ValueError:
try:
warnings.warn("failed to reshape %s to %s" % (
result.shape, self.series[series].shape))
# try series of expected shapes
result.shape = (-1,) + self.series[series].shape
except ValueError:
# revert to generic shape
result.shape = (-1,) + pages[0].shape
else:
result.shape = (-1,) + pages[0].shape
return result
def _omeseries(self):
"""Return image series in OME-TIFF file(s)."""
root = etree.fromstring(self.pages[0].tags['image_description'].value)
uuid = root.attrib.get('UUID', None)
self._files = {uuid: self}
dirname = self._fh.dirname
modulo = {}
result = []
for element in root:
if element.tag.endswith('BinaryOnly'):
warnings.warn("ome-xml: not an ome-tiff master file")
break
if element.tag.endswith('StructuredAnnotations'):
for annot in element:
if not annot.attrib.get('Namespace',
'').endswith('modulo'):
continue
for value in annot:
for modul in value:
for along in modul:
if not along.tag[:-1].endswith('Along'):
continue
axis = along.tag[-1]
newaxis = along.attrib.get('Type', 'other')
newaxis = AXES_LABELS[newaxis]
if 'Start' in along.attrib:
labels = range(
int(along.attrib['Start']),
int(along.attrib['End']) + 1,
int(along.attrib.get('Step', 1)))
else:
labels = [label.text for label in along
if label.tag.endswith('Label')]
modulo[axis] = (newaxis, labels)
if not element.tag.endswith('Image'):
continue
for pixels in element:
if not pixels.tag.endswith('Pixels'):
continue
atr = pixels.attrib
dtype = atr.get('Type', None)
axes = ''.join(reversed(atr['DimensionOrder']))
shape = list(int(atr['Size' + ax]) for ax in axes)
size = product(shape[:-2])
ifds = [None] * size
for data in pixels:
if not data.tag.endswith('TiffData'):
continue
atr = data.attrib
ifd = int(atr.get('IFD', 0))
num = int(atr.get('NumPlanes', 1 if 'IFD' in atr else 0))
num = int(atr.get('PlaneCount', num))
idx = [int(atr.get('First' + ax, 0)) for ax in axes[:-2]]
try:
idx = numpy.ravel_multi_index(idx, shape[:-2])
except ValueError:
# ImageJ produces invalid ome-xml when cropping
warnings.warn("ome-xml: invalid TiffData index")
continue
for uuid in data:
if not uuid.tag.endswith('UUID'):
continue
if uuid.text not in self._files:
if not self._multifile:
# abort reading multifile OME series
# and fall back to generic series
return []
fname = uuid.attrib['FileName']
try:
tif = TiffFile(os.path.join(dirname, fname))
except (IOError, ValueError):
tif.close()
warnings.warn(
"ome-xml: failed to read '%s'" % fname)
break
self._files[uuid.text] = tif
if self._multifile_close:
tif.close()
pages = self._files[uuid.text].pages
try:
for i in range(num if num else len(pages)):
ifds[idx + i] = pages[ifd + i]
except IndexError:
warnings.warn("ome-xml: index out of range")
# only process first uuid
break
else:
pages = self.pages
try:
for i in range(num if num else len(pages)):
ifds[idx + i] = pages[ifd + i]
except IndexError:
warnings.warn("ome-xml: index out of range")
if all(i is None for i in ifds):
# skip images without data
continue
dtype = next(i for i in ifds if i).dtype
result.append(Record(axes=axes, shape=shape, pages=ifds,
dtype=numpy.dtype(dtype)))
for record in result:
for axis, (newaxis, labels) in modulo.items():
i = record.axes.index(axis)
size = len(labels)
if record.shape[i] == size:
record.axes = record.axes.replace(axis, newaxis, 1)
else:
record.shape[i] //= size
record.shape.insert(i + 1, size)
record.axes = record.axes.replace(axis, axis + newaxis, 1)
record.shape = tuple(record.shape)
# squeeze dimensions
for record in result:
record.shape, record.axes = squeeze_axes(record.shape, record.axes)
return result
def __len__(self):
"""Return number of image pages in file."""
return len(self.pages)
def __getitem__(self, key):
"""Return specified page."""
return self.pages[key]
def __iter__(self):
"""Return iterator over pages."""
return iter(self.pages)
def __str__(self):
"""Return string containing information about file."""
result = [
self._fh.name.capitalize(),
format_size(self._fh.size),
{'<': 'little endian', '>': 'big endian'}[self.byteorder]]
if self.is_bigtiff:
result.append("bigtiff")
if len(self.pages) > 1:
result.append("%i pages" % len(self.pages))
if len(self.series) > 1:
result.append("%i series" % len(self.series))
if len(self._files) > 1:
result.append("%i files" % (len(self._files)))
return ", ".join(result)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
@lazyattr
def fstat(self):
try:
return os.fstat(self._fh.fileno())
except Exception: # io.UnsupportedOperation
return None
@lazyattr
def is_bigtiff(self):
return self.offset_size != 4
@lazyattr
def is_rgb(self):
return all(p.is_rgb for p in self.pages)
@lazyattr
def is_palette(self):
return all(p.is_palette for p in self.pages)
@lazyattr
def is_mdgel(self):
return any(p.is_mdgel for p in self.pages)
@lazyattr
def is_mediacy(self):
return any(p.is_mediacy for p in self.pages)
@lazyattr
def is_stk(self):
return all(p.is_stk for p in self.pages)
@lazyattr
def is_lsm(self):
return self.pages[0].is_lsm
@lazyattr
def is_imagej(self):
return self.pages[0].is_imagej
@lazyattr
def is_micromanager(self):
return self.pages[0].is_micromanager
@lazyattr
def is_nih(self):
return self.pages[0].is_nih
@lazyattr
def is_fluoview(self):
return self.pages[0].is_fluoview
@lazyattr
def is_ome(self):
return self.pages[0].is_ome
class TiffPage(object):
"""A TIFF image file directory (IFD).
Attributes
----------
index : int
Index of page in file.
dtype : str {TIFF_SAMPLE_DTYPES}
Data type of image, colormapped if applicable.
shape : tuple
Dimensions of the image array in TIFF page,
colormapped and with one alpha channel if applicable.
axes : str
Axes label codes:
'X' width, 'Y' height, 'S' sample, 'I' image series|page|plane,
'Z' depth, 'C' color|em-wavelength|channel, 'E' ex-wavelength|lambda,
'T' time, 'R' region|tile, 'A' angle, 'P' phase, 'H' lifetime,
'L' exposure, 'V' event, 'Q' unknown, '_' missing
tags : TiffTags
Dictionary of tags in page.
Tag values are also directly accessible as attributes.
color_map : numpy array
Color look up table, if exists.
cz_lsm_scan_info: Record(dict)
LSM scan info attributes, if exists.
imagej_tags: Record(dict)
Consolidated ImageJ description and metadata tags, if exists.
uic_tags: Record(dict)
Consolidated MetaMorph STK/UIC tags, if exists.
All attributes are read-only.
Notes
-----
The internal, normalized '_shape' attribute is 6 dimensional:
0. number planes (stk)
1. planar samples_per_pixel
2. image_depth Z (sgi)
3. image_length Y
4. image_width X
5. contig samples_per_pixel
"""
def __init__(self, parent):
"""Initialize instance from file."""
self.parent = parent
self.index = len(parent.pages)
self.shape = self._shape = ()
self.dtype = self._dtype = None
self.axes = ""
self.tags = TiffTags()
self._fromfile()
self._process_tags()
def _fromfile(self):
"""Read TIFF IFD structure and its tags from file.
File cursor must be at storage position of IFD offset and is left at
offset to next IFD.
Raises StopIteration if offset (first bytes read) is 0.
"""
fh = self.parent.filehandle
byteorder = self.parent.byteorder
offset_size = self.parent.offset_size
fmt = {4: 'I', 8: 'Q'}[offset_size]
offset = struct.unpack(byteorder + fmt, fh.read(offset_size))[0]
if not offset:
raise StopIteration()
# read standard tags
tags = self.tags
fh.seek(offset)
fmt, size = {4: ('H', 2), 8: ('Q', 8)}[offset_size]
try:
numtags = struct.unpack(byteorder + fmt, fh.read(size))[0]
except Exception:
warnings.warn("corrupted page list")
raise StopIteration()
tagcode = 0
for _ in range(numtags):
try:
tag = TiffTag(self.parent)
# print(tag)
except TiffTag.Error as e:
warnings.warn(str(e))
continue
if tagcode > tag.code:
# expected for early LSM and tifffile versions
warnings.warn("tags are not ordered by code")
tagcode = tag.code
if tag.name not in tags:
tags[tag.name] = tag
else:
# some files contain multiple IFD with same code
# e.g. MicroManager files contain two image_description
i = 1
while True:
name = "%s_%i" % (tag.name, i)
if name not in tags:
tags[name] = tag
break
pos = fh.tell()
if self.is_lsm or (self.index and self.parent.is_lsm):
# correct non standard LSM bitspersample tags
self.tags['bits_per_sample']._correct_lsm_bitspersample(self)
if self.is_lsm:
# read LSM info subrecords
for name, reader in CZ_LSM_INFO_READERS.items():
try:
offset = self.cz_lsm_info['offset_' + name]
except KeyError:
continue
if offset < 8:
# older LSM revision
continue
fh.seek(offset)
try:
setattr(self, 'cz_lsm_' + name, reader(fh))
except ValueError:
pass
elif self.is_stk and 'uic1tag' in tags and not tags['uic1tag'].value:
# read uic1tag now that plane count is known
uic1tag = tags['uic1tag']
fh.seek(uic1tag.value_offset)
tags['uic1tag'].value = Record(
read_uic1tag(fh, byteorder, uic1tag.dtype, uic1tag.count,
tags['uic2tag'].count))
fh.seek(pos)
def _process_tags(self):
"""Validate standard tags and initialize attributes.
Raise ValueError if tag values are not supported.
"""
tags = self.tags
for code, (name, default, dtype, count, validate) in TIFF_TAGS.items():
if not (name in tags or default is None):
tags[name] = TiffTag(code, dtype=dtype, count=count,
value=default, name=name)
if name in tags and validate:
try:
if tags[name].count == 1:
setattr(self, name, validate[tags[name].value])
else:
setattr(self, name, tuple(
validate[value] for value in tags[name].value))
except KeyError:
raise ValueError("%s.value (%s) not supported" %
(name, tags[name].value))
tag = tags['bits_per_sample']
if tag.count == 1:
self.bits_per_sample = tag.value
else:
# LSM might list more items than samples_per_pixel
value = tag.value[:self.samples_per_pixel]
if any((v - value[0] for v in value)):
self.bits_per_sample = value
else:
self.bits_per_sample = value[0]
tag = tags['sample_format']
if tag.count == 1:
self.sample_format = TIFF_SAMPLE_FORMATS[tag.value]
else:
value = tag.value[:self.samples_per_pixel]
if any((v - value[0] for v in value)):
self.sample_format = [TIFF_SAMPLE_FORMATS[v] for v in value]
else:
self.sample_format = TIFF_SAMPLE_FORMATS[value[0]]
if 'photometric' not in tags:
self.photometric = None
if 'image_depth' not in tags:
self.image_depth = 1
if 'image_length' in tags:
self.strips_per_image = int(math.floor(
float(self.image_length + self.rows_per_strip - 1) /
self.rows_per_strip))
else:
self.strips_per_image = 0
key = (self.sample_format, self.bits_per_sample)
self.dtype = self._dtype = TIFF_SAMPLE_DTYPES.get(key, None)
if 'image_length' not in self.tags or 'image_width' not in self.tags:
# some GEL file pages are missing image data
self.image_length = 0
self.image_width = 0
self.image_depth = 0
self.strip_offsets = 0
self._shape = ()
self.shape = ()
self.axes = ''
if self.is_palette:
self.dtype = self.tags['color_map'].dtype[1]
self.color_map = numpy.array(self.color_map, self.dtype)
dmax = self.color_map.max()
if dmax < 256:
self.dtype = numpy.uint8
self.color_map = self.color_map.astype(self.dtype)
# else:
# self.dtype = numpy.uint8
# self.color_map >>= 8
# self.color_map = self.color_map.astype(self.dtype)
self.color_map.shape = (3, -1)
# determine shape of data
image_length = self.image_length
image_width = self.image_width
image_depth = self.image_depth
samples_per_pixel = self.samples_per_pixel
if self.is_stk:
assert self.image_depth == 1
planes = self.tags['uic2tag'].count
if self.is_contig:
self._shape = (planes, 1, 1, image_length, image_width,
samples_per_pixel)
if samples_per_pixel == 1:
self.shape = (planes, image_length, image_width)
self.axes = 'YX'
else:
self.shape = (planes, image_length, image_width,
samples_per_pixel)
self.axes = 'YXS'
else:
self._shape = (planes, samples_per_pixel, 1, image_length,
image_width, 1)
if samples_per_pixel == 1:
self.shape = (planes, image_length, image_width)
self.axes = 'YX'
else:
self.shape = (planes, samples_per_pixel, image_length,
image_width)
self.axes = 'SYX'
# detect type of series
if planes == 1:
self.shape = self.shape[1:]
elif numpy.all(self.uic2tag.z_distance != 0):
self.axes = 'Z' + self.axes
elif numpy.all(numpy.diff(self.uic2tag.time_created) != 0):
self.axes = 'T' + self.axes
else:
self.axes = 'I' + self.axes
# DISABLED
if self.is_palette:
assert False, "color mapping disabled for stk"
elif self.is_palette:
samples = 1
if 'extra_samples' in self.tags:
samples += len(self.extra_samples)
if self.is_contig:
self._shape = (1, 1, image_depth, image_length, image_width,
samples)
else:
self._shape = (1, samples, image_depth, image_length,
image_width, 1)
if self.color_map.shape[1] >= 2 ** self.bits_per_sample:
if image_depth == 1:
self.shape = (3, image_length, image_width)
self.axes = 'CYX'
else:
self.shape = (3, image_depth, image_length, image_width)
self.axes = 'CZYX'
else:
warnings.warn("palette cannot be applied")
self.is_palette = False
if image_depth == 1:
self.shape = (image_length, image_width)
self.axes = 'YX'
else:
self.shape = (image_depth, image_length, image_width)
self.axes = 'ZYX'
elif self.is_rgb or samples_per_pixel > 1:
if self.is_contig:
self._shape = (1, 1, image_depth, image_length, image_width,
samples_per_pixel)
if image_depth == 1:
self.shape = (image_length, image_width, samples_per_pixel)
self.axes = 'YXS'
else:
self.shape = (image_depth, image_length, image_width,
samples_per_pixel)
self.axes = 'ZYXS'
else:
self._shape = (1, samples_per_pixel, image_depth,
image_length, image_width, 1)
if image_depth == 1:
self.shape = (samples_per_pixel, image_length, image_width)
self.axes = 'SYX'
else:
self.shape = (samples_per_pixel, image_depth,
image_length, image_width)
self.axes = 'SZYX'
if False and self.is_rgb and 'extra_samples' in self.tags:
# DISABLED: only use RGB and first alpha channel if exists
extra_samples = self.extra_samples
if self.tags['extra_samples'].count == 1:
extra_samples = (extra_samples, )
for exs in extra_samples:
if exs in ('unassalpha', 'assocalpha', 'unspecified'):
if self.is_contig:
self.shape = self.shape[:-1] + (4,)
else:
self.shape = (4,) + self.shape[1:]
break
else:
self._shape = (1, 1, image_depth, image_length, image_width, 1)
if image_depth == 1:
self.shape = (image_length, image_width)
self.axes = 'YX'
else:
self.shape = (image_depth, image_length, image_width)
self.axes = 'ZYX'
if not self.compression and 'strip_byte_counts' not in tags:
self.strip_byte_counts = (
product(self.shape) * (self.bits_per_sample // 8), )
assert len(self.shape) == len(self.axes)
def asarray(self, squeeze=True, colormapped=True, rgbonly=False,
scale_mdgel=False, memmap=False, reopen=True):
"""Read image data from file and return as numpy array.
Raise ValueError if format is unsupported.
If any of 'squeeze', 'colormapped', or 'rgbonly' are not the default,
the shape of the returned array might be different from the page shape.
Parameters
----------
squeeze : bool
If True, all length-1 dimensions (except X and Y) are
squeezed out from result.
colormapped : bool
If True, color mapping is applied for palette-indexed images.
rgbonly : bool
If True, return RGB(A) image without additional extra samples.
memmap : bool
If True, use numpy.memmap to read arrays from file if possible.
For use on 64 bit systems and files with few huge contiguous data.
reopen : bool
If True and the parent file handle is closed, the file is
temporarily re-opened (and closed if no exception occurs).
scale_mdgel : bool
If True, MD Gel data will be scaled according to the private
metadata in the second TIFF page. The dtype will be float32.
"""
if not self._shape:
return
if self.dtype is None:
raise ValueError("data type not supported: %s%i" % (
self.sample_format, self.bits_per_sample))
if self.compression not in TIFF_DECOMPESSORS:
raise ValueError("cannot decompress %s" % self.compression)
tag = self.tags['sample_format']
if tag.count != 1 and any((i - tag.value[0] for i in tag.value)):
raise ValueError("sample formats don't match %s" % str(tag.value))
fh = self.parent.filehandle
closed = fh.closed
if closed:
if reopen:
fh.open()
else:
raise IOError("file handle is closed")
dtype = self._dtype
shape = self._shape
image_width = self.image_width
image_length = self.image_length
image_depth = self.image_depth
typecode = self.parent.byteorder + dtype
bits_per_sample = self.bits_per_sample
if self.is_tiled:
if 'tile_offsets' in self.tags:
byte_counts = self.tile_byte_counts
offsets = self.tile_offsets
else:
byte_counts = self.strip_byte_counts
offsets = self.strip_offsets
tile_width = self.tile_width
tile_length = self.tile_length
tile_depth = self.tile_depth if 'tile_depth' in self.tags else 1
tw = (image_width + tile_width - 1) // tile_width
tl = (image_length + tile_length - 1) // tile_length
td = (image_depth + tile_depth - 1) // tile_depth
shape = (shape[0], shape[1],
td * tile_depth, tl * tile_length, tw * tile_width, shape[-1])
tile_shape = (tile_depth, tile_length, tile_width, shape[-1])
runlen = tile_width
else:
byte_counts = self.strip_byte_counts
offsets = self.strip_offsets
runlen = image_width
if any(o < 2 for o in offsets):
raise ValueError("corrupted page")
if memmap and self._is_memmappable(rgbonly, colormapped):
result = fh.memmap_array(typecode, shape, offset=offsets[0])
elif self.is_contiguous:
fh.seek(offsets[0])
result = fh.read_array(typecode, product(shape))
result = result.astype('=' + dtype)
else:
if self.is_contig:
runlen *= self.samples_per_pixel
if bits_per_sample in (8, 16, 32, 64, 128):
if (bits_per_sample * runlen) % 8:
raise ValueError("data and sample size mismatch")
def unpack(x):
try:
return numpy.fromstring(x, typecode)
except ValueError as e:
# strips may be missing EOI
warnings.warn("unpack: %s" % e)
xlen = ((len(x) // (bits_per_sample // 8))
* (bits_per_sample // 8))
return numpy.fromstring(x[:xlen], typecode)
elif isinstance(bits_per_sample, tuple):
def unpack(x):
return unpackrgb(x, typecode, bits_per_sample)
else:
def unpack(x):
return unpackints(x, typecode, bits_per_sample, runlen)
decompress = TIFF_DECOMPESSORS[self.compression]
if self.compression == 'jpeg':
table = self.jpeg_tables if 'jpeg_tables' in self.tags else b''
decompress = lambda x: decodejpg(x, table, self.photometric)
if self.is_tiled:
result = numpy.empty(shape, dtype)
tw, tl, td, pl = 0, 0, 0, 0
for offset, bytecount in zip(offsets, byte_counts):
fh.seek(offset)
tile = unpack(decompress(fh.read(bytecount)))
tile.shape = tile_shape
if self.predictor == 'horizontal':
numpy.cumsum(tile, axis=-2, dtype=dtype, out=tile)
result[0, pl, td:td + tile_depth,
tl:tl + tile_length, tw:tw + tile_width, :] = tile
del tile
tw += tile_width
if tw >= shape[4]:
tw, tl = 0, tl + tile_length
if tl >= shape[3]:
tl, td = 0, td + tile_depth
if td >= shape[2]:
td, pl = 0, pl + 1
result = result[...,
:image_depth, :image_length, :image_width, :]
else:
strip_size = (self.rows_per_strip * self.image_width *
self.samples_per_pixel)
result = numpy.empty(shape, dtype).reshape(-1)
index = 0
for offset, bytecount in zip(offsets, byte_counts):
fh.seek(offset)
strip = fh.read(bytecount)
strip = decompress(strip)
strip = unpack(strip)
size = min(result.size, strip.size, strip_size,
result.size - index)
result[index:index + size] = strip[:size]
del strip
index += size
result.shape = self._shape
if self.predictor == 'horizontal' and not (self.is_tiled and not
self.is_contiguous):
# work around bug in LSM510 software
if not (self.parent.is_lsm and not self.compression):
numpy.cumsum(result, axis=-2, dtype=dtype, out=result)
if colormapped and self.is_palette:
if self.color_map.shape[1] >= 2 ** bits_per_sample:
# FluoView and LSM might fail here
result = numpy.take(self.color_map,
result[:, 0, :, :, :, 0], axis=1)
elif rgbonly and self.is_rgb and 'extra_samples' in self.tags:
# return only RGB and first alpha channel if exists
extra_samples = self.extra_samples
if self.tags['extra_samples'].count == 1:
extra_samples = (extra_samples, )
for i, exs in enumerate(extra_samples):
if exs in ('unassalpha', 'assocalpha', 'unspecified'):
if self.is_contig:
result = result[..., [0, 1, 2, 3 + i]]
else:
result = result[:, [0, 1, 2, 3 + i]]
break
else:
if self.is_contig:
result = result[..., :3]
else:
result = result[:, :3]
if squeeze:
try:
result.shape = self.shape
except ValueError:
warnings.warn("failed to reshape from %s to %s" % (
str(result.shape), str(self.shape)))
if scale_mdgel and self.parent.is_mdgel:
# MD Gel stores private metadata in the second page
tags = self.parent.pages[1]
if tags.md_file_tag in (2, 128):
scale = tags.md_scale_pixel
scale = scale[0] / scale[1] # rational
result = result.astype('float32')
if tags.md_file_tag == 2:
result **= 2 # squary root data format
result *= scale
if closed:
# TODO: file remains open if an exception occurred above
fh.close()
return result
def _is_memmappable(self, rgbonly, colormapped):
"""Return if image data in file can be memory mapped."""
if not self.parent.filehandle.is_file or not self.is_contiguous:
return False
return not (self.predictor or
(rgbonly and 'extra_samples' in self.tags) or
(colormapped and self.is_palette) or
({'big': '>', 'little': '<'}[sys.byteorder] !=
self.parent.byteorder))
@lazyattr
def is_contiguous(self):
"""Return offset and size of contiguous data, else None.
Excludes prediction and colormapping.
"""
if self.compression or self.bits_per_sample not in (8, 16, 32, 64):
return
if self.is_tiled:
if (self.image_width != self.tile_width or
self.image_length % self.tile_length or
self.tile_width % 16 or self.tile_length % 16):
return
if ('image_depth' in self.tags and 'tile_depth' in self.tags and
(self.image_length != self.tile_length or
self.image_depth % self.tile_depth)):
return
offsets = self.tile_offsets
byte_counts = self.tile_byte_counts
else:
offsets = self.strip_offsets
byte_counts = self.strip_byte_counts
if len(offsets) == 1:
return offsets[0], byte_counts[0]
if self.is_stk or all(offsets[i] + byte_counts[i] == offsets[i + 1]
# no data/ignore offset
or byte_counts[i + 1] == 0
for i in range(len(offsets) - 1)):
return offsets[0], sum(byte_counts)
def __str__(self):
"""Return string containing information about page."""
s = ', '.join(s for s in (
' x '.join(str(i) for i in self.shape),
str(numpy.dtype(self.dtype)),
'%s bit' % str(self.bits_per_sample),
self.photometric if 'photometric' in self.tags else '',
self.compression if self.compression else 'raw',
'|'.join(t[3:] for t in (
'is_stk', 'is_lsm', 'is_nih', 'is_ome', 'is_imagej',
'is_micromanager', 'is_fluoview', 'is_mdgel', 'is_mediacy',
'is_sgi', 'is_reduced', 'is_tiled',
'is_contiguous') if getattr(self, t))) if s)
return "Page %i: %s" % (self.index, s)
def __getattr__(self, name):
"""Return tag value."""
if name in self.tags:
value = self.tags[name].value
setattr(self, name, value)
return value
raise AttributeError(name)
@lazyattr
def uic_tags(self):
"""Consolidate UIC tags."""
if not self.is_stk:
raise AttributeError("uic_tags")
tags = self.tags
result = Record()
result.number_planes = tags['uic2tag'].count
if 'image_description' in tags:
result.plane_descriptions = self.image_description.split(b'\x00')
if 'uic1tag' in tags:
result.update(tags['uic1tag'].value)
if 'uic3tag' in tags:
result.update(tags['uic3tag'].value) # wavelengths
if 'uic4tag' in tags:
result.update(tags['uic4tag'].value) # override uic1 tags
uic2tag = tags['uic2tag'].value
result.z_distance = uic2tag.z_distance
result.time_created = uic2tag.time_created
result.time_modified = uic2tag.time_modified
try:
result.datetime_created = [
julian_datetime(*dt) for dt in
zip(uic2tag.date_created, uic2tag.time_created)]
result.datetime_modified = [
julian_datetime(*dt) for dt in
zip(uic2tag.date_modified, uic2tag.time_modified)]
except ValueError as e:
warnings.warn("uic_tags: %s" % e)
return result
@lazyattr
def imagej_tags(self):
"""Consolidate ImageJ metadata."""
if not self.is_imagej:
raise AttributeError("imagej_tags")
tags = self.tags
if 'image_description_1' in tags:
# MicroManager
result = imagej_description(tags['image_description_1'].value)
else:
result = imagej_description(tags['image_description'].value)
if 'imagej_metadata' in tags:
try:
result.update(imagej_metadata(
tags['imagej_metadata'].value,
tags['imagej_byte_counts'].value,
self.parent.byteorder))
except Exception as e:
warnings.warn(str(e))
return Record(result)
@lazyattr
def is_rgb(self):
"""True if page contains a RGB image."""
return ('photometric' in self.tags and
self.tags['photometric'].value == 2)
@lazyattr
def is_contig(self):
"""True if page contains a contiguous image."""
return ('planar_configuration' in self.tags and
self.tags['planar_configuration'].value == 1)
@lazyattr
def is_palette(self):
"""True if page contains a palette-colored image and not OME or STK."""
try:
# turn off color mapping for OME-TIFF and STK
if self.is_stk or self.is_ome or self.parent.is_ome:
return False
except IndexError:
pass # OME-XML not found in first page
return ('photometric' in self.tags and
self.tags['photometric'].value == 3)
@lazyattr
def is_tiled(self):
"""True if page contains tiled image."""
return 'tile_width' in self.tags
@lazyattr
def is_reduced(self):
"""True if page is a reduced image of another image."""
return bool(self.tags['new_subfile_type'].value & 1)
@lazyattr
def is_mdgel(self):
"""True if page contains md_file_tag tag."""
return 'md_file_tag' in self.tags
@lazyattr
def is_mediacy(self):
"""True if page contains Media Cybernetics Id tag."""
return ('mc_id' in self.tags and
self.tags['mc_id'].value.startswith(b'MC TIFF'))
@lazyattr
def is_stk(self):
"""True if page contains UIC2Tag tag."""
return 'uic2tag' in self.tags
@lazyattr
def is_lsm(self):
"""True if page contains LSM CZ_LSM_INFO tag."""
return 'cz_lsm_info' in self.tags
@lazyattr
def is_fluoview(self):
"""True if page contains FluoView MM_STAMP tag."""
return 'mm_stamp' in self.tags
@lazyattr
def is_nih(self):
"""True if page contains NIH image header."""
return 'nih_image_header' in self.tags
@lazyattr
def is_sgi(self):
"""True if page contains SGI image and tile depth tags."""
return 'image_depth' in self.tags and 'tile_depth' in self.tags
@lazyattr
def is_ome(self):
"""True if page contains OME-XML in image_description tag."""
return ('image_description' in self.tags and self.tags[
'image_description'].value.startswith(b'<?xml version='))
@lazyattr
def is_shaped(self):
"""True if page contains shape in image_description tag."""
return ('image_description' in self.tags and self.tags[
'image_description'].value.startswith(b'shape=('))
@lazyattr
def is_imagej(self):
"""True if page contains ImageJ description."""
return (
('image_description' in self.tags and
self.tags['image_description'].value.startswith(b'ImageJ=')) or
('image_description_1' in self.tags and # Micromanager
self.tags['image_description_1'].value.startswith(b'ImageJ=')))
@lazyattr
def is_micromanager(self):
"""True if page contains Micro-Manager metadata."""
return 'micromanager_metadata' in self.tags
class TiffTag(object):
"""A TIFF tag structure.
Attributes
----------
name : string
Attribute name of tag.
code : int
Decimal code of tag.
dtype : str
Datatype of tag data. One of TIFF_DATA_TYPES.
count : int
Number of values.
value : various types
Tag data as Python object.
value_offset : int
Location of value in file, if any.
All attributes are read-only.
"""
__slots__ = ('code', 'name', 'count', 'dtype', 'value', 'value_offset',
'_offset', '_value', '_type')
class Error(Exception):
pass
def __init__(self, arg, **kwargs):
"""Initialize instance from file or arguments."""
self._offset = None
if hasattr(arg, '_fh'):
self._fromfile(arg)
else:
self._fromdata(arg, **kwargs)
def _fromdata(self, code, dtype, count, value, name=None):
"""Initialize instance from arguments."""
self.code = int(code)
self.name = name if name else str(code)
self.dtype = TIFF_DATA_TYPES[dtype]
self.count = int(count)
self.value = value
self._value = value
self._type = dtype
def _fromfile(self, parent):
"""Read tag structure from open file. Advance file cursor."""
fh = parent.filehandle
byteorder = parent.byteorder
self._offset = fh.tell()
self.value_offset = self._offset + parent.offset_size + 4
fmt, size = {4: ('HHI4s', 12), 8: ('HHQ8s', 20)}[parent.offset_size]
data = fh.read(size)
code, dtype = struct.unpack(byteorder + fmt[:2], data[:4])
count, value = struct.unpack(byteorder + fmt[2:], data[4:])
self._value = value
self._type = dtype
if code in TIFF_TAGS:
name = TIFF_TAGS[code][0]
elif code in CUSTOM_TAGS:
name = CUSTOM_TAGS[code][0]
else:
name = str(code)
try:
dtype = TIFF_DATA_TYPES[self._type]
except KeyError:
raise TiffTag.Error("unknown tag data type %i" % self._type)
fmt = '%s%i%s' % (byteorder, count * int(dtype[0]), dtype[1])
size = struct.calcsize(fmt)
if size > parent.offset_size or code in CUSTOM_TAGS:
pos = fh.tell()
tof = {4: 'I', 8: 'Q'}[parent.offset_size]
self.value_offset = offset = struct.unpack(
byteorder +
tof,
value)[0]
if offset < 0 or offset > parent.filehandle.size:
raise TiffTag.Error("corrupt file - invalid tag value offset")
elif offset < 4:
raise TiffTag.Error("corrupt value offset for tag %i" % code)
fh.seek(offset)
if code in CUSTOM_TAGS:
readfunc = CUSTOM_TAGS[code][1]
value = readfunc(fh, byteorder, dtype, count)
if isinstance(value, dict): # numpy.core.records.record
value = Record(value)
elif code in TIFF_TAGS or dtype[-1] == 's':
value = struct.unpack(fmt, fh.read(size))
else:
value = read_numpy(fh, byteorder, dtype, count)
fh.seek(pos)
else:
value = struct.unpack(fmt, value[:size])
if code not in CUSTOM_TAGS and code not in (273, 279, 324, 325):
# scalar value if not strip/tile offsets/byte_counts
if len(value) == 1:
value = value[0]
if (dtype.endswith('s') and isinstance(value, bytes)
and self._type != 7):
# TIFF ASCII fields can contain multiple strings,
# each terminated with a NUL
value = stripascii(value)
self.code = code
self.name = name
self.dtype = dtype
self.count = count
self.value = value
def _correct_lsm_bitspersample(self, parent):
"""Correct LSM bitspersample tag.
Old LSM writers may use a separate region for two 16-bit values,
although they fit into the tag value element of the tag.
"""
if self.code == 258 and self.count == 2:
# TODO: test this. Need example file.
warnings.warn("correcting LSM bitspersample tag")
fh = parent.filehandle
tof = {4: '<I', 8: '<Q'}[parent.offset_size]
self.value_offset = struct.unpack(tof, self._value)[0]
fh.seek(self.value_offset)
self.value = struct.unpack("<HH", fh.read(4))
def as_str(self):
"""Return value as human readable string."""
return ((str(self.value).split('\n', 1)[0]) if (self._type != 7)
else '<undefined>')
def __str__(self):
"""Return string containing information about tag."""
return ' '.join(str(getattr(self, s)) for s in self.__slots__)
class TiffSequence(object):
"""Sequence of image files.
The data shape and dtype of all files must match.
Properties
----------
files : list
List of file names.
shape : tuple
Shape of image sequence.
axes : str
Labels of axes in shape.
Examples
--------
>>> tifs = TiffSequence("test.oif.files/*.tif")
>>> tifs.shape, tifs.axes
((2, 100), 'CT')
>>> data = tifs.asarray()
>>> data.shape
(2, 100, 256, 256)
"""
_patterns = {
'axes': r"""
# matches Olympus OIF and Leica TIFF series
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
"""}
class ParseError(Exception):
pass
def __init__(self, files, imread=TiffFile, pattern='axes',
*args, **kwargs):
"""Initialize instance from multiple files.
Parameters
----------
files : str, or sequence of str
Glob pattern or sequence of file names.
imread : function or class
Image read function or class with asarray function returning numpy
array from single file.
pattern : str
Regular expression pattern that matches axes names and sequence
indices in file names.
By default this matches Olympus OIF and Leica TIFF series.
"""
if isinstance(files, str):
files = natural_sorted(glob.glob(files))
files = list(files)
if not files:
raise ValueError("no files found")
# if not os.path.isfile(files[0]):
# raise ValueError("file not found")
self.files = files
if hasattr(imread, 'asarray'):
# redefine imread
_imread = imread
def imread(fname, *args, **kwargs):
with _imread(fname) as im:
return im.asarray(*args, **kwargs)
self.imread = imread
self.pattern = self._patterns.get(pattern, pattern)
try:
self._parse()
if not self.axes:
self.axes = 'I'
except self.ParseError:
self.axes = 'I'
self.shape = (len(files),)
self._start_index = (0,)
self._indices = tuple((i,) for i in range(len(files)))
def __str__(self):
"""Return string with information about image sequence."""
return "\n".join([
self.files[0],
'* files: %i' % len(self.files),
'* axes: %s' % self.axes,
'* shape: %s' % str(self.shape)])
def __len__(self):
return len(self.files)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
pass
def asarray(self, memmap=False, *args, **kwargs):
"""Read image data from all files and return as single numpy array.
If memmap is True, return an array stored in a binary file on disk.
The args and kwargs parameters are passed to the imread function.
Raise IndexError or ValueError if image shapes don't match.
"""
im = self.imread(self.files[0], *args, **kwargs)
shape = self.shape + im.shape
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=im.dtype, shape=shape)
else:
result = numpy.zeros(shape, dtype=im.dtype)
result = result.reshape(-1, *im.shape)
for index, fname in zip(self._indices, self.files):
index = [i - j for i, j in zip(index, self._start_index)]
index = numpy.ravel_multi_index(index, self.shape)
im = self.imread(fname, *args, **kwargs)
result[index] = im
result.shape = shape
return result
def _parse(self):
"""Get axes and shape from file names."""
if not self.pattern:
raise self.ParseError("invalid pattern")
pattern = re.compile(self.pattern, re.IGNORECASE | re.VERBOSE)
matches = pattern.findall(self.files[0])
if not matches:
raise self.ParseError("pattern doesn't match file names")
matches = matches[-1]
if len(matches) % 2:
raise self.ParseError("pattern doesn't match axis name and index")
axes = ''.join(m for m in matches[::2] if m)
if not axes:
raise self.ParseError("pattern doesn't match file names")
indices = []
for fname in self.files:
matches = pattern.findall(fname)[-1]
if axes != ''.join(m for m in matches[::2] if m):
raise ValueError("axes don't match within the image sequence")
indices.append([int(m) for m in matches[1::2] if m])
shape = tuple(numpy.max(indices, axis=0))
start_index = tuple(numpy.min(indices, axis=0))
shape = tuple(i - j + 1 for i, j in zip(shape, start_index))
if product(shape) != len(self.files):
warnings.warn("files are missing. Missing data are zeroed")
self.axes = axes.upper()
self.shape = shape
self._indices = indices
self._start_index = start_index
class Record(dict):
"""Dictionary with attribute access.
Can also be initialized with numpy.core.records.record.
"""
__slots__ = ()
def __init__(self, arg=None, **kwargs):
if kwargs:
arg = kwargs
elif arg is None:
arg = {}
try:
dict.__init__(self, arg)
except (TypeError, ValueError):
for i, name in enumerate(arg.dtype.names):
v = arg[i]
self[name] = v if v.dtype.char != 'S' else stripnull(v)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self.__setitem__(name, value)
def __str__(self):
"""Pretty print Record."""
s = []
lists = []
for k in sorted(self):
try:
if k.startswith('_'): # does not work with byte
continue
except AttributeError:
pass
v = self[k]
if isinstance(v, (list, tuple)) and len(v):
if isinstance(v[0], Record):
lists.append((k, v))
continue
elif isinstance(v[0], TiffPage):
v = [i.index for i in v if i]
s.append(
("* %s: %s" % (k, str(v))).split("\n", 1)[0]
[:PRINT_LINE_LEN].rstrip())
for k, v in lists:
l = []
for i, w in enumerate(v):
l.append("* %s[%i]\n %s" % (k, i,
str(w).replace("\n", "\n ")))
s.append('\n'.join(l))
return '\n'.join(s)
class TiffTags(Record):
"""Dictionary of TiffTag with attribute access."""
def __str__(self):
"""Return string with information about all tags."""
s = []
for tag in sorted(self.values(), key=lambda x: x.code):
typecode = "%i%s" % (tag.count * int(tag.dtype[0]), tag.dtype[1])
line = "* %i %s (%s) %s" % (
tag.code, tag.name, typecode, tag.as_str())
s.append(line[:PRINT_LINE_LEN].lstrip())
return '\n'.join(s)
class FileHandle(object):
"""Binary file handle.
* Handle embedded files (for CZI within CZI files).
* Allow to re-open closed files (for multi file formats such as OME-TIFF).
* Read numpy arrays and records from file like objects.
Only binary read, seek, tell, and close are supported on embedded files.
When initialized from another file handle, do not use it unless this
FileHandle is closed.
Attributes
----------
name : str
Name of the file.
path : str
Absolute path to file.
size : int
Size of file in bytes.
is_file : bool
If True, file has a filno and can be memory mapped.
All attributes are read-only.
"""
__slots__ = ('_fh', '_arg', '_mode', '_name', '_dir',
'_offset', '_size', '_close', 'is_file')
def __init__(self, arg, mode='rb', name=None, offset=None, size=None):
"""Initialize file handle from file name or another file handle.
Parameters
----------
arg : str, File, or FileHandle
File name or open file handle.
mode : str
File open mode in case 'arg' is a file name.
name : str
Optional name of file in case 'arg' is a file handle.
offset : int
Optional start position of embedded file. By default this is
the current file position.
size : int
Optional size of embedded file. By default this is the number
of bytes from the 'offset' to the end of the file.
"""
self._fh = None
self._arg = arg
self._mode = mode
self._name = name
self._dir = ''
self._offset = offset
self._size = size
self._close = True
self.is_file = False
self.open()
def open(self):
"""Open or re-open file."""
if self._fh:
return # file is open
if isinstance(self._arg, str):
# file name
self._arg = os.path.abspath(self._arg)
self._dir, self._name = os.path.split(self._arg)
self._fh = open(self._arg, self._mode)
self._close = True
if self._offset is None:
self._offset = 0
elif isinstance(self._arg, FileHandle):
# FileHandle
self._fh = self._arg._fh
if self._offset is None:
self._offset = 0
self._offset += self._arg._offset
self._close = False
if not self._name:
if self._offset:
name, ext = os.path.splitext(self._arg._name)
self._name = "%s@%i%s" % (name, self._offset, ext)
else:
self._name = self._arg._name
self._dir = self._arg._dir
else:
# open file object
self._fh = self._arg
if self._offset is None:
self._offset = self._arg.tell()
self._close = False
if not self._name:
try:
self._dir, self._name = os.path.split(self._fh.name)
except AttributeError:
self._name = "Unnamed stream"
if self._offset:
self._fh.seek(self._offset)
if self._size is None:
pos = self._fh.tell()
self._fh.seek(self._offset, 2)
self._size = self._fh.tell()
self._fh.seek(pos)
try:
self._fh.fileno()
self.is_file = True
except Exception:
self.is_file = False
def read(self, size=-1):
"""Read 'size' bytes from file, or until EOF is reached."""
if size < 0 and self._offset:
size = self._size
return self._fh.read(size)
def memmap_array(self, dtype, shape, offset=0, mode='r', order='C'):
"""Return numpy.memmap of data stored in file."""
if not self.is_file:
raise ValueError("Can not memory map file without fileno.")
return numpy.memmap(self._fh, dtype=dtype, mode=mode,
offset=self._offset + offset,
shape=shape, order=order)
def read_array(self, dtype, count=-1, sep=""):
"""Return numpy array from file.
Work around numpy issue #2230, "numpy.fromfile does not accept
StringIO object" https://github.com/numpy/numpy/issues/2230.
"""
try:
return numpy.fromfile(self._fh, dtype, count, sep)
except IOError:
if count < 0:
size = self._size
else:
size = count * numpy.dtype(dtype).itemsize
data = self._fh.read(size)
return numpy.fromstring(data, dtype, count, sep)
def read_record(self, dtype, shape=1, byteorder=None):
"""Return numpy record from file."""
try:
rec = numpy.rec.fromfile(self._fh, dtype, shape,
byteorder=byteorder)
except Exception:
dtype = numpy.dtype(dtype)
if shape is None:
shape = self._size // dtype.itemsize
size = product(sequence(shape)) * dtype.itemsize
data = self._fh.read(size)
return numpy.rec.fromstring(data, dtype, shape,
byteorder=byteorder)
return rec[0] if shape == 1 else rec
def tell(self):
"""Return file's current position."""
return self._fh.tell() - self._offset
def seek(self, offset, whence=0):
"""Set file's current position."""
if self._offset:
if whence == 0:
self._fh.seek(self._offset + offset, whence)
return
elif whence == 2:
self._fh.seek(self._offset + self._size + offset, 0)
return
self._fh.seek(offset, whence)
def close(self):
"""Close file."""
if self._close and self._fh:
self._fh.close()
self._fh = None
self.is_file = False
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def __getattr__(self, name):
"""Return attribute from underlying file object."""
if self._offset:
warnings.warn(
"FileHandle: '%s' not implemented for embedded files" % name)
return getattr(self._fh, name)
@property
def name(self):
return self._name
@property
def dirname(self):
return self._dir
@property
def path(self):
return os.path.join(self._dir, self._name)
@property
def size(self):
return self._size
@property
def closed(self):
return self._fh is None
def read_bytes(fh, byteorder, dtype, count):
"""Read tag data from file and return as byte string."""
dtype = 'b' if dtype[-1] == 's' else byteorder + dtype[-1]
return fh.read_array(dtype, count).tostring()
def read_numpy(fh, byteorder, dtype, count):
"""Read tag data from file and return as numpy array."""
dtype = 'b' if dtype[-1] == 's' else byteorder + dtype[-1]
return fh.read_array(dtype, count)
def read_json(fh, byteorder, dtype, count):
"""Read JSON tag data from file and return as object."""
data = fh.read(count)
try:
return json.loads(str(stripnull(data), 'utf-8'))
except ValueError:
warnings.warn("invalid JSON `%s`" % data)
def read_mm_header(fh, byteorder, dtype, count):
"""Read MM_HEADER tag from file and return as numpy.rec.array."""
return fh.read_record(MM_HEADER, byteorder=byteorder)
def read_mm_stamp(fh, byteorder, dtype, count):
"""Read MM_STAMP tag from file and return as numpy.array."""
return fh.read_array(byteorder + 'f8', 8)
def read_uic1tag(fh, byteorder, dtype, count, plane_count=None):
"""Read MetaMorph STK UIC1Tag from file and return as dictionary.
Return empty dictionary if plane_count is unknown.
"""
assert dtype in ('2I', '1I') and byteorder == '<'
result = {}
if dtype == '2I':
# pre MetaMorph 2.5 (not tested)
values = fh.read_array('<u4', 2 * count).reshape(count, 2)
result = {'z_distance': values[:, 0] / values[:, 1]}
elif plane_count:
for i in range(count):
tagid = struct.unpack('<I', fh.read(4))[0]
if tagid in (28, 29, 37, 40, 41):
# silently skip unexpected tags
fh.read(4)
continue
name, value = read_uic_tag(fh, tagid, plane_count, offset=True)
result[name] = value
return result
def read_uic2tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC2Tag from file and return as dictionary."""
assert dtype == '2I' and byteorder == '<'
values = fh.read_array('<u4', 6 * plane_count).reshape(plane_count, 6)
return {
'z_distance': values[:, 0] / values[:, 1],
'date_created': values[:, 2], # julian days
'time_created': values[:, 3], # milliseconds
'date_modified': values[:, 4], # julian days
'time_modified': values[:, 5], # milliseconds
}
def read_uic3tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC3Tag from file and return as dictionary."""
assert dtype == '2I' and byteorder == '<'
values = fh.read_array('<u4', 2 * plane_count).reshape(plane_count, 2)
return {'wavelengths': values[:, 0] / values[:, 1]}
def read_uic4tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC4Tag from file and return as dictionary."""
assert dtype == '1I' and byteorder == '<'
result = {}
while True:
tagid = struct.unpack('<H', fh.read(2))[0]
if tagid == 0:
break
name, value = read_uic_tag(fh, tagid, plane_count, offset=False)
result[name] = value
return result
def read_uic_tag(fh, tagid, plane_count, offset):
"""Read a single UIC tag value from file and return tag name and value.
UIC1Tags use an offset.
"""
def read_int(count=1):
value = struct.unpack('<%iI' % count, fh.read(4 * count))
return value[0] if count == 1 else value
try:
name, dtype = UIC_TAGS[tagid]
except KeyError:
# unknown tag
return '_tagid_%i' % tagid, read_int()
if offset:
pos = fh.tell()
if dtype not in (int, None):
off = read_int()
if off < 8:
warnings.warn("invalid offset for uic tag '%s': %i"
% (name, off))
return name, off
fh.seek(off)
if dtype is None:
# skip
name = '_' + name
value = read_int()
elif dtype is int:
# int
value = read_int()
elif dtype is Fraction:
# fraction
value = read_int(2)
value = value[0] / value[1]
elif dtype is julian_datetime:
# datetime
value = julian_datetime(*read_int(2))
elif dtype is read_uic_image_property:
# ImagePropertyEx
value = read_uic_image_property(fh)
elif dtype is str:
# pascal string
size = read_int()
if 0 <= size < 2 ** 10:
value = struct.unpack('%is' % size, fh.read(size))[0][:-1]
value = stripnull(value)
elif offset:
value = ''
warnings.warn("corrupt string in uic tag '%s'" % name)
else:
raise ValueError("invalid string size %i" % size)
elif dtype == '%ip':
# sequence of pascal strings
value = []
for i in range(plane_count):
size = read_int()
if 0 <= size < 2 ** 10:
string = struct.unpack('%is' % size, fh.read(size))[0][:-1]
string = stripnull(string)
value.append(string)
elif offset:
warnings.warn("corrupt string in uic tag '%s'" % name)
else:
raise ValueError("invalid string size %i" % size)
else:
# struct or numpy type
dtype = '<' + dtype
if '%i' in dtype:
dtype = dtype % plane_count
if '(' in dtype:
# numpy type
value = fh.read_array(dtype, 1)[0]
if value.shape[-1] == 2:
# assume fractions
value = value[..., 0] / value[..., 1]
else:
# struct format
value = struct.unpack(dtype, fh.read(struct.calcsize(dtype)))
if len(value) == 1:
value = value[0]
if offset:
fh.seek(pos + 4)
return name, value
def read_uic_image_property(fh):
"""Read UIC ImagePropertyEx tag from file and return as dict."""
# TODO: test this
size = struct.unpack('B', fh.read(1))[0]
name = struct.unpack('%is' % size, fh.read(size))[0][:-1]
flags, prop = struct.unpack('<IB', fh.read(5))
if prop == 1:
value = struct.unpack('II', fh.read(8))
value = value[0] / value[1]
else:
size = struct.unpack('B', fh.read(1))[0]
value = struct.unpack('%is' % size, fh.read(size))[0]
return dict(name=name, flags=flags, value=value)
def read_cz_lsm_info(fh, byteorder, dtype, count):
"""Read CS_LSM_INFO tag from file and return as numpy.rec.array."""
assert byteorder == '<'
magic_number, structure_size = struct.unpack('<II', fh.read(8))
if magic_number not in (50350412, 67127628):
raise ValueError("not a valid CS_LSM_INFO structure")
fh.seek(-8, 1)
if structure_size < numpy.dtype(CZ_LSM_INFO).itemsize:
# adjust structure according to structure_size
cz_lsm_info = []
size = 0
for name, dtype in CZ_LSM_INFO:
size += numpy.dtype(dtype).itemsize
if size > structure_size:
break
cz_lsm_info.append((name, dtype))
else:
cz_lsm_info = CZ_LSM_INFO
return fh.read_record(cz_lsm_info, byteorder=byteorder)
def read_cz_lsm_floatpairs(fh):
"""Read LSM sequence of float pairs from file and return as list."""
size = struct.unpack('<i', fh.read(4))[0]
return fh.read_array('<2f8', count=size)
def read_cz_lsm_positions(fh):
"""Read LSM positions from file and return as list."""
size = struct.unpack('<I', fh.read(4))[0]
return fh.read_array('<2f8', count=size)
def read_cz_lsm_time_stamps(fh):
"""Read LSM time stamps from file and return as list."""
size, count = struct.unpack('<ii', fh.read(8))
if size != (8 + 8 * count):
raise ValueError("lsm_time_stamps block is too short")
# return struct.unpack('<%dd' % count, fh.read(8*count))
return fh.read_array('<f8', count=count)
def read_cz_lsm_event_list(fh):
"""Read LSM events from file and return as list of (time, type, text)."""
count = struct.unpack('<II', fh.read(8))[1]
events = []
while count > 0:
esize, etime, etype = struct.unpack('<IdI', fh.read(16))
etext = stripnull(fh.read(esize - 16))
events.append((etime, etype, etext))
count -= 1
return events
def read_cz_lsm_scan_info(fh):
"""Read LSM scan information from file and return as Record."""
block = Record()
blocks = [block]
unpack = struct.unpack
if 0x10000000 != struct.unpack('<I', fh.read(4))[0]:
# not a Recording sub block
raise ValueError("not a lsm_scan_info structure")
fh.read(8)
while True:
entry, dtype, size = unpack('<III', fh.read(12))
if dtype == 2:
# ascii
value = stripnull(fh.read(size))
elif dtype == 4:
# long
value = unpack('<i', fh.read(4))[0]
elif dtype == 5:
# rational
value = unpack('<d', fh.read(8))[0]
else:
value = 0
if entry in CZ_LSM_SCAN_INFO_ARRAYS:
blocks.append(block)
name = CZ_LSM_SCAN_INFO_ARRAYS[entry]
newobj = []
setattr(block, name, newobj)
block = newobj
elif entry in CZ_LSM_SCAN_INFO_STRUCTS:
blocks.append(block)
newobj = Record()
block.append(newobj)
block = newobj
elif entry in CZ_LSM_SCAN_INFO_ATTRIBUTES:
name = CZ_LSM_SCAN_INFO_ATTRIBUTES[entry]
setattr(block, name, value)
elif entry == 0xffffffff:
# end sub block
block = blocks.pop()
else:
# unknown entry
setattr(block, "entry_0x%x" % entry, value)
if not blocks:
break
return block
def read_nih_image_header(fh, byteorder, dtype, count):
"""Read NIH_IMAGE_HEADER tag from file and return as numpy.rec.array."""
a = fh.read_record(NIH_IMAGE_HEADER, byteorder=byteorder)
a = a.newbyteorder(byteorder)
a.xunit = a.xunit[:a._xunit_len]
a.um = a.um[:a._um_len]
return a
def read_micromanager_metadata(fh):
"""Read MicroManager non-TIFF settings from open file and return as dict.
The settings can be used to read image data without parsing the TIFF file.
Raise ValueError if file does not contain valid MicroManager metadata.
"""
fh.seek(0)
try:
byteorder = {b'II': '<', b'MM': '>'}[fh.read(2)]
except IndexError:
raise ValueError("not a MicroManager TIFF file")
results = {}
fh.seek(8)
(index_header, index_offset, display_header, display_offset,
comments_header, comments_offset, summary_header, summary_length
) = struct.unpack(byteorder + "IIIIIIII", fh.read(32))
if summary_header != 2355492:
raise ValueError("invalid MicroManager summary_header")
results['summary'] = read_json(fh, byteorder, None, summary_length)
if index_header != 54773648:
raise ValueError("invalid MicroManager index_header")
fh.seek(index_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 3453623:
raise ValueError("invalid MicroManager index_header")
data = struct.unpack(byteorder + "IIIII" * count, fh.read(20 * count))
results['index_map'] = {
'channel': data[::5], 'slice': data[1::5], 'frame': data[2::5],
'position': data[3::5], 'offset': data[4::5]}
if display_header != 483765892:
raise ValueError("invalid MicroManager display_header")
fh.seek(display_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 347834724:
raise ValueError("invalid MicroManager display_header")
results['display_settings'] = read_json(fh, byteorder, None, count)
if comments_header != 99384722:
raise ValueError("invalid MicroManager comments_header")
fh.seek(comments_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 84720485:
raise ValueError("invalid MicroManager comments_header")
results['comments'] = read_json(fh, byteorder, None, count)
return results
def imagej_metadata(data, bytecounts, byteorder):
"""Return dict from ImageJ metadata tag value."""
_str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252')
def read_string(data, byteorder):
return _str(stripnull(data[0 if byteorder == '<' else 1::2]))
def read_double(data, byteorder):
return struct.unpack(byteorder + ('d' * (len(data) // 8)), data)
def read_bytes(data, byteorder):
# return struct.unpack('b' * len(data), data)
return numpy.fromstring(data, 'uint8')
metadata_types = { # big endian
b'info': ('info', read_string),
b'labl': ('labels', read_string),
b'rang': ('ranges', read_double),
b'luts': ('luts', read_bytes),
b'roi ': ('roi', read_bytes),
b'over': ('overlays', read_bytes)}
metadata_types.update( # little endian
dict((k[::-1], v) for k, v in metadata_types.items()))
if not bytecounts:
raise ValueError("no ImageJ metadata")
if not data[:4] in (b'IJIJ', b'JIJI'):
raise ValueError("invalid ImageJ metadata")
header_size = bytecounts[0]
if header_size < 12 or header_size > 804:
raise ValueError("invalid ImageJ metadata header size")
ntypes = (header_size - 4) // 8
header = struct.unpack(byteorder + '4sI' * ntypes, data[4:4 + ntypes * 8])
pos = 4 + ntypes * 8
counter = 0
result = {}
for mtype, count in zip(header[::2], header[1::2]):
values = []
name, func = metadata_types.get(mtype, (_str(mtype), read_bytes))
for _ in range(count):
counter += 1
pos1 = pos + bytecounts[counter]
values.append(func(data[pos:pos1], byteorder))
pos = pos1
result[name.strip()] = values[0] if count == 1 else values
return result
def imagej_description(description):
"""Return dict from ImageJ image_description tag."""
def _bool(val):
return {b'true': True, b'false': False}[val.lower()]
_str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252')
result = {}
for line in description.splitlines():
try:
key, val = line.split(b'=')
except Exception:
continue
key = key.strip()
val = val.strip()
for dtype in (int, float, _bool, _str):
try:
val = dtype(val)
break
except Exception:
pass
result[_str(key)] = val
return result
def _replace_by(module_function, package=None, warn=False):
"""Try replace decorated function by module.function."""
try:
from importlib import import_module
except ImportError:
warnings.warn('could not import module importlib')
return lambda func: func
def decorate(func, module_function=module_function, warn=warn):
try:
module, function = module_function.split('.')
if not package:
module = import_module(module)
else:
module = import_module('.' + module, package=package)
func, oldfunc = getattr(module, function), func
globals()['__old_' + func.__name__] = oldfunc
except Exception:
if warn:
warnings.warn("failed to import %s" % module_function)
return func
return decorate
def decodejpg(encoded, tables=b'', photometric=None,
ycbcr_subsampling=None, ycbcr_positioning=None):
"""Decode JPEG encoded byte string (using _czifile extension module)."""
import _czifile
image = _czifile.decodejpg(encoded, tables)
if photometric == 'rgb' and ycbcr_subsampling and ycbcr_positioning:
# TODO: convert YCbCr to RGB
pass
return image.tostring()
@_replace_by('_tifffile.decodepackbits')
def decodepackbits(encoded):
"""Decompress PackBits encoded byte string.
PackBits is a simple byte-oriented run-length compression scheme.
"""
func = ord if sys.version[0] == '2' else lambda x: x
result = []
result_extend = result.extend
i = 0
try:
while True:
n = func(encoded[i]) + 1
i += 1
if n < 129:
result_extend(encoded[i:i + n])
i += n
elif n > 129:
result_extend(encoded[i:i + 1] * (258 - n))
i += 1
except IndexError:
pass
return b''.join(result) if sys.version[0] == '2' else bytes(result)
@_replace_by('_tifffile.decodelzw')
def decodelzw(encoded):
"""Decompress LZW (Lempel-Ziv-Welch) encoded TIFF strip (byte string).
The strip must begin with a CLEAR code and end with an EOI code.
This is an implementation of the LZW decoding algorithm described in (1).
It is not compatible with old style LZW compressed files like quad-lzw.tif.
"""
len_encoded = len(encoded)
bitcount_max = len_encoded * 8
unpack = struct.unpack
if sys.version[0] == '2':
newtable = [chr(i) for i in range(256)]
else:
newtable = [bytes([i]) for i in range(256)]
newtable.extend((0, 0))
def next_code():
"""Return integer of `bitw` bits at `bitcount` position in encoded."""
start = bitcount // 8
s = encoded[start:start + 4]
try:
code = unpack('>I', s)[0]
except Exception:
code = unpack('>I', s + b'\x00' * (4 - len(s)))[0]
code <<= bitcount % 8
code &= mask
return code >> shr
switchbitch = { # code: bit-width, shr-bits, bit-mask
255: (9, 23, int(9 * '1' + '0' * 23, 2)),
511: (10, 22, int(10 * '1' + '0' * 22, 2)),
1023: (11, 21, int(11 * '1' + '0' * 21, 2)),
2047: (12, 20, int(12 * '1' + '0' * 20, 2)), }
bitw, shr, mask = switchbitch[255]
bitcount = 0
if len_encoded < 4:
raise ValueError("strip must be at least 4 characters long")
if next_code() != 256:
raise ValueError("strip must begin with CLEAR code")
code = 0
oldcode = 0
result = []
result_append = result.append
while True:
code = next_code() # ~5% faster when inlining this function
bitcount += bitw
if code == 257 or bitcount >= bitcount_max: # EOI
break
if code == 256: # CLEAR
table = newtable[:]
table_append = table.append
lentable = 258
bitw, shr, mask = switchbitch[255]
code = next_code()
bitcount += bitw
if code == 257: # EOI
break
result_append(table[code])
else:
if code < lentable:
decoded = table[code]
newcode = table[oldcode] + decoded[:1]
else:
newcode = table[oldcode]
newcode += newcode[:1]
decoded = newcode
result_append(decoded)
table_append(newcode)
lentable += 1
oldcode = code
if lentable in switchbitch:
bitw, shr, mask = switchbitch[lentable]
if code != 257:
warnings.warn("unexpected end of lzw stream (code %i)" % code)
return b''.join(result)
@_replace_by('_tifffile.unpackints')
def unpackints(data, dtype, itemsize, runlen=0):
"""Decompress byte string to array of integers of any bit size <= 32.
Parameters
----------
data : byte str
Data to decompress.
dtype : numpy.dtype or str
A numpy boolean or integer type.
itemsize : int
Number of bits per integer.
runlen : int
Number of consecutive integers, after which to start at next byte.
"""
if itemsize == 1: # bitarray
data = numpy.fromstring(data, '|B')
data = numpy.unpackbits(data)
if runlen % 8:
data = data.reshape(-1, runlen + (8 - runlen % 8))
data = data[:, :runlen].reshape(-1)
return data.astype(dtype)
dtype = numpy.dtype(dtype)
if itemsize in (8, 16, 32, 64):
return numpy.fromstring(data, dtype)
if itemsize < 1 or itemsize > 32:
raise ValueError("itemsize out of range: %i" % itemsize)
if dtype.kind not in "biu":
raise ValueError("invalid dtype")
itembytes = next(i for i in (1, 2, 4, 8) if 8 * i >= itemsize)
if itembytes != dtype.itemsize:
raise ValueError("dtype.itemsize too small")
if runlen == 0:
runlen = len(data) // itembytes
skipbits = runlen * itemsize % 8
if skipbits:
skipbits = 8 - skipbits
shrbits = itembytes * 8 - itemsize
bitmask = int(itemsize * '1' + '0' * shrbits, 2)
dtypestr = '>' + dtype.char # dtype always big endian?
unpack = struct.unpack
l = runlen * (len(data) * 8 // (runlen * itemsize + skipbits))
result = numpy.empty((l, ), dtype)
bitcount = 0
for i in range(len(result)):
start = bitcount // 8
s = data[start:start + itembytes]
try:
code = unpack(dtypestr, s)[0]
except Exception:
code = unpack(dtypestr, s + b'\x00' * (itembytes - len(s)))[0]
code <<= bitcount % 8
code &= bitmask
result[i] = code >> shrbits
bitcount += itemsize
if (i + 1) % runlen == 0:
bitcount += skipbits
return result
def unpackrgb(data, dtype='<B', bitspersample=(5, 6, 5), rescale=True):
"""Return array from byte string containing packed samples.
Use to unpack RGB565 or RGB555 to RGB888 format.
Parameters
----------
data : byte str
The data to be decoded. Samples in each pixel are stored consecutively.
Pixels are aligned to 8, 16, or 32 bit boundaries.
dtype : numpy.dtype
The sample data type. The byteorder applies also to the data stream.
bitspersample : tuple
Number of bits for each sample in a pixel.
rescale : bool
Upscale samples to the number of bits in dtype.
Returns
-------
result : ndarray
Flattened array of unpacked samples of native dtype.
Examples
--------
>>> data = struct.pack('BBBB', 0x21, 0x08, 0xff, 0xff)
>>> print(unpackrgb(data, '<B', (5, 6, 5), False))
[ 1 1 1 31 63 31]
>>> print(unpackrgb(data, '<B', (5, 6, 5)))
[ 8 4 8 255 255 255]
>>> print(unpackrgb(data, '<B', (5, 5, 5)))
[ 16 8 8 255 255 255]
"""
dtype = numpy.dtype(dtype)
bits = int(numpy.sum(bitspersample))
if not (bits <= 32 and all(
i <= dtype.itemsize * 8 for i in bitspersample)):
raise ValueError("sample size not supported %s" % str(bitspersample))
dt = next(i for i in 'BHI' if numpy.dtype(i).itemsize * 8 >= bits)
data = numpy.fromstring(data, dtype.byteorder + dt)
result = numpy.empty((data.size, len(bitspersample)), dtype.char)
for i, bps in enumerate(bitspersample):
t = data >> int(numpy.sum(bitspersample[i + 1:]))
t &= int('0b' + '1' * bps, 2)
if rescale:
o = ((dtype.itemsize * 8) // bps + 1) * bps
if o > data.dtype.itemsize * 8:
t = t.astype('I')
t *= (2 ** o - 1) // (2 ** bps - 1)
t //= 2 ** (o - (dtype.itemsize * 8))
result[:, i] = t
return result.reshape(-1)
def reorient(image, orientation):
"""Return reoriented view of image array.
Parameters
----------
image : numpy array
Non-squeezed output of asarray() functions.
Axes -3 and -2 must be image length and width respectively.
orientation : int or str
One of TIFF_ORIENTATIONS keys or values.
"""
o = TIFF_ORIENTATIONS.get(orientation, orientation)
if o == 'top_left':
return image
elif o == 'top_right':
return image[..., ::-1, :]
elif o == 'bottom_left':
return image[..., ::-1, :, :]
elif o == 'bottom_right':
return image[..., ::-1, ::-1, :]
elif o == 'left_top':
return numpy.swapaxes(image, -3, -2)
elif o == 'right_top':
return numpy.swapaxes(image, -3, -2)[..., ::-1, :]
elif o == 'left_bottom':
return numpy.swapaxes(image, -3, -2)[..., ::-1, :, :]
elif o == 'right_bottom':
return numpy.swapaxes(image, -3, -2)[..., ::-1, ::-1, :]
def squeeze_axes(shape, axes, skip='XY'):
"""Return shape and axes with single-dimensional entries removed.
Remove unused dimensions unless their axes are listed in 'skip'.
>>> squeeze_axes((5, 1, 2, 1, 1), 'TZYXC')
((5, 2, 1), 'TYX')
"""
if len(shape) != len(axes):
raise ValueError("dimensions of axes and shape don't match")
shape, axes = zip(*(i for i in zip(shape, axes)
if i[0] > 1 or i[1] in skip))
return shape, ''.join(axes)
def transpose_axes(data, axes, asaxes='CTZYX'):
"""Return data with its axes permuted to match specified axes.
A view is returned if possible.
>>> transpose_axes(numpy.zeros((2, 3, 4, 5)), 'TYXC', asaxes='CTZYX').shape
(5, 2, 1, 3, 4)
"""
for ax in axes:
if ax not in asaxes:
raise ValueError("unknown axis %s" % ax)
# add missing axes to data
shape = data.shape
for ax in reversed(asaxes):
if ax not in axes:
axes = ax + axes
shape = (1,) + shape
data = data.reshape(shape)
# transpose axes
data = data.transpose([axes.index(ax) for ax in asaxes])
return data
def stack_pages(pages, memmap=False, *args, **kwargs):
"""Read data from sequence of TiffPage and stack them vertically.
If memmap is True, return an array stored in a binary file on disk.
Additional parameters are passsed to the page asarray function.
"""
if len(pages) == 0:
raise ValueError("no pages")
if len(pages) == 1:
return pages[0].asarray(memmap=memmap, *args, **kwargs)
result = pages[0].asarray(*args, **kwargs)
shape = (len(pages),) + result.shape
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=result.dtype, shape=shape)
else:
result = numpy.empty(shape, dtype=result.dtype)
for i, page in enumerate(pages):
result[i] = page.asarray(*args, **kwargs)
return result
def stripnull(string):
"""Return string truncated at first null character.
Clean NULL terminated C strings.
>>> stripnull(b'string\\x00')
b'string'
"""
i = string.find(b'\x00')
return string if (i < 0) else string[:i]
def stripascii(string):
"""Return string truncated at last byte that is 7bit ASCII.
Clean NULL separated and terminated TIFF strings.
>>> stripascii(b'string\\x00string\\n\\x01\\x00')
b'string\\x00string\\n'
>>> stripascii(b'\\x00')
b''
"""
# TODO: pythonize this
ord_ = ord if sys.version_info[0] < 3 else lambda x: x
i = len(string)
while i:
i -= 1
if 8 < ord_(string[i]) < 127:
break
else:
i = -1
return string[:i + 1]
def format_size(size):
"""Return file size as string from byte size."""
for unit in ('B', 'KB', 'MB', 'GB', 'TB'):
if size < 2048:
return "%.f %s" % (size, unit)
size /= 1024.0
def sequence(value):
"""Return tuple containing value if value is not a sequence.
>>> sequence(1)
(1,)
>>> sequence([1])
[1]
"""
try:
len(value)
return value
except TypeError:
return value,
def product(iterable):
"""Return product of sequence of numbers.
Equivalent of functools.reduce(operator.mul, iterable, 1).
>>> product([2**8, 2**30])
274877906944
>>> product([])
1
"""
prod = 1
for i in iterable:
prod *= i
return prod
def natural_sorted(iterable):
"""Return human sorted list of strings.
E.g. for sorting file names.
>>> natural_sorted(['f1', 'f2', 'f10'])
['f1', 'f2', 'f10']
"""
def sortkey(x):
return [(int(c) if c.isdigit() else c) for c in re.split(numbers, x)]
numbers = re.compile(r'(\d+)')
return sorted(iterable, key=sortkey)
def excel_datetime(timestamp, epoch=datetime.datetime.fromordinal(693594)):
"""Return datetime object from timestamp in Excel serial format.
Convert LSM time stamps.
>>> excel_datetime(40237.029999999795)
datetime.datetime(2010, 2, 28, 0, 43, 11, 999982)
"""
return epoch + datetime.timedelta(timestamp)
def julian_datetime(julianday, milisecond=0):
"""Return datetime from days since 1/1/4713 BC and ms since midnight.
Convert Julian dates according to MetaMorph.
>>> julian_datetime(2451576, 54362783)
datetime.datetime(2000, 2, 2, 15, 6, 2, 783)
"""
if julianday <= 1721423:
# no datetime before year 1
return None
a = julianday + 1
if a > 2299160:
alpha = math.trunc((a - 1867216.25) / 36524.25)
a += 1 + alpha - alpha // 4
b = a + (1524 if a > 1721423 else 1158)
c = math.trunc((b - 122.1) / 365.25)
d = math.trunc(365.25 * c)
e = math.trunc((b - d) / 30.6001)
day = b - d - math.trunc(30.6001 * e)
month = e - (1 if e < 13.5 else 13)
year = c - (4716 if month > 2.5 else 4715)
hour, milisecond = divmod(milisecond, 1000 * 60 * 60)
minute, milisecond = divmod(milisecond, 1000 * 60)
second, milisecond = divmod(milisecond, 1000)
return datetime.datetime(year, month, day,
hour, minute, second, milisecond)
def test_tifffile(directory='testimages', verbose=True):
"""Read all images in directory.
Print error message on failure.
>>> test_tifffile(verbose=False)
"""
successful = 0
failed = 0
start = time.time()
for f in glob.glob(os.path.join(directory, '*.*')):
if verbose:
print("\n%s>\n" % f.lower(), end='')
t0 = time.time()
try:
tif = TiffFile(f, multifile=True)
except Exception as e:
if not verbose:
print(f, end=' ')
print("ERROR:", e)
failed += 1
continue
try:
img = tif.asarray()
except ValueError:
try:
img = tif[0].asarray()
except Exception as e:
if not verbose:
print(f, end=' ')
print("ERROR:", e)
failed += 1
continue
finally:
tif.close()
successful += 1
if verbose:
print("%s, %s %s, %s, %.0f ms" % (
str(tif), str(img.shape), img.dtype, tif[0].compression,
(time.time() - t0) * 1e3))
if verbose:
print("\nSuccessfully read %i of %i files in %.3f s\n" % (
successful, successful + failed, time.time() - start))
class TIFF_SUBFILE_TYPES(object):
def __getitem__(self, key):
result = []
if key & 1:
result.append('reduced_image')
if key & 2:
result.append('page')
if key & 4:
result.append('mask')
return tuple(result)
TIFF_PHOTOMETRICS = {
0: 'miniswhite',
1: 'minisblack',
2: 'rgb',
3: 'palette',
4: 'mask',
5: 'separated', # CMYK
6: 'ycbcr',
8: 'cielab',
9: 'icclab',
10: 'itulab',
32803: 'cfa', # Color Filter Array
32844: 'logl',
32845: 'logluv',
34892: 'linear_raw'
}
TIFF_COMPESSIONS = {
1: None,
2: 'ccittrle',
3: 'ccittfax3',
4: 'ccittfax4',
5: 'lzw',
6: 'ojpeg',
7: 'jpeg',
8: 'adobe_deflate',
9: 't85',
10: 't43',
32766: 'next',
32771: 'ccittrlew',
32773: 'packbits',
32809: 'thunderscan',
32895: 'it8ctpad',
32896: 'it8lw',
32897: 'it8mp',
32898: 'it8bl',
32908: 'pixarfilm',
32909: 'pixarlog',
32946: 'deflate',
32947: 'dcs',
34661: 'jbig',
34676: 'sgilog',
34677: 'sgilog24',
34712: 'jp2000',
34713: 'nef',
}
TIFF_DECOMPESSORS = {
None: lambda x: x,
'adobe_deflate': zlib.decompress,
'deflate': zlib.decompress,
'packbits': decodepackbits,
'lzw': decodelzw,
# 'jpeg': decodejpg
}
TIFF_DATA_TYPES = {
1: '1B', # BYTE 8-bit unsigned integer.
2: '1s', # ASCII 8-bit byte that contains a 7-bit ASCII code;
# the last byte must be NULL (binary zero).
3: '1H', # SHORT 16-bit (2-byte) unsigned integer
4: '1I', # LONG 32-bit (4-byte) unsigned integer.
5: '2I', # RATIONAL Two LONGs: the first represents the numerator of
# a fraction; the second, the denominator.
6: '1b', # SBYTE An 8-bit signed (twos-complement) integer.
7: '1s', # UNDEFINED An 8-bit byte that may contain anything,
# depending on the definition of the field.
8: '1h', # SSHORT A 16-bit (2-byte) signed (twos-complement) integer.
9: '1i', # SLONG A 32-bit (4-byte) signed (twos-complement) integer.
10: '2i', # SRATIONAL Two SLONGs: the first represents the numerator
# of a fraction, the second the denominator.
11: '1f', # FLOAT Single precision (4-byte) IEEE format.
12: '1d', # DOUBLE Double precision (8-byte) IEEE format.
13: '1I', # IFD unsigned 4 byte IFD offset.
# 14: '', # UNICODE
# 15: '', # COMPLEX
16: '1Q', # LONG8 unsigned 8 byte integer (BigTiff)
17: '1q', # SLONG8 signed 8 byte integer (BigTiff)
18: '1Q', # IFD8 unsigned 8 byte IFD offset (BigTiff)
}
TIFF_SAMPLE_FORMATS = {
1: 'uint',
2: 'int',
3: 'float',
# 4: 'void',
# 5: 'complex_int',
6: 'complex',
}
TIFF_SAMPLE_DTYPES = {
('uint', 1): '?', # bitmap
('uint', 2): 'B',
('uint', 3): 'B',
('uint', 4): 'B',
('uint', 5): 'B',
('uint', 6): 'B',
('uint', 7): 'B',
('uint', 8): 'B',
('uint', 9): 'H',
('uint', 10): 'H',
('uint', 11): 'H',
('uint', 12): 'H',
('uint', 13): 'H',
('uint', 14): 'H',
('uint', 15): 'H',
('uint', 16): 'H',
('uint', 17): 'I',
('uint', 18): 'I',
('uint', 19): 'I',
('uint', 20): 'I',
('uint', 21): 'I',
('uint', 22): 'I',
('uint', 23): 'I',
('uint', 24): 'I',
('uint', 25): 'I',
('uint', 26): 'I',
('uint', 27): 'I',
('uint', 28): 'I',
('uint', 29): 'I',
('uint', 30): 'I',
('uint', 31): 'I',
('uint', 32): 'I',
('uint', 64): 'Q',
('int', 8): 'b',
('int', 16): 'h',
('int', 32): 'i',
('int', 64): 'q',
('float', 16): 'e',
('float', 32): 'f',
('float', 64): 'd',
('complex', 64): 'F',
('complex', 128): 'D',
('uint', (5, 6, 5)): 'B',
}
TIFF_ORIENTATIONS = {
1: 'top_left',
2: 'top_right',
3: 'bottom_right',
4: 'bottom_left',
5: 'left_top',
6: 'right_top',
7: 'right_bottom',
8: 'left_bottom',
}
# TODO: is there a standard for character axes labels?
AXES_LABELS = {
'X': 'width',
'Y': 'height',
'Z': 'depth',
'S': 'sample', # rgb(a)
'I': 'series', # general sequence, plane, page, IFD
'T': 'time',
'C': 'channel', # color, emission wavelength
'A': 'angle',
'P': 'phase', # formerly F # P is Position in LSM!
'R': 'tile', # region, point, mosaic
'H': 'lifetime', # histogram
'E': 'lambda', # excitation wavelength
'L': 'exposure', # lux
'V': 'event',
'Q': 'other',
# 'M': 'mosaic', # LSM 6
}
AXES_LABELS.update(dict((v, k) for k, v in AXES_LABELS.items()))
# Map OME pixel types to numpy dtype
OME_PIXEL_TYPES = {
'int8': 'i1',
'int16': 'i2',
'int32': 'i4',
'uint8': 'u1',
'uint16': 'u2',
'uint32': 'u4',
'float': 'f4',
# 'bit': 'bit',
'double': 'f8',
'complex': 'c8',
'double-complex': 'c16',
}
# NIH Image PicHeader v1.63
NIH_IMAGE_HEADER = [
('fileid', 'a8'),
('nlines', 'i2'),
('pixelsperline', 'i2'),
('version', 'i2'),
('oldlutmode', 'i2'),
('oldncolors', 'i2'),
('colors', 'u1', (3, 32)),
('oldcolorstart', 'i2'),
('colorwidth', 'i2'),
('extracolors', 'u2', (6, 3)),
('nextracolors', 'i2'),
('foregroundindex', 'i2'),
('backgroundindex', 'i2'),
('xscale', 'f8'),
('_x0', 'i2'),
('_x1', 'i2'),
('units_t', 'i2'), # NIH_UNITS_TYPE
('p1', [('x', 'i2'), ('y', 'i2')]),
('p2', [('x', 'i2'), ('y', 'i2')]),
('curvefit_t', 'i2'), # NIH_CURVEFIT_TYPE
('ncoefficients', 'i2'),
('coeff', 'f8', 6),
('_um_len', 'u1'),
('um', 'a15'),
('_x2', 'u1'),
('binarypic', 'b1'),
('slicestart', 'i2'),
('sliceend', 'i2'),
('scalemagnification', 'f4'),
('nslices', 'i2'),
('slicespacing', 'f4'),
('currentslice', 'i2'),
('frameinterval', 'f4'),
('pixelaspectratio', 'f4'),
('colorstart', 'i2'),
('colorend', 'i2'),
('ncolors', 'i2'),
('fill1', '3u2'),
('fill2', '3u2'),
('colortable_t', 'u1'), # NIH_COLORTABLE_TYPE
('lutmode_t', 'u1'), # NIH_LUTMODE_TYPE
('invertedtable', 'b1'),
('zeroclip', 'b1'),
('_xunit_len', 'u1'),
('xunit', 'a11'),
('stacktype_t', 'i2'), # NIH_STACKTYPE_TYPE
]
NIH_COLORTABLE_TYPE = (
'CustomTable', 'AppleDefault', 'Pseudo20', 'Pseudo32', 'Rainbow',
'Fire1', 'Fire2', 'Ice', 'Grays', 'Spectrum')
NIH_LUTMODE_TYPE = (
'PseudoColor', 'OldAppleDefault', 'OldSpectrum', 'GrayScale',
'ColorLut', 'CustomGrayscale')
NIH_CURVEFIT_TYPE = (
'StraightLine', 'Poly2', 'Poly3', 'Poly4', 'Poly5', 'ExpoFit',
'PowerFit', 'LogFit', 'RodbardFit', 'SpareFit1', 'Uncalibrated',
'UncalibratedOD')
NIH_UNITS_TYPE = (
'Nanometers', 'Micrometers', 'Millimeters', 'Centimeters', 'Meters',
'Kilometers', 'Inches', 'Feet', 'Miles', 'Pixels', 'OtherUnits')
NIH_STACKTYPE_TYPE = (
'VolumeStack', 'RGBStack', 'MovieStack', 'HSVStack')
# Map Universal Imaging Corporation MetaMorph internal tag ids to name and type
UIC_TAGS = {
0: ('auto_scale', int),
1: ('min_scale', int),
2: ('max_scale', int),
3: ('spatial_calibration', int),
4: ('x_calibration', Fraction),
5: ('y_calibration', Fraction),
6: ('calibration_units', str),
7: ('name', str),
8: ('thresh_state', int),
9: ('thresh_state_red', int),
10: ('tagid_10', None), # undefined
11: ('thresh_state_green', int),
12: ('thresh_state_blue', int),
13: ('thresh_state_lo', int),
14: ('thresh_state_hi', int),
15: ('zoom', int),
16: ('create_time', julian_datetime),
17: ('last_saved_time', julian_datetime),
18: ('current_buffer', int),
19: ('gray_fit', None),
20: ('gray_point_count', None),
21: ('gray_x', Fraction),
22: ('gray_y', Fraction),
23: ('gray_min', Fraction),
24: ('gray_max', Fraction),
25: ('gray_unit_name', str),
26: ('standard_lut', int),
27: ('wavelength', int),
28: ('stage_position', '(%i,2,2)u4'), # N xy positions as fractions
29: ('camera_chip_offset', '(%i,2,2)u4'), # N xy offsets as fractions
30: ('overlay_mask', None),
31: ('overlay_compress', None),
32: ('overlay', None),
33: ('special_overlay_mask', None),
34: ('special_overlay_compress', None),
35: ('special_overlay', None),
36: ('image_property', read_uic_image_property),
37: ('stage_label', '%ip'), # N str
38: ('autoscale_lo_info', Fraction),
39: ('autoscale_hi_info', Fraction),
40: ('absolute_z', '(%i,2)u4'), # N fractions
41: ('absolute_z_valid', '(%i,)u4'), # N long
42: ('gamma', int),
43: ('gamma_red', int),
44: ('gamma_green', int),
45: ('gamma_blue', int),
46: ('camera_bin', int),
47: ('new_lut', int),
48: ('image_property_ex', None),
49: ('plane_property', int),
50: ('user_lut_table', '(256,3)u1'),
51: ('red_autoscale_info', int),
52: ('red_autoscale_lo_info', Fraction),
53: ('red_autoscale_hi_info', Fraction),
54: ('red_minscale_info', int),
55: ('red_maxscale_info', int),
56: ('green_autoscale_info', int),
57: ('green_autoscale_lo_info', Fraction),
58: ('green_autoscale_hi_info', Fraction),
59: ('green_minscale_info', int),
60: ('green_maxscale_info', int),
61: ('blue_autoscale_info', int),
62: ('blue_autoscale_lo_info', Fraction),
63: ('blue_autoscale_hi_info', Fraction),
64: ('blue_min_scale_info', int),
65: ('blue_max_scale_info', int),
# 66: ('overlay_plane_color', read_uic_overlay_plane_color),
}
# Olympus FluoView
MM_DIMENSION = [
('name', 'a16'),
('size', 'i4'),
('origin', 'f8'),
('resolution', 'f8'),
('unit', 'a64'),
]
MM_HEADER = [
('header_flag', 'i2'),
('image_type', 'u1'),
('image_name', 'a257'),
('offset_data', 'u4'),
('palette_size', 'i4'),
('offset_palette0', 'u4'),
('offset_palette1', 'u4'),
('comment_size', 'i4'),
('offset_comment', 'u4'),
('dimensions', MM_DIMENSION, 10),
('offset_position', 'u4'),
('map_type', 'i2'),
('map_min', 'f8'),
('map_max', 'f8'),
('min_value', 'f8'),
('max_value', 'f8'),
('offset_map', 'u4'),
('gamma', 'f8'),
('offset', 'f8'),
('gray_channel', MM_DIMENSION),
('offset_thumbnail', 'u4'),
('voice_field', 'i4'),
('offset_voice_field', 'u4'),
]
# Carl Zeiss LSM
CZ_LSM_INFO = [
('magic_number', 'u4'),
('structure_size', 'i4'),
('dimension_x', 'i4'),
('dimension_y', 'i4'),
('dimension_z', 'i4'),
('dimension_channels', 'i4'),
('dimension_time', 'i4'),
('data_type', 'i4'), # CZ_DATA_TYPES
('thumbnail_x', 'i4'),
('thumbnail_y', 'i4'),
('voxel_size_x', 'f8'),
('voxel_size_y', 'f8'),
('voxel_size_z', 'f8'),
('origin_x', 'f8'),
('origin_y', 'f8'),
('origin_z', 'f8'),
('scan_type', 'u2'),
('spectral_scan', 'u2'),
('type_of_data', 'u4'), # CZ_TYPE_OF_DATA
('offset_vector_overlay', 'u4'),
('offset_input_lut', 'u4'),
('offset_output_lut', 'u4'),
('offset_channel_colors', 'u4'),
('time_interval', 'f8'),
('offset_channel_data_types', 'u4'),
('offset_scan_info', 'u4'), # CZ_LSM_SCAN_INFO
('offset_ks_data', 'u4'),
('offset_time_stamps', 'u4'),
('offset_event_list', 'u4'),
('offset_roi', 'u4'),
('offset_bleach_roi', 'u4'),
('offset_next_recording', 'u4'),
# LSM 2.0 ends here
('display_aspect_x', 'f8'),
('display_aspect_y', 'f8'),
('display_aspect_z', 'f8'),
('display_aspect_time', 'f8'),
('offset_mean_of_roi_overlay', 'u4'),
('offset_topo_isoline_overlay', 'u4'),
('offset_topo_profile_overlay', 'u4'),
('offset_linescan_overlay', 'u4'),
('offset_toolbar_flags', 'u4'),
('offset_channel_wavelength', 'u4'),
('offset_channel_factors', 'u4'),
('objective_sphere_correction', 'f8'),
('offset_unmix_parameters', 'u4'),
# LSM 3.2, 4.0 end here
('offset_acquisition_parameters', 'u4'),
('offset_characteristics', 'u4'),
('offset_palette', 'u4'),
('time_difference_x', 'f8'),
('time_difference_y', 'f8'),
('time_difference_z', 'f8'),
('internal_use_1', 'u4'),
('dimension_p', 'i4'),
('dimension_m', 'i4'),
('dimensions_reserved', '16i4'),
('offset_tile_positions', 'u4'),
('reserved_1', '9u4'),
('offset_positions', 'u4'),
('reserved_2', '21u4'), # must be 0
]
# Import functions for LSM_INFO sub-records
CZ_LSM_INFO_READERS = {
'scan_info': read_cz_lsm_scan_info,
'time_stamps': read_cz_lsm_time_stamps,
'event_list': read_cz_lsm_event_list,
'channel_colors': read_cz_lsm_floatpairs,
'positions': read_cz_lsm_floatpairs,
'tile_positions': read_cz_lsm_floatpairs,
}
# Map cz_lsm_info.scan_type to dimension order
CZ_SCAN_TYPES = {
0: 'XYZCT', # x-y-z scan
1: 'XYZCT', # z scan (x-z plane)
2: 'XYZCT', # line scan
3: 'XYTCZ', # time series x-y
4: 'XYZTC', # time series x-z
5: 'XYTCZ', # time series 'Mean of ROIs'
6: 'XYZTC', # time series x-y-z
7: 'XYCTZ', # spline scan
8: 'XYCZT', # spline scan x-z
9: 'XYTCZ', # time series spline plane x-z
10: 'XYZCT', # point mode
}
# Map dimension codes to cz_lsm_info attribute
CZ_DIMENSIONS = {
'X': 'dimension_x',
'Y': 'dimension_y',
'Z': 'dimension_z',
'C': 'dimension_channels',
'T': 'dimension_time',
}
# Description of cz_lsm_info.data_type
CZ_DATA_TYPES = {
0: 'varying data types',
1: '8 bit unsigned integer',
2: '12 bit unsigned integer',
5: '32 bit float',
}
# Description of cz_lsm_info.type_of_data
CZ_TYPE_OF_DATA = {
0: 'Original scan data',
1: 'Calculated data',
2: '3D reconstruction',
3: 'Topography height map',
}
CZ_LSM_SCAN_INFO_ARRAYS = {
0x20000000: "tracks",
0x30000000: "lasers",
0x60000000: "detection_channels",
0x80000000: "illumination_channels",
0xa0000000: "beam_splitters",
0xc0000000: "data_channels",
0x11000000: "timers",
0x13000000: "markers",
}
CZ_LSM_SCAN_INFO_STRUCTS = {
# 0x10000000: "recording",
0x40000000: "track",
0x50000000: "laser",
0x70000000: "detection_channel",
0x90000000: "illumination_channel",
0xb0000000: "beam_splitter",
0xd0000000: "data_channel",
0x12000000: "timer",
0x14000000: "marker",
}
CZ_LSM_SCAN_INFO_ATTRIBUTES = {
# recording
0x10000001: "name",
0x10000002: "description",
0x10000003: "notes",
0x10000004: "objective",
0x10000005: "processing_summary",
0x10000006: "special_scan_mode",
0x10000007: "scan_type",
0x10000008: "scan_mode",
0x10000009: "number_of_stacks",
0x1000000a: "lines_per_plane",
0x1000000b: "samples_per_line",
0x1000000c: "planes_per_volume",
0x1000000d: "images_width",
0x1000000e: "images_height",
0x1000000f: "images_number_planes",
0x10000010: "images_number_stacks",
0x10000011: "images_number_channels",
0x10000012: "linscan_xy_size",
0x10000013: "scan_direction",
0x10000014: "time_series",
0x10000015: "original_scan_data",
0x10000016: "zoom_x",<|fim▁hole|> 0x1000001b: "sample_0z",
0x1000001c: "sample_spacing",
0x1000001d: "line_spacing",
0x1000001e: "plane_spacing",
0x1000001f: "plane_width",
0x10000020: "plane_height",
0x10000021: "volume_depth",
0x10000023: "nutation",
0x10000034: "rotation",
0x10000035: "precession",
0x10000036: "sample_0time",
0x10000037: "start_scan_trigger_in",
0x10000038: "start_scan_trigger_out",
0x10000039: "start_scan_event",
0x10000040: "start_scan_time",
0x10000041: "stop_scan_trigger_in",
0x10000042: "stop_scan_trigger_out",
0x10000043: "stop_scan_event",
0x10000044: "stop_scan_time",
0x10000045: "use_rois",
0x10000046: "use_reduced_memory_rois",
0x10000047: "user",
0x10000048: "use_bc_correction",
0x10000049: "position_bc_correction1",
0x10000050: "position_bc_correction2",
0x10000051: "interpolation_y",
0x10000052: "camera_binning",
0x10000053: "camera_supersampling",
0x10000054: "camera_frame_width",
0x10000055: "camera_frame_height",
0x10000056: "camera_offset_x",
0x10000057: "camera_offset_y",
0x10000059: "rt_binning",
0x1000005a: "rt_frame_width",
0x1000005b: "rt_frame_height",
0x1000005c: "rt_region_width",
0x1000005d: "rt_region_height",
0x1000005e: "rt_offset_x",
0x1000005f: "rt_offset_y",
0x10000060: "rt_zoom",
0x10000061: "rt_line_period",
0x10000062: "prescan",
0x10000063: "scan_direction_z",
# track
0x40000001: "multiplex_type", # 0 after line; 1 after frame
0x40000002: "multiplex_order",
0x40000003: "sampling_mode", # 0 sample; 1 line average; 2 frame average
0x40000004: "sampling_method", # 1 mean; 2 sum
0x40000005: "sampling_number",
0x40000006: "acquire",
0x40000007: "sample_observation_time",
0x4000000b: "time_between_stacks",
0x4000000c: "name",
0x4000000d: "collimator1_name",
0x4000000e: "collimator1_position",
0x4000000f: "collimator2_name",
0x40000010: "collimator2_position",
0x40000011: "is_bleach_track",
0x40000012: "is_bleach_after_scan_number",
0x40000013: "bleach_scan_number",
0x40000014: "trigger_in",
0x40000015: "trigger_out",
0x40000016: "is_ratio_track",
0x40000017: "bleach_count",
0x40000018: "spi_center_wavelength",
0x40000019: "pixel_time",
0x40000021: "condensor_frontlens",
0x40000023: "field_stop_value",
0x40000024: "id_condensor_aperture",
0x40000025: "condensor_aperture",
0x40000026: "id_condensor_revolver",
0x40000027: "condensor_filter",
0x40000028: "id_transmission_filter1",
0x40000029: "id_transmission1",
0x40000030: "id_transmission_filter2",
0x40000031: "id_transmission2",
0x40000032: "repeat_bleach",
0x40000033: "enable_spot_bleach_pos",
0x40000034: "spot_bleach_posx",
0x40000035: "spot_bleach_posy",
0x40000036: "spot_bleach_posz",
0x40000037: "id_tubelens",
0x40000038: "id_tubelens_position",
0x40000039: "transmitted_light",
0x4000003a: "reflected_light",
0x4000003b: "simultan_grab_and_bleach",
0x4000003c: "bleach_pixel_time",
# laser
0x50000001: "name",
0x50000002: "acquire",
0x50000003: "power",
# detection_channel
0x70000001: "integration_mode",
0x70000002: "special_mode",
0x70000003: "detector_gain_first",
0x70000004: "detector_gain_last",
0x70000005: "amplifier_gain_first",
0x70000006: "amplifier_gain_last",
0x70000007: "amplifier_offs_first",
0x70000008: "amplifier_offs_last",
0x70000009: "pinhole_diameter",
0x7000000a: "counting_trigger",
0x7000000b: "acquire",
0x7000000c: "point_detector_name",
0x7000000d: "amplifier_name",
0x7000000e: "pinhole_name",
0x7000000f: "filter_set_name",
0x70000010: "filter_name",
0x70000013: "integrator_name",
0x70000014: "channel_name",
0x70000015: "detector_gain_bc1",
0x70000016: "detector_gain_bc2",
0x70000017: "amplifier_gain_bc1",
0x70000018: "amplifier_gain_bc2",
0x70000019: "amplifier_offset_bc1",
0x70000020: "amplifier_offset_bc2",
0x70000021: "spectral_scan_channels",
0x70000022: "spi_wavelength_start",
0x70000023: "spi_wavelength_stop",
0x70000026: "dye_name",
0x70000027: "dye_folder",
# illumination_channel
0x90000001: "name",
0x90000002: "power",
0x90000003: "wavelength",
0x90000004: "aquire",
0x90000005: "detchannel_name",
0x90000006: "power_bc1",
0x90000007: "power_bc2",
# beam_splitter
0xb0000001: "filter_set",
0xb0000002: "filter",
0xb0000003: "name",
# data_channel
0xd0000001: "name",
0xd0000003: "acquire",
0xd0000004: "color",
0xd0000005: "sample_type",
0xd0000006: "bits_per_sample",
0xd0000007: "ratio_type",
0xd0000008: "ratio_track1",
0xd0000009: "ratio_track2",
0xd000000a: "ratio_channel1",
0xd000000b: "ratio_channel2",
0xd000000c: "ratio_const1",
0xd000000d: "ratio_const2",
0xd000000e: "ratio_const3",
0xd000000f: "ratio_const4",
0xd0000010: "ratio_const5",
0xd0000011: "ratio_const6",
0xd0000012: "ratio_first_images1",
0xd0000013: "ratio_first_images2",
0xd0000014: "dye_name",
0xd0000015: "dye_folder",
0xd0000016: "spectrum",
0xd0000017: "acquire",
# timer
0x12000001: "name",
0x12000002: "description",
0x12000003: "interval",
0x12000004: "trigger_in",
0x12000005: "trigger_out",
0x12000006: "activation_time",
0x12000007: "activation_number",
# marker
0x14000001: "name",
0x14000002: "description",
0x14000003: "trigger_in",
0x14000004: "trigger_out",
}
# Map TIFF tag code to attribute name, default value, type, count, validator
TIFF_TAGS = {
254: ('new_subfile_type', 0, 4, 1, TIFF_SUBFILE_TYPES()),
255: ('subfile_type', None, 3, 1,
{0: 'undefined', 1: 'image', 2: 'reduced_image', 3: 'page'}),
256: ('image_width', None, 4, 1, None),
257: ('image_length', None, 4, 1, None),
258: ('bits_per_sample', 1, 3, 1, None),
259: ('compression', 1, 3, 1, TIFF_COMPESSIONS),
262: ('photometric', None, 3, 1, TIFF_PHOTOMETRICS),
266: ('fill_order', 1, 3, 1, {1: 'msb2lsb', 2: 'lsb2msb'}),
269: ('document_name', None, 2, None, None),
270: ('image_description', None, 2, None, None),
271: ('make', None, 2, None, None),
272: ('model', None, 2, None, None),
273: ('strip_offsets', None, 4, None, None),
274: ('orientation', 1, 3, 1, TIFF_ORIENTATIONS),
277: ('samples_per_pixel', 1, 3, 1, None),
278: ('rows_per_strip', 2 ** 32 - 1, 4, 1, None),
279: ('strip_byte_counts', None, 4, None, None),
280: ('min_sample_value', None, 3, None, None),
281: ('max_sample_value', None, 3, None, None), # 2**bits_per_sample
282: ('x_resolution', None, 5, 1, None),
283: ('y_resolution', None, 5, 1, None),
284: ('planar_configuration', 1, 3, 1, {1: 'contig', 2: 'separate'}),
285: ('page_name', None, 2, None, None),
286: ('x_position', None, 5, 1, None),
287: ('y_position', None, 5, 1, None),
296: ('resolution_unit', 2, 4, 1, {1: 'none', 2: 'inch', 3: 'centimeter'}),
297: ('page_number', None, 3, 2, None),
305: ('software', None, 2, None, None),
306: ('datetime', None, 2, None, None),
315: ('artist', None, 2, None, None),
316: ('host_computer', None, 2, None, None),
317: ('predictor', 1, 3, 1, {1: None, 2: 'horizontal'}),
318: ('white_point', None, 5, 2, None),
319: ('primary_chromaticities', None, 5, 6, None),
320: ('color_map', None, 3, None, None),
322: ('tile_width', None, 4, 1, None),
323: ('tile_length', None, 4, 1, None),
324: ('tile_offsets', None, 4, None, None),
325: ('tile_byte_counts', None, 4, None, None),
338: ('extra_samples', None, 3, None,
{0: 'unspecified', 1: 'assocalpha', 2: 'unassalpha'}),
339: ('sample_format', 1, 3, 1, TIFF_SAMPLE_FORMATS),
340: ('smin_sample_value', None, None, None, None),
341: ('smax_sample_value', None, None, None, None),
347: ('jpeg_tables', None, 7, None, None),
530: ('ycbcr_subsampling', 1, 3, 2, None),
531: ('ycbcr_positioning', 1, 3, 1, None),
32996: ('sgi_datatype', None, None, 1, None), # use sample_format
32997: ('image_depth', None, 4, 1, None),
32998: ('tile_depth', None, 4, 1, None),
33432: ('copyright', None, 1, None, None),
33445: ('md_file_tag', None, 4, 1, None),
33446: ('md_scale_pixel', None, 5, 1, None),
33447: ('md_color_table', None, 3, None, None),
33448: ('md_lab_name', None, 2, None, None),
33449: ('md_sample_info', None, 2, None, None),
33450: ('md_prep_date', None, 2, None, None),
33451: ('md_prep_time', None, 2, None, None),
33452: ('md_file_units', None, 2, None, None),
33550: ('model_pixel_scale', None, 12, 3, None),
33922: ('model_tie_point', None, 12, None, None),
34665: ('exif_ifd', None, None, 1, None),
34735: ('geo_key_directory', None, 3, None, None),
34736: ('geo_double_params', None, 12, None, None),
34737: ('geo_ascii_params', None, 2, None, None),
34853: ('gps_ifd', None, None, 1, None),
37510: ('user_comment', None, None, None, None),
42112: ('gdal_metadata', None, 2, None, None),
42113: ('gdal_nodata', None, 2, None, None),
50289: ('mc_xy_position', None, 12, 2, None),
50290: ('mc_z_position', None, 12, 1, None),
50291: ('mc_xy_calibration', None, 12, 3, None),
50292: ('mc_lens_lem_na_n', None, 12, 3, None),
50293: ('mc_channel_name', None, 1, None, None),
50294: ('mc_ex_wavelength', None, 12, 1, None),
50295: ('mc_time_stamp', None, 12, 1, None),
50838: ('imagej_byte_counts', None, None, None, None),
65200: ('flex_xml', None, 2, None, None),
# code: (attribute name, default value, type, count, validator)
}
# Map custom TIFF tag codes to attribute names and import functions
CUSTOM_TAGS = {
700: ('xmp', read_bytes),
34377: ('photoshop', read_numpy),
33723: ('iptc', read_bytes),
34675: ('icc_profile', read_bytes),
33628: ('uic1tag', read_uic1tag), # Universal Imaging Corporation STK
33629: ('uic2tag', read_uic2tag),
33630: ('uic3tag', read_uic3tag),
33631: ('uic4tag', read_uic4tag),
34361: ('mm_header', read_mm_header), # Olympus FluoView
34362: ('mm_stamp', read_mm_stamp),
34386: ('mm_user_block', read_bytes),
34412: ('cz_lsm_info', read_cz_lsm_info), # Carl Zeiss LSM
43314: ('nih_image_header', read_nih_image_header),
# 40001: ('mc_ipwinscal', read_bytes),
40100: ('mc_id_old', read_bytes),
50288: ('mc_id', read_bytes),
50296: ('mc_frame_properties', read_bytes),
50839: ('imagej_metadata', read_bytes),
51123: ('micromanager_metadata', read_json),
}
# Max line length of printed output
PRINT_LINE_LEN = 79
def imshow(data, title=None, vmin=0, vmax=None, cmap=None,
bitspersample=None, photometric='rgb', interpolation='nearest',
dpi=96, figure=None, subplot=111, maxdim=8192, **kwargs):
"""Plot n-dimensional images using matplotlib.pyplot.
Return figure, subplot and plot axis.
Requires pyplot already imported ``from matplotlib import pyplot``.
Parameters
----------
bitspersample : int or None
Number of bits per channel in integer RGB images.
photometric : {'miniswhite', 'minisblack', 'rgb', or 'palette'}
The color space of the image data.
title : str
Window and subplot title.
figure : matplotlib.figure.Figure (optional).
Matplotlib to use for plotting.
subplot : int
A matplotlib.pyplot.subplot axis.
maxdim : int
maximum image size in any dimension.
kwargs : optional
Arguments for matplotlib.pyplot.imshow.
"""
# if photometric not in ('miniswhite', 'minisblack', 'rgb', 'palette'):
# raise ValueError("Can't handle %s photometrics" % photometric)
# TODO: handle photometric == 'separated' (CMYK)
isrgb = photometric in ('rgb', 'palette')
data = numpy.atleast_2d(data.squeeze())
data = data[(slice(0, maxdim), ) * len(data.shape)]
dims = data.ndim
if dims < 2:
raise ValueError("not an image")
elif dims == 2:
dims = 0
isrgb = False
else:
if isrgb and data.shape[-3] in (3, 4):
data = numpy.swapaxes(data, -3, -2)
data = numpy.swapaxes(data, -2, -1)
elif not isrgb and (data.shape[-1] < data.shape[-2] // 16 and
data.shape[-1] < data.shape[-3] // 16 and
data.shape[-1] < 5):
data = numpy.swapaxes(data, -3, -1)
data = numpy.swapaxes(data, -2, -1)
isrgb = isrgb and data.shape[-1] in (3, 4)
dims -= 3 if isrgb else 2
if photometric == 'palette' and isrgb:
datamax = data.max()
if datamax > 255:
data >>= 8 # possible precision loss
data = data.astype('B')
elif data.dtype.kind in 'ui':
if not (isrgb and data.dtype.itemsize <= 1) or bitspersample is None:
try:
bitspersample = int(math.ceil(math.log(data.max(), 2)))
except Exception:
bitspersample = data.dtype.itemsize * 8
elif not isinstance(bitspersample, int):
# bitspersample can be tuple, e.g. (5, 6, 5)
bitspersample = data.dtype.itemsize * 8
datamax = 2 ** bitspersample
if isrgb:
if bitspersample < 8:
data <<= 8 - bitspersample
elif bitspersample > 8:
data >>= bitspersample - 8 # precision loss
data = data.astype('B')
elif data.dtype.kind == 'f':
datamax = data.max()
if isrgb and datamax > 1.0:
if data.dtype.char == 'd':
data = data.astype('f')
data /= datamax
elif data.dtype.kind == 'b':
datamax = 1
elif data.dtype.kind == 'c':
raise NotImplementedError("complex type") # TODO: handle complex types
if not isrgb:
if vmax is None:
vmax = datamax
if vmin is None:
if data.dtype.kind == 'i':
dtmin = numpy.iinfo(data.dtype).min
vmin = numpy.min(data)
if vmin == dtmin:
vmin = numpy.min(data > dtmin)
if data.dtype.kind == 'f':
dtmin = numpy.finfo(data.dtype).min
vmin = numpy.min(data)
if vmin == dtmin:
vmin = numpy.min(data > dtmin)
else:
vmin = 0
pyplot = sys.modules['matplotlib.pyplot']
if figure is None:
pyplot.rc('font', family='sans-serif', weight='normal', size=8)
figure = pyplot.figure(dpi=dpi, figsize=(10.3, 6.3), frameon=True,
facecolor='1.0', edgecolor='w')
try:
figure.canvas.manager.window.title(title)
except Exception:
pass
pyplot.subplots_adjust(bottom=0.03 * (dims + 2), top=0.9,
left=0.1, right=0.95, hspace=0.05, wspace=0.0)
subplot = pyplot.subplot(subplot)
if title:
try:
title = str(title, 'Windows-1252')
except TypeError:
pass
pyplot.title(title, size=11)
if cmap is None:
if data.dtype.kind in 'ubf' or vmin == 0:
cmap = 'cubehelix'
else:
cmap = 'coolwarm'
if photometric == 'miniswhite':
cmap += '_r'
image = pyplot.imshow(data[(0, ) * dims].squeeze(), vmin=vmin, vmax=vmax,
cmap=cmap, interpolation=interpolation, **kwargs)
if not isrgb:
pyplot.colorbar() # panchor=(0.55, 0.5), fraction=0.05
def format_coord(x, y):
# callback function to format coordinate display in toolbar
x = int(x + 0.5)
y = int(y + 0.5)
try:
if dims:
return "%s @ %s [%4i, %4i]" % (cur_ax_dat[1][y, x],
current, x, y)
else:
return "%s @ [%4i, %4i]" % (data[y, x], x, y)
except IndexError:
return ""
pyplot.gca().format_coord = format_coord
if dims:
current = list((0, ) * dims)
cur_ax_dat = [0, data[tuple(current)].squeeze()]
sliders = [pyplot.Slider(
pyplot.axes([0.125, 0.03 * (axis + 1), 0.725, 0.025]),
'Dimension %i' % axis, 0, data.shape[axis] - 1, 0, facecolor='0.5',
valfmt='%%.0f [%i]' % data.shape[axis]) for axis in range(dims)]
for slider in sliders:
slider.drawon = False
def set_image(current, sliders=sliders, data=data):
# change image and redraw canvas
cur_ax_dat[1] = data[tuple(current)].squeeze()
image.set_data(cur_ax_dat[1])
for ctrl, index in zip(sliders, current):
ctrl.eventson = False
ctrl.set_val(index)
ctrl.eventson = True
figure.canvas.draw()
def on_changed(index, axis, data=data, current=current):
# callback function for slider change event
index = int(round(index))
cur_ax_dat[0] = axis
if index == current[axis]:
return
if index >= data.shape[axis]:
index = 0
elif index < 0:
index = data.shape[axis] - 1
current[axis] = index
set_image(current)
def on_keypressed(event, data=data, current=current):
# callback function for key press event
key = event.key
axis = cur_ax_dat[0]
if str(key) in '0123456789':
on_changed(key, axis)
elif key == 'right':
on_changed(current[axis] + 1, axis)
elif key == 'left':
on_changed(current[axis] - 1, axis)
elif key == 'up':
cur_ax_dat[0] = 0 if axis == len(data.shape) - 1 else axis + 1
elif key == 'down':
cur_ax_dat[0] = len(data.shape) - 1 if axis == 0 else axis - 1
elif key == 'end':
on_changed(data.shape[axis] - 1, axis)
elif key == 'home':
on_changed(0, axis)
figure.canvas.mpl_connect('key_press_event', on_keypressed)
for axis, ctrl in enumerate(sliders):
ctrl.on_changed(lambda k, a=axis: on_changed(k, a))
return figure, subplot, image
def _app_show():
"""Block the GUI. For use as skimage plugin."""
pyplot = sys.modules['matplotlib.pyplot']
pyplot.show()
def main(argv=None):
"""Command line usage main function."""
if float(sys.version[0:3]) < 2.6:
print("This script requires Python version 2.6 or better.")
print("This is Python version %s" % sys.version)
return 0
if argv is None:
argv = sys.argv
import optparse
parser = optparse.OptionParser(
usage="usage: %prog [options] path",
description="Display image data in TIFF files.",
version="%%prog %s" % __version__)
opt = parser.add_option
opt('-p', '--page', dest='page', type='int', default=-1,
help="display single page")
opt('-s', '--series', dest='series', type='int', default=-1,
help="display series of pages of same shape")
opt('--nomultifile', dest='nomultifile', action='store_true',
default=False, help="don't read OME series from multiple files")
opt('--noplot', dest='noplot', action='store_true', default=False,
help="don't display images")
opt('--interpol', dest='interpol', metavar='INTERPOL', default='bilinear',
help="image interpolation method")
opt('--dpi', dest='dpi', type='int', default=96,
help="set plot resolution")
opt('--debug', dest='debug', action='store_true', default=False,
help="raise exception on failures")
opt('--test', dest='test', action='store_true', default=False,
help="try read all images in path")
opt('--doctest', dest='doctest', action='store_true', default=False,
help="runs the docstring examples")
opt('-v', '--verbose', dest='verbose', action='store_true', default=True)
opt('-q', '--quiet', dest='verbose', action='store_false')
settings, path = parser.parse_args()
path = ' '.join(path)
if settings.doctest:
import doctest
doctest.testmod()
return 0
if not path:
parser.error("No file specified")
if settings.test:
test_tifffile(path, settings.verbose)
return 0
if any(i in path for i in '?*'):
path = glob.glob(path)
if not path:
print('no files match the pattern')
return 0
# TODO: handle image sequences
# if len(path) == 1:
path = path[0]
print("Reading file structure...", end=' ')
start = time.time()
try:
tif = TiffFile(path, multifile=not settings.nomultifile)
except Exception as e:
if settings.debug:
raise
else:
print("\n", e)
sys.exit(0)
print("%.3f ms" % ((time.time() - start) * 1e3))
if tif.is_ome:
settings.norgb = True
images = [(None, tif[0 if settings.page < 0 else settings.page])]
if not settings.noplot:
print("Reading image data... ", end=' ')
def notnone(x):
return next(i for i in x if i is not None)
start = time.time()
try:
if settings.page >= 0:
images = [(tif.asarray(key=settings.page),
tif[settings.page])]
elif settings.series >= 0:
images = [(tif.asarray(series=settings.series),
notnone(tif.series[settings.series].pages))]
else:
images = []
for i, s in enumerate(tif.series):
try:
images.append(
(tif.asarray(series=i), notnone(s.pages)))
except ValueError as e:
images.append((None, notnone(s.pages)))
if settings.debug:
raise
else:
print("\n* series %i failed: %s... " % (i, e),
end='')
print("%.3f ms" % ((time.time() - start) * 1e3))
except Exception as e:
if settings.debug:
raise
else:
print(e)
tif.close()
print("\nTIFF file:", tif)
print()
for i, s in enumerate(tif.series):
print("Series %i" % i)
print(s)
print()
for i, page in images:
print(page)
print(page.tags)
if page.is_palette:
print("\nColor Map:", page.color_map.shape, page.color_map.dtype)
for attr in ('cz_lsm_info', 'cz_lsm_scan_info', 'uic_tags',
'mm_header', 'imagej_tags', 'micromanager_metadata',
'nih_image_header'):
if hasattr(page, attr):
print("", attr.upper(), Record(getattr(page, attr)), sep="\n")
print()
if page.is_micromanager:
print('MICROMANAGER_FILE_METADATA')
print(Record(tif.micromanager_metadata))
if images and not settings.noplot:
try:
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot
except ImportError as e:
warnings.warn("failed to import matplotlib.\n%s" % e)
else:
for img, page in images:
if img is None:
continue
vmin, vmax = None, None
if 'gdal_nodata' in page.tags:
try:
vmin = numpy.min(img[img > float(page.gdal_nodata)])
except ValueError:
pass
if page.is_stk:
try:
vmin = page.uic_tags['min_scale']
vmax = page.uic_tags['max_scale']
except KeyError:
pass
else:
if vmax <= vmin:
vmin, vmax = None, None
title = "%s\n %s" % (str(tif), str(page))
imshow(img, title=title, vmin=vmin, vmax=vmax,
bitspersample=page.bits_per_sample,
photometric=page.photometric,
interpolation=settings.interpol,
dpi=settings.dpi)
pyplot.show()
TIFFfile = TiffFile # backwards compatibility
if sys.version_info[0] > 2:
basestring = str, bytes
unicode = str
if __name__ == "__main__":
sys.exit(main())<|fim▁end|>
|
0x10000017: "zoom_y",
0x10000018: "zoom_z",
0x10000019: "sample_0x",
0x1000001a: "sample_0y",
|
<|file_name|>traits.rs<|end_file_name|><|fim▁begin|>use image::{imageops, DynamicImage, GenericImageView, GrayImage, ImageBuffer, Pixel};
use std::borrow::Cow;
use std::ops;
/// Interface for types used for storing hash data.
///
/// This is implemented for `Vec<u8>`, `Box<[u8]>` and arrays that are multiples/combinations of
/// useful x86 bytewise SIMD register widths (64, 128, 256, 512 bits).
///
/// Please feel free to open a pull request [on Github](https://github.com/abonander/img_hash)
/// if you need this implemented for a different array size.
pub trait HashBytes {
/// Construct this type from an iterator of bytes.
///
/// If this type has a finite capacity (i.e. an array) then it can ignore extra data
/// (the hash API will not create a hash larger than this type can contain). Unused capacity
/// **must** be zeroed.
fn from_iter<I: Iterator<Item = u8>>(iter: I) -> Self where Self: Sized;
/// Return the maximum capacity of this type, in bits.
///
/// If this type has an arbitrary/theoretically infinite capacity, return `usize::max_value()`.
fn max_bits() -> usize;
/// Get the hash bytes as a slice.
fn as_slice(&self) -> &[u8];
}
impl HashBytes for Box<[u8]> {
fn from_iter<I: Iterator<Item = u8>>(iter: I) -> Self {
// stable in 1.32, effectively the same thing
// iter.collect()
iter.collect::<Vec<u8>>().into_boxed_slice()
}
fn max_bits() -> usize {
usize::max_value()
}
fn as_slice(&self) -> &[u8] { self }
}
impl HashBytes for Vec<u8> {
fn from_iter<I: Iterator<Item=u8>>(iter: I) -> Self {
iter.collect()
}
fn max_bits() -> usize {
usize::max_value()
}
fn as_slice(&self) -> &[u8] { self }
}
macro_rules! hash_bytes_array {
($($n:expr),*) => {$(
impl HashBytes for [u8; $n] {
fn from_iter<I: Iterator<Item=u8>>(mut iter: I) -> Self {
// optimizer should eliminate this zeroing
let mut out = [0; $n];
for (src, dest) in iter.by_ref().zip(out.as_mut()) {
*dest = src;
}
out
}
fn max_bits() -> usize {
$n * 8
}
fn as_slice(&self) -> &[u8] { self }
}
)*}
}
hash_bytes_array!(8, 16, 24, 32, 40, 48, 56, 64);
struct BoolsToBytes<I> {
iter: I,
}
impl<I> Iterator for BoolsToBytes<I> where I: Iterator<Item=bool> {
type Item = u8;
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
// starts at the LSB and works up
self.iter.by_ref().take(8).enumerate().fold(None, |accum, (n, val)| {
accum.or(Some(0)).map(|accum| accum | ((val as u8) << n))
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let (lower, upper) = self.iter.size_hint();
(
lower / 8,
// if the upper bound doesn't evenly divide by `8` then we will yield an extra item
upper.map(|upper| if upper % 8 == 0 { upper / 8 } else { upper / 8 + 1})
)
}
}
pub(crate) trait BitSet: HashBytes {
fn from_bools<I: Iterator<Item = bool>>(iter: I) -> Self where Self: Sized {
Self::from_iter(BoolsToBytes { iter })
}
fn hamming(&self, other: &Self) -> u32 {
self.as_slice().iter().zip(other.as_slice()).map(|(l, r)| (l ^ r).count_ones()).sum()
}
}
impl<T: HashBytes> BitSet for T {}
/// Shorthand trait bound for APIs in this crate.
///
/// Currently only implemented for the types provided by `image` with 8-bit channels.
pub trait Image: GenericImageView + 'static {
/// The equivalent `ImageBuffer` type for this container.
type Buf: Image + DiffImage;
/// Grayscale the image, reducing to 8 bit depth and dropping the alpha channel.
fn to_grayscale(&self) -> Cow<GrayImage>;
/// Blur the image with the given `Gaussian` sigma.
fn blur(&self, sigma: f32) -> Self::Buf;
/// Iterate over the image, passing each pixel's coordinates and values in `u8` to the closure.<|fim▁hole|> /// The iteration order is unspecified but each pixel **must** be visited exactly _once_.
///
/// If the pixel's channels are wider than 8 bits then the values should be scaled to
/// `[0, 255]`, not truncated.
///
/// ### Note
/// If the pixel data length is 2 or 4, the last index is assumed to be the alpha channel.
/// A pixel data length outside of `[1, 4]` will cause a panic.
fn foreach_pixel8<F>(&self, foreach: F) where F: FnMut(u32, u32, &[u8]);
}
/// Image types that can be diffed.
pub trait DiffImage {
/// Subtract the pixel values of `other` from `self` in-place.
fn diff_inplace(&mut self, other: &Self);
}
#[cfg(not(feature = "nightly"))]
impl<P: 'static, C: 'static> Image for ImageBuffer<P, C>
where P: Pixel<Subpixel = u8>, C: ops::Deref<Target=[u8]> {
type Buf = ImageBuffer<P, Vec<u8>>;
fn to_grayscale(&self) -> Cow<GrayImage> {
Cow::Owned(imageops::grayscale(self))
}
fn blur(&self, sigma: f32) -> Self::Buf { imageops::blur(self, sigma) }
fn foreach_pixel8<F>(&self, mut foreach: F) where F: FnMut(u32, u32, &[u8]) {
self.enumerate_pixels().for_each(|(x, y, px)| foreach(x, y, px.channels()))
}
}
#[cfg(feature = "nightly")]
impl<P: 'static, C: 'static> Image for ImageBuffer<P, C>
where P: Pixel<Subpixel = u8>, C: ops::Deref<Target=[u8]> {
type Buf = ImageBuffer<P, Vec<u8>>;
default fn to_grayscale(&self) -> Cow<GrayImage> {
Cow::Owned(imageops::grayscale(self))
}
default fn blur(&self, sigma: f32) -> Self::Buf { imageops::blur(self, sigma) }
default fn foreach_pixel8<F>(&self, mut foreach: F) where F: FnMut(u32, u32, &[u8]) {
self.enumerate_pixels().for_each(|(x, y, px)| foreach(x, y, px.channels()))
}
}
impl<P: 'static> DiffImage for ImageBuffer<P, Vec<u8>> where P: Pixel<Subpixel = u8> {
fn diff_inplace(&mut self, other: &Self) {
self.iter_mut().zip(other.iter()).for_each(|(l, r)| *l -= r);
}
}
impl Image for DynamicImage {
type Buf = image::RgbaImage;
fn to_grayscale(&self) -> Cow<GrayImage> {
self.as_luma8().map_or_else(|| Cow::Owned(self.to_luma()), Cow::Borrowed)
}
fn blur(&self, sigma: f32) -> Self::Buf { imageops::blur(self, sigma) }
fn foreach_pixel8<F>(&self, mut foreach: F) where F: FnMut(u32, u32, &[u8]) {
self.pixels().for_each(|(x, y, px)| foreach(x, y, px.channels()))
}
}
#[cfg(feature = "nightly")]
impl Image for GrayImage {
// type Buf = GrayImage;
// Avoids copying
fn to_grayscale(&self) -> Cow<GrayImage> {
Cow::Borrowed(self)
}
}
#[test]
fn test_bools_to_bytes() {
let bools = (0 .. 16).map(|x| x & 1 == 0);
let bytes = Vec::from_bools(bools.clone());
assert_eq!(*bytes, [0b01010101; 2]);
let bools_to_bytes = BoolsToBytes { iter: bools };
assert_eq!(bools_to_bytes.size_hint(), (2, Some(2)));
}<|fim▁end|>
|
///
|
<|file_name|>ipy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-<|fim▁hole|># © 2016 Chris Ferrie ([email protected]) and
# Christopher E. Granade ([email protected])
#
# This file is a part of the Qinfer project.
# Licensed under the AGPL version 3.
##
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##
## FEATURES ###################################################################
from __future__ import absolute_import
from __future__ import division
## EXPORTS ###################################################################
__all__ = ['IPythonProgressBar']
## IMPORTS ####################################################################
try:
from IPython.display import display
import ipywidgets as ipw
except:
display = None
ipw = None
## CLASSES ###################################################################
class IPythonProgressBar(object):
"""
Represents a progress bar as an IPython widget. If the widget
is closed by the user, or by calling ``finalize()``, any further
operations will be ignored.
.. note::
This progress bar is compatible with QuTiP progress bar
classes.
"""
def __init__(self):
if ipw is None:
raise ImportError("IPython support requires the ipywidgets package.")
self.widget = ipw.FloatProgress(
value=0.0, min=0.0, max=100.0, step=0.5,
description=""
)
@property
def description(self):
"""
Text description for the progress bar widget,
or ``None`` if the widget has been closed.
:type: `str`
"""
try:
return self.widget.description
except:
return None
@description.setter
def description(self, value):
try:
self.widget.description = value
except:
pass
def start(self, max):
"""
Displays the progress bar for a given maximum value.
:param float max: Maximum value of the progress bar.
"""
try:
self.widget.max = max
display(self.widget)
except:
pass
def update(self, n):
"""
Updates the progress bar to display a new value.
"""
try:
self.widget.value = n
except:
pass
def finished(self):
"""
Destroys the progress bar.
"""
try:
self.widget.close()
except:
pass<|fim▁end|>
|
##
# ipy.py: Interaction with IPython and Jupyter.
##
|
<|file_name|>Production5650.java<|end_file_name|><|fim▁begin|>package org.gradle.test.performance.mediummonolithicjavaproject.p282;
public class Production5650 {
private String property0;
public String getProperty0() {
return property0;
}
public void setProperty0(String value) {
property0 = value;
}
private String property1;
public String getProperty1() {
return property1;
}
public void setProperty1(String value) {
property1 = value;
}
private String property2;
public String getProperty2() {
return property2;
}
public void setProperty2(String value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}<|fim▁hole|> property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
}<|fim▁end|>
|
public void setProperty4(String value) {
|
<|file_name|>whitelist_parser.go<|end_file_name|><|fim▁begin|>package sanitize
import(
"os"
"encoding/json"<|fim▁hole|>func WhitelistFromFile(filepath string) (*Whitelist, error) {
bytes, err := readFileToBytes(filepath)
if err != nil {
return nil, err
}
whitelist, err := NewWhitelist(bytes)
return whitelist, nil
}
// helper function to read entirety of provided file into byte slice
func readFileToBytes(filepath string) ([]byte, error) {
f, err := os.Open(filepath)
if err != nil {
return nil, err
}
// prepare byte slice to read json file into
fileInfo, err := f.Stat()
bytes := make([]byte, fileInfo.Size())
_, err = f.Read(bytes)
return bytes, err
}
// Create a new whitelist from JSON configuration
func NewWhitelist(jsonData []byte) (*Whitelist, error) {
// unmarshal json file into contract-free interface
configuration := &Whitelist{}
err := json.Unmarshal(jsonData, configuration)
return configuration, err
}<|fim▁end|>
|
)
// Load a new whitelist from a JSON file
|
<|file_name|>.eslintrc.client.js<|end_file_name|><|fim▁begin|>module.exports = {
'extends': ['google', 'plugin:react/recommended'],
'parserOptions': {
'ecmaVersion': 6,
'sourceType': 'module',<|fim▁hole|> 'env': {
'browser': true,
},
'plugins': [
'react'
]
};<|fim▁end|>
|
'ecmaFeatures': {
'jsx': true
}
},
|
<|file_name|>cancel-orders.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
import btceapi
# This sample shows use of a KeyHandler. For each API key in the file
# passed in as the first argument, all pending orders for the specified
# pair and type will be canceled.
if len(sys.argv) < 4:
print "Usage: cancel_orders.py <key file> <pair> <order type>"
print " key file - Path to a file containing key/secret/nonce data"
print " pair - A currency pair, such as btc_usd"
print " order type - Type of orders to process, either 'buy' or 'sell'"
sys.exit(1)
key_file = sys.argv[1]
pair = sys.argv[2]
order_type = unicode(sys.argv[3])
handler = btceapi.KeyHandler(key_file)
for key in handler.keys:
print "Canceling orders for key %s" % key
t = btceapi.TradeAPI(key, handler)
try:<|fim▁hole|> # with the correct order type.
orders = t.orderList(pair = pair)
for o in orders:
if o.type == order_type:
print " Canceling %s %s order for %f @ %f" % (pair, order_type,
o.amount, o.rate)
t.cancelOrder(o.order_id)
if not orders:
print " There are no %s %s orders" % (pair, order_type)
except Exception as e:
print " An error occurred: %s" % e<|fim▁end|>
|
# Get a list of orders for the given pair, and cancel the ones
|
<|file_name|>throwIfEmpty-spec.ts<|end_file_name|><|fim▁begin|>/** @prettier */
import { expect } from 'chai';
import { EMPTY, of, EmptyError, defer, throwError, Observable } from 'rxjs';
import { throwIfEmpty, mergeMap, retry, take } from 'rxjs/operators';
import { TestScheduler } from 'rxjs/testing';
import { observableMatcher } from '../helpers/observableMatcher';
/** @test {throwIfEmpty} */
describe('throwIfEmpty', () => {
let rxTestScheduler: TestScheduler;
beforeEach(() => {
rxTestScheduler = new TestScheduler(observableMatcher);
});
describe('with errorFactory', () => {
it('should error when empty', () => {
rxTestScheduler.run(({ cold, expectObservable }) => {
const source = cold('----|');
const expected = ' ----#';
const result = source.pipe(throwIfEmpty(() => new Error('test')));
expectObservable(result).toBe(expected, undefined, new Error('test'));
});
});
it('should throw if empty', () => {
const error = new Error('So empty inside');
let thrown: any;
EMPTY.pipe(throwIfEmpty(() => error)).subscribe({
error(err) {
thrown = err;
},
});
expect(thrown).to.equal(error);
});
it('should NOT throw if NOT empty', () => {
const error = new Error('So empty inside');
let thrown: any;
of('test')
.pipe(throwIfEmpty(() => error))
.subscribe({
error(err) {
thrown = err;
},
});
expect(thrown).to.be.undefined;
});
it('should pass values through', () => {
rxTestScheduler.run(({ cold, expectObservable, expectSubscriptions }) => {
const source = cold('----a---b---c---|');
const sub1 = ' ^---------------!';
const expected = ' ----a---b---c---|';
const result = source.pipe(throwIfEmpty(() => new Error('test')));
expectObservable(result).toBe(expected);
expectSubscriptions(source.subscriptions).toBe([sub1]);
});
});
it('should never when never', () => {
rxTestScheduler.run(({ cold, expectObservable, expectSubscriptions }) => {
const source = cold('-');
const sub1 = ' ^';
const expected = ' -';
const result = source.pipe(throwIfEmpty(() => new Error('test')));
expectObservable(result).toBe(expected);
expectSubscriptions(source.subscriptions).toBe([sub1]);
});
});
it('should error when empty', () => {
rxTestScheduler.run(({ cold, expectObservable, expectSubscriptions }) => {
const source = cold('----|');
const sub1 = ' ^---!';
const expected = ' ----#';
<|fim▁hole|> expectObservable(result).toBe(expected, undefined, new Error('test'));
expectSubscriptions(source.subscriptions).toBe([sub1]);
});
});
it('should throw if empty after retry', () => {
const error = new Error('So empty inside');
let thrown: any;
let sourceIsEmpty = false;
const source = defer(() => {
if (sourceIsEmpty) {
return EMPTY;
}
sourceIsEmpty = true;
return of(1, 2);
});
source
.pipe(
throwIfEmpty(() => error),
mergeMap((value) => {
if (value > 1) {
return throwError(() => new Error());
}
return of(value);
}),
retry(1)
)
.subscribe({
error(err) {
thrown = err;
},
});
expect(thrown).to.equal(error);
});
});
describe('without errorFactory', () => {
it('should throw EmptyError if empty', () => {
let thrown: any;
EMPTY.pipe(throwIfEmpty()).subscribe({
error(err) {
thrown = err;
},
});
expect(thrown).to.be.instanceof(EmptyError);
});
it('should NOT throw if NOT empty', () => {
let thrown: any;
of('test')
.pipe(throwIfEmpty())
.subscribe({
error(err) {
thrown = err;
},
});
expect(thrown).to.be.undefined;
});
it('should pass values through', () => {
rxTestScheduler.run(({ cold, expectObservable, expectSubscriptions }) => {
const source = cold('----a---b---c---|');
const sub1 = ' ^---------------!';
const expected = ' ----a---b---c---|';
const result = source.pipe(throwIfEmpty());
expectObservable(result).toBe(expected);
expectSubscriptions(source.subscriptions).toBe([sub1]);
});
});
it('should never when never', () => {
rxTestScheduler.run(({ cold, expectObservable, expectSubscriptions }) => {
const source = cold('-');
const sub1 = ' ^';
const expected = ' -';
const result = source.pipe(throwIfEmpty());
expectObservable(result).toBe(expected);
expectSubscriptions(source.subscriptions).toBe([sub1]);
});
});
it('should error when empty', () => {
rxTestScheduler.run(({ cold, expectObservable, expectSubscriptions }) => {
const source = cold('----|');
const sub1 = ' ^---!';
const expected = ' ----#';
const result = source.pipe(throwIfEmpty());
expectObservable(result).toBe(expected, undefined, new EmptyError());
expectSubscriptions(source.subscriptions).toBe([sub1]);
});
});
it('should throw if empty after retry', () => {
let thrown: any;
let sourceIsEmpty = false;
const source = defer(() => {
if (sourceIsEmpty) {
return EMPTY;
}
sourceIsEmpty = true;
return of(1, 2);
});
source
.pipe(
throwIfEmpty(),
mergeMap((value) => {
if (value > 1) {
return throwError(() => new Error());
}
return of(value);
}),
retry(1)
)
.subscribe({
error(err) {
thrown = err;
},
});
expect(thrown).to.be.instanceof(EmptyError);
});
});
it('should stop listening to a synchronous observable when unsubscribed', () => {
const sideEffects: number[] = [];
const synchronousObservable = new Observable<number>((subscriber) => {
// This will check to see if the subscriber was closed on each loop
// when the unsubscribe hits (from the `take`), it should be closed
for (let i = 0; !subscriber.closed && i < 10; i++) {
sideEffects.push(i);
subscriber.next(i);
}
});
synchronousObservable.pipe(throwIfEmpty(), take(3)).subscribe(() => {
/* noop */
});
expect(sideEffects).to.deep.equal([0, 1, 2]);
});
});<|fim▁end|>
|
const result = source.pipe(throwIfEmpty(() => new Error('test')));
|
<|file_name|>EditUserTest.java<|end_file_name|><|fim▁begin|>package userstoreauth.servlets;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import userstoreauth.model.UserVer2;
import userstoreauth.service.UserStoreMb;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
class EditUserTest {
@BeforeEach
void setUp() {
UserStoreMb us = new UserStoreMb();
us.deleteAll();
}
@Test<|fim▁hole|> void editUser() throws ServletException, IOException {
EditUser editUser = new EditUser();
UserStoreMb us = new UserStoreMb();
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getParameter("login")).thenReturn("login");
when(request.getParameter("password")).thenReturn("password0");
when(request.getParameter("name")).thenReturn("name0");
when(request.getParameter("email")).thenReturn("email0");
when(request.getParameter("role")).thenReturn("admin");
when(request.getParameter("country")).thenReturn("Россия");
when(request.getParameter("city")).thenReturn("Москва");
UserVer2 user = new UserVer2("login", "password", "name", "email", "Россия", "Москва", Timestamp.valueOf(LocalDateTime.now()), "user");
us.addUser(user);
assertEquals(user, us.getByLogin("login"));
editUser.doPost(request, response);
user.setPassword("password0");
user.setName("name0");
user.setEmail("email0");
user.setRole("admin");
assertEquals(user, us.getByLogin("login"));
}
}<|fim▁end|>
| |
<|file_name|>streaming.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyspark_cassandra.util import as_java_object, as_java_array
from pyspark.streaming.dstream import DStream
from pyspark_cassandra.conf import WriteConf
from pyspark_cassandra.util import helper
from pyspark.serializers import AutoBatchedSerializer, PickleSerializer
def saveToCassandra(dstream, keyspace, table, columns=None, row_format=None, keyed=None,
write_conf=None, **write_conf_kwargs):
ctx = dstream._ssc._sc
gw = ctx._gateway
# create write config as map
write_conf = WriteConf.build(write_conf, **write_conf_kwargs)
write_conf = as_java_object(gw, write_conf.settings())
# convert the columns to a string array
columns = as_java_array(gw, "String", columns) if columns else None
return helper(ctx).saveToCassandra(dstream._jdstream, keyspace, table, columns, row_format,
keyed, write_conf)
def joinWithCassandraTable(dstream, keyspace, table, selected_columns=None, join_columns=None):
"""Joins a DStream (a stream of RDDs) with a Cassandra table
Arguments:
@param dstream(DStream)
The DStream to join. Equals to self when invoking joinWithCassandraTable on a monkey
patched RDD.
@param keyspace(string):
The keyspace to join on.
@param table(string):
The CQL table to join on.
@param selected_columns(string):
The columns to select from the Cassandra table.
@param join_columns(string):
The columns used to join on from the Cassandra table.
"""
<|fim▁hole|> selected_columns = as_java_array(gw, "String", selected_columns) if selected_columns else None
join_columns = as_java_array(gw, "String", join_columns) if join_columns else None
h = helper(ctx)
dstream = h.joinWithCassandraTable(dstream._jdstream, keyspace, table, selected_columns,
join_columns)
dstream = h.pickleRows(dstream)
dstream = h.javaDStream(dstream)
return DStream(dstream, ssc, AutoBatchedSerializer(PickleSerializer()))
# Monkey patch the default python DStream so that data in it can be stored to and joined with
# Cassandra tables
DStream.saveToCassandra = saveToCassandra
DStream.joinWithCassandraTable = joinWithCassandraTable<|fim▁end|>
|
ssc = dstream._ssc
ctx = ssc._sc
gw = ctx._gateway
|
<|file_name|>mutatingwebhookconfiguration.go<|end_file_name|><|fim▁begin|>/*
Copyright 2021 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by injection-gen. DO NOT EDIT.
package mutatingwebhookconfiguration
import (
context "context"
apiadmissionregistrationv1beta1 "k8s.io/api/admissionregistration/v1beta1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
labels "k8s.io/apimachinery/pkg/labels"
v1beta1 "k8s.io/client-go/informers/admissionregistration/v1beta1"
kubernetes "k8s.io/client-go/kubernetes"
admissionregistrationv1beta1 "k8s.io/client-go/listers/admissionregistration/v1beta1"
cache "k8s.io/client-go/tools/cache"
client "knative.dev/pkg/client/injection/kube/client"
factory "knative.dev/pkg/client/injection/kube/informers/factory"
controller "knative.dev/pkg/controller"
injection "knative.dev/pkg/injection"
logging "knative.dev/pkg/logging"
)
func init() {
injection.Default.RegisterInformer(withInformer)
injection.Dynamic.RegisterDynamicInformer(withDynamicInformer)
}
// Key is used for associating the Informer inside the context.Context.
type Key struct{}
func withInformer(ctx context.Context) (context.Context, controller.Informer) {
f := factory.Get(ctx)
inf := f.Admissionregistration().V1beta1().MutatingWebhookConfigurations()
return context.WithValue(ctx, Key{}, inf), inf.Informer()
}
func withDynamicInformer(ctx context.Context) context.Context {
inf := &wrapper{client: client.Get(ctx), resourceVersion: injection.GetResourceVersion(ctx)}
return context.WithValue(ctx, Key{}, inf)
}
// Get extracts the typed informer from the context.
func Get(ctx context.Context) v1beta1.MutatingWebhookConfigurationInformer {
untyped := ctx.Value(Key{})
if untyped == nil {
logging.FromContext(ctx).Panic(
"Unable to fetch k8s.io/client-go/informers/admissionregistration/v1beta1.MutatingWebhookConfigurationInformer from context.")
}
return untyped.(v1beta1.MutatingWebhookConfigurationInformer)
}
type wrapper struct {
client kubernetes.Interface
resourceVersion string
}
var _ v1beta1.MutatingWebhookConfigurationInformer = (*wrapper)(nil)
var _ admissionregistrationv1beta1.MutatingWebhookConfigurationLister = (*wrapper)(nil)
func (w *wrapper) Informer() cache.SharedIndexInformer {
return cache.NewSharedIndexInformer(nil, &apiadmissionregistrationv1beta1.MutatingWebhookConfiguration{}, 0, nil)
}
func (w *wrapper) Lister() admissionregistrationv1beta1.MutatingWebhookConfigurationLister {
return w
}
// SetResourceVersion allows consumers to adjust the minimum resourceVersion
// used by the underlying client. It is not accessible via the standard
// lister interface, but can be accessed through a user-defined interface and
// an implementation check e.g. rvs, ok := foo.(ResourceVersionSetter)
func (w *wrapper) SetResourceVersion(resourceVersion string) {
w.resourceVersion = resourceVersion
}
func (w *wrapper) List(selector labels.Selector) (ret []*apiadmissionregistrationv1beta1.MutatingWebhookConfiguration, err error) {
lo, err := w.client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().List(context.TODO(), v1.ListOptions{
LabelSelector: selector.String(),
ResourceVersion: w.resourceVersion,<|fim▁hole|> })
if err != nil {
return nil, err
}
for idx := range lo.Items {
ret = append(ret, &lo.Items[idx])
}
return ret, nil
}
func (w *wrapper) Get(name string) (*apiadmissionregistrationv1beta1.MutatingWebhookConfiguration, error) {
return w.client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Get(context.TODO(), name, v1.GetOptions{
ResourceVersion: w.resourceVersion,
})
}<|fim▁end|>
| |
<|file_name|>test_contact_compare.py<|end_file_name|><|fim▁begin|>__author__ = 'Keiran'
from model.contact import Contact
import pytest
def test_contact_compare(app, orm):
with pytest.allure.step('Given a sorted contact list from DB'):
contacts_from_db = orm.get_contact_list()
sorted_contacts_from_db = list(sorted(contacts_from_db, key=Contact.id_or_max))
with pytest.allure.step('Given a sorted contact list from home page'):
contacts_from_home_page = app.contact.get_contact_list()
sorted_contacts_from_home_page = list(sorted(contacts_from_home_page, key=Contact.id_or_max))<|fim▁hole|> assert sorted_contacts_from_db[index].join_mails() == sorted_contacts_from_home_page[index].all_mails
assert sorted_contacts_from_db[index].join_phones() == sorted_contacts_from_home_page[index].all_phones<|fim▁end|>
|
with pytest.allure.step('Then I compare this lists'):
for index in range(len(sorted_contacts_from_db)):
assert sorted_contacts_from_db[index] == sorted_contacts_from_home_page[index]
|
<|file_name|>SensorConstants.java<|end_file_name|><|fim▁begin|>// This code is part of the CPCC-NG project.
//
// Copyright (c) 2009-2016 Clemens Krainer <[email protected]>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software Foundation,
// Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
package cpcc.demo.setup.builder;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Pair;
import cpcc.core.entities.SensorDefinition;
import cpcc.core.entities.SensorType;
import cpcc.core.entities.SensorVisibility;
import cpcc.core.entities.TopicCategory;
/**
* Sensor Constants implementation.
*/
public final class SensorConstants
{
private static final String SENSOR_MSGS_NAV_SAT_FIX = "sensor_msgs/NavSatFix";
private static final String SENSOR_MSGS_IMAGE = "sensor_msgs/Image";
private static final String STD_MSGS_FLOAT32 = "std_msgs/Float32";
private static final Date now = new Date();
private static final SensorDefinition[] SENSOR_DEFINITIONS = {
new SensorDefinitionBuilder()
.setId(1)
.setDescription("Altimeter")
.setLastUpdate(now)
.setMessageType(STD_MSGS_FLOAT32)
.setParameters(null)
.setType(SensorType.ALTIMETER)
.setVisibility(SensorVisibility.ALL_VV)
.setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(2)
.setDescription("Area of Operations")
.setLastUpdate(now)
.setMessageType(STD_MSGS_FLOAT32)
.setParameters(null)
.setType(SensorType.AREA_OF_OPERATIONS)
.setVisibility(SensorVisibility.PRIVILEGED_VV)
.setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(3)
.setDescription("Barometer")
.setLastUpdate(now)
.setMessageType(STD_MSGS_FLOAT32)
.setParameters(null)
.setType(SensorType.BAROMETER)
.setVisibility(SensorVisibility.ALL_VV)
.setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(4)
.setDescription("Battery")
.setLastUpdate(now)
.setMessageType(STD_MSGS_FLOAT32)
.setParameters(null)
.setType(SensorType.BATTERY)
.setVisibility(SensorVisibility.PRIVILEGED_VV)
.setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(5)
.setDescription("Belly Mounted Camera 640x480")
.setLastUpdate(now)
.setMessageType(SENSOR_MSGS_IMAGE)
.setParameters("width=640 height=480 yaw=0 down=1.571 alignment=''north''")
.setType(SensorType.CAMERA)
.setVisibility(SensorVisibility.ALL_VV)
.setDeleted(false).build(),
// new SensorDefinitionBuilder()
// .setId(6)
// .setDescription("FPV Camera 640x480")
// .setLastUpdate(now)
// .setMessageType("sensor_msgs/Image")
// .setParameters("width=640 height=480 yaw=0 down=0 alignment=''heading''")
// .setType(SensorType.CAMERA)
// .setVisibility(SensorVisibility.ALL_VV)
// .setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(7)
.setDescription("CO2")
.setLastUpdate(now)
.setMessageType(STD_MSGS_FLOAT32)
.setParameters(null)
.setType(SensorType.CO2)
.setVisibility(SensorVisibility.ALL_VV)
.setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(9)
.setDescription("GPS")
.setLastUpdate(now)
.setMessageType(SENSOR_MSGS_NAV_SAT_FIX)
.setParameters(null)
.setType(SensorType.GPS)
.setVisibility(SensorVisibility.ALL_VV)
.setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(10)
.setDescription("Hardware")
.setLastUpdate(now)
.setMessageType(STD_MSGS_FLOAT32)
.setParameters(null)
.setType(SensorType.HARDWARE)
.setVisibility(SensorVisibility.PRIVILEGED_VV)
.setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(11)
.setDescription("NOx")
.setLastUpdate(now)
.setMessageType(STD_MSGS_FLOAT32)
.setParameters(null)
.setType(SensorType.NOX)
.setVisibility(SensorVisibility.ALL_VV)
.setDeleted(false).build(),
new SensorDefinitionBuilder()
.setId(12)
.setDescription("Thermometer")
.setLastUpdate(now)
.setMessageType(STD_MSGS_FLOAT32)
.setParameters(null)
.setType(SensorType.THERMOMETER)
.setVisibility(SensorVisibility.ALL_VV)
.setDeleted(false).build()
};
public static final Map<TopicCategory, SensorType> TOPIC_SENSOR_MAP = Collections.unmodifiableMap(Stream
.of(Pair.of(TopicCategory.ALTITUDE_OVER_GROUND, SensorType.ALTIMETER),
Pair.of(TopicCategory.CAMERA, SensorType.CAMERA),
Pair.of(TopicCategory.CAMERA_INFO, SensorType.CAMERA),
Pair.of(TopicCategory.GPS_POSITION_PROVIDER, SensorType.GPS))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight)));
private SensorConstants()
{
// Intentionally empty.
}
/**
* @param type the required sensor types.
* @return all sensor definitions specified in type.<|fim▁hole|> {
Set<SensorType> types = Stream.of(type).collect(Collectors.toSet());
return Stream.of(SENSOR_DEFINITIONS).filter(x -> types.contains(x.getType())).collect(Collectors.toList());
}
/**
* @return all sensor definitions.
*/
public static List<SensorDefinition> all()
{
return Arrays.asList(SENSOR_DEFINITIONS);
}
}<|fim▁end|>
|
*/
public static List<SensorDefinition> byType(SensorType... type)
|
<|file_name|>test_marshal.py<|end_file_name|><|fim▁begin|>from ..base import BaseTopazTest
class TestMarshal(BaseTopazTest):
def test_version_constants(self, space):
w_res = space.execute("return Marshal::MAJOR_VERSION")
assert space.int_w(w_res) == 4
w_res = space.execute("return Marshal::MINOR_VERSION")
assert space.int_w(w_res) == 8
w_res = space.execute("return Marshal.dump('test')[0].ord")
assert space.int_w(w_res) == 4
w_res = space.execute("return Marshal.dump('test')[1].ord")
assert space.int_w(w_res) == 8
def test_dump_constants(self, space):
w_res = space.execute("return Marshal.dump(nil)")
assert space.str_w(w_res) == "\x04\b0"
w_res = space.execute("return Marshal.dump(true)")
assert space.str_w(w_res) == "\x04\bT"
w_res = space.execute("return Marshal.dump(false)")
assert space.str_w(w_res) == "\x04\bF"
def test_load_constants(self, space):
w_res = space.execute("return Marshal.load('\x04\b0')")
assert w_res == space.w_nil
w_res = space.execute("return Marshal.load('\x04\bT')")
assert w_res == space.w_true
w_res = space.execute("return Marshal.load('\x04\bF')")
assert w_res == space.w_false
def test_constants(self, space):
w_res = space.execute("return Marshal.load(Marshal.dump(nil))")
assert w_res == space.w_nil
w_res = space.execute("return Marshal.load(Marshal.dump(true))")
assert w_res == space.w_true
w_res = space.execute("return Marshal.load(Marshal.dump(false))")
assert w_res == space.w_false
def test_dump_tiny_integer(self, space):
w_res = space.execute("return Marshal.dump(5)")
assert space.str_w(w_res) == "\x04\bi\n"
w_res = space.execute("return Marshal.dump(100)")
assert space.str_w(w_res) == "\x04\bii"
w_res = space.execute("return Marshal.dump(0)")
assert space.str_w(w_res) == "\x04\bi\x00"
w_res = space.execute("return Marshal.dump(-1)")
assert space.str_w(w_res) == "\x04\bi\xFA"
w_res = space.execute("return Marshal.dump(-123)")
assert space.str_w(w_res) == "\x04\bi\x80"
w_res = space.execute("return Marshal.dump(122)")
assert space.str_w(w_res) == "\x04\bi\x7F"
def test_load_tiny_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\n')")
assert space.int_w(w_res) == 5
w_res = space.execute("return Marshal.load('\x04\bii')")
assert space.int_w(w_res) == 100
#w_res = space.execute('return Marshal.load("\x04\bi\x00")')
w_res = space.execute('return Marshal.load(Marshal.dump(0))')
assert space.int_w(w_res) == 0
w_res = space.execute("return Marshal.load('\x04\bi\xFA')")
assert space.int_w(w_res) == -1
w_res = space.execute("return Marshal.load('\x04\bi\x80')")
assert space.int_w(w_res) == -123
w_res = space.execute("return Marshal.load('\x04\bi\x7F')")
assert space.int_w(w_res) == 122
def test_dump_array(self, space):
w_res = space.execute("return Marshal.dump([])")
assert space.str_w(w_res) == "\x04\b[\x00"
w_res = space.execute("return Marshal.dump([nil])")
assert space.str_w(w_res) == "\x04\b[\x060"
w_res = space.execute("return Marshal.dump([nil, true, false])")
assert space.str_w(w_res) == "\x04\b[\b0TF"
w_res = space.execute("return Marshal.dump([1, 2, 3])")
assert space.str_w(w_res) == "\x04\b[\x08i\x06i\x07i\x08"
w_res = space.execute("return Marshal.dump([1, [2, 3], 4])")
assert space.str_w(w_res) == "\x04\b[\bi\x06[\ai\ai\bi\t"
w_res = space.execute("return Marshal.dump([:foo, :bar])")
assert space.str_w(w_res) == "\x04\b[\a:\bfoo:\bbar"
def test_load_array(self, space):
#w_res = space.execute("return Marshal.load('\x04\b[\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump([]))")
assert self.unwrap(space, w_res) == []
w_res = space.execute("return Marshal.load('\x04\b[\x060')")
assert self.unwrap(space, w_res) == [None]
w_res = space.execute("return Marshal.load('\x04\b[\b0TF')")
assert self.unwrap(space, w_res) == [None, True, False]
w_res = space.execute("return Marshal.load('\x04\b[\x08i\x06i\x07i\x08')")
assert self.unwrap(space, w_res) == [1, 2, 3]
w_res = space.execute("return Marshal.load('\x04\b[\bi\x06[\ai\ai\bi\t')")
assert self.unwrap(space, w_res) == [1, [2, 3], 4]
w_res = space.execute("return Marshal.load('\x04\b[\a:\bfoo:\bbar')")
assert self.unwrap(space, w_res) == ["foo", "bar"]
def test_dump_symbol(self, space):
w_res = space.execute("return Marshal.dump(:abc)")
assert space.str_w(w_res) == "\x04\b:\babc"
w_res = space.execute("return Marshal.dump(('hello' * 25).to_sym)")
assert space.str_w(w_res) == "\x04\b:\x01}" + "hello" * 25
w_res = space.execute("return Marshal.dump(('hello' * 100).to_sym)")
assert space.str_w(w_res) == "\x04\b:\x02\xF4\x01" + "hello" * 100
def test_load_symbol(self, space):
w_res = space.execute("return Marshal.load('\x04\b:\babc')")
assert space.symbol_w(w_res) == "abc"
w_res = space.execute("return Marshal.load('\x04\b:\x01}' + 'hello' * 25)")
assert space.symbol_w(w_res) == "hello" * 25
def test_dump_hash(self, space):
w_res = space.execute("return Marshal.dump({})")
assert space.str_w(w_res) == "\x04\b{\x00"
w_res = space.execute("return Marshal.dump({1 => 2, 3 => 4})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x06i\ai\bi\t"
w_res = space.execute("return Marshal.dump({1 => {2 => 3}, 4 => 5})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x06{\x06i\ai\bi\ti\n"
w_res = space.execute("return Marshal.dump({1234 => {23456 => 3456789}, 4 => 5})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x02\xD2\x04{\x06i\x02\xA0[i\x03\x15\xBF4i\ti\n"
def test_load_hash(self, space):
#w_res = space.execute("return Marshal.load('\x04\b{\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump({}))")
assert self.unwrap(space, w_res) == {}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x06i\ai\bi\t')")
assert self.unwrap(space, w_res) == {1: 2, 3: 4}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x06{\x06i\ai\bi\ti\n')")
assert self.unwrap(space, w_res) == {1: {2: 3}, 4: 5}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x02\xD2\x04{\x06i\x02\xA0[i\x03\x15\xBF4i\ti\n')")
assert self.unwrap(space, w_res) == {1234: {23456: 3456789}, 4: 5}
def test_dump_integer(self, space):
w_res = space.execute("return Marshal.dump(123)")
assert space.str_w(w_res) == "\x04\bi\x01{"
w_res = space.execute("return Marshal.dump(255)")
assert space.str_w(w_res) == "\x04\bi\x01\xFF"
w_res = space.execute("return Marshal.dump(256)")
assert space.str_w(w_res) == "\x04\bi\x02\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 16 - 2)")
assert space.str_w(w_res) == "\x04\bi\x02\xFE\xFF"
w_res = space.execute("return Marshal.dump(2 ** 16 - 1)")
assert space.str_w(w_res) == "\x04\bi\x02\xFF\xFF"
w_res = space.execute("return Marshal.dump(2 ** 16)")
assert space.str_w(w_res) == "\x04\bi\x03\x00\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 16 + 1)")
assert space.str_w(w_res) == "\x04\bi\x03\x01\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 30 - 1)")
assert space.str_w(w_res) == "\x04\bi\x04\xFF\xFF\xFF?"
# TODO: test tooo big numbers (they give a warning and inf)
def test_load_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\x01{')")
assert space.int_w(w_res) == 123
w_res = space.execute("return Marshal.load('\x04\bi\x01\xFF')")
assert space.int_w(w_res) == 255
#w_res = space.execute("return Marshal.load('\x04\bi\x02\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(256))")
assert space.int_w(w_res) == 256
w_res = space.execute("return Marshal.load('\x04\bi\x02\xFE\xFF')")
assert space.int_w(w_res) == 2 ** 16 - 2
w_res = space.execute("return Marshal.load('\x04\bi\x02\xFF\xFF')")
assert space.int_w(w_res) == 2 ** 16 - 1
#w_res = space.execute("return Marshal.load('\x04\bi\x03\x00\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(2 ** 16))")
assert space.int_w(w_res) == 2 ** 16
<|fim▁hole|> #w_res = space.execute("return Marshal.load('\x04\bi\x03\x01\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(2 ** 16 + 1))")
assert space.int_w(w_res) == 2 ** 16 + 1
w_res = space.execute("return Marshal.load('\x04\bi\x04\xFF\xFF\xFF?')")
assert space.int_w(w_res) == 2 ** 30 - 1
def test_dump_negative_integer(self, space):
w_res = space.execute("return Marshal.dump(-1)")
assert space.str_w(w_res) == "\x04\bi\xFA"
w_res = space.execute("return Marshal.dump(-123)")
assert space.str_w(w_res) == "\x04\bi\x80"
w_res = space.execute("return Marshal.dump(-124)")
assert space.str_w(w_res) == "\x04\bi\xFF\x84"
w_res = space.execute("return Marshal.dump(-256)")
assert space.str_w(w_res) == "\x04\bi\xFF\x00"
w_res = space.execute("return Marshal.dump(-257)")
assert space.str_w(w_res) == "\x04\bi\xFE\xFF\xFE"
w_res = space.execute("return Marshal.dump(-(2 ** 30))")
assert space.str_w(w_res) == "\x04\bi\xFC\x00\x00\x00\xC0"
def test_load_negative_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\xFA')")
assert space.int_w(w_res) == -1
w_res = space.execute("return Marshal.load('\x04\bi\x80')")
assert space.int_w(w_res) == -123
w_res = space.execute("return Marshal.load('\x04\bi\xFF\x84')")
assert space.int_w(w_res) == -124
#w_res = space.execute("return Marshal.load('\x04\bi\xFF\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-256))")
assert space.int_w(w_res) == -256
w_res = space.execute("return Marshal.load('\x04\bi\xFE\xFF\xFE')")
assert space.int_w(w_res) == -257
#w_res = space.execute("return Marshal.load('\x04\bi\xFE\x00\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 16)))")
assert space.int_w(w_res) == -(2 ** 16)
w_res = space.execute("return Marshal.load('\x04\bi\xFD\xFF\xFF\xFE')")
assert space.int_w(w_res) == -(2 ** 16 + 1)
#w_res = space.execute("return Marshal.load('\x04\bi\xFC\x00\x00\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 24)))")
assert space.int_w(w_res) == -(2 ** 24)
w_res = space.execute("return Marshal.load('\x04\bi\xFC\xFF\xFF\xFF\xFE')")
assert space.int_w(w_res) == -(2 ** 24 + 1)
#w_res = space.execute("return Marshal.load('\x04\bi\xFC\x00\x00\x00\xC0')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 30)))")
assert space.int_w(w_res) == -(2 ** 30)
def test_dump_float(self, space):
w_res = space.execute("return Marshal.dump(0.0)")
assert space.str_w(w_res) == "\x04\bf\x060"
w_res = space.execute("return Marshal.dump(0.1)")
assert space.str_w(w_res) == "\x04\bf\b0.1"
w_res = space.execute("return Marshal.dump(1.0)")
assert space.str_w(w_res) == "\x04\bf\x061"
w_res = space.execute("return Marshal.dump(1.1)")
assert space.str_w(w_res) == "\x04\bf\b1.1"
w_res = space.execute("return Marshal.dump(1.001)")
assert space.str_w(w_res) == "\x04\bf\n1.001"
#w_res = space.execute("return Marshal.dump(123456789.123456789)")
#assert space.str_w(w_res) == "\x04\bf\x17123456789.12345679"
#w_res = space.execute("return Marshal.dump(-123456789.123456789)")
#assert space.str_w(w_res) == "\x04\bf\x18-123456789.12345679"
#w_res = space.execute("return Marshal.dump(-0.0)")
#assert space.str_w(w_res) == "\x04\bf\a-0"
def test_load_float(self, space):
w_res = space.execute("return Marshal.load('\x04\bf\x060')")
assert space.float_w(w_res) == 0.0
w_res = space.execute("return Marshal.load('\x04\bf\b0.1')")
assert space.float_w(w_res) == 0.1
w_res = space.execute("return Marshal.load('\x04\bf\x061')")
assert space.float_w(w_res) == 1.0
w_res = space.execute("return Marshal.load('\x04\bf\b1.1')")
assert space.float_w(w_res) == 1.1
w_res = space.execute("return Marshal.load('\x04\bf\n1.001')")
assert space.float_w(w_res) == 1.001
#w_res = space.execute("return Marshal.load('\x04\bf\x17123456789.12345679')")
#assert space.float_w(w_res) == 123456789.123456789
#w_res = space.execute("return Marshal.load('\x04\bf\x18-123456789.12345679')")
#assert space.float_w(w_res) == -123456789.123456789
#w_res = space.execute("return Marshal.load('\x04\bf\a-0')")
#assert repr(space.float_w(w_res)) == repr(-0.0)
def test_dump_string(self, space):
w_res = space.execute("return Marshal.dump('')")
assert space.str_w(w_res) == "\x04\bI\"\x00\x06:\x06ET"
w_res = space.execute("return Marshal.dump('abc')")
assert space.str_w(w_res) == "\x04\bI\"\babc\x06:\x06ET"
w_res = space.execute("return Marshal.dump('i am a longer string')")
assert space.str_w(w_res) == "\x04\bI\"\x19i am a longer string\x06:\x06ET"
def test_load_string(self, space):
#w_res = space.execute("return Marshal.load('\x04\bI\"\x00\x06:\x06ET')")
w_res = space.execute("return Marshal.load(Marshal.dump(''))")
assert space.str_w(w_res) == ""
w_res = space.execute("return Marshal.load('\x04\bI\"\babc\x06:\x06ET')")
assert space.str_w(w_res) == "abc"
w_res = space.execute("return Marshal.load('\x04\bI\"\x19i am a longer string\x06:\x06ET')")
assert space.str_w(w_res) == "i am a longer string"
def test_array(self, space):
w_res = space.execute("return Marshal.load(Marshal.dump([1, 2, 3]))")
assert self.unwrap(space, w_res) == [1, 2, 3]
w_res = space.execute("return Marshal.load(Marshal.dump([1, [2, 3], 4]))")
assert self.unwrap(space, w_res) == [1, [2, 3], 4]
w_res = space.execute("return Marshal.load(Marshal.dump([130, [2, 3], 4]))")
assert self.unwrap(space, w_res) == [130, [2, 3], 4]
w_res = space.execute("return Marshal.load(Marshal.dump([-10000, [2, 123456], -9000]))")
assert self.unwrap(space, w_res) == [-10000, [2, 123456], -9000]
w_res = space.execute("return Marshal.load(Marshal.dump([:foo, :bar]))")
assert self.unwrap(space, w_res) == ["foo", "bar"]
w_res = space.execute("return Marshal.load(Marshal.dump(['foo', 'bar']))")
assert self.unwrap(space, w_res) == ["foo", "bar"]
def test_incompatible_format(self, space):
with self.raises(
space,
"TypeError",
"incompatible marshal file format (can't be read)\n"
"format version 4.8 required; 97.115 given"
):
space.execute("Marshal.load('asd')")
def test_short_data(self, space):
with self.raises(space, "ArgumentError", "marshal data too short"):
space.execute("Marshal.load('')")
def test_parameters(self, space):
with self.raises(space, "TypeError", "instance of IO needed"):
space.execute("Marshal.load(4)")
def test_io(self, space, tmpdir):
f = tmpdir.join("testfile")
w_res = space.execute("""
Marshal.dump('hallo', File.new('%s', 'wb'))
file = File.open('%s', 'rb')
return Marshal.load(file.read)
""" % (f, f))
assert space.str_w(w_res) == "hallo"
w_res = space.execute("""
Marshal.dump('hallo', File.new('%s', 'wb'))
file = File.open('%s', 'rb')
return Marshal.load(file)
""" % (f, f))
assert space.str_w(w_res) == "hallo"<|fim▁end|>
| |
<|file_name|>types.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from __future__ import (division, absolute_import, print_function,
unicode_literals)<|fim▁hole|>from beets.plugins import BeetsPlugin
from beets.dbcore import types
from beets.util.confit import ConfigValueError
from beets import library
class TypesPlugin(BeetsPlugin):
@property
def item_types(self):
return self._types()
@property
def album_types(self):
return self._types()
def _types(self):
if not self.config.exists():
return {}
mytypes = {}
for key, value in self.config.items():
if value.get() == 'int':
mytypes[key] = types.INTEGER
elif value.get() == 'float':
mytypes[key] = types.FLOAT
elif value.get() == 'bool':
mytypes[key] = types.BOOLEAN
elif value.get() == 'date':
mytypes[key] = library.DateType()
else:
raise ConfigValueError(
u"unknown type '{0}' for the '{1}' field"
.format(value, key))
return mytypes<|fim▁end|>
| |
<|file_name|>systemhost_mock.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from StringIO import StringIO
from webkitpy.common.system.environment import Environment
from webkitpy.common.system.executive_mock import MockExecutive
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.platforminfo_mock import MockPlatformInfo
from webkitpy.common.system.user_mock import MockUser
from webkitpy.common.system.workspace_mock import MockWorkspace
class MockSystemHost(object):
def __init__(self, log_executive=False, executive_throws_when_run=None, os_name=None, os_version=None, executive=None, filesystem=None):
self.executable = 'python'
self.executive = executive or MockExecutive(should_log=log_executive, should_throw_when_run=executive_throws_when_run)
self.filesystem = filesystem or MockFileSystem()
self.user = MockUser()
self.platform = MockPlatformInfo()
if os_name:
self.platform.os_name = os_name
if os_version:
self.platform.os_version = os_version
# FIXME: Should this take pointers to the filesystem and the executive?
self.workspace = MockWorkspace()<|fim▁hole|> self.stderr = StringIO()
def copy_current_environment(self):
return Environment({"MOCK_ENVIRON_COPY": '1'})
def print_(self, *args, **kwargs):
sep = kwargs.get('sep', ' ')
end = kwargs.get('end', '\n')
stream = kwargs.get('stream', self.stdout)
stream.write(sep.join([str(arg) for arg in args]) + end)<|fim▁end|>
|
self.stdin = StringIO()
self.stdout = StringIO()
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>export * from './home.component';
export * from './overview/overview.component';
export * from './profile/profile.component';<|fim▁hole|><|fim▁end|>
|
export * from './accounts/accounts.component';
export * from './categorization/categorization.component';
|
<|file_name|>rosteritemexchange.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="de" version="2.0">
<context>
<name>ExchangeApproveDialog</name>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="24"/>
<source>Roster Modification - %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="27"/>
<source>Contact '%1' offers you to make the following changes in your contact list:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="32"/>
<source>Modification</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="106"/>
<source>Add new contact '%1 <%2>'</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="108"/>
<source>Add new contact '%1 <%2>' to the group: %3</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="113"/>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="137"/>
<source>Copy contact '%1' to the group: %2</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="121"/>
<source>Remove contact '%1' from contact list</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="123"/>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="143"/>
<source>Remove contact '%1' from the group: %2</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.cpp" line="131"/>
<source>Rename contact '%1' to '%2'</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ExchangeApproveDialogClass</name>
<message>
<location filename="../../plugins/rosteritemexchange/exchangeapprovedialog.ui" line="46"/>
<source>Send authorization request to new contacts</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RosterItemExchange</name>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="65"/>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="153"/>
<source>Roster Item Exchange</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="66"/>
<source>Allows to exchange contact list items</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="154"/>
<source>Supports the exchanging of contact list items</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="179"/>
<source>When receiving roster modification request</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="290"/>
<source>Contacts list management</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="291"/>
<source>Allow gateways and group services manage your contacts list</source>
<translation type="unfinished"/><|fim▁hole|> </message>
<message numerus="yes">
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="528"/>
<source>Send %n Contact(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="650"/>
<source>Roster modification request from %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="656"/>
<source>Roster modification</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="659"/>
<source>%1 offers you to make some changes in your contact list.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="810"/>
<source>%n contact(s) sent</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location filename="../../plugins/rosteritemexchange/rosteritemexchange.cpp" line="812"/>
<source>Failed to send %n contact(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
</context>
</TS><|fim▁end|>
| |
<|file_name|>generate-graphics.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import re
import subprocess
from jinja2 import Template
import replacements
TEMPLATENAME = "base.svg.template"
# modifiers for layers in order as in keymap
MODIFIERS = [
[],
["SHIFT"],
["MOD3"],
["MOD3", "SHIFT"],
["MOD4"],
["MOD4", "SHIFT"],
["MOD3", "MOD4"],
[]
]
LAYERNAMES = ["1", "2", "3", "5", "4", "Pseudoebene", "6", ""]
# 1E9E = Latin Capital Letter Sharp S
upper_chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZÄÖÜ\u1e9e'
lower_chars = 'abcdefghijklmnopqrstuvwxyzäöüß'
CAPS_MAP = str.maketrans(dict(zip(upper_chars + lower_chars,
lower_chars + upper_chars)))
assert len(lower_chars) == len(upper_chars) == 30
assert len(CAPS_MAP) == len(lower_chars) + len(upper_chars)
def keymap_to_keys(text):
# simple and dump parser for xkb keymap files
#
# It simply searches all "key { … };" parts and splits them.
# A more advanced version would parts "xkb_symbols { … }" first
# and only search in this part.
assert text.startswith("xkb_keymap")
KEY_PATTERN = r'\s key \s .+? \s { [^}]+? };'
SYMBOLS_PATTERN = r'\[ (.+?) \]'
text = text.split('xkb_symbols', 1)[1]
# FIXME: assumes the next section (if there is one) is
# xkb_geometry
text = text.split('xkb_geometry', 1)[0]
for k in re.findall(KEY_PATTERN, text, re.M+re.X):
_, name, text = k.split(None, 2)
name = name.strip('<').rstrip('>')
text = text.replace('symbols[Group1]', '')
symbols = re.findall(SYMBOLS_PATTERN, text, re.M+re.X)
if not symbols:
raise SystemExit(f"{name} did not match: {text!r}")
if len(symbols) != 1:
print("currious key:", name, symbols)
symbols = [s.strip() for s in symbols[0].split(',')]
# replace keynames with the symbol they produce<|fim▁hole|> symbols = [replacements.f(s) for s in symbols]
# Some keys aren't layered, hence the list is too short.
# pad them with the first entry.
symbols = (symbols + symbols[:1]*9)[:9]
yield name, symbols
# --- argument handling ---
if len(sys.argv) not in (2, 3):
raise SystemExit('Usage: ./<this script> variantname [numpad]')
layout = sys.argv[1]
numpad = (len(sys.argv) == 3 and sys.argv[2] == "numpad")
swap_m3r_ä = (layout == "vou" or layout == "mine")
vou = (layout == "vou")
mine = (layout == "mine")
version = "numpad" if numpad else "tkl"
# - read data and template
keymap = subprocess.check_output(
["xkbcli", "compile-keymap", "--layout", "de", "--variant", layout],
text=True)
keymap = dict(keymap_to_keys(keymap))
with open(TEMPLATENAME) as templatefile:
template = Template(templatefile.read())
# --- generate files ---
def write_image(layername, layerdict):
layerdict["numpad"] = numpad
layerdict["swap_m3r_ä"] = swap_m3r_ä
layerdict["vou"] = vou
layerdict["mine"] = mine
with open(f'{layout}-{layername}-{version}.svg', 'w') as out:
out.write(template.render(layerdict))
def make_caps_lock(text):
if len(text) == 1:
return text.translate(CAPS_MAP)
else:
return text
# - main layers
for layer in range(7): # 7 because the last layer is empty
# create a dict with the replacements from replacements.py
layerdict = {a: b[layer] for a, b in keymap.items()}
# color modifiers accordingly
for x in MODIFIERS[layer]:
layerdict[x] = " pressed"
write_image(LAYERNAMES[layer], layerdict)
filename = f'{layout}-{LAYERNAMES[layer]}-{version}.svg'
with open(filename, 'w') as out:
out.write(template.render(layerdict))
# - caps-lock images
for layer in 0, 1:
# create a dict with the replacements from replacements.py
layerdict = {a: make_caps_lock(b[layer]) for a, b in keymap.items()}
# color modifiers accordingly
for x in MODIFIERS[layer]:
layerdict[x] = " pressed"
write_image(LAYERNAMES[layer] + 'caps', layerdict)
# - "leer" image
write_image('leer', {})<|fim▁end|>
| |
<|file_name|>gist_trees.py<|end_file_name|><|fim▁begin|>"""
Derivation and Elementary Trees live here.
"""
from __future__ import print_function
from baal.structures import Entry, ConstituencyTree, consts
from baal.semantics import Predicate, Expression
from collections import deque
from copy import copy, deepcopy
from math import floor, ceil
try:
input = raw_input
except:
pass
def prn_pairs(phead, thead):
pairs = [("-LRB-", "-RRB-"), ("-RSB-", "-RSB-"), ("-LCB-", "-RCB-"),
("--", "--"), (",", ",")]
return any([left.lower()==phead.lower() and right.lower()==thead.lower() for left,right in pairs])
class AttachmentPoint(object):
def __init__(self, free, pos_symbol, gorn, type, seq_index):
self.free = free
self.pos_symbol = pos_symbol
self.gorn = gorn
self.type = type
self.seq_index = seq_index
self.hlf_symbol = None
self.frontier_increment = 0.01
self.frontier = (-1,0)
def __repr__(self):
return "{}@{}".format(self.pos_symbol,self.gorn)
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
@classmethod
def from_tree(cls, tree, address, seq_index, tree_type):
new_point = cls(True, tree.symbol, address, tree_type, seq_index)
if tree.spine_index >= 0:
new_point.frontier = (tree.spine_index, tree.spine_index)
return new_point
@property
def left_frontier(self):
l, r = self.frontier
self.frontier = (l-self.frontier_increment, r)
assert self.frontier[0] > floor(self.frontier[0])
return self.frontier[0]
@property
def right_frontier(self):
l, r = self.frontier
self.frontier = (l, r+self.frontier_increment)
assert self.frontier[1] < ceil(self.frontier[1])
return self.frontier[1]
def sibling_increment(self, left=True):
l, r = self.frontier
if left:
self.frontier = (ceil(l) - 1.0, r)
else:
self.frontier = (l, floor(r) + 1.0)
def match(self, op):
pos_match = self.pos_symbol == op.target['pos_symbol']
gorn_match = ((self.gorn == op.target['target_gorn'])
or op.target['target_gorn'] is None)
hlf_match = self.hlf_symbol == op.target['target_hlf']
type_match = self.type == op.type
fail = []
if not pos_match:
f = "failure because pos:"
f += "self: {}; op: {}".format(str(self.pos_symbol),
str(op.target['pos_symbol']))
fail.append(f)
if not gorn_match:
f = "failure because gorn:"
f += "self: {}; op: {}".format(str(self.gorn),
str(op.target['target_gorn']))
fail.append(f)
if not hlf_match:
f = "failure because hlf:"
f += "self: {}; op: {}".format(str(self.hlf_symbol),
str(op.target['target_hlf']))
fail.append(f)
#if len(fail) > 0:
# print(" & \n".join(fail))
#else:
# print("Success!")
return self.free and pos_match and gorn_match and hlf_match and type_match
def set_path_features(self, hlf_symbol):
self.hlf_symbol = hlf_symbol
def clone(self):
ret = AttachmentPoint(self.free, self.pos_symbol, self.gorn,
self.type, self.seq_index)
ret.hlf_symbol = self.hlf_symbol
ret.frontier = self.frontier
return ret
class AttachmentOperation(object):
"""Represents an elementary tree operation
Used by DerivationTrees when trying to find where an elementary tree should attach
There are two modes to the operation:
1. Use it as a general attachment. In this case it needs to know
the permissable attachments via the pos_symbol (and direction if insertion)
2. Use it in specific attachment. In this case it needs to know
identifying information about the tree it should be attaching to.
Current ideas: hlf_symbol, tree_id, argument_number, gorn_address
Thoughts: gorn_address won't work (for obvious reasons as the tree grows)
tree_id won't work because there might be duplicates
hlf_symbol could work, as long as this semantic form remains
argument_number requires planning, which CSG and others might handle
"""
def __init__(self, target, type):
"""Pass in the already made parameters to make the operation.
Args:
target: dict with keys 'pos_symbol' and 'parameter'
'pos_symbol' is the part of speech this operation looks for
'parameter' is direction for insertions, and argument number
for substitutions
type: the type of operation this is: consts.INSERTION or consts.SUBSTITUTION
Notes:
insertion direction: left means it inserts on the left side
e.g. (NP* (DT a)) inserts left.
the asterisk denotes the attachment point
right means it inserts on the right side
e.g. (*S (. .)) inserts right
the asterisk denotes the attachment point
"""
self.target = target
self.type = type
@property
def is_insertion(self):
return self.type == consts.INSERTION
@property
def direction(self):
if not self.is_insertion:
raise Exception("Not an insertion tree")
else:
return self.target['attach_direction']
def clone(self):
return AttachmentOperation(self.target, self.type)
def set_path_features(self, target_gorn, target_hlf):
if target_hlf is not None:
self.target['target_hlf'] = target_hlf
if target_gorn is not None:
self.target['target_gorn'] = tuple(target_gorn)
@classmethod
def from_tree(cls, tree):
"""Calculate the parameters for the operation from a parse tree
Args:
tree: A ConstituencyParse instance
"""
if tree.adjunct:
target = {'pos_symbol': tree.symbol, 'attach_direction': tree.direction,
'target_gorn': None, 'target_hlf': None}
type = consts.INSERTION
else:
target = {'pos_symbol': tree.symbol, 'attach_direction': "up",
'target_gorn': None, 'target_hlf': None}
type = consts.SUBSTITUTION
return cls(target, type)
return cls(root_op, "", (0,), None, "(ROOT)",
[root_subpoint], [], hlf_symbol="g-1")
class ElementaryTree(object):
"""represent a tree fragment, its operations, and its internal addresses
"""
def __init__(self, op, head, head_address, head_symbol, bracketed_string,
substitution_points, insertion_points,
hlf_symbol=None, tree_id=None, last_type=None, last_index=-1):
self.tree_operation = op
self.head = head
self.head_address = head_address
self.substitution_points = substitution_points
self.insertion_points = insertion_points
self.address = (0,)
self.last_type = last_type
self.last_index = last_index
self.hlf_symbol = hlf_symbol
self.bracketed_string = bracketed_string
self.tree_id = tree_id
self.head_symbol = head_symbol
@classmethod
def from_full_parse_tree(cls, parse_tree):
if parse_tree.symbol == "" and len(parse_tree.children) == 1:
parse_tree.symbol = "ROOT"
_, addressbook = parse_tree.clone()
@classmethod
def from_single_parse_tree(cls, parse_tree):
if parse_tree.save_str().upper() == "(ROOT ROOT)":
return cls.root_tree()
_, addressbook = parse_tree.clone()
head = None
head_address = None
substitution_points = list()
insertion_points = list()
sorted_book = sorted(addressbook.items())
_, root = sorted_book[0]
root_sym = root.symbol
for address, tree in sorted_book:
#if tree.symbol == "ROOT":
# head = "ROOT"
# new_point = AttachmentPoint.from_tree(tree, address, 0, consts.SUBSTITUTION)
# substitution_points.append(new_point)
if tree.lexical:
if head is None:
head = tree.symbol
head_address = address
head_parent = tree.parent
else:
assert prn_pairs(head, tree.symbol)
elif tree.complement:
new_point = AttachmentPoint.from_tree(tree,
address,
len(substitution_points),
consts.SUBSTITUTION)
substitution_points.append(new_point)
elif tree.spine_index >= 0:
new_point = AttachmentPoint.from_tree(tree,
address,
len(insertion_points),
consts.INSERTION)
insertion_points.append(new_point)
else:
print(address, tree)
print("Then what is it?")
op = AttachmentOperation.from_tree(parse_tree)
assert (head is not None and head_address is not None) or head is "ROOT"
return cls(op, head, head_address, head_parent, parse_tree.save_str(),
substitution_points, insertion_points)
@classmethod
def from_bracketed_string(cls, bracketed_string):
parse_tree, _ = ConstituencyTree.make(bracketed_string=bracketed_string)
return cls.from_single_parse_tree(parse_tree)
@classmethod
def root_tree(cls):
root_op = AttachmentOperation({'pos_symbol': 'ROOT', 'attach_direction': None,
'target_gorn': None, 'target_hlf':None},
consts.SUBSTITUTION)
root_subpoint = AttachmentPoint(True, 'ROOT', (0,), consts.SUBSTITUTION, 0)
root_subpoint.hlf_symbol = "g-1"
return cls(root_op, "", (0,), None, "(ROOT)",
[root_subpoint], [], hlf_symbol="g-1")
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
################### INSERTION OPERATION
########################################
def insert(self, op_tree):
new_tree = deepcopy(self)#.clone()
address = new_tree.mark_insertion(op_tree.tree_operation)
op_tree = deepcopy(op_tree)#.clone()
op_tree.address = address
return new_tree, op_tree
def mark_insertion(self, op):
assert self.last_match is not None
assert self.last_match.match(op)
if op.target['attach_direction'] == "left":
op_index = self.last_match.left_frontier
else:
op_index = self.last_match.right_frontier
return self.last_match.gorn + (op_index,)
def matches_inspoint(self, op):
self.last_type = None
self.last_index = -1
for index, point in enumerate(self.insertion_points):
if point.match(op):
self.last_index = index
self.last_type = consts.INSERTION
return True
return False
################### SUBSTITUTION OPERATION
###########################################
def substitute(self, op_tree):
"""update open substitution spots.
Args:
op_tree: an ElementaryTree instance
Notes:
accepts an op_tree that needs to substitute here.
raises an Exception if it can't
"""
new_tree = deepcopy(self)#self.clone()
address = new_tree.mark_substituted(op_tree.tree_operation)
op_tree = deepcopy(op_tree)#.clone()
op_tree.address = address
return new_tree, op_tree
def mark_substituted(self, op):
assert self.last_match is not None
assert self.last_match.match(op)
self.last_match.free = False
match_gorn = self.last_match.gorn
if self.hlf_symbol == 'g-1':
return match_gorn
is_left = match_gorn < self.head_address
for point in self.insertion_points:
if point.gorn == match_gorn[:-1]:
point.sibling_increment(is_left)
return match_gorn
def matches_subpoint(self, op):
"""check to see if operation matches anything on this tree
Args:
op: AttachmentOperation instance
Returns:
True, False
"""
self.last_type = None
self.last_index = -1
for index, point in enumerate(self.substitution_points):
if point.match(op):
self.last_type = consts.SUBSTITUTION
self.last_index = index
return True
return False
##################### UTILITY METHODS
#####################################
def point_iterator(self, ignore_taken=False):
for pt_type, points in zip(['SUB', 'INS'], [self.sub_points, self.ins_points]):
for point in points:
if ignore_taken and not point.free:
continue
yield pt_type, point
@property
def ins_points(self):
return self.insertion_points
@property
def sub_points(self):
return self.substitution_points
@property
def root_pos(self):
return self.tree_operation.target['pos_symbol']
@property
def last_match(self):
if self.last_index < 0:
return None
elif self.last_type == consts.SUBSTITUTION:
return self.substitution_points[self.last_index]
else:
return self.insertion_points[self.last_index]
@property
def is_insertion(self):
return self.tree_operation.is_insertion
@property
def pos_symbol(self):
return self.tree_operation.target['pos_symbol']
def set_path_features(self, target_gorn=None, target_hlf=None,
self_hlf=None, tree_id=None):
"""Set the variables needed to reconstruct paths.
Args
target_gorn: the gorn address of the target operation node
target_hlf: the target hlf symbol of the target operation tree
self_hlf: this tree's hlf symbol
Notes:
The gorn address will identify where in the target tree
The target_hlf will identify which tree; especially important for duplicates
"""
if self_hlf:
for point in self.substitution_points + self.insertion_points:
point.set_path_features(self_hlf)
self.hlf_symbol = self_hlf
if target_gorn or target_hlf:
self.tree_operation.set_path_features(target_gorn, target_hlf)
if tree_id:
self.tree_id = tree_id
def expand_address(self, incoming):
self.expanded_address = incoming
for _, point in self.point_iterator():
point.expanded_address = incoming + point.gorn[1:]
""" a soft deletion to see if i can get rid of this code
def refresh_points(self):
self.tree_operation = self.tree_operation.clone()
self.substitution_points = [sub.clone() for sub in self.substitution_points]
self.insertion_points = [ins.clone() for ins in self.insertion_points]
def clone(self):
new_tree = ElementaryTree(self.tree_operation, self.head,
self.head_address, self.bracketed_string,
self.substitution_points,
self.insertion_points)
new_tree.refresh_points()
if self.last_match:
new_tree.last_type = self.last_type
new_tree.last_index = self.last_index
if self.hlf_symbol:
new_tree.hlf_symbol = self.hlf_symbol
new_tree.address = self.address
new_tree.tree_id = self.tree_id
return new_tree
"""
def __str__(self):
return self.bracketed_string
def __repr__(self):
substr = ", ".join("{}{}@{}".format(sub.pos_symbol,
"-FREE" if sub.free else "-FILLED",
sub.gorn)
for sub in sorted(self.substitution_points,
key=lambda x: x.gorn))
instr = ", ".join("{}@{}".format(ins.pos_symbol, ins.gorn)
for ins in sorted(self.insertion_points,
key=lambda x: x.gorn))
if self.tree_operation.is_insertion:
typestr = "{}*" if self.tree_operation.direction == "left" else "*{}"
else:
typestr = "^{}^"
typestr = typestr.format(self.head)
return "<{}; sub=[{}], ins=[{}]>".format(typestr, substr, instr)
class DerivationTree(object):
"""represent a tree of ElementaryTrees and their attachment addresses.
"""
def __init__(self, elem_tree, children, predicate=None, suppress_predicate=False):
self.elem_tree = elem_tree
self.children = children
self.predicate = predicate
if not suppress_predicate and predicate is None:
self.predicate = self.instantiate_semantics()
@classmethod
def root_tree(cls):
E = ElementaryTree.root_tree()
P = Predicate(name='ROOT', valence=1, hlf_symbol='g-1')
return cls(E, [], P)
@classmethod
def from_single_parse_tree(cls, tree):
elem_tree = ElementaryTree.from_single_parse_tree(tree)
return cls(elem_tree, [])
@classmethod
def from_bracketed(cls, bracketed_string, **kwargs):
elem_tree = ElementaryTree.from_bracketed_string(bracketed_string)
#parse_tree, _ = ConstituencyTree.make(bracketed_string=bracketed_string)
return cls(elem_tree, [], **kwargs)
@property
def E(self):
""" shortcut alias for shorter lines """
return self.elem_tree
@property
def is_insertion(self):
return self.elem_tree.is_insertion
@property
def direction(self):
if self.is_insertion:
return self.E.tree_operation.target['attach_direction']
else:
return "up"
@property
def tree_op(self):
return self.E.tree_operation
@property
def bracketed(self):
return self.E.bracketed_string
@property
def head(self):
return self.E.head
@property
def supertag(self):
return (self.E.root_pos, self.E.head_symbol, self.direction)
@property
def superindex(self):
return (self.head, self.supertag)
@property
def is_root(self):
return "ROOT" in self.E.bracketed_string
@property
def num_children(self):
return sum([child.num_children+1 for child in self.children])
@property
def lexical(self):
out = [self.E.head]
for child in self.children:
out.extend(child.lexical)
return out
def target_gorn(self, adjust_insertion=True):
gorn = self.tree_op.target['target_gorn']
direction = self.tree_op.target['attach_direction']
if self.is_insertion and adjust_insertion:
gorn += ((-100 if direction == "left" else 100), )
return gorn
def accepts_op(self, other_tree):
other_target = other_tree.E.tree_operation.target['pos_symbol']
if other_tree.is_insertion:
points = self.E.insertion_points
else:
points = self.E.substitution_points
for point in points:
if point.pos_symbol == other_target:
return True
return False
def expand_address(self, incoming=None):
incoming = incoming or (0,)
self.E.expand_address(incoming)
self.expanded_address = incoming
for child in self.children:
child_address = incoming + child.E.address[1:]
child.expand_address(child_address)
def all_points(self):
points = list(self.E.point_iterator())
for child in self.children:
points.extend(child.all_points)
return points
def get_spine(self):
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
annotate = lambda t: (t.symbol, ("SUB" if t.complement
else ("INS" if t.adjunct
else "SPINE")))
not_lex = lambda t: not tree.lexical
spine = [[(tree.symbol, self.direction)]]
while not_lex(tree):
if len(tree.children) == 1 and tree.children[0].lexical:
break
spine.append([annotate(c) for c in tree.children if not_lex(c)])
tree = tree.children[tree.spine_index]
return spine
def roll_features(self, parent_head="ROOT"):
"""assumes 1 head.. more thought needed for other forms"""
spine = self.get_spine()
out_ch = [child.head for child in self.children]
out = [(self.head, parent_head, self.bracketed, spine, out_ch)]
for child in self.children:
out.extend(child.roll_features(self.head))
return out
def modo_roll_features(self, parent_head="ROOT", parent_spine=None):
"""v2. mother-daughter roll features
roll up the tree; get the mother-daughter quadruples
"""
parent_spine = parent_spine or ((("ROOT", "SUB"),),)
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
safety = 0
annotate = lambda t: (t.symbol, ("SUB" if t.complement
else ("INS" if t.adjunct
else "SPINE")))
filter_ch = lambda c: c.E.head_symbol in [",", ":", ".", "``","''", "--"]
not_lex = lambda t: not tree.lexical
spine = [[(tree.symbol, self.direction)]]
while not_lex(tree):
if len(tree.children) == 1 and tree.children[0].lexical:
break
spine.append([annotate(c) for c in tree.children if not_lex(c)])
tree = tree.children[tree.spine_index]
safety += 1
if safety == 100:
raise Exception("loop issue")
out = [(self.head, parent_head, self.bracketed, spine, parent_spine)]
for child in self.children:
out.extend(child.modo_roll_features(self.head, spine))
return out
def dcontext_roll_features(self):
"""v3. mother-daughter roll features
roll up the trees; get the node+daughter head context
"""
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
annotate = lambda t: (t.symbol, ("SUB" if t.complement
else ("INS" if t.adjunct
else "SPINE")))
filter_ch = lambda c: c.E.head_symbol in [",", ":", ".", "``","''", "--"]
not_lex = lambda t: not tree.lexical
spine = [[(tree.symbol, self.direction)]]
while not_lex(tree):
if len(tree.children) == 1 and tree.children[0].lexical:
break
spine.append([annotate(c) for c in tree.children if not_lex(c)])
tree = tree.children[tree.spine_index]
hlf_info = (self.E.hlf_symbol, self.E.tree_operation.target['target_hlf'])
child_heads = [child.head for child in self.children]
out = [(self.head, spine, child_heads, self.bracketed, hlf_info)]
for child in self.children:
out.extend(child.dcontext_roll_features())
return out
def learning_features_july2016(self):
'''sequential choice model with a horizon and RTTN
'''
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
annotate = lambda t: (t.symbol, ("SUB" if t.complement
else ("INS" if t.adjunct
else "SPINE")))
not_lex = lambda t: not tree.lexical
spine = [[(tree.symbol, self.direction)]]
while not_lex(tree):
if len(tree.children) == 1 and tree.children[0].lexical:
break
spine.append([annotate(c) for c in tree.children if not_lex(c)])
tree = tree.children[tree.spine_index]
return self.head, spine
def to_constituency(self):
raise Exception("dont use this yet")
import pdb
#pdb.set_trace()
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
for child in sorted(self.children, key=lambda c: c.E.address):
print("*******\n**********")
print("starting child {}".format(child.supertag))
ct = child.to_constituency()
print("----------------------------")
print("finished to constituency for ct")
print("tree is currently {}".format(tree))
print("child's ct: {}".format(ct))
print("-------------------")
print(self.bracketed)
print(child.E.address)
print(str(child))
print("attaching {} to {}".format(child.bracketed, self.bracketed))
self.attach_at(tree, ct, list(child.E.address)[1:])
return tree
def attach_at(self, node, op, address):
raise Exception("dont use this yet")
while len(address) > 1:
node = node.children[address.pop(0)]
if not hasattr(node, "bookkeeper"):
node.bookkeeper = {}
opid = address.pop(0)
assert len(address) == 0
if isinstance(opid, int):
node.children[opid].__dict__.update(op.__dict__)
elif isinstance(opid, float):
if opid > 0:
node.children.extend(op.children)
else:
node.children = op.children + node.children
node.spine_index += len(op.children)
else:
raise Exception("sanity check")
def __str__(self):
if self.E.bracketed_string == "(ROOT)" and len(self.children) == 0:
return "<empty root>"
lexical = self.in_order_lexical()
return " ".join(lexical)
def __repr__(self):
if self.E.bracketed_string == "(ROOT)" and len(self.children) == 0:
return "<empty root>"
descs = self.in_order_descriptive()
return " ".join(descs)
def _check_heads(self, child_prep, next_word, stk_idx, sf_stk, avail_pos):
for (head,hlf), child in child_prep.items():
if head == next_word:
import pdb
#pdb.set_trace()
w_size = child.num_children + 1
low,high = stk_idx, stk_idx+w_size
while high >= stk_idx and low >= 0:
possible = sf_stk[low:high]
if sorted(possible) == sorted(child.lexical):
child_prep.pop((head, hlf))
pos = avail_pos.pop()
return child, pos, low
else:
low -= 1
high -= 1
return None, None, None
def _sort_by_surface_form(self, sf_list, children, positions, left=True):
"""assign spine-out indices that agrees with surface form list (sf_list)
positions start from 0 and go negative when left, positive when right
we want to associate things closer to 0 with words closer to head
"""
#my_possible_positions = [i for i,x in enumerate(sf_list) if x==self.E.head]
#if "down" in [c.E.head for c in children]:
# import pdb
# pdb.set_trace()
#for possible_position in my_possible_positions:
#print("===")
child_prep = {(child.E.head,child.E.hlf_symbol):child for child in children}
pairing = []
avail_pos = sorted(positions)
sf_stk = sf_list[:]
if not left:
avail_pos = avail_pos[::-1]
sf_stk = sf_stk[::-1]
# if the position is so bad that it cuts off the words, just skip it
if not all([(word in sf_stk) for c in children for word in c.lexical]):
raise Exception()
stk_idx = len(sf_stk) - 1
#print("xxx")
domain = set([w for child in children for w in child.lexical])
import pdb
#pdb.set_trace()
while len(avail_pos) > 0 and stk_idx >= 0:
#while len(sf_stk) > 0 and len(pairing)<len(children):
#print("---", possible_position, child_prep.keys(), sf_stk, stk_idx)
next_word = sf_stk[stk_idx]
if next_word not in domain:
#print("trashpop", next_word)
sf_stk.pop()
else:
child, pos, low = self._check_heads(child_prep, next_word, stk_idx, sf_stk, avail_pos)
if child is not None:
stk_idx = low
sf_stk = sf_stk[:low]
pairing.append((child,pos))
stk_idx -= 1
try:
assert len(avail_pos) == 0
yield pairing
except:
raise Exception()
#try:
# assert len(my_possible_positions) > 1
#except:
print("available positions weren't exausted. why?")
print("I thought i had it figured out; multiple of this head word")
print("it partitions string too much.. but i was wrong?")
print("debugging. inspect now.")
import pdb
pdb.set_trace()
def sort_by_surface_form(self, sf_list, children, positions, left=True):
#import pdb
#pdb.set_trace()
#try:
#if self.E.head == "iii":
# import pdb
# pdb.set_trace()
all_pairings = list(self._sort_by_surface_form(sf_list, children, positions, left))
#except IndexError as e:
# print("tried to pop from an empty list... what should I do")
# import pdb
# pdb.set_trace()
if len(all_pairings) == 1:
return all_pairings[0]
else:
#try:
key = lambda item: (item[1], (item[0].E.head, item[0].E.hlf_symbol))
same = lambda p1, p2: tuple(map(key,p1))==tuple(map(key,p2))
if all([same(p1,p2) for p1 in all_pairings for p2 in all_pairings]):
#print("all same anyway, returning")
return all_pairings[0]
else:
dt_check = lambda diffs: any([item[0].E.head_symbol == "DT" for pair in diffs for item in pair])
dt_key = lambda pairing: sum([abs(p) for c,p in pairing if c.E.head_symbol=="DT"])
differences = [(p1,p2) for i,p1 in enumerate(all_pairings)
for j,p2 in enumerate(all_pairings)
if not same(p1,p2) and i<j]
differences = [(x,y) for diff_item in differences for x,y in zip(*diff_item) if x!=y]
if len(differences) == 2 and dt_check(differences):
#print("shortcutting")
out_pairing = max(all_pairings, key=dt_key)
#print("hopefully works: ", out_pairing)
return out_pairing
#return all_pairings[0]
print("Not sure what to do. not all pairings are the same. inspect please")
import pdb
pdb.set_trace()
#except Exception as e:
# print("not exactly sure what is breaking")
# import pdb
# pdb.set_trace()
def surface_index(self, sf_list, num_left):
for i,w in enumerate(sf_list):
if w == self.E.head and i >= num_left:
return i
return -1
def align_gorn_to_surface(self, surface_form):
if len(self.children) == 0:
return
sf_list = surface_form.split(" ")
if self.E.head == "as" and "much" in sf_list:
import pdb
#pdb.set_trace()
left_of = lambda x,me: x.elem_tree.address < me.elem_tree.head_address
left_children = [child for child in self.children if left_of(child, self)]
organizer = {}
num_left = sum([child.num_children+1 for child in left_children])
boundary = max(num_left, self.surface_index(sf_list, num_left))
left_form = " ".join(sf_list[:boundary])
right_form = " ".join(sf_list[boundary+1:])
#### LEFT CHILDREN
for child in left_children:
addr = child.elem_tree.address
level, position = addr[:-1], addr[-1]
organizer.setdefault(level, []).append((child, position))
for level, items in organizer.items():
if len(items) == 1:
continue
children, positions = [x[0] for x in items], [x[1] for x in items]
pairing = self.sort_by_surface_form(sf_list[:boundary], children, positions, True)
for child,position in pairing:
assert child.E.address[:-1] == level
child.E.address = child.E.address[:-1] + (position,)
#### RIGHT CHILDREN
organizer = {}
right_children = [child for child in self.children if not left_of(child, self)]
for child in right_children:
addr = child.elem_tree.address
level, position = addr[:-1], addr[-1]
organizer.setdefault(level, []).append((child, position))
for level, items in organizer.items():
if len(items) == 1:
continue
children, positions = [x[0] for x in items], [x[1] for x in items]
pairing = self.sort_by_surface_form(sf_list[boundary+1:], children, positions, False)
for child,position in pairing:
assert child.E.address[:-1] == level
child.E.address = child.E.address[:-1] + (position,)
for child in left_children:
child.align_gorn_to_surface(left_form)
for child in right_children:
child.align_gorn_to_surface(right_form)
def align_gorn_to_surface_deprecated_march30(self, surface_form):
left_of = lambda x,me: x.elem_tree.address < me.elem_tree.head_address
surface_index = lambda child: surface_form.find(child.elem_tree.head)
left_children = [child for child in self.children if left_of(child, self)]
organizer = {}
#### LEFT CHILDREN
for child in left_children:
addr = child.elem_tree.address
level, position = addr[:-1], addr[-1]
organizer.setdefault(level, []).append((child, position))
for level, items in organizer.items():
if len(items) == 1:
continue
child_list = sorted([c for c,p in items], key=surface_index)
pop_q = deque(sorted([p for c,p in items]))
assert [x!=y for x in pop_q for y in pop_q]
for child in child_list:
addr = child.elem_tree.address
child.elem_tree.address = addr[:-1] + (pop_q.popleft(), )
#### RIGHT CHILDREN
organizer = {}
right_children = [child for child in self.children if not left_of(child, self)]
for child in right_children:
addr = child.elem_tree.address
level, position = addr[:-1], addr[-1]
organizer.setdefault(level, []).append((child, position))
for level, items in organizer.items():
if len(items) == 1:
continue
child_list = sorted([c for c,p in items], key=surface_index)
pop_q = deque(sorted([p for c,p in items]))
for child in child_list:
addr = child.elem_tree.address
child.elem_tree.address = addr[:-1] + (pop_q.popleft(), )
for child in self.children:
child.align_gorn_to_surface(surface_form)
def align_gorn_to_surface_old(self, surface_form):
ins_children = [child for child in self.children if child.is_insertion]
sub_children = [child for child in self.children if not child.is_insertion]
surface_index = lambda child: surface_form.find(child.elem_tree.head)
organizer = {}
for child in ins_children:
addr = child.elem_tree.address
new_addr = addr[:-1] + ((1,) if addr[-1] > 0 else (-1,))
organizer.setdefault(addr, []).append(child)
for proxy_addr, child_list in organizer.items():
if len(child_list) == 1:
continue
offset = min([c.elem_tree.address[-1] for c in child_list])
for i, child in enumerate(sorted(child_list, key=surface_index),0):
last_bit = i+offset
child.elem_tree.address = proxy_addr[:-1] +(last_bit,)
for child in self.children:
child.align_gorn_to_surface(surface_form)
#left_ins = [child for child in ins_children if child.elem_tree.address[-1]<0]
#right_ins = [child for child in ins_children if child.elem_tree.address[-1]>0]
#surface_index = lambda child: surface_form.find(child.elem_tree.head)
#sort_key = lambda ch: ch.elem_tree.address[:-1]+()
def gorn_in_order(self, include_empty=False):
items = [(child.elem_tree.address, child) for child in self.children]
if len(self.E.head) > 0:
items.append((self.elem_tree.head_address, self))
if include_empty:
for point in self.elem_tree.substitution_points:
if all([addr!=point.gorn for addr, _ in items]):
items.append((point.gorn, None))
sorted_items = sorted(items)
return sorted_items
def gorn_pre_order(self, merged=True):
"""Return children sorted by gorn. Use for pre-order walks.
Will also return from inside out.
"""
left_of = lambda x,me: x.elem_tree.address < me.elem_tree.head_address
left_children = [child for child in self.children if left_of(child, self)]
right_children = [child for child in self.children if not left_of(child, self)]
sorted_left = sorted(left_children, key=lambda x: x.elem_tree.address, reverse=True)
#for i,left in enumerate(sorted_left):
# print(i,left.elem_tree.bracketed_string)
# print(i,left.elem_tree.address)
sorted_right = sorted(right_children, key=lambda x: x.elem_tree.address)
#for i,right in enumerate(sorted_right):
# print(i,right.elem_tree.bracketed_string)
# print(i,right.elem_tree.address)
#sorted_children = sorted(self.children, key=lambda x: x.elem_tree.address)
if merged:
return sorted_left + sorted_right
else:
return sorted_left, sorted_right
def learning_features(self, *args):
"""make learning features. currently for dual attender model.
output: features and annotations for pairs (parent, child)
"""
feature_output = []
f1 = "head={}".format(self.E.head)
f2 = "template={}".format(self.E.bracketed_string.replace(self.E.head, ""))
if self.is_root:
my_feats = (f2,)
else:
my_feats = (f1, f2)
for child_type, side in zip(self.gorn_pre_order(False), ("left", "right")):
for i, child in enumerate(child_type):
anno = []
anno.append("dist-from-spine: {}".format(i))
anno.append("dist-from-frontier: {}".format(len(child_type)-i-1))
anno.append("spine-side: {}".format(side))
if child.is_insertion:
anno.append("type=ins")
else:
anno.append("type=sub")
for j, pt in enumerate(self.E.substitution_points):
if pt.gorn == child.E.address:
anno.append("argument-{}".format(j))
child_feats, pairs_below = child.learning_features()
feature_output.extend(pairs_below)
feature_output.append((my_feats, child_feats, tuple(anno)))
return my_feats, feature_output
def _old_learning_features(self, flat=False):
raise Exception("don't use this function anymore")
f1 = "head={}".format(self.elem_tree.head)
f2 = "template={}".format(self.elem_tree.bracketed_string.replace(self.elem_tree.head, ""))
#f4 = "surface=[{}]".format(str(self))
#fulllex = self.in_order_lexical(True)
#f5 = "surface_with_empties=[{}]".format(fulllex)
myfeats = {"f1":f1,"f2":f2,"f3": []}
#"f4":f4,"f5":f5}
allfeats = [myfeats]
first_ins = lambda child: (child.E.address < self.E.head_address and
all([child.E.address < other_child.E.address
for other_child in self.children
if other_child.E.address != child.E.address]))
last_ins = lambda child: (child.E.address > self.E.head_address and
all([child.E.address > other_child.E.address
for other_child in self.children
if other_child.E.address != child.E.address]))
for child in self.children:
# if child is insertion, find out whether it's furthest left or furthest right
# if child is substitution, find out which of the substitution poitns it corresponds to
if first_ins(child):
pass
arrow = "<-" if child.is_insertion else "->"
f3 = "{}{}{}".format(self.elem_tree.head, arrow, child.elem_tree.head)
myfeats['f3'].append(f3)
allfeats.extend(child.learning_features())
if flat:
final_list = []
for featset in allfeats:
for featval in featset.values():
if isinstance(featval, list):
final_list.extend(featval)
else:
final_list.append(featval)
return final_list
return allfeats
def path_reconstruction_features(self):
return (self.E.bracketed_string, self.E.hlf_symbol,
self.E.tree_operation.target['target_hlf'],
self.E.tree_operation.target['target_gorn'])
#return (self.elem_tree.tree_id, self.elem_tree.head)
def pre_order_features(self):
feat_list = [self.path_reconstruction_features()]# for now, just id
for child in self.gorn_pre_order():
feat_list.extend(child.pre_order_features())
return tuple(feat_list)
def pre_order_descriptive(self):
descs = [str(self.elem_tree)]
sorted_children = sorted(self.children, key=lambda x: x.elem_tree.address)
for tree in sorted_children:
descs.extend(tree.pre_order_descriptive())
return descs
def in_order_descriptive(self):
descs = []
for address, tree in self.gorn_in_order():
if tree == self:
descs.append(str(self.elem_tree))
else:
descs.extend(tree.in_order_descriptive())
return descs
def in_order_treeids(self):
treeids = []
for address, tree in self.gorn_in_order():
if tree == self:
treeids.append(tree.elem_tree.tree_id)
else:
treeids.extend(tree.in_order_treeids())
return treeids
def pre_order_lexical(self):
pass
def in_order_lexical(self, include_empties=False):
lexical = []
for address, tree in self.gorn_in_order(include_empties):
if include_empties and tree is None:
lexical.append("<open-sub-point>")
elif tree.elem_tree.head is None:
continue
elif tree == self:
lexical.append(self.elem_tree.head)
else:
lexical.extend(tree.in_order_lexical())
return lexical
def expanded_by_hlf(self, book=None):
if book is None:
self.expand_address()
book = {}
book[self.E.hlf_symbol] = self.expanded_address
for child in self.children:
book = child.expanded_by_hlf(book)
return book
def make_expression(self, top=True):
expr = []
for i, (address, tree) in enumerate(self.gorn_in_order()):
if tree == self:
expr.append(self.predicate)
else:
expr.extend(tree.make_expression(False))
if top:
return Expression.from_iter(expr)
return expr
def lookup_insert(self, index):
return self.elem_tree.insertion_points[index].gorn
def lookup_sub(self, index):
return self.elem_tree.substitution_points[index].gorn
def set_path_features(self, instantiate_semantics=True, *args, **kwargs):
self.elem_tree.set_path_features(*args, **kwargs)
if instantiate_semantics:
self.predicate = self.instantiate_semantics()
def set_insertion_argument(self, arg):
if not self.is_insertion:
raise Exception("Don't call this if it's not insertion..")
self.predicate.substitute(arg, 0)
def instantiate_semantics(self):
num_arguments = len(self.elem_tree.substitution_points)
if self.is_insertion:
num_arguments += 1
predicate = Predicate(self.elem_tree.head,
num_arguments,
self.elem_tree.hlf_symbol)
if self.elem_tree.hlf_symbol is None:
self.elem_tree.set_path_features(self_hlf=predicate.hlf_symbol)
return predicate
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
def clone(self):
children = [child.clone() for child in self.children]
pred = self.predicate.clone()
return self.__class__(self.elem_tree.clone(), children)
def handle_insertion(self, operative, in_place):
"""Check if my elementary tree is the insertion point; if not, recurse
Args:
op_tree: ElementaryTree instance
"""
ThisClass = self.__class__
op_tree = operative.elem_tree
op = op_tree.tree_operation
if self.elem_tree.matches_inspoint(op):
# do the insertting; making new elem tree copies; updating addresses
new_elem_tree, new_op_tree = self.elem_tree.insert(op_tree)
# start making the new composed tree
# create a new clone of the op dtree
if in_place:
new_operative = operative
new_operative.elem_tree = new_op_tree
new_children = self.children
else:
#new_children = [child.clone() for child in self.children]
new_children = deepcopy(self.children)
new_operative = ThisClass.replicate(operative, new_op_tree)
# since it's an insertion, this pred is an argument to the op
new_pred = deepcopy(self.predicate)
# put the predicate into the op
new_operative.set_insertion_argument(new_pred)
# finish off the children
new_children.append(new_operative)
else:
new_elem_tree = deepcopy(self.elem_tree)
new_children = [child.operate(operative, in_place) for child in self.children]
new_pred = deepcopy(self.predicate)
if in_place:
self.elem_tree = new_elem_tree
self.children = new_children
self.predicate = new_pred
return self
else:
return ThisClass(new_elem_tree, new_children)
def handle_substitution(self, operative, in_place=False):
"""Check if my elementary tree is the subpoint; if not, recurse on children
Args:
op_tree: ElementaryTree instance<|fim▁hole|> if self.elem_tree.matches_subpoint(op):
# the purpose of the substitute is to give the op_tree an address
# that adddress is the location of its substituion
# this is important for when we want to order our derived children via gorn
new_elem_tree, new_op_tree = self.elem_tree.substitute(op_tree)
##### HANDLE IN-PLACE-TYPE VS FACTORY-TYPE OPERATION
# the thing coming in is copied
if in_place:
new_operative = operative
new_operative.elem_tree = new_op_tree
new_children = self.children
else:
new_children = deepcopy(self.children)#[child.clone() for child in self.children]
new_operative = ThisClass.replicate(operative, new_op_tree)
new_children.append(new_operative)
##### HANDLE LOGIC STUFF
new_pred = deepcopy(self.predicate)#.clone()
# we put it into its correct spot
if self.is_insertion:
pred_arg_index = new_elem_tree.last_index + 1
else:
pred_arg_index = new_elem_tree.last_index
# abusing terms. substitute here is not a tree substitute, but a logic substitute
# find a better term....................
new_pred.substitute(new_operative.predicate, pred_arg_index)
else:
new_elem_tree = deepcopy(self.elem_tree)#.clone()
new_pred = deepcopy(self.predicate)#.clone()
new_children = [child.operate(operative, in_place) for child in self.children]
if in_place:
self.elem_tree = new_elem_tree
self.children = new_children
self.predicate = new_pred
return self
else:
return ThisClass(new_elem_tree, new_children)
def operate(self, operative, in_place=False):
"""handle the possible operations incoming to this derived tree.
Args:
operative: a DerivationTree instance
Returns:
a new DerivationTree that results from operation
Notes:
An intended operation would know what tree it wants to operate on
and where it wants to do it.
E.G:
(NP* (DT a)) knows it wants to attach to the tree (NP (NN dog))
which is substituted into (S (NP) (VP finds) (NP))
The DerivationTree should know that (NP (NN dog)) was substituted into
the first substitution spot.
Temp QUD:
what is the best way to represent this intended operation?
we could have the DT tree know it wants to attach to tree id X
but that tree id X could be in the tree twice (either NP)
it could know the predicate then?
"""
if operative.elem_tree.tree_operation.type == consts.INSERTION:
return self.handle_insertion(operative, in_place)
elif operative.elem_tree.tree_operation.type == consts.SUBSTITUTION:
return self.handle_substitution(operative, in_place)
@classmethod
def replicate(cls, old_inst, new_elem_tree=None, new_children=None, new_pred=None):
""" this is basically clone but allows etrees, childre, and preds rather than just straight cloning """
new_elem_tree = new_elem_tree or deepcopy(old_inst.elem_tree)#.clone()
new_children = new_children or deepcopy(old_inst.children) #[child.clone() for child in old_inst.children]
new_pred = new_pred or deepcopy(old_inst.predicate)#.clone()
return cls(new_elem_tree, new_children)
def test():
parse = """(ROOT(S(NP(NP (DT The) (NN boy))(VP (VBG laying)(S(VP (VB face)(PRT (RP down))(PP (IN on)(NP (DT a) (NN skateboard)))))))(VP (VBZ is)(VP (VBG being)(VP (VBN pushed)(PP (IN along)(NP (DT the) (NN ground)))(PP (IN by)(NP (DT another) (NN boy))))))(. .)))"""
tree_cuts = tree_enrichment.string2cuts(parse)
tree_strings = [cut.save_str() for cut in tree_cuts]
derived_trees = [DerivationTree.from_bracketed(tree_string) for tree_string in tree_strings]
derived_trees[2].elem_tree.insertion_points[0].hlf_symbol = 'g0'
derived_trees[1].elem_tree.tree_operation.target['target_hlf'] = 'g0'
derived_trees[1].elem_tree.tree_operation.target['target_gorn'] = (0,)
#derived_two = [DerivationTree.from_parse_tree(tree) for tree in tree_cuts]
return derived_trees
if __name__ == "__main__":
test()<|fim▁end|>
|
"""
ThisClass = self.__class__
op_tree = operative.elem_tree
op = op_tree.tree_operation
|
<|file_name|>dissimilaroriginwindow.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DissimilarOriginWindowBinding;
use dom::bindings::codegen::Bindings::DissimilarOriginWindowBinding::DissimilarOriginWindowMethods;
use dom::bindings::error::{Error, ErrorResult};
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableJS, Root};
use dom::bindings::str::DOMString;
use dom::bindings::structuredclone::StructuredCloneData;
use dom::dissimilaroriginlocation::DissimilarOriginLocation;
use dom::globalscope::GlobalScope;
use dom::windowproxy::WindowProxy;
use dom_struct::dom_struct;
use ipc_channel::ipc;
use js::jsapi::{JSContext, HandleValue};
use js::jsval::{JSVal, UndefinedValue};
use msg::constellation_msg::PipelineId;
use script_traits::ScriptMsg;
use servo_url::ImmutableOrigin;
use servo_url::MutableOrigin;
use servo_url::ServoUrl;
/// Represents a dissimilar-origin `Window` that exists in another script thread.
///
/// Since the `Window` is in a different script thread, we cannot access it
/// directly, but some of its accessors (for example `window.parent`)
/// still need to function.
///
/// In `windowproxy.rs`, we create a custom window proxy for these windows,
/// that throws security exceptions for most accessors. This is not a replacement
/// for XOWs, but provides belt-and-braces security.
#[dom_struct]
pub struct DissimilarOriginWindow {
/// The global for this window.
globalscope: GlobalScope,
/// The window proxy for this window.
window_proxy: JS<WindowProxy>,
/// The location of this window, initialized lazily.
location: MutNullableJS<DissimilarOriginLocation>,
}
impl DissimilarOriginWindow {
#[allow(unsafe_code)]
pub fn new(
global_to_clone_from: &GlobalScope,
window_proxy: &WindowProxy,
) -> Root<Self> {
let cx = global_to_clone_from.get_cx();
// Any timer events fired on this window are ignored.
let (timer_event_chan, _) = ipc::channel().unwrap();
let win = box Self {
globalscope: GlobalScope::new_inherited(
PipelineId::new(),
global_to_clone_from.devtools_chan().cloned(),
global_to_clone_from.mem_profiler_chan().clone(),
global_to_clone_from.time_profiler_chan().clone(),
global_to_clone_from.script_to_constellation_chan().clone(),
global_to_clone_from.scheduler_chan().clone(),<|fim▁hole|> timer_event_chan,
global_to_clone_from.origin().clone(),
// FIXME(nox): The microtask queue is probably not important
// here, but this whole DOM interface is a hack anyway.
global_to_clone_from.microtask_queue().clone(),
),
window_proxy: JS::from_ref(window_proxy),
location: Default::default(),
};
unsafe { DissimilarOriginWindowBinding::Wrap(cx, win) }
}
pub fn origin(&self) -> &MutableOrigin {
self.upcast::<GlobalScope>().origin()
}
}
impl DissimilarOriginWindowMethods for DissimilarOriginWindow {
// https://html.spec.whatwg.org/multipage/#dom-window
fn Window(&self) -> Root<WindowProxy> {
Root::from_ref(&*self.window_proxy)
}
// https://html.spec.whatwg.org/multipage/#dom-self
fn Self_(&self) -> Root<WindowProxy> {
Root::from_ref(&*self.window_proxy)
}
// https://html.spec.whatwg.org/multipage/#dom-frames
fn Frames(&self) -> Root<WindowProxy> {
Root::from_ref(&*self.window_proxy)
}
// https://html.spec.whatwg.org/multipage/#dom-parent
fn GetParent(&self) -> Option<Root<WindowProxy>> {
// Steps 1-3.
if self.window_proxy.is_browsing_context_discarded() {
return None;
}
// Step 4.
if let Some(parent) = self.window_proxy.parent() {
return Some(Root::from_ref(parent));
}
// Step 5.
Some(Root::from_ref(&*self.window_proxy))
}
// https://html.spec.whatwg.org/multipage/#dom-top
fn GetTop(&self) -> Option<Root<WindowProxy>> {
// Steps 1-3.
if self.window_proxy.is_browsing_context_discarded() {
return None;
}
// Steps 4-5.
Some(Root::from_ref(self.window_proxy.top()))
}
// https://html.spec.whatwg.org/multipage/#dom-length
fn Length(&self) -> u32 {
// TODO: Implement x-origin length
0
}
// https://html.spec.whatwg.org/multipage/#dom-window-close
fn Close(&self) {
// TODO: Implement x-origin close
}
// https://html.spec.whatwg.org/multipage/#dom-window-closed
fn Closed(&self) -> bool {
// TODO: Implement x-origin close
false
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-window-postmessage
unsafe fn PostMessage(&self, cx: *mut JSContext, message: HandleValue, origin: DOMString) -> ErrorResult {
// Step 3-5.
let origin = match &origin[..] {
"*" => None,
"/" => {
// TODO: Should be the origin of the incumbent settings object.
None
},
url => match ServoUrl::parse(&url) {
Ok(url) => Some(url.origin()),
Err(_) => return Err(Error::Syntax),
}
};
// Step 1-2, 6-8.
// TODO(#12717): Should implement the `transfer` argument.
let data = StructuredCloneData::write(cx, message)?;
// Step 9.
self.post_message(origin, data);
Ok(())
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-opener
unsafe fn Opener(&self, _: *mut JSContext) -> JSVal {
// TODO: Implement x-origin opener
UndefinedValue()
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-opener
unsafe fn SetOpener(&self, _: *mut JSContext, _: HandleValue) {
// TODO: Implement x-origin opener
}
// https://html.spec.whatwg.org/multipage/#dom-window-blur
fn Blur(&self) {
// TODO: Implement x-origin blur
}
// https://html.spec.whatwg.org/multipage/#dom-focus
fn Focus(&self) {
// TODO: Implement x-origin focus
}
// https://html.spec.whatwg.org/multipage/#dom-location
fn Location(&self) -> Root<DissimilarOriginLocation> {
self.location.or_init(|| DissimilarOriginLocation::new(self))
}
}
impl DissimilarOriginWindow {
pub fn post_message(&self, origin: Option<ImmutableOrigin>, data: StructuredCloneData) {
let msg = ScriptMsg::PostMessage(self.window_proxy.browsing_context_id(),
origin,
data.move_to_arraybuffer());
let _ = self.upcast::<GlobalScope>().script_to_constellation_chan().send(msg);
}
}<|fim▁end|>
|
global_to_clone_from.resource_threads().clone(),
|
<|file_name|>BenchmarkTest08513.java<|end_file_name|><|fim▁begin|>/**
* OWASP Benchmark Project v1.1
*
* This file is part of the Open Web Application Security Project (OWASP)
* Benchmark Project. For details, please see
* <a href="https://www.owasp.org/index.php/Benchmark">https://www.owasp.org/index.php/Benchmark</a>.
*
* The Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details
*
* @author Dave Wichers <a href="https://www.aspectsecurity.com">Aspect Security</a>
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/BenchmarkTest08513")
public class BenchmarkTest08513 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String param = "";
java.util.Enumeration<String> headerNames = request.getHeaderNames();
if (headerNames.hasMoreElements()) {
param = headerNames.nextElement(); // just grab first element
}
String bar = new Test().doSomething(param);
try {
javax.naming.directory.DirContext dc = org.owasp.benchmark.helpers.Utils.getDirContext();
dc.search("name", bar, new javax.naming.directory.SearchControls());
} catch (javax.naming.NamingException e) {
throw new ServletException(e);<|fim▁hole|>
private class Test {
public String doSomething(String param) throws ServletException, IOException {
String bar = param;
return bar;
}
} // end innerclass Test
} // end DataflowThruInnerClass<|fim▁end|>
|
}
} // end doPost
|
<|file_name|>postupgrade.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package upgrade
import (
"fmt"
"os"
"path/filepath"
"time"
"github.com/pkg/errors"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
errorsutil "k8s.io/apimachinery/pkg/util/errors"
"k8s.io/apimachinery/pkg/util/version"
clientset "k8s.io/client-go/kubernetes"
certutil "k8s.io/client-go/util/cert"
kubeadmapi "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm"
kubeadmapiv1beta1 "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm/v1beta1"
kubeadmconstants "k8s.io/kubernetes/cmd/kubeadm/app/constants"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/addons/dns"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/addons/proxy"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/bootstraptoken/clusterinfo"
nodebootstraptoken "k8s.io/kubernetes/cmd/kubeadm/app/phases/bootstraptoken/node"
certsphase "k8s.io/kubernetes/cmd/kubeadm/app/phases/certs"
kubeletphase "k8s.io/kubernetes/cmd/kubeadm/app/phases/kubelet"
patchnodephase "k8s.io/kubernetes/cmd/kubeadm/app/phases/patchnode"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/uploadconfig"
"k8s.io/kubernetes/cmd/kubeadm/app/util/apiclient"
dryrunutil "k8s.io/kubernetes/cmd/kubeadm/app/util/dryrun"
)
var expiry = 180 * 24 * time.Hour
// PerformPostUpgradeTasks runs nearly the same functions as 'kubeadm init' would do
// Note that the mark-control-plane phase is left out, not needed, and no token is created as that doesn't belong to the upgrade
func PerformPostUpgradeTasks(client clientset.Interface, cfg *kubeadmapi.InitConfiguration, newK8sVer *version.Version, dryRun bool) error {
errs := []error{}
// Upload currently used configuration to the cluster
// Note: This is done right in the beginning of cluster initialization; as we might want to make other phases
// depend on centralized information from this source in the future
if err := uploadconfig.UploadConfiguration(cfg, client); err != nil {
errs = append(errs, err)
}
// Create the new, version-branched kubelet ComponentConfig ConfigMap
if err := kubeletphase.CreateConfigMap(cfg.ClusterConfiguration.ComponentConfigs.Kubelet, cfg.KubernetesVersion, client); err != nil {
errs = append(errs, errors.Wrap(err, "error creating kubelet configuration ConfigMap"))
}
// Write the new kubelet config down to disk and the env file if needed
if err := writeKubeletConfigFiles(client, cfg, newK8sVer, dryRun); err != nil {
errs = append(errs, err)
}
// Annotate the node with the crisocket information, sourced either from the InitConfiguration struct or
// --cri-socket.
// TODO: In the future we want to use something more official like NodeStatus or similar for detecting this properly
if err := patchnodephase.AnnotateCRISocket(client, cfg.NodeRegistration.Name, cfg.NodeRegistration.CRISocket); err != nil {
errs = append(errs, errors.Wrap(err, "error uploading crisocket"))
}
// Create/update RBAC rules that makes the bootstrap tokens able to post CSRs
if err := nodebootstraptoken.AllowBootstrapTokensToPostCSRs(client); err != nil {
errs = append(errs, err)
}
// Create/update RBAC rules that makes the bootstrap tokens able to get their CSRs approved automatically
if err := nodebootstraptoken.AutoApproveNodeBootstrapTokens(client); err != nil {
errs = append(errs, err)
}
// Create/update RBAC rules that makes the nodes to rotate certificates and get their CSRs approved automatically
if err := nodebootstraptoken.AutoApproveNodeCertificateRotation(client); err != nil {
errs = append(errs, err)
}
// TODO: Is this needed to do here? I think that updating cluster info should probably be separate from a normal upgrade
// Create the cluster-info ConfigMap with the associated RBAC rules
// if err := clusterinfo.CreateBootstrapConfigMapIfNotExists(client, kubeadmconstants.GetAdminKubeConfigPath()); err != nil {
// return err
//}
// Create/update RBAC rules that makes the cluster-info ConfigMap reachable
if err := clusterinfo.CreateClusterInfoRBACRules(client); err != nil {
errs = append(errs, err)
}
// Rotate the kube-apiserver cert and key if needed
if err := BackupAPIServerCertIfNeeded(cfg, dryRun); err != nil {
errs = append(errs, err)
}
// Upgrade kube-dns/CoreDNS and kube-proxy
if err := dns.EnsureDNSAddon(&cfg.ClusterConfiguration, client); err != nil {
errs = append(errs, err)
}
// Remove the old DNS deployment if a new DNS service is now used (kube-dns to CoreDNS or vice versa)
if err := removeOldDNSDeploymentIfAnotherDNSIsUsed(&cfg.ClusterConfiguration, client, dryRun); err != nil {
errs = append(errs, err)
}
if err := proxy.EnsureProxyAddon(&cfg.ClusterConfiguration, &cfg.LocalAPIEndpoint, client); err != nil {
errs = append(errs, err)
}
return errorsutil.NewAggregate(errs)
}
func removeOldDNSDeploymentIfAnotherDNSIsUsed(cfg *kubeadmapi.ClusterConfiguration, client clientset.Interface, dryRun bool) error {
return apiclient.TryRunCommand(func() error {
installedDeploymentName := kubeadmconstants.KubeDNSDeploymentName
deploymentToDelete := kubeadmconstants.CoreDNSDeploymentName
if cfg.DNS.Type == kubeadmapi.CoreDNS {
installedDeploymentName = kubeadmconstants.CoreDNSDeploymentName
deploymentToDelete = kubeadmconstants.KubeDNSDeploymentName
}
// If we're dry-running, we don't need to wait for the new DNS addon to become ready
if !dryRun {
dnsDeployment, err := client.AppsV1().Deployments(metav1.NamespaceSystem).Get(installedDeploymentName, metav1.GetOptions{})
if err != nil {
return err
}
if dnsDeployment.Status.ReadyReplicas == 0 {
return errors.New("the DNS deployment isn't ready yet")
}
}
// We don't want to wait for the DNS deployment above to become ready when dryrunning (as it never will)
// but here we should execute the DELETE command against the dryrun clientset, as it will only be logged
err := apiclient.DeleteDeploymentForeground(client, metav1.NamespaceSystem, deploymentToDelete)
if err != nil && !apierrors.IsNotFound(err) {
return err
}
return nil
}, 10)
}<|fim▁hole|> shouldBackup, err := shouldBackupAPIServerCertAndKey(certAndKeyDir)
if err != nil {
// Don't fail the upgrade phase if failing to determine to backup kube-apiserver cert and key.
return errors.Wrap(err, "[postupgrade] WARNING: failed to determine to backup kube-apiserver cert and key")
}
if !shouldBackup {
return nil
}
// If dry-running, just say that this would happen to the user and exit
if dryRun {
fmt.Println("[postupgrade] Would rotate the API server certificate and key.")
return nil
}
// Don't fail the upgrade phase if failing to backup kube-apiserver cert and key, just continue rotating the cert
// TODO: We might want to reconsider this choice.
if err := backupAPIServerCertAndKey(certAndKeyDir); err != nil {
fmt.Printf("[postupgrade] WARNING: failed to backup kube-apiserver cert and key: %v", err)
}
return certsphase.CreateCertAndKeyFilesWithCA(
&certsphase.KubeadmCertAPIServer,
&certsphase.KubeadmCertRootCA,
cfg,
)
}
func writeKubeletConfigFiles(client clientset.Interface, cfg *kubeadmapi.InitConfiguration, newK8sVer *version.Version, dryRun bool) error {
kubeletDir, err := GetKubeletDir(dryRun)
if err != nil {
// The error here should never occur in reality, would only be thrown if /tmp doesn't exist on the machine.
return err
}
errs := []error{}
// Write the configuration for the kubelet down to disk so the upgraded kubelet can start with fresh config
if err := kubeletphase.DownloadConfig(client, newK8sVer, kubeletDir); err != nil {
// Tolerate the error being NotFound when dryrunning, as there is a pretty common scenario: the dryrun process
// *would* post the new kubelet-config-1.X configmap that doesn't exist now when we're trying to download it
// again.
if !(apierrors.IsNotFound(err) && dryRun) {
errs = append(errs, errors.Wrap(err, "error downloading kubelet configuration from the ConfigMap"))
}
}
if dryRun { // Print what contents would be written
dryrunutil.PrintDryRunFile(kubeadmconstants.KubeletConfigurationFileName, kubeletDir, kubeadmconstants.KubeletRunDirectory, os.Stdout)
}
envFilePath := filepath.Join(kubeadmconstants.KubeletRunDirectory, kubeadmconstants.KubeletEnvFileName)
if _, err := os.Stat(envFilePath); os.IsNotExist(err) {
// Write env file with flags for the kubelet to use. We do not need to write the --register-with-taints for the control-plane,
// as we handle that ourselves in the mark-control-plane phase
// TODO: Maybe we want to do that some time in the future, in order to remove some logic from the mark-control-plane phase?
if err := kubeletphase.WriteKubeletDynamicEnvFile(&cfg.ClusterConfiguration, &cfg.NodeRegistration, false, kubeletDir); err != nil {
errs = append(errs, errors.Wrap(err, "error writing a dynamic environment file for the kubelet"))
}
if dryRun { // Print what contents would be written
dryrunutil.PrintDryRunFile(kubeadmconstants.KubeletEnvFileName, kubeletDir, kubeadmconstants.KubeletRunDirectory, os.Stdout)
}
}
return errorsutil.NewAggregate(errs)
}
// GetKubeletDir gets the kubelet directory based on whether the user is dry-running this command or not.
func GetKubeletDir(dryRun bool) (string, error) {
if dryRun {
return kubeadmconstants.CreateTempDirForKubeadm("kubeadm-upgrade-dryrun")
}
return kubeadmconstants.KubeletRunDirectory, nil
}
// backupAPIServerCertAndKey backups the old cert and key of kube-apiserver to a specified directory.
func backupAPIServerCertAndKey(certAndKeyDir string) error {
subDir := filepath.Join(certAndKeyDir, "expired")
if err := os.Mkdir(subDir, 0766); err != nil {
return errors.Wrapf(err, "failed to created backup directory %s", subDir)
}
filesToMove := map[string]string{
filepath.Join(certAndKeyDir, kubeadmconstants.APIServerCertName): filepath.Join(subDir, kubeadmconstants.APIServerCertName),
filepath.Join(certAndKeyDir, kubeadmconstants.APIServerKeyName): filepath.Join(subDir, kubeadmconstants.APIServerKeyName),
}
return moveFiles(filesToMove)
}
// moveFiles moves files from one directory to another.
func moveFiles(files map[string]string) error {
filesToRecover := map[string]string{}
for from, to := range files {
if err := os.Rename(from, to); err != nil {
return rollbackFiles(filesToRecover, err)
}
filesToRecover[to] = from
}
return nil
}
// rollbackFiles moves the files back to the original directory.
func rollbackFiles(files map[string]string, originalErr error) error {
errs := []error{originalErr}
for from, to := range files {
if err := os.Rename(from, to); err != nil {
errs = append(errs, err)
}
}
return errors.Errorf("couldn't move these files: %v. Got errors: %v", files, errorsutil.NewAggregate(errs))
}
// shouldBackupAPIServerCertAndKey checks if the cert of kube-apiserver will be expired in 180 days.
func shouldBackupAPIServerCertAndKey(certAndKeyDir string) (bool, error) {
apiServerCert := filepath.Join(certAndKeyDir, kubeadmconstants.APIServerCertName)
certs, err := certutil.CertsFromFile(apiServerCert)
if err != nil {
return false, errors.Wrapf(err, "couldn't load the certificate file %s", apiServerCert)
}
if len(certs) == 0 {
return false, errors.New("no certificate data found")
}
if time.Since(certs[0].NotBefore) > expiry {
return true, nil
}
return false, nil
}<|fim▁end|>
|
// BackupAPIServerCertIfNeeded rotates the kube-apiserver certificate if older than 180 days
func BackupAPIServerCertIfNeeded(cfg *kubeadmapi.InitConfiguration, dryRun bool) error {
certAndKeyDir := kubeadmapiv1beta1.DefaultCertificatesDir
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
from . import project<|fim▁end|>
| |
<|file_name|>pypi_cache_server.py<|end_file_name|><|fim▁begin|>'''
Pypi cache server
Original author: Victor-mortal
'''
import os
import httplib
import urlparse
import logging
import locale
import json
import hashlib
import webob
import gevent
from gevent import wsgi as wsgi_fast, pywsgi as wsgi, monkey
CACHE_DIR = '.cache'
wsgi = wsgi_fast # comment to use pywsgi
host = '0.0.0.0'
port = 8080
class Proxy(object):
"""A WSGI based web proxy application
"""<|fim▁hole|>
def __init__(self, chunkSize=4096, timeout=60, dropHeaders=['transfer-encoding'], pypiHost=None, log=None):
"""
@param log: logger of logging library
"""
self.log = log
if self.log is None:
self.log = logging.getLogger('proxy')
self.chunkSize = chunkSize
self.timeout = timeout
self.dropHeaders = dropHeaders
self.pypiHost = pypiHost
def yieldData(self, response, cache_file=None):
while True:
data = response.read(self.chunkSize)
yield data
if cache_file:
cache_file.write(data)
if len(data) < self.chunkSize:
break
if cache_file:
cache_file.close()
def _rewrite(self, req, start_response):
path = req.path_info
if req.query_string:
path += '?' + req.query_string
parts = urlparse.urlparse(path)
headers = req.headers
md = hashlib.md5()
md.update(' '.join('%s:%s'%v for v in headers.iteritems()))
md.update(path)
cache_file = os.path.join(CACHE_DIR, md.hexdigest())
if os.path.exists(cache_file):
o = json.load( open(cache_file+'.js', 'rb') )
start_response(o['response'], o['headers'])
return self.yieldData( open(cache_file) )
self.log.debug('Request from %s to %s', req.remote_addr, path)
url = path
conn = httplib.HTTPConnection(self.pypiHost, timeout=self.timeout)
#headers['X-Forwarded-For'] = req.remote_addr
#headers['X-Real-IP'] = req.remote_addr
try:
conn.request(req.method, url, headers=headers, body=req.body)
response = conn.getresponse()
except Exception, e:
msg = str(e)
if os.name == 'nt':
_, encoding = locale.getdefaultlocale()
msg = msg.decode(encoding)
self.log.warn('Bad gateway with reason: %s', msg, exc_info=True)
start_response('502 Bad gateway', [])
return ['Bad gateway']
headers = [(k, v) for (k, v) in response.getheaders()\
if k not in self.dropHeaders]
start_response('%s %s' % (response.status, response.reason),
headers)
json.dump( {'headers': headers, 'response': '%s %s' % (response.status, response.reason)}, open(cache_file+'.js', 'wb'))
return self.yieldData(response, cache_file=open(cache_file, 'wb'))
def __call__(self, env, start_response):
req = webob.Request(env)
return self._rewrite(req, start_response)
if __name__ == '__main__':
if not os.path.isdir(CACHE_DIR):
os.mkdir(CACHE_DIR)
monkey.patch_all()
handler = Proxy(pypiHost='pypi.python.org:80')
wsgi.WSGIServer((host, port), handler).serve_forever()
run()<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.