prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>test_e_invoice_request_log.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestEInvoiceRequestLog(unittest.TestCase):<|fim▁hole|><|fim▁end|>
|
pass
|
<|file_name|>ini_doc.cpp<|end_file_name|><|fim▁begin|>#include <algorithm>
#include <utility>
#include "ini_doc.hpp"
<|fim▁hole|>namespace Utility {
namespace {
std::string& trim(std::string& str) {
auto pos = str.find_first_not_of(" \t");
str.erase(0, pos);
pos = str.find_last_not_of(" \t");
if (pos != str.npos) {
str.erase(pos + 1);
}
return str;
}
IniDoc::Section::value_type parseParameter(std::string& line) {
auto pos = line.find('=');
if (pos == line.npos) {
throw InvalidIniDoc(std::string("Invalid parameter: \"").append(line).append("\""));
}
std::string parameterName = line.substr(0, pos);
IniDoc::Property parameterValue{line.substr(pos + 1)};
return std::make_pair(std::move(parameterName), std::move(parameterValue));
}
void writeSection(std::ostream& os, const IniDoc::Section& section) {
for (const auto& propertyPair : section) {
os << propertyPair.first << "=" << propertyPair.second.string() << std::endl;
}
os << std::endl;
}
} //
void IniDoc::Property::syncStr() const {
if (strSync_) return;
str_.clear();
if (!items_.empty()) {
auto itbeg = items_.cbegin();
str_.append(getValue<std::string>(*itbeg++));
std::for_each(itbeg, items_.cend(), [this](const value_type& item){
str_.append(1, ',').append(item);
});
}
strSync_ = true;
}
void IniDoc::Property::syncItems() const {
if (itemsSync_) return;
Cont_ value;
auto itbeg = str_.cbegin();
auto itend = str_.cend();
auto it0 = itbeg;
decltype(it0) it1;
items_.clear();
do {
it1 = std::find(it0, itend, ',');
items_.emplace_back(it0, it1);
it0 = it1;
} while (it1 != itend && ++it0 != itbeg);
itemsSync_ = true;
}
void IniDoc::merge(const IniDoc &toMerge, DuplicateAction daction) {
const auto& rdoc = toMerge.doc();
for (const auto& rs: rdoc) {
if (!doc_.count(rs.first) || daction == DuplicateAction::overwriteSection) {
doc_[rs.first] = rs.second;
} else {
auto& s = doc_[rs.first];
for (const auto& rp: rs.second) {
if (!s.count(rp.first) || daction == DuplicateAction::overwriteProperty) {
s[rp.first] = rp.second;
} else if (daction == DuplicateAction::combineProperty) {
auto& is = s[rp.first].items();
for (const auto& ri: rp.second.items()) {
is.push_back(ri);
}
} else if (daction == DuplicateAction::fail) {
throw std::runtime_error(std::string{
"duplicated property: \""}.append(rp.first).append("\" in section \"").append(rp.first));
}
}
}
}
}
std::istream& read(std::istream& is, IniDoc& inidoc) {
auto& doc = inidoc.doc();
IniDoc::SectionName sname{};
doc[sname];
for (std::string line; std::getline(is, line);) {
// trim line
trim(line);
// skip empty and comments
if (line.empty() || line[0] == ';') {
continue;
}
if (*line.cbegin() == '[') {
if (*line.crbegin() != ']') {
throw InvalidIniDoc(std::string("Invalid line: \"").append(line).append("\""));
}
doc[sname = line.substr(1, line.size() - 2)];
continue;
}
doc[sname].insert(parseParameter(line));
}
return is;
}
std::ostream& write(std::ostream& os, const IniDoc& inidoc) {
const auto& doc = inidoc.doc();
if (doc.empty()) {
return os;
}
auto it = doc.cbegin();
if (it->first.empty()) {
// default section
writeSection(os, it->second);
++it;
}
std::for_each(it, doc.cend(), [&os](const IniDoc::Doc::value_type& sectionPair) {
os << "[" << sectionPair.first << "]" << std::endl;
writeSection(os, sectionPair.second);
});
return os;
}
} // Utility<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//!
//! If you're using the service, you're probably looking for [DirectoryServiceClient](struct.DirectoryServiceClient.html) and [DirectoryService](trait.DirectoryService.html).
extern crate hyper;
extern crate rusoto_core;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
mod generated;
mod custom;
pub use generated::*;
pub use custom::*;<|fim▁end|>
|
//! AWS Directory Service
|
<|file_name|>test_sampleParser.py<|end_file_name|><|fim▁begin|># test_sampleParser.py
import os
from ..sampleParser import SampleParser
class TestSampleParser:
def setup(self):
self.folderName = os.path.join('.', 'tests', 'Export')
self.parser = SampleParser(self.folderName)
def test_getDirectoryFiles(self):
files = self._obtainDirectory()
assert len(files) > 0
def test_storeFileNamesByPatternInDictionary(self):
files = self._obtainDirectory()
assert len(files) > 0
for _file in files:
self.parser.storeFileNamesByPatternInDictionary(_file)
sampleDictionary = self.parser.getSampleDictionary()<|fim▁hole|> assert len(sample) == 2
def test_readFileIntoArray(self):
files = self._obtainDirectory()
assert len(files) > 0
assert len(self.parser.readFileIntoArray(files[0])) > 0
def _obtainDirectory(self):
return self.parser.getDirectoryFiles()<|fim▁end|>
|
assert len(sampleDictionary) == 4
print ("SampleParser: ", sampleDictionary)
# each item in the dictionary should have two samples for each sample type
for sample in sampleDictionary.items():
|
<|file_name|>MeasureReportBuilder.java<|end_file_name|><|fim▁begin|>package ca.uhn.fhir.cql.dstu3.builder;
/*-
* #%L
* HAPI FHIR JPA Server - Clinical Quality Language
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.cql.common.builder.BaseBuilder;
import org.hl7.fhir.dstu3.model.MeasureReport;
import org.hl7.fhir.dstu3.model.Period;
import org.hl7.fhir.dstu3.model.Reference;
import org.hl7.fhir.exceptions.FHIRException;
import org.opencds.cqf.cql.engine.runtime.Interval;
import java.util.Date;
public class MeasureReportBuilder extends BaseBuilder<MeasureReport> {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(MeasureReportBuilder.class);
public MeasureReportBuilder() {
super(new MeasureReport());
}
public MeasureReportBuilder buildStatus(String status) {
try {
this.complexProperty.setStatus(MeasureReport.MeasureReportStatus.fromCode(status));
} catch (FHIRException e) {
ourLog.warn("Exception caught while attempting to set Status to '" + status + "', assuming status COMPLETE!"
+ System.lineSeparator() + e.getMessage());
this.complexProperty.setStatus(MeasureReport.MeasureReportStatus.COMPLETE);
}
return this;
}
public MeasureReportBuilder buildType(MeasureReport.MeasureReportType type) {
this.complexProperty.setType(type);<|fim▁hole|>
public MeasureReportBuilder buildMeasureReference(String measureRef) {
this.complexProperty.setMeasure(new Reference(measureRef));
return this;
}
public MeasureReportBuilder buildPatientReference(String patientRef) {
this.complexProperty.setPatient(new Reference(patientRef));
return this;
}
public MeasureReportBuilder buildPeriod(Interval period) {
this.complexProperty.setPeriod(new Period().setStart((Date) period.getStart()).setEnd((Date) period.getEnd()));
return this;
}
}<|fim▁end|>
|
return this;
}
|
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>'use strict';
// There's an example D script here to showcase a "slow" handler where it's
// wildcard'd by the route name. In "real life" you'd probably start with a
// d script that breaks down the route -start and -done, and then you'd want
// to see which handler is taking longest from there.
//
// $ node demo.js
// $ curl localhost:9080/foo/bar
// $ sudo ./handler-timing.d
// ^C
//
// handler-6
// value ------------- Distribution ------------- count
// -1 | 0
// 0 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 10
// 1 | 0
//
// parseAccept
// value ------------- Distribution ------------- count
// -1 | 0
// 0 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 10
// 1 | 0
//
// parseAuthorization
// value ------------- Distribution ------------- count
// -1 | 0
// 0 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 10
// 1 | 0
//
// parseDate
// value ------------- Distribution ------------- count
// -1 | 0
// 0 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 10
// 1 | 0
//
// parseQueryString
// value ------------- Distribution ------------- count
// -1 | 0
// 0 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 10
// 1 | 0
//
// parseUrlEncodedBody
// value ------------- Distribution ------------- count
// -1 | 0
// 0 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 10
// 1 | 0
//
// sendResult
// value ------------- Distribution ------------- count
// 1 | 0
// 2 |@@@@ 1
// 4 | 0
// 8 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 9
// 16 | 0
//
// slowHandler
// value ------------- Distribution ------------- count
// 64 | 0
// 128 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 9
// 256 |@@@@ 1
// 512 | 0
//
// getfoo
// value ------------- Distribution ------------- count
// 64 | 0
// 128 |@@@@ 1<|fim▁hole|>// 512 | 0
var restify = require('../../lib');
var Logger = require('bunyan');
///--- Globals
var NAME = 'exampleapp';
///--- Mainline
var log = new Logger({
name: NAME,
level: 'trace',
service: NAME,
serializers: restify.bunyan.serializers
});
var server = restify.createServer({
name: NAME,
Logger: log,
formatters: {
'application/foo': function (req, res, body, cb) {
if (body instanceof Error) {
body = body.stack;
} else if (Buffer.isBuffer(body)) {
body = body.toString('base64');
} else {
switch (typeof body) {
case 'boolean':
case 'number':
case 'string':
body = body.toString();
break;
case 'undefined':
body = '';
break;
default:
body = body === null ? '' :
'Demoing application/foo formatter; ' +
JSON.stringify(body);
break;
}
}
return cb(null, body);
}
}
});
server.use(restify.acceptParser(server.acceptable));
server.use(restify.authorizationParser());
server.use(restify.dateParser());
server.use(restify.queryParser());
server.use(restify.urlEncodedBodyParser());
server.use(function slowHandler(req, res, next) {
setTimeout(function () {
next();
}, 250);
});
server.get({url: '/foo/:id', name: 'GetFoo'}, function (req, res, next) {
next();
}, function sendResult(req, res, next) {
res.contentType = 'application/foo';
res.send({
hello: req.params.id
});
next();
});
server.head('/foo/:id', function (req, res, next) {
res.send({
hello: req.params.id
});
next();
});
server.put('/foo/:id', function (req, res, next) {
res.send({
hello: req.params.id
});
next();
});
server.post('/foo/:id', function (req, res, next) {
res.json(201, req.params);
next();
});
server.del('/foo/:id', function (req, res, next) {
res.send(204);
next();
});
server.on('after', function (req, res, name) {
req.log.info('%s just finished: %d.', name, res.code);
});
server.on('NotFound', function (req, res) {
res.send(404, req.url + ' was not found');
});
server.listen(9080, function () {
log.info('listening: %s', server.url);
});<|fim▁end|>
|
// 256 |@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 9
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export * from './lib/zipalign';
export * from './lib/zipfile';
// import {zipalign} from './lib/zipalign'
// import {ZipFile} from './lib/zipfile'
// zipalign('/Users/andreban/Downloads/vl805_update_0137a8.zip', 'app-release-aligned-apk.zip')
// .then(() => ZipFile.open('app-release-aligned-apk.zip'))
// .then(() => console.log('done'))
// .catch((e) => console.error('Failed', e));<|fim▁end|>
| |
<|file_name|>PatientInfoRepository.java<|end_file_name|><|fim▁begin|>package org.cardiacatlas.xpacs.repository;
import org.cardiacatlas.xpacs.domain.PatientInfo;
<|fim▁hole|>/**
* Spring Data JPA repository for the PatientInfo entity.
*/
@SuppressWarnings("unused")
public interface PatientInfoRepository extends JpaRepository<PatientInfo,Long> {
}<|fim▁end|>
|
import org.springframework.data.jpa.repository.*;
import java.util.List;
|
<|file_name|>hw5_task1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
u"""
Задание 1: классный Человек.
УСЛОВИЕ:
Реализовать класс Person, который отображает запись в книге контактов.
Класс имеет 4 атрибута:
- surname - строка - фамилия контакта (обязательный)
- first_name - строка - имя контакта (обязательный)
- nickname - строка - псевдоним (опциональный)
- birth_date - объект datetime.date (обязательный)
Каждый вызов класса должен создавать экземпляр (инстанс) класса с указанными
атрибутами.
Также класс имеет 2 метода:
- get_age() - считает возраст контакта в полных годах на дату вызова и
возвращает строку вида: "27";
- get_fullname() - возвращает строку, отражающую полное имя (фамилия + имя)
контакта;
"""
__author__ = "Sergei Shybkoi"
__copyright__ = "Copyright 2014, The Homework Project"
__email__ = "[email protected]"
__status__ = "Production"
__date__ = "2014-11-18"
import datetime
class Person(object):
u"""Класс Person"""
def __init__(self, surname, first_name, birth_date, nickname=None):
u"""Инишн класса"""
try:
var_date = datetime.datetime.strptime(birth_date, "%Y-%m-%d")
res_date = datetime.date(var_date.year,
var_date.month, var_date.day)
except TypeError:
print "Incorrect type of birthday date!"
res_date = None
except ValueError:
print "Wrong value of birthday date!"
res_date = None
self.surname = surname
self.first_name = first_name
self.birth_date = res_date
if nickname is not None:
<|fim▁hole|>
def get_age(self):
u"""Метод класса подсчитывает и выводит количество полных лет"""
if self.birth_date is not None:
today_date = datetime.date.today()
delta = today_date.year - self.birth_date.year
if today_date.month <= self.birth_date.month \
and today_date.day < self.birth_date.day:
delta -= 1
print "Age:", delta
return str(delta)
else:
print "No correct data about person's birthday."
return "0"
def get_fullname(self):
u"""Метод выводит и возвращаем полное имя экземпляра класса Person"""
print self.surname, self.first_name
return self.surname + " " + self.first_name<|fim▁end|>
|
self.nickname = nickname
|
<|file_name|>precision_recall.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as plt
import numpy as np
import sklearn
from sklearn import svm, datasets
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import average_precision_score
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import label_binarize
from sklearn.multiclass import OneVsRestClassifier
import pdb
from sklearn import ensemble
from sklearn import neighbors
from sklearn import tree
#import data
data_path = "joined_matrix_split.txt"
mat = np.loadtxt(data_path)
features = mat[50000:60000, 0:40]
features = sklearn.preprocessing.scale(features, axis=1)
output_raw = mat[50000:60000, -1]
output = sklearn.preprocessing.binarize(output_raw)
# Split into training and test
random_state = np.random.RandomState(0)
X_train, X_test, y_train, y_test = train_test_split(features, output, test_size=.5,
random_state=random_state)
n_classes = 1
#run classifier
classifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True,
random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision = dict()
recall = dict()
average_precision = dict()
precision[0], recall[0], _ = precision_recall_curve(y_test, y_score)
average_precision[0] = average_precision_score(y_test, y_score)
# now do rbf kernel
classifier = OneVsRestClassifier(svm.SVC(kernel='rbf', probability=True, random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[1], recall[1], _ = precision_recall_curve(y_test, y_score)
average_precision[1] = average_precision_score(y_test, y_score)
# now do adaboost
model = ensemble.AdaBoostClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[2], recall[2], _ = precision_recall_curve(y_test, y_score)
average_precision[2] = average_precision_score(y_test, y_score)
"""
pdb.set_trace()
# now do kNN classifier
model = neighbors.KNeighborsClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[3], recall[3], _ = precision_recall_curve(y_test, y_score)
average_precision[3] = average_precision_score(y_test, y_score)
# now do random forrest
model = ensemble.RandomForestClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[4], recall[4], _ = precision_recall_curve(y_test, y_score)
average_precision[4] = average_precision_score(y_test, y_score)
# now do decision trees
model = tree.DecisionTreeClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[5], recall[5], _ = precision_recall_curve(y_test, y_score)
average_precision[5] = average_precision_score(y_test, y_score)
# Plot Precision-Recall curve
#plt.clf()
#plt.plot(recall[0], precision[0], label='Precision-Recall curve')
#plt.xlabel('Recall')
#plt.ylabel('Precision')
#plt.ylim([0.0, 1.05])
#plt.xlim([0.0, 1.0])
#plt.title('Linear SVC Precision vs. Recall: AUC={0:0.2f}'.format(average_precision[0]))
#plt.legend(loc="lower left")
#plt.show()<|fim▁hole|>kernel = {}
kernel[0] = "linear SVC"
kernel[1] = "rbf SVC"
kernel[2] = "AdaBoost classifier"
#kernel[3] = "k-nearest-neighbors classifier"
#kernel[4] = "random forest classifier"
#kernel[5] = "decision tree classifier"
# Plot Precision-Recall curve for each class
plt.clf()
for i in range(3):
plt.plot(recall[i], precision[i],
label='Precision-recall curve of {0} (area = {1:0.2f})'
''.format(kernel[i], average_precision[i]))
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.title('Classification on aggregate crime; precision vs. recall')
plt.legend(loc="lower right")
plt.show()<|fim▁end|>
|
"""
|
<|file_name|>issue-57271-lib.rs<|end_file_name|><|fim▁begin|>#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum BaseType {
Byte,
Char,
Double,
Float,
Int,
Long,<|fim▁hole|> Short,
Boolean,
}<|fim▁end|>
| |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bball_intel.settings.base")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())<|fim▁end|>
|
WSGI config for bball_intel project.
|
<|file_name|>JSONToken.java<|end_file_name|><|fim▁begin|>/*
* Copyright 1999-2101 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.fastjson.parser;
/**
* @author wenshao[[email protected]]
*/
public class JSONToken {
//
public final static int ERROR = 1;
//
public final static int LITERAL_INT = 2;
//
public final static int LITERAL_FLOAT = 3;
//
public final static int LITERAL_STRING = 4;
//
public final static int LITERAL_ISO8601_DATE = 5;
public final static int TRUE = 6;
//
public final static int FALSE = 7;
//
public final static int NULL = 8;
//
public final static int NEW = 9;
//
public final static int LPAREN = 10; // ("("),
//
public final static int RPAREN = 11; // (")"),
//
public final static int LBRACE = 12; // ("{"),
//
public final static int RBRACE = 13; // ("}"),
//
public final static int LBRACKET = 14; // ("["),
//
public final static int RBRACKET = 15; // ("]"),
//
public final static int COMMA = 16; // (","),
//
public final static int COLON = 17; // (":"),
//
public final static int IDENTIFIER = 18;
//
public final static int FIELD_NAME = 19;
public final static int EOF = 20;
public final static int SET = 21;
public final static int TREE_SET = 22;
public final static int UNDEFINED = 23; // undefined
public static String name(int value) {
switch (value) {
case ERROR:
return "error";
case LITERAL_INT:
return "int";
case LITERAL_FLOAT:
return "float";
case LITERAL_STRING:
return "string";
case LITERAL_ISO8601_DATE:
return "iso8601";
case TRUE:
return "true";
case FALSE:
return "false";
case NULL:
return "null";
case NEW:
return "new";
case LPAREN:
return "(";
case RPAREN:
return ")";
case LBRACE:
return "{";
case RBRACE:
return "}";
case LBRACKET:
return "[";
case RBRACKET:
return "]";
case COMMA:
return ",";
case COLON:
return ":";
case IDENTIFIER:
return "ident";
case FIELD_NAME:
return "fieldName";
case EOF:
return "EOF";
case SET:
return "Set";
case TREE_SET:
return "TreeSet";
case UNDEFINED:
return "undefined";
default:
return "Unkown";
}
}
}<|fim▁end|>
|
* you may not use this file except in compliance with the License.
|
<|file_name|>server.js<|end_file_name|><|fim▁begin|>var express = require('express');
var app = express();
var path = require('path');
var session = require('express-session');
var bodyParser = require('body-parser')
var fs = require('fs');
app.use(express.static(path.join(__dirname, 'public')));
app.use(bodyParser.urlencoded({extended: true}));
app.use(bodyParser.json());
app.use(session({
secret: 'angular_tutorial',
resave: true,
saveUninitialized: true,
}));
var Db = require('mongodb').Db;
var Connection = require('mongodb').Connection;
var Server = require('mongodb').Server;
var ObjectID = require('mongodb').ObjectID;
var db = new Db('tutor', new Server("localhost", 27017, {safe: true}, {auto_reconnect: true}, {}));
db.open(function(){
console.log("mongo db is opened!");
db.collection('notes', function(error, notes) {
db.notes = notes;
});
db.collection('sections', function(error, sections) {
db.sections = sections;
});
});
app.get("/notes", function(req,res) {
db.notes.find(req.query).toArray(function(err, items) {
res.send(items);
});
});
app.post("/notes", function(req,res) {
db.notes.insert(req.body);
res.end();
});
app.get("/sections", function(req,res) {<|fim▁hole|> db.sections.find(req.query).toArray(function(err, items) {
res.send(items);
});
});
app.get("/checkUser", function(req,res) {
if (req.query.user.length>2) {
res.send(true);
} else {
res.send(false);
}
});
app.post("/sections/replace", function(req,resp) {
// do not clear the list
if (req.body.length==0) {
resp.end();
}
// this should be used only for reordering
db.sections.remove({}, function(err, res) {
if (err) console.log(err);
db.sections.insert(req.body, function(err, res) {
if (err) console.log("err after insert",err);
resp.end();
});
});
});
app.listen(3000);<|fim▁end|>
| |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create a Python package of the Linux guest environment."""
import glob
import sys
<|fim▁hole|> install_requires += ['boto']
if sys.version_info >= (3, 7):
install_requires += ['distro']
setuptools.setup(
author='Google Compute Engine Team',
author_email='[email protected]',
description='Google Compute Engine',
include_package_data=True,
install_requires=install_requires,
license='Apache Software License',
long_description='Google Compute Engine guest environment.',
name='google-compute-engine',
packages=setuptools.find_packages(),
url='https://github.com/GoogleCloudPlatform/compute-image-packages',
version='20191112.0',
# Entry points create scripts in /usr/bin that call a function.
entry_points={
'console_scripts': [
'google_accounts_daemon=google_compute_engine.accounts.accounts_daemon:main',
'google_clock_skew_daemon=google_compute_engine.clock_skew.clock_skew_daemon:main',
'google_instance_setup=google_compute_engine.instance_setup.instance_setup:main',
'google_network_daemon=google_compute_engine.networking.network_daemon:main',
'google_metadata_script_runner=google_compute_engine.metadata_scripts.script_manager:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Installation/Setup',
'Topic :: System :: Systems Administration',
],
)<|fim▁end|>
|
import setuptools
install_requires = ['setuptools']
if sys.version_info < (3, 0):
|
<|file_name|>validation_utils.py<|end_file_name|><|fim▁begin|>import gzip
import logging
import os
import shutil
import uuid
from PIL import Image
from zipfile import ZipFile
import pandas as pd
import numpy as np
import pyresample as pr
from trollvalidation.validations import configuration as cfg
LOG = logging.getLogger(__name__)
# logging.basicConfig(level=logging.DEBUG,
# format='[%(levelname)s: %(asctime)s: %(name)s] %(message)s',
# datefmt='%Y-%m-%d %H:%M:%S')
class TmpFiles(object):
"""docstring for TmpFiles"""
def __init__(self, files=[]):
super(TmpFiles, self).__init__()
if isinstance(files, list):
self.tmpfiles = files
else:
self.tmpfiles = [files]
def append(self, files):
if isinstance(files, list):
self.tmpfiles += files
else:
self.tmpfiles.append(files)
def cleanup(self):
map(os.remove, self.files)
def cleanup(_, tmp_files):
# Delete files first and the remove directories
for tmp_file in tmp_files:
if os.path.isfile(tmp_file):
LOG.info("Cleaning up... {0}".format(tmp_file))
os.remove(tmp_file)
for tmp_folder in tmp_files:
if os.path.exists(tmp_folder):
LOG.info("Cleaning up... {0}".format(tmp_folder))
shutil.rmtree(tmp_folder)
def write_to_csv(results, description_str=''):
# prevent empty results "None" blocking the writing of CSV files
results = filter(lambda l: l, results)
if results:
if cfg.CSV_HEADER:
df = pd.DataFrame(results, index=zip(*results)[0],
columns=cfg.CSV_HEADER)
else:
df = pd.DataFrame(results, index=zip(*results)[0])
df.to_csv(os.path.join(cfg.OUTPUT_DIR, '{0}_results.csv'.format(<|fim▁hole|> description_str)))
def get_area_def(file_handle):
"""
This function is a utility function to read the area definition
of corresponding to an ice concentration product.
:param file_handle: str
Path to an ice concentration product in NetCDF product.
:return: AreaDefinition
The parsed area definition corresponding to the projection
and area extent of the product.
"""
file_name = os.path.basename(file_handle)
if 'NH25kmEASE2' in file_name:
cfg_id = 'EASE2_NH'
elif 'SH25kmEASE2' in file_name:
cfg_id = 'EASE2_SH'
elif 'nh_ease-125' in file_name:
cfg_id = 'EASE_NH'
elif 'sh_ease-125' in file_name:
cfg_id = 'EASE_SH'
elif 'nh_ease2-250' in file_name:
cfg_id = 'EASE2_NH'
elif 'sh_ease2-250' in file_name:
cfg_id = 'EASE2_SH'
elif 'nic_weekly_' in file_name:
cfg_id = 'NIC_EASE_NH'
elif 'nh_polstere-100' in file_name:
cfg_id = 'OSISAF_NH'
elif 'sh_polstere-100' in file_name:
cfg_id = 'OSISAF_SH'
# TODO: Add this case as soon as I have access to the dataset!
# elif 'nic_weekly_' in file_name:
# cfg_id = 'NIC_EASE_SH'
else:
raise ValueError('No matching region for file {0}'.format(
file_handle))
return pr.utils.parse_area_file('etc/areas.cfg', cfg_id)[0]
def uncompress(compressed_file, target=cfg.TMP_DIR):
"""
This function is a utility function to uncompress NetCDF files in
case they are given that way.
The gzipped original is removed after decompression.
:param product_file: str
Path to a zipped ice concentration product in NetCDF product.
:return: str
The path of an uncompressed NetCDF file.
"""
unpacked_filename, extension = os.path.splitext(compressed_file)
if extension == '.gz':
LOG.info('Unpacking {0}'.format(compressed_file))
if not os.path.isfile(unpacked_filename):
with gzip.open(compressed_file, 'rb') as packed_file:
with open(unpacked_filename, 'wb') as unpacked_file:
unpacked_file.write(packed_file.read())
# os.remove(compressed_file)
return unpacked_filename, []
elif extension == '.zip':
LOG.info('Unpacking {0}'.format(compressed_file))
tmp_id = str(uuid.uuid4())
temporary_files_folder = os.path.join(target, tmp_id)
with open(compressed_file, 'rb') as packed_file:
with ZipFile(packed_file) as z:
for name in z.namelist():
if name.endswith('.shp'):
unpacked_shapefile = os.path.join(
temporary_files_folder, name)
try:
z.extract(name, temporary_files_folder)
except Exception, e:
LOG.exception(e)
LOG.error('Could not uncompress {0}'.format(name))
return unpacked_shapefile, [temporary_files_folder]
else:
return compressed_file, []
def dump_data(ref_time, eval_data, orig_data, orig_file):
hemisphere = 'NH'
if '_sh_' in os.path.basename(orig_file) or \
'_SH_' in os.path.basename(orig_file):
hemisphere = 'SH'
out_path = os.path.join(cfg.OUTPUT_DIR, ref_time)
if not os.path.exists(out_path):
os.makedirs(out_path)
eval_data_img = Image.fromarray(eval_data.astype(np.uint8))
fname = os.path.join(out_path, '{0}_{1}_eval_data.bmp'.format(
cfg.VALIDATION_ID, hemisphere))
eval_data_img.save(fname)
eval_data.dump(fname.replace('.bmp', '.pkl'))
orig_data_img = Image.fromarray(orig_data.astype(np.uint8))
fname = os.path.join(out_path, '{0}_{1}_orig_data.bmp'.format(
cfg.VALIDATION_ID, hemisphere))
orig_data_img.save(fname)
orig_data.dump(fname.replace('.bmp', '.pkl'))<|fim▁end|>
| |
<|file_name|>Six_zebra_models8148.py<|end_file_name|><|fim▁begin|>import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((4135.63, -1070.6, 11660.5), (0.7, 0.7, 0.7), 507.685)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((4398.66, -1885.45, 11556), (0.7, 0.7, 0.7), 479.978)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((4346, -175.265, 10569.5), (0.7, 0.7, 0.7), 681.834)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((4223.5, 1861.87, 9387.15), (0.7, 0.7, 0.7), 522.532)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((4224.1, 2470.58, 8995.04), (0, 1, 0), 751.925)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((2596.13, 1379.53, 8172.18), (0.7, 0.7, 0.7), 437.001)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((3316.53, 2240.06, 6607.33), (0.7, 0.7, 0.7), 710.767)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((2583.82, 1751.55, 5136.33), (0.7, 0.7, 0.7), 762.077)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((3387, 1724.66, 3770.93), (0.7, 0.7, 0.7), 726.799)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((4317.71, 1934.83, 2179.92), (0.7, 0.7, 0.7), 885.508)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((5963.21, 1383.66, 2065.49), (0.7, 0.7, 0.7), 778.489)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((6342.36, -373.552, 922.922), (0.7, 0.7, 0.7), 790.333)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((6711.54, -2098.47, -184.939), (0.7, 0.7, 0.7), 707.721)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((6382.88, -2315.65, 1390.69), (0.7, 0.7, 0.7), 651.166)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((6780.75, -1179.03, 265.654), (0.7, 0.7, 0.7), 708.61)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((6674.81, 354.788, 232.452), (0.7, 0.7, 0.7), 490.595)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((6063.62, 1175.64, 1159.87), (0.7, 0.7, 0.7), 591.565)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((5400.78, 2218.88, 2007.65), (0.7, 0.7, 0.7), 581.287)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((3951.99, 1285, 2398.29), (0.7, 0.7, 0.7), 789.529)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((3014.12, 2460.53, 2339.28), (0.7, 0.7, 0.7), 623.587)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((2097.93, 3863.9, 1690.04), (0.7, 0.7, 0.7), 1083.56)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((882.166, 4466.2, 643.004), (0.7, 0.7, 0.7), 504.258)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((1847.75, 4807.14, 1639.62), (0.7, 0.7, 0.7), 805.519)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((3845.84, 5275.07, 2103.55), (0.7, 0.7, 0.7), 631.708)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((5900.88, 5746.76, 1743.52), (0.7, 0.7, 0.7), 805.942)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((6891.95, 5990.09, 1469.8), (1, 0.7, 0), 672.697)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((7156.82, 7390.62, 3791.02), (0.7, 0.7, 0.7), 797.863)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((7863.39, 8865, 4672.27), (1, 0.7, 0), 735.682)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((6883.27, 9508.46, 5162.61), (0.7, 0.7, 0.7), 602.14)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((5306.04, 11132.3, 5692.61), (0.7, 0.7, 0.7), 954.796)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((5604.24, 10591.3, 5499.9), (0.7, 0.7, 0.7), 1021.88)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((6435.75, 11209.8, 6255.67), (0.7, 0.7, 0.7), 909.323)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((6546.22, 12724.7, 7940.81), (0.7, 0.7, 0.7), 621.049)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')<|fim▁hole|>if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((6500.01, 11137.7, 10309.1), (0.7, 0.7, 0.7), 890.246)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((5691.31, 11026.7, 11880), (0.7, 0.7, 0.7), 671.216)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((4142.55, 10951.5, 12569.6), (0.7, 0.7, 0.7), 662.672)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((3567.94, 11207.3, 11077.2), (0.7, 0.7, 0.7), 646.682)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((4422.45, 12412.7, 10636.6), (0.7, 0.7, 0.7), 769.945)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((5975.48, 11714.3, 9585.63), (0.7, 0.7, 0.7), 606.92)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((6803.89, 12013.8, 10353.8), (0.7, 0.7, 0.7), 622.571)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((6510.18, 11249.9, 9360.04), (0.7, 0.7, 0.7), 466.865)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((6262.14, 10457.4, 10043.2), (0.7, 0.7, 0.7), 682.933)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((6527.49, 11190.3, 9628), (0.7, 0.7, 0.7), 809.326)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((6882.44, 11606.9, 7879.59), (0.7, 0.7, 0.7), 796.72)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((8233.03, 9644.28, 6270.22), (0.7, 0.7, 0.7), 870.026)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((9834.19, 8665.84, 6262.73), (0.7, 0.7, 0.7), 909.577)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((10542.8, 7972.79, 6841.18), (0, 1, 0), 500.536)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((11998.3, 8255.99, 8163.14), (0.7, 0.7, 0.7), 725.276)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((14091.2, 9372.32, 9368.35), (0.7, 0.7, 0.7), 570.331)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((13051.9, 10596.5, 10037), (0.7, 0.7, 0.7), 492.203)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((11660.7, 11231.3, 7572.41), (0, 1, 0), 547.7)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((11147.2, 10890.1, 8058.48), (0.7, 0.7, 0.7), 581.921)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((10302, 11401.5, 9681.69), (0.7, 0.7, 0.7), 555.314)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((9427.83, 11028.2, 10913.8), (0.7, 0.7, 0.7), 404.219)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((8414.68, 9534.64, 10649.3), (0.7, 0.7, 0.7), 764.234)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])<|fim▁end|>
|
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((6903.16, 12091.3, 9204.94), (0.7, 0.7, 0.7), 525.154)
|
<|file_name|>PlaylistsNav.tsx<|end_file_name|><|fim▁begin|>/* eslint-disable jsx-a11y/no-autofocus */
import * as electron from 'electron';
import * as React from 'react';
import * as Icon from 'react-fontawesome';
import * as PlaylistsActions from '../../actions/PlaylistsActions';
import PlaylistsNavLink from '../PlaylistsNavLink/PlaylistsNavLink';
import { PlaylistModel } from '../../../shared/types/interfaces';
import * as styles from './PlaylistsNav.css';
const { Menu } = electron.remote;
interface Props {
playlists: PlaylistModel[];
}
interface State {
renamed: string | null;
}
class PlaylistsNav extends React.Component<Props, State> {
constructor(props: Props) {
super(props);
this.state = {
renamed: null // the playlist being renamed if there's one
};
this.blur = this.blur.bind(this);
this.focus = this.focus.bind(this);
this.keyDown = this.keyDown.bind(this);
this.showContextMenu = this.showContextMenu.bind(this);
this.createPlaylist = this.createPlaylist.bind(this);
}
showContextMenu(playlistId: string) {
const template: electron.MenuItemConstructorOptions[] = [
{
label: 'Rename',
click: () => {
this.setState({ renamed: playlistId });
}
},
{
label: 'Delete',
click: async () => {
await PlaylistsActions.remove(playlistId);
}
},
{
type: 'separator'
},
{
label: 'Duplicate',
click: async () => {
await PlaylistsActions.duplicate(playlistId);
}
},
{
type: 'separator'
},
{
label: 'Export',
click: async () => {
await PlaylistsActions.exportToM3u(playlistId);
}
}
];<|fim▁hole|> }
async createPlaylist() {
// Todo 'new playlist 1', 'new playlist 2' ...
await PlaylistsActions.create('New playlist', [], false, true);
}
async rename(_id: string, name: string) {
await PlaylistsActions.rename(_id, name);
}
async keyDown(e: React.KeyboardEvent<HTMLInputElement>) {
e.persist();
switch (e.nativeEvent.code) {
case 'Enter': {
// Enter
if (this.state.renamed && e.currentTarget) {
await this.rename(this.state.renamed, e.currentTarget.value);
this.setState({ renamed: null });
}
break;
}
case 'Escape': {
// Escape
this.setState({ renamed: null });
break;
}
default: {
break;
}
}
}
async blur(e: React.FocusEvent<HTMLInputElement>) {
if (this.state.renamed) {
await this.rename(this.state.renamed, e.currentTarget.value);
}
this.setState({ renamed: null });
}
focus(e: React.FocusEvent<HTMLInputElement>) {
e.currentTarget.select();
}
render() {
const { playlists } = this.props;
// TODO (y.solovyov): extract into separate method that returns items
const nav = playlists.map((elem) => {
let navItemContent;
if (elem._id === this.state.renamed) {
navItemContent = (
<input
className={styles.item__input}
type='text'
defaultValue={elem.name}
onKeyDown={this.keyDown}
onBlur={this.blur}
onFocus={this.focus}
autoFocus
/>
);
} else {
navItemContent = (
<PlaylistsNavLink className={styles.item__link} playlistId={elem._id} onContextMenu={this.showContextMenu}>
{elem.name}
</PlaylistsNavLink>
);
}
return <div key={`playlist-${elem._id}`}>{navItemContent}</div>;
});
return (
<div className={styles.playlistsNav}>
<div className={styles.playlistsNav__header}>
<h4 className={styles.playlistsNav__title}>Playlists</h4>
<div className={styles.actions}>
<button className={styles.action} onClick={this.createPlaylist} title='New playlist'>
<Icon name='plus' />
</button>
</div>
</div>
<div className={styles.playlistsNav__body}>{nav}</div>
</div>
);
}
}
export default PlaylistsNav;<|fim▁end|>
|
const context = Menu.buildFromTemplate(template);
context.popup({}); // Let it appear
|
<|file_name|>Shendrones.py<|end_file_name|><|fim▁begin|>import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
import copy
import json
import re
VARIANT_JSON_REGEX = re.compile("product: ({.*}),")
class ShendronesSpider(CrawlSpider):
name = "shendrones"
allowed_domains = ["shendrones.myshopify.com"]
start_urls = ["http://shendrones.myshopify.com/collections/all"]
rules = (
Rule(LinkExtractor(restrict_css=[".grid-item"]), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = self.name
variant = {}
item["variants"] = [variant]
base_url = response.url
item["manufacturer"] = "Shendrones"
# Find the json info for variants.
body = response.body_as_unicode()
m = VARIANT_JSON_REGEX.search(body)
if m:
shopify_info = json.loads(m.group(1))
global_title = shopify_info["title"]
preorder = False
if global_title.endswith("Pre Order"):
global_title = global_title[:-len("Pre Order")].strip()
variant["stock_state"] = "backordered"
preorder = True
for v in shopify_info["variants"]:
if v["title"] != "Default Title":
item["name"] = global_title + " " + v["title"]
variant["url"] = base_url + "?variant=" + str(v["id"])
else:
item["name"] = global_title
variant["url"] = base_url<|fim▁hole|> if v["inventory_quantity"] <= 0:
if v["inventory_policy"] == "deny":
variant["stock_state"] = "out_of_stock"
else:
variant["stock_state"] = "backordered"
elif v["inventory_quantity"] < 3:
variant["stock_state"] = "low_stock"
variant["stock_text"] = "Only " + str(v["inventory_quantity"]) + " left!"
else:
variant["stock_state"] = "in_stock"
yield item
item = copy.deepcopy(item)
variant = item["variants"][0]<|fim▁end|>
|
variant["price"] = "${:.2f}".format(v["price"] / 100)
if not preorder:
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from .Cov_Mat import *
from .SaveLoadModel import *<|fim▁end|>
|
from .Confusion_MI import*
|
<|file_name|>TaskEvent.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2005-2006 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package com.sun.source.util;
import com.sun.source.tree.CompilationUnitTree;
import javax.lang.model.element.TypeElement;
import javax.tools.JavaFileObject;
/**
* Provides details about work that has been done by the Sun Java Compiler, javac.
*
* @author Jonathan Gibbons
* @since 1.6
*/
public final class TaskEvent
{
/**
* Kind of task event.
* @since 1.6
*/
public enum Kind {
/**
* For events related to the parsing of a file.
*/
PARSE,
/**
* For events relating to elements being entered.
**/
ENTER,
/**
* For events relating to elements being analyzed for errors.
**/
ANALYZE,
/**
* For events relating to class files being generated.
**/
GENERATE,
/**
* For events relating to overall annotaion processing.
**/
ANNOTATION_PROCESSING,
/**
* For events relating to an individual annotation processing round.
**/
ANNOTATION_PROCESSING_ROUND<|fim▁hole|> };
public TaskEvent(Kind kind) {
this(kind, null, null, null);
}
public TaskEvent(Kind kind, JavaFileObject sourceFile) {
this(kind, sourceFile, null, null);
}
public TaskEvent(Kind kind, CompilationUnitTree unit) {
this(kind, unit.getSourceFile(), unit, null);
}
public TaskEvent(Kind kind, CompilationUnitTree unit, TypeElement clazz) {
this(kind, unit.getSourceFile(), unit, clazz);
}
private TaskEvent(Kind kind, JavaFileObject file, CompilationUnitTree unit, TypeElement clazz) {
this.kind = kind;
this.file = file;
this.unit = unit;
this.clazz = clazz;
}
public Kind getKind() {
return kind;
}
public JavaFileObject getSourceFile() {
return file;
}
public CompilationUnitTree getCompilationUnit() {
return unit;
}
public TypeElement getTypeElement() {
return clazz;
}
public String toString() {
return "TaskEvent["
+ kind + ","
+ file + ","
// the compilation unit is identified by the file
+ clazz + "]";
}
private Kind kind;
private JavaFileObject file;
private CompilationUnitTree unit;
private TypeElement clazz;
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# --!-- coding: utf8 --!--
"""
The converters package provide functions to quickly convert on the fly from
one format to another. It is responsible to check what external library are
present, and do the job as best as possible with what we have in hand.
"""
from manuskript.converters.abstractConverter import abstractConverter
from manuskript.converters.pandocConverter import pandocConverter
#from manuskript.converters.markdownConverter import markdownConverter
def HTML2MD(html):
# Convert using pandoc
if pandocConverter.isValid():
return pandocConverter.convert(html, _from="html", to="markdown")
# Convert to plain text using QTextEdit
return HTML2PlainText(html)
def HTML2PlainText(html):
"""
Convert from HTML to plain text.
"""
if pandocConverter.isValid():
return pandocConverter.convert(html, _from="html", to="plain")
# Last resort: probably resource inefficient
e = QTextEdit()
e.setHtml(html)<|fim▁hole|><|fim▁end|>
|
return e.toPlainText()
|
<|file_name|>SimplexNoise.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.utilities.procedural;
import org.terasology.math.TeraMath;
import org.terasology.utilities.random.FastRandom;
/**
* A speed-improved simplex noise algorithm for Simplex noise in 2D, 3D and 4D.
* <br><br>
* Based on example code by Stefan Gustavson ([email protected]).
* Optimisations by Peter Eastman ([email protected]).
* Better rank ordering method by Stefan Gustavson in 2012.
* <br><br>
* This could be speeded up even further, but it's useful as it is.
* <br><br>
* Version 2012-03-09
* <br><br>
* This code was placed in the public domain by its original author,
* Stefan Gustavson. You may use it as you see fit, but
* attribution is appreciated.
* <br><br>
* See http://staffwww.itn.liu.se/~stegu/
* <br><br>
* msteiger: Introduced seed value
*/
public class SimplexNoise extends AbstractNoise implements Noise2D, Noise3D {
/**
* Multiply this with the gridDim provided and noise(x,x) will give tileable 1D noise which will tile
* when x crosses a multiple of (this * gridDim)
*/
public static final float TILEABLE1DMAGICNUMBER = 0.5773502691896258f;
private static Grad[] grad3 = {
new Grad(1, 1, 0), new Grad(-1, 1, 0), new Grad(1, -1, 0), new Grad(-1, -1, 0),
new Grad(1, 0, 1), new Grad(-1, 0, 1), new Grad(1, 0, -1), new Grad(-1, 0, -1),
new Grad(0, 1, 1), new Grad(0, -1, 1), new Grad(0, 1, -1), new Grad(0, -1, -1)};
private static Grad[] grad4 = {
new Grad(0, 1, 1, 1), new Grad(0, 1, 1, -1), new Grad(0, 1, -1, 1), new Grad(0, 1, -1, -1),
new Grad(0, -1, 1, 1), new Grad(0, -1, 1, -1), new Grad(0, -1, -1, 1), new Grad(0, -1, -1, -1),
new Grad(1, 0, 1, 1), new Grad(1, 0, 1, -1), new Grad(1, 0, -1, 1), new Grad(1, 0, -1, -1),
new Grad(-1, 0, 1, 1), new Grad(-1, 0, 1, -1), new Grad(-1, 0, -1, 1), new Grad(-1, 0, -1, -1),
new Grad(1, 1, 0, 1), new Grad(1, 1, 0, -1), new Grad(1, -1, 0, 1), new Grad(1, -1, 0, -1),
new Grad(-1, 1, 0, 1), new Grad(-1, 1, 0, -1), new Grad(-1, -1, 0, 1), new Grad(-1, -1, 0, -1),
new Grad(1, 1, 1, 0), new Grad(1, 1, -1, 0), new Grad(1, -1, 1, 0), new Grad(1, -1, -1, 0),
new Grad(-1, 1, 1, 0), new Grad(-1, 1, -1, 0), new Grad(-1, -1, 1, 0), new Grad(-1, -1, -1, 0)};
// Skewing and unskewing factors for 2, 3, and 4 dimensions
private static final float F2 = 0.5f * (float) (Math.sqrt(3.0f) - 1.0f);
private static final float G2 = (3.0f - (float) Math.sqrt(3.0f)) / 6.0f;
private static final float F3 = 1.0f / 3.0f;
private static final float G3 = 1.0f / 6.0f;
private static final float F4 = ((float) Math.sqrt(5.0f) - 1.0f) / 4.0f;
private static final float G4 = (5.0f - (float) Math.sqrt(5.0f)) / 20.0f;
private final short[] perm;
private final short[] permMod12;
private final int permCount;
/**
* Initialize permutations with a given seed and grid dimension.
*
* @param seed a seed value used for permutation shuffling
*/
public SimplexNoise(long seed) {
this(seed, 256);
}
/**
* Initialize permutations with a given seed and grid dimension.
* Supports 1D tileable noise
* @see SimplexNoise#tileable1DMagicNumber
*
* @param seed a seed value used for permutation shuffling
* @param gridDim gridDim x gridDim will be the number of squares in the square grid formed after skewing the simplices belonging to once "tile"
*/
public SimplexNoise(long seed, int gridDim) {
FastRandom rand = new FastRandom(seed);
permCount = gridDim;
perm = new short[permCount * 2];
permMod12 = new short[permCount * 2];
short[] p = new short[permCount];
// Initialize with all values [0..(permCount-1)]
for (short i = 0; i < permCount; i++) {
p[i] = i;
}
// Shuffle the array
for (int i = 0; i < permCount; i++) {
int j = rand.nextInt(permCount);
short swap = p[i];
p[i] = p[j];
p[j] = swap;
}
for (int i = 0; i < permCount * 2; i++) {
perm[i] = p[i % permCount];
permMod12[i] = (short) (perm[i] % 12);
}
}
private static float dot(Grad g, float x, float y) {
return g.x * x + g.y * y;
}
private static float dot(Grad g, float x, float y, float z) {
return g.x * x + g.y * y + g.z * z;
}
private static float dot(Grad g, float x, float y, float z, float w) {
return g.x * x + g.y * y + g.z * z + g.w * w;
}
/**
* 2D simplex noise
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @return a noise value in the interval [-1,1]
*/
@Override
public float noise(float xin, float yin) {
float n0;
float n1;
float n2; // Noise contributions from the three corners
// Skew the input space to determine which simplex cell we're in
float s = (xin + yin) * F2; // Hairy factor for 2D
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
float t = (i + j) * G2;
float xo0 = i - t; // Unskew the cell origin back to (x,y) space
float yo0 = j - t;
float x0 = xin - xo0; // The x,y distances from the cell origin
float y0 = yin - yo0;
// For the 2D case, the simplex shape is an equilateral triangle.
// Determine which simplex we are in.
int i1; // Offsets for second (middle) corner of simplex in (i,j) coords
int j1;
if (x0 > y0) { // lower triangle, XY order: (0,0)->(1,0)->(1,1)
i1 = 1;
j1 = 0;
} else { // upper triangle, YX order: (0,0)->(0,1)->(1,1)
i1 = 0;
j1 = 1;
}
// A step of (1,0) in (i,j) means a step of (1-c,-c) in (x,y), and
// a step of (0,1) in (i,j) means a step of (-c,1-c) in (x,y), where
// c = (3-sqrt(3))/6
float x1 = x0 - i1 + G2; // Offsets for middle corner in (x,y) unskewed coords
float y1 = y0 - j1 + G2;
float x2 = x0 - 1.0f + 2.0f * G2; // Offsets for last corner in (x,y) unskewed coords
float y2 = y0 - 1.0f + 2.0f * G2;
// Work out the hashed gradient indices of the three simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int gi0 = permMod12[ii + perm[jj]];
int gi1 = permMod12[ii + i1 + perm[jj + j1]];
int gi2 = permMod12[ii + 1 + perm[jj + 1]];
// Calculate the contribution from the three corners
float t0 = 0.5f - x0 * x0 - y0 * y0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad3[gi0], x0, y0); // (x,y) of grad3 used for 2D gradient
}
float t1 = 0.5f - x1 * x1 - y1 * y1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad3[gi1], x1, y1);
}
float t2 = 0.5f - x2 * x2 - y2 * y2;
if (t2 < 0) {
n2 = 0.0f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad3[gi2], x2, y2);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to return values in the interval [-1,1].
return 70.0f * (n0 + n1 + n2);
}
/**
* 3D simplex noise
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @param zin the z input coordinate
* @return a noise value in the interval [-1,1]
*/
@Override
public float noise(float xin, float yin, float zin) {
float n0;
float n1;
float n2;
float n3; // Noise contributions from the four corners
// Skew the input space to determine which simplex cell we're in
float s = (xin + yin + zin) * F3; // Very nice and simple skew factor for 3D
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
int k = TeraMath.floorToInt(zin + s);
float t = (i + j + k) * G3;
float xo0 = i - t; // Unskew the cell origin back to (x,y,z) space
float yo0 = j - t;
float zo0 = k - t;
float x0 = xin - xo0; // The x,y,z distances from the cell origin
float y0 = yin - yo0;
float z0 = zin - zo0;
// For the 3D case, the simplex shape is a slightly irregular tetrahedron.
// Determine which simplex we are in.
int i1;
int j1;
int k1; // Offsets for second corner of simplex in (i,j,k) coords
int i2;
int j2;
int k2; // Offsets for third corner of simplex in (i,j,k) coords
if (x0 >= y0) {
if (y0 >= z0) { // X Y Z order
i1 = 1;
j1 = 0;
k1 = 0;
i2 = 1;
j2 = 1;
k2 = 0;
} else if (x0 >= z0) { // X Z Y order
i1 = 1;
j1 = 0;
k1 = 0;
i2 = 1;
j2 = 0;
k2 = 1;
} else { // Z X Y order
i1 = 0;
j1 = 0;
k1 = 1;
i2 = 1;
j2 = 0;
k2 = 1;
}
} else { // x0<y0
if (y0 < z0) { // Z Y X order
i1 = 0;
j1 = 0;
k1 = 1;
i2 = 0;
j2 = 1;
k2 = 1;
} else if (x0 < z0) { // Y Z X order
i1 = 0;
j1 = 1;
k1 = 0;
i2 = 0;
j2 = 1;
k2 = 1;
} else { // Y X Z order
i1 = 0;
j1 = 1;
k1 = 0;
i2 = 1;
j2 = 1;
k2 = 0;
}
}
// A step of (1,0,0) in (i,j,k) means a step of (1-c,-c,-c) in (x,y,z),
// a step of (0,1,0) in (i,j,k) means a step of (-c,1-c,-c) in (x,y,z), and
// a step of (0,0,1) in (i,j,k) means a step of (-c,-c,1-c) in (x,y,z), where
// c = 1/6.
float x1 = x0 - i1 + G3; // Offsets for second corner in (x,y,z) coords
float y1 = y0 - j1 + G3;
float z1 = z0 - k1 + G3;
float x2 = x0 - i2 + 2.0f * G3; // Offsets for third corner in (x,y,z) coords
float y2 = y0 - j2 + 2.0f * G3;
float z2 = z0 - k2 + 2.0f * G3;
float x3 = x0 - 1.0f + 3.0f * G3; // Offsets for last corner in (x,y,z) coords
float y3 = y0 - 1.0f + 3.0f * G3;
float z3 = z0 - 1.0f + 3.0f * G3;
// Work out the hashed gradient indices of the four simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int kk = Math.floorMod(k, permCount);
int gi0 = permMod12[ii + perm[jj + perm[kk]]];
int gi1 = permMod12[ii + i1 + perm[jj + j1 + perm[kk + k1]]];
int gi2 = permMod12[ii + i2 + perm[jj + j2 + perm[kk + k2]]];
int gi3 = permMod12[ii + 1 + perm[jj + 1 + perm[kk + 1]]];
// Calculate the contribution from the four corners
float t0 = 0.6f - x0 * x0 - y0 * y0 - z0 * z0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad3[gi0], x0, y0, z0);
}
float t1 = 0.6f - x1 * x1 - y1 * y1 - z1 * z1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad3[gi1], x1, y1, z1);
}
float t2 = 0.6f - x2 * x2 - y2 * y2 - z2 * z2;
if (t2 < 0) {
n2 = 0.0f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad3[gi2], x2, y2, z2);
}
float t3 = 0.6f - x3 * x3 - y3 * y3 - z3 * z3;
if (t3 < 0) {
n3 = 0.0f;
} else {
t3 *= t3;
n3 = t3 * t3 * dot(grad3[gi3], x3, y3, z3);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to stay just inside [-1,1]
return 32.0f * (n0 + n1 + n2 + n3);
}
/**
* 4D simplex noise, better simplex rank ordering method 2012-03-09
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @param zin the z input coordinate
* @return a noise value in the interval [-1,1]
*/
public float noise(float xin, float yin, float zin, float win) {
float n0;
float n1;
float n2;
float n3;
float n4; // Noise contributions from the five corners
// Skew the (x,y,z,w) space to determine which cell of 24 simplices we're in
float s = (xin + yin + zin + win) * F4; // Factor for 4D skewing
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
int k = TeraMath.floorToInt(zin + s);
int l = TeraMath.floorToInt(win + s);
float t = (i + j + k + l) * G4; // Factor for 4D unskewing
float xo0 = i - t; // Unskew the cell origin back to (x,y,z,w) space
float yo0 = j - t;
float zo0 = k - t;
float wo0 = l - t;
float x0 = xin - xo0; // The x,y,z,w distances from the cell origin
float y0 = yin - yo0;
float z0 = zin - zo0;
float w0 = win - wo0;
// For the 4D case, the simplex is a 4D shape I won't even try to describe.
// To find out which of the 24 possible simplices we're in, we need to
// determine the magnitude ordering of x0, y0, z0 and w0.
// Six pair-wise comparisons are performed between each possible pair
// of the four coordinates, and the results are used to rank the numbers.
int rankx = 0;
int ranky = 0;
int rankz = 0;
int rankw = 0;
if (x0 > y0) {
rankx++;
} else {
ranky++;
}
if (x0 > z0) {
rankx++;
} else {
rankz++;
}
if (x0 > w0) {
rankx++;
} else {
rankw++;
}
if (y0 > z0) {
ranky++;
} else {
rankz++;
}
if (y0 > w0) {
ranky++;
} else {
rankw++;
}
if (z0 > w0) {
rankz++;
} else {
rankw++;
}
int i1;
int j1;
int k1;
int l1; // The integer offsets for the second simplex corner
int i2;
int j2;
int k2;
int l2; // The integer offsets for the third simplex corner
int i3;
int j3;
int k3;
int l3; // The integer offsets for the fourth simplex corner
// simplex[c] is a 4-vector with the numbers 0, 1, 2 and 3 in some order.
// Many values of c will never occur, since e.g. x>y>z>w makes x<z, y<w and x<w
// impossible. Only the 24 indices which have non-zero entries make any sense.
// We use a thresholding to set the coordinates in turn from the largest magnitude.
// Rank 3 denotes the largest coordinate.
i1 = rankx >= 3 ? 1 : 0;
j1 = ranky >= 3 ? 1 : 0;
k1 = rankz >= 3 ? 1 : 0;
l1 = rankw >= 3 ? 1 : 0;
// Rank 2 denotes the second largest coordinate.
i2 = rankx >= 2 ? 1 : 0;
j2 = ranky >= 2 ? 1 : 0;
k2 = rankz >= 2 ? 1 : 0;
l2 = rankw >= 2 ? 1 : 0;
// Rank 1 denotes the second smallest coordinate.
i3 = rankx >= 1 ? 1 : 0;
j3 = ranky >= 1 ? 1 : 0;
k3 = rankz >= 1 ? 1 : 0;
l3 = rankw >= 1 ? 1 : 0;
// The fifth corner has all coordinate offsets = 1, so no need to compute that.
float x1 = x0 - i1 + G4; // Offsets for second corner in (x,y,z,w) coords
float y1 = y0 - j1 + G4;
float z1 = z0 - k1 + G4;
float w1 = w0 - l1 + G4;
float x2 = x0 - i2 + 2.0f * G4; // Offsets for third corner in (x,y,z,w) coords
float y2 = y0 - j2 + 2.0f * G4;
float z2 = z0 - k2 + 2.0f * G4;
float w2 = w0 - l2 + 2.0f * G4;
float x3 = x0 - i3 + 3.0f * G4; // Offsets for fourth corner in (x,y,z,w) coords
float y3 = y0 - j3 + 3.0f * G4;
float z3 = z0 - k3 + 3.0f * G4;
float w3 = w0 - l3 + 3.0f * G4;
float x4 = x0 - 1.0f + 4.0f * G4; // Offsets for last corner in (x,y,z,w) coords
float y4 = y0 - 1.0f + 4.0f * G4;
float z4 = z0 - 1.0f + 4.0f * G4;
float w4 = w0 - 1.0f + 4.0f * G4;
// Work out the hashed gradient indices of the five simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int kk = Math.floorMod(k, permCount);
int ll = Math.floorMod(l, permCount);
int gi0 = perm[ii + perm[jj + perm[kk + perm[ll]]]] % 32;
int gi1 = perm[ii + i1 + perm[jj + j1 + perm[kk + k1 + perm[ll + l1]]]] % 32;
int gi2 = perm[ii + i2 + perm[jj + j2 + perm[kk + k2 + perm[ll + l2]]]] % 32;
int gi3 = perm[ii + i3 + perm[jj + j3 + perm[kk + k3 + perm[ll + l3]]]] % 32;
int gi4 = perm[ii + 1 + perm[jj + 1 + perm[kk + 1 + perm[ll + 1]]]] % 32;
// Calculate the contribution from the five corners
float t0 = 0.6f - x0 * x0 - y0 * y0 - z0 * z0 - w0 * w0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad4[gi0], x0, y0, z0, w0);
}
float t1 = 0.6f - x1 * x1 - y1 * y1 - z1 * z1 - w1 * w1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad4[gi1], x1, y1, z1, w1);
}
float t2 = 0.6f - x2 * x2 - y2 * y2 - z2 * z2 - w2 * w2;
if (t2 < 0) {
n2 = 0.f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad4[gi2], x2, y2, z2, w2);
}
float t3 = 0.6f - x3 * x3 - y3 * y3 - z3 * z3 - w3 * w3;
if (t3 < 0) {
n3 = 0.0f;<|fim▁hole|> n3 = t3 * t3 * dot(grad4[gi3], x3, y3, z3, w3);
}
float t4 = 0.6f - x4 * x4 - y4 * y4 - z4 * z4 - w4 * w4;
if (t4 < 0) {
n4 = 0.0f;
} else {
t4 *= t4;
n4 = t4 * t4 * dot(grad4[gi4], x4, y4, z4, w4);
}
// Sum up and scale the result to cover the range [-1,1]
return 27.0f * (n0 + n1 + n2 + n3 + n4);
}
// Inner class to speed up gradient computations
// (array access is a lot slower than member access)
private static class Grad {
float x;
float y;
float z;
float w;
Grad(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
}
Grad(float x, float y, float z, float w) {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
}
}<|fim▁end|>
|
} else {
t3 *= t3;
|
<|file_name|>MaterialPropertyValue.cpp<|end_file_name|><|fim▁begin|>/*********************************************************\
* Copyright (c) 2012-2021 The Unrimp Team
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software
* and associated documentation files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
* BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\*********************************************************/
//[-------------------------------------------------------]
//[ Includes ]
//[-------------------------------------------------------]
#include "Renderer/Public/Resource/Material/MaterialPropertyValue.h"
//[-------------------------------------------------------]
//[ Namespace ]
//[-------------------------------------------------------]
namespace Renderer
{
//[-------------------------------------------------------]
//[ Public static methods ]
//[-------------------------------------------------------]
uint32_t MaterialPropertyValue::getValueTypeNumberOfBytes(ValueType valueType)
{
switch (valueType)
{
case ValueType::UNKNOWN:
return 0;
case ValueType::BOOLEAN:
return sizeof(bool);
case ValueType::INTEGER:
return sizeof(int);
case ValueType::INTEGER_2:
return sizeof(int) * 2;
case ValueType::INTEGER_3:
return sizeof(int) * 3;
case ValueType::INTEGER_4:
return sizeof(int) * 4;
case ValueType::FLOAT:
return sizeof(float);
case ValueType::FLOAT_2:
return sizeof(float) * 2;
case ValueType::FLOAT_3:
return sizeof(float) * 3;
case ValueType::FLOAT_4:
return sizeof(float) * 4;
case ValueType::FLOAT_3_3:
return sizeof(float) * 3 * 3;
case ValueType::FLOAT_4_4:
return sizeof(float) * 4 * 4;
case ValueType::FILL_MODE:
return sizeof(Rhi::FillMode);
case ValueType::CULL_MODE:
return sizeof(Rhi::CullMode);
case ValueType::CONSERVATIVE_RASTERIZATION_MODE:
return sizeof(Rhi::ConservativeRasterizationMode);
case ValueType::DEPTH_WRITE_MASK:
return sizeof(Rhi::DepthWriteMask);
case ValueType::STENCIL_OP:
return sizeof(Rhi::StencilOp);
case ValueType::COMPARISON_FUNC:
return sizeof(Rhi::ComparisonFunc);
case ValueType::BLEND:
return sizeof(Rhi::Blend);
case ValueType::BLEND_OP:
return sizeof(Rhi::BlendOp);
case ValueType::FILTER_MODE:
return sizeof(Rhi::FilterMode);
case ValueType::TEXTURE_ADDRESS_MODE:
return sizeof(Rhi::TextureAddressMode);
case ValueType::TEXTURE_ASSET_ID:
return sizeof(AssetId);
case ValueType::GLOBAL_MATERIAL_PROPERTY_ID:
return sizeof(MaterialPropertyId);
}
// Error, we should never ever end up in here
return 0;
}
//[-------------------------------------------------------]
//[ Public methods ]
//[-------------------------------------------------------]
bool MaterialPropertyValue::operator ==(const MaterialPropertyValue& materialPropertyValue) const
{
// Check value type
if (mValueType != materialPropertyValue.getValueType())
{
// Not identical due to value type mismatch
return false;
}
// Check value type
switch (mValueType)
{
case ValueType::UNKNOWN:
return true;
case ValueType::BOOLEAN:
return (mValue.Boolean == materialPropertyValue.mValue.Boolean);
case ValueType::INTEGER:
return (mValue.Integer == materialPropertyValue.mValue.Integer);
case ValueType::INTEGER_2:
return (mValue.Integer2[0] == materialPropertyValue.mValue.Integer2[0] &&
mValue.Integer2[1] == materialPropertyValue.mValue.Integer2[1]);<|fim▁hole|>
case ValueType::INTEGER_3:
return (mValue.Integer3[0] == materialPropertyValue.mValue.Integer3[0] &&
mValue.Integer3[1] == materialPropertyValue.mValue.Integer3[1] &&
mValue.Integer3[2] == materialPropertyValue.mValue.Integer3[2]);
case ValueType::INTEGER_4:
return (mValue.Integer4[0] == materialPropertyValue.mValue.Integer4[0] &&
mValue.Integer4[1] == materialPropertyValue.mValue.Integer4[1] &&
mValue.Integer4[2] == materialPropertyValue.mValue.Integer4[2] &&
mValue.Integer4[3] == materialPropertyValue.mValue.Integer4[3]);
case ValueType::FLOAT:
return (mValue.Float == materialPropertyValue.mValue.Float);
case ValueType::FLOAT_2:
return (mValue.Float2[0] == materialPropertyValue.mValue.Float2[0] &&
mValue.Float2[1] == materialPropertyValue.mValue.Float2[1]);
case ValueType::FLOAT_3:
return (mValue.Float3[0] == materialPropertyValue.mValue.Float3[0] &&
mValue.Float3[1] == materialPropertyValue.mValue.Float3[1] &&
mValue.Float3[2] == materialPropertyValue.mValue.Float3[2]);
case ValueType::FLOAT_4:
return (mValue.Float4[0] == materialPropertyValue.mValue.Float4[0] &&
mValue.Float4[1] == materialPropertyValue.mValue.Float4[1] &&
mValue.Float4[2] == materialPropertyValue.mValue.Float4[2] &&
mValue.Float4[3] == materialPropertyValue.mValue.Float4[3]);
case ValueType::FLOAT_3_3:
// Declaration property only
return true;
case ValueType::FLOAT_4_4:
// Declaration property only
return true;
case ValueType::FILL_MODE:
return (mValue.FillMode == materialPropertyValue.mValue.FillMode);
case ValueType::CULL_MODE:
return (mValue.CullMode == materialPropertyValue.mValue.CullMode);
case ValueType::CONSERVATIVE_RASTERIZATION_MODE:
return (mValue.ConservativeRasterizationMode == materialPropertyValue.mValue.ConservativeRasterizationMode);
case ValueType::DEPTH_WRITE_MASK:
return (mValue.DepthWriteMask == materialPropertyValue.mValue.DepthWriteMask);
case ValueType::STENCIL_OP:
return (mValue.StencilOp == materialPropertyValue.mValue.StencilOp);
case ValueType::COMPARISON_FUNC:
return (mValue.ComparisonFunc == materialPropertyValue.mValue.ComparisonFunc);
case ValueType::BLEND:
return (mValue.Blend == materialPropertyValue.mValue.Blend);
case ValueType::BLEND_OP:
return (mValue.BlendOp == materialPropertyValue.mValue.BlendOp);
case ValueType::FILTER_MODE:
return (mValue.FilterMode == materialPropertyValue.mValue.FilterMode);
case ValueType::TEXTURE_ADDRESS_MODE:
return (mValue.TextureAddressMode == materialPropertyValue.mValue.TextureAddressMode);
case ValueType::TEXTURE_ASSET_ID:
return (mValue.TextureAssetId == materialPropertyValue.mValue.TextureAssetId);
case ValueType::GLOBAL_MATERIAL_PROPERTY_ID:
return (mValue.GlobalMaterialPropertyId == materialPropertyValue.mValue.GlobalMaterialPropertyId);
}
// Not identical
ASSERT(false, "Invalid value type")
return false;
}
//[-------------------------------------------------------]
//[ Namespace ]
//[-------------------------------------------------------]
} // Renderer<|fim▁end|>
| |
<|file_name|>nexttowardl.cpp<|end_file_name|><|fim▁begin|>// Copyright John Maddock 2008.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
# include <pch.hpp>
#ifndef BOOST_MATH_TR1_SOURCE
# define BOOST_MATH_TR1_SOURCE
#endif
#include <boost/math/tr1.hpp>
#include <boost/math/special_functions/next.hpp>
#include "c_policy.hpp"
namespace boost{ namespace math{ namespace tr1{
<|fim▁hole|>{
return c_policies::nextafter BOOST_PREVENT_MACRO_SUBSTITUTION(x, y);
}
}}}<|fim▁end|>
|
extern "C" long double BOOST_MATH_TR1_DECL boost_nexttowardl BOOST_PREVENT_MACRO_SUBSTITUTION(long double x, long double y) BOOST_MATH_C99_THROW_SPEC
|
<|file_name|>rkt_image_dependencies_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The rkt Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"fmt"
"io/ioutil"
"os"
"strings"
"testing"
"github.com/coreos/rkt/tests/testutils"
taas "github.com/coreos/rkt/tests/testutils/aci-server"
)
const (
manifestDepsTemplate = `
{
"acKind" : "ImageManifest",
"acVersion" : "0.7.4",
"dependencies" : [
DEPENDENCIES
],
"labels" : [
{
"name" : "version",
"value" : "VERSION"
},
{
"name" : "arch",
"value" : "amd64"
},
{
"value" : "linux",
"name" : "os"
}
],
"app" : {
"user" : "0",
"exec" : [
"/inspect", "--print-msg=HelloDependencies"
],
"workingDirectory" : "/",
"group" : "0",
"environment" : [
]
},
"name" : "IMG_NAME"
}
`
)
// TestImageDependencies generates ACIs with a complex dependency tree and
// fetches them via the discovery mechanism. Some dependencies are already<|fim▁hole|>// mechanism. This is to reproduce the scenario in explained in:
// https://github.com/coreos/rkt/issues/1752#issue-117121841
func TestImageDependencies(t *testing.T) {
tmpDir := createTempDirOrPanic("rkt-TestImageDeps-")
defer os.RemoveAll(tmpDir)
ctx := testutils.NewRktRunCtx()
defer ctx.Cleanup()
server := runDiscoveryServer(t, taas.ServerOrdinary, taas.AuthNone)
defer server.Close()
baseImage := getInspectImagePath()
_ = importImageAndFetchHash(t, ctx, "", baseImage)
emptyImage := getEmptyImagePath()
fileSet := make(map[string]string)
// Scenario from https://github.com/coreos/rkt/issues/1752#issue-117121841
//
// A->B
// A->C
// A->D
//
// B: prefetched
//
// C->B
// C->E
//
// D->B
// D->E
topImage := "localhost/image-a"
imageList := []struct {
shortName string
imageName string
deps string
version string
prefetch bool
manifest string
fileName string
}{
{
shortName: "a",
imageName: topImage,
deps: `{"imageName":"localhost/image-b"}, {"imageName":"localhost/image-c"}, {"imageName":"localhost/image-d"}`,
version: "1",
},
{
shortName: "b",
imageName: "localhost/image-b",
deps: ``,
version: "1",
prefetch: true,
},
{
shortName: "c",
imageName: "localhost/image-c",
deps: `{"imageName":"localhost/image-b"}, {"imageName":"localhost/image-e", "labels": [{"name": "version", "value": "1"}]}`,
version: "1",
},
{
shortName: "d",
imageName: "localhost/image-d",
deps: `{"imageName":"localhost/image-b"}, {"imageName":"localhost/image-e", "labels": [{"name": "version", "value": "1"}]}`,
version: "1",
},
{
shortName: "e",
imageName: "localhost/image-e",
deps: `{"imageName":"coreos.com/rkt-inspect"}`,
version: "1",
},
}
for i, _ := range imageList {
// We need a reference rather than a new copy from "range"
// because we modify the content
img := &imageList[i]
img.manifest = manifestDepsTemplate
img.manifest = strings.Replace(img.manifest, "IMG_NAME", img.imageName, -1)
img.manifest = strings.Replace(img.manifest, "DEPENDENCIES", img.deps, -1)
img.manifest = strings.Replace(img.manifest, "VERSION", img.version, -1)
tmpManifest, err := ioutil.TempFile(tmpDir, "manifest-"+img.shortName+"-")
if err != nil {
panic(fmt.Sprintf("Cannot create temp manifest: %v", err))
}
defer os.Remove(tmpManifest.Name())
if err := ioutil.WriteFile(tmpManifest.Name(), []byte(img.manifest), 0600); err != nil {
panic(fmt.Sprintf("Cannot write to temp manifest: %v", err))
}
baseName := "image-" + img.shortName + ".aci"
img.fileName = patchACI(emptyImage, baseName, "--manifest", tmpManifest.Name())
defer os.Remove(img.fileName)
fileSet[baseName] = img.fileName
}
server.UpdateFileSet(fileSet)
for i := len(imageList) - 1; i >= 0; i-- {
img := imageList[i]
if img.prefetch {
t.Logf("Importing image %q: %q", img.imageName, img.fileName)
testImageShortHash := importImageAndFetchHash(t, ctx, "", img.fileName)
t.Logf("Imported image %q: %s", img.imageName, testImageShortHash)
}
}
runCmd := fmt.Sprintf("%s --debug --insecure-options=image,tls run %s", ctx.Cmd(), topImage)
child := spawnOrFail(t, runCmd)
expectedList := []string{
"image: fetching image from https://localhost/localhost/image-a.aci",
"image: using image from local store for image name localhost/image-b",
"image: fetching image from https://localhost/localhost/image-c.aci",
"image: fetching image from https://localhost/localhost/image-d.aci",
"image: using image from local store for image name coreos.com/rkt-inspect",
"HelloDependencies",
}
for _, expected := range expectedList {
if err := expectWithOutput(child, expected); err != nil {
t.Fatalf("Expected %q but not found: %v", expected, err)
}
}
waitOrFail(t, child, true)
}<|fim▁end|>
|
// cached in the CAS, and some dependencies are fetched via the discovery
|
<|file_name|>0bc0a93ac867_.py<|end_file_name|><|fim▁begin|>"""Add user table to support doorman and oauth authentication.
Revision ID: 0bc0a93ac867
Revises: fd28e46e46a6
Create Date: 2016-05-11 11:01:40.472139
"""
# revision identifiers, used by Alembic.
revision = '0bc0a93ac867'<|fim▁hole|>import doorman.database
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=80), nullable=False),
sa.Column('email', sa.String(), nullable=True),
sa.Column('password', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('social_id', sa.String(), nullable=True),
sa.Column('first_name', sa.String(), nullable=True),
sa.Column('last_name', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('user')
### end Alembic commands ###<|fim▁end|>
|
down_revision = 'fd28e46e46a6'
from alembic import op
import sqlalchemy as sa
|
<|file_name|>test_cobalt.py<|end_file_name|><|fim▁begin|># Copyright 2016 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class TestCobalt:<|fim▁hole|><|fim▁end|>
|
def test_stop_signals(self, mocker):
pass
|
<|file_name|>debug.interceptor.ts<|end_file_name|><|fim▁begin|>import { module, IRequestConfig, IHttpInterceptor, IHttpProvider } from 'angular';
import { $log, $location } from 'ngimport';
import { JsonUtils } from 'core/utils';
export class DebugInterceptor implements IHttpInterceptor {
public request = (config: IRequestConfig): IRequestConfig => {
try {
// This is a great opportunity to break Deck, so be careful.
this.logMutatingRequest(config);
} catch (e) {
$log.warn('Debug interceptor bug: ', e.message);
}<|fim▁hole|>
private logMutatingRequest(config: IRequestConfig): void {
if (
$location.url() &&
$location.url().includes('debug=true') &&
['POST', 'PUT', 'DELETE'].includes(config.method)
) {
$log.log(`${config.method}: ${config.url} \n`, JsonUtils.makeSortedStringFromObject(config.data));
}
}
}
export const DEBUG_INTERCEPTOR = 'spinnaker.core.debug.interceptor';
module(DEBUG_INTERCEPTOR, [])
.service('debugInterceptor', DebugInterceptor)
.config(['$httpProvider', ($httpProvider: IHttpProvider) => $httpProvider.interceptors.push('debugInterceptor')]);<|fim▁end|>
|
return config;
};
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import datetime
import decimal
import hashlib
import logging
from time import time
from django.conf import settings
from django.utils.encoding import force_bytes
from django.utils.timezone import utc
logger = logging.getLogger('django.db.backends')
class CursorWrapper:
def __init__(self, cursor, db):
self.cursor = cursor
self.db = db
WRAP_ERROR_ATTRS = frozenset(['fetchone', 'fetchmany', 'fetchall', 'nextset'])
def __getattr__(self, attr):
cursor_attr = getattr(self.cursor, attr)
if attr in CursorWrapper.WRAP_ERROR_ATTRS:
return self.db.wrap_database_errors(cursor_attr)
else:
return cursor_attr
def __iter__(self):
with self.db.wrap_database_errors:
for item in self.cursor:
yield item
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Close instead of passing through to avoid backend-specific behavior
# (#17671). Catch errors liberally because errors in cleanup code
# aren't useful.
try:
self.close()
except self.db.Database.Error:
pass
# The following methods cannot be implemented in __getattr__, because the
# code must run when the method is invoked, not just when it is accessed.
def callproc(self, procname, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.callproc(procname)
else:
return self.cursor.callproc(procname, params)
def execute(self, sql, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.execute(sql)
else:
return self.cursor.execute(sql, params)
def executemany(self, sql, param_list):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
return self.cursor.executemany(sql, param_list)
class CursorDebugWrapper(CursorWrapper):
# XXX callproc isn't instrumented at this time.
def execute(self, sql, params=None):
start = time()
try:
return super(CursorDebugWrapper, self).execute(sql, params)
finally:
stop = time()
duration = stop - start
sql = self.db.ops.last_executed_query(self.cursor, sql, params)
self.db.queries_log.append({
'sql': sql,
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, params,
extra={'duration': duration, 'sql': sql, 'params': params}
)
def executemany(self, sql, param_list):
start = time()
try:
return super(CursorDebugWrapper, self).executemany(sql, param_list)
finally:
stop = time()
duration = stop - start
try:
times = len(param_list)
except TypeError: # param_list could be an iterator
times = '?'
self.db.queries_log.append({
'sql': '%s times: %s' % (times, sql),
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, param_list,
extra={'duration': duration, 'sql': sql, 'params': param_list}
)
###############################################
# Converters from database (string) to Python #
###############################################
def typecast_date(s):
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
def typecast_time(s): # does NOT store time zone information
if not s:
return None
hour, minutes, seconds = s.split(':')
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:<|fim▁hole|> return datetime.time(int(hour), int(minutes), int(seconds), int((microseconds + '000000')[:6]))
def typecast_timestamp(s): # does NOT store time zone information
# "2005-07-29 15:48:00.590358-05"
# "2005-07-29 09:56:00-05"
if not s:
return None
if ' ' not in s:
return typecast_date(s)
d, t = s.split()
# Extract timezone information, if it exists. Currently we just throw
# it away, but in the future we may make use of it.
if '-' in t:
t, tz = t.split('-', 1)
tz = '-' + tz
elif '+' in t:
t, tz = t.split('+', 1)
tz = '+' + tz
else:
tz = ''
dates = d.split('-')
times = t.split(':')
seconds = times[2]
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
tzinfo = utc if settings.USE_TZ else None
return datetime.datetime(
int(dates[0]), int(dates[1]), int(dates[2]),
int(times[0]), int(times[1]), int(seconds),
int((microseconds + '000000')[:6]), tzinfo
)
def typecast_decimal(s):
if s is None or s == '':
return None
return decimal.Decimal(s)
###############################################
# Converters from Python to database (string) #
###############################################
def rev_typecast_decimal(d):
if d is None:
return None
return str(d)
def truncate_name(name, length=None, hash_len=4):
"""Shortens a string to a repeatable mangled version with the given length.
"""
if length is None or len(name) <= length:
return name
hsh = hashlib.md5(force_bytes(name)).hexdigest()[:hash_len]
return '%s%s' % (name[:length - hash_len], hsh)
def format_number(value, max_digits, decimal_places):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
if value is None:
return None
if isinstance(value, decimal.Decimal):
context = decimal.getcontext().copy()
if max_digits is not None:
context.prec = max_digits
if decimal_places is not None:
value = value.quantize(decimal.Decimal(".1") ** decimal_places, context=context)
else:
context.traps[decimal.Rounded] = 1
value = context.create_decimal(value)
return "{:f}".format(value)
if decimal_places is not None:
return "%.*f" % (decimal_places, value)
return "{:f}".format(value)
def strip_quotes(table_name):
"""
Strip quotes off of quoted table names to make them safe for use in index
names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming
scheme) becomes 'USER"."TABLE'.
"""
has_quotes = table_name.startswith('"') and table_name.endswith('"')
return table_name[1:-1] if has_quotes else table_name<|fim▁end|>
|
microseconds = '0'
|
<|file_name|>test_youtube_signature.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import io
import re
import string
from youtube_dl.extractor import YoutubeIE
from youtube_dl.utils import compat_str, compat_urlretrieve
_TESTS = [
(
'https://s.ytimg.com/yts/jsbin/html5player-vflHOr_nV.js',
'js',
86,
'>=<;:/.-[+*)(\'&%$#"!ZYX0VUTSRQPONMLKJIHGFEDCBA\\yxwvutsrqponmlkjihgfedcba987654321',
),
(
'https://s.ytimg.com/yts/jsbin/html5player-vfldJ8xgI.js',
'js',
85,
'3456789a0cdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRS[UVWXYZ!"#$%&\'()*+,-./:;<=>?@',
),
(
'https://s.ytimg.com/yts/jsbin/html5player-vfle-mVwz.js',
'js',
90,
']\\[@?>=<;:/.-,+*)(\'&%$#"hZYXWVUTSRQPONMLKJIHGFEDCBAzyxwvutsrqponmlkjiagfedcb39876',
),
(
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vfl0Cbn9e.js',
'js',
84,
'O1I3456789abcde0ghijklmnopqrstuvwxyzABCDEFGHfJKLMN2PQRSTUVW@YZ!"#$%&\'()*+,-./:;<=',
),
(
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflXGBaUN.js',
'js',
'2ACFC7A61CA478CD21425E5A57EBD73DDC78E22A.2094302436B2D377D14A3BBA23022D023B8BC25AA',
'A52CB8B320D22032ABB3A41D773D2B6342034902.A22E87CDD37DBE75A5E52412DC874AC16A7CFCA2',
),
(
'http://s.ytimg.com/yts/swfbin/player-vfl5vIhK2/watch_as3.swf',
'swf',
86,
'O1I3456789abcde0ghijklmnopqrstuvwxyzABCDEFGHfJKLMN2PQRSTUVWXY\\!"#$%&\'()*+,-./:;<=>?'
),
(
'http://s.ytimg.com/yts/swfbin/player-vflmDyk47/watch_as3.swf',
'swf',
'F375F75BF2AFDAAF2666E43868D46816F83F13E81C46.3725A8218E446A0DECD33F79DC282994D6AA92C92C9',
'9C29AA6D499282CD97F33DCED0A644E8128A5273.64C18E31F38361864D86834E6662FAADFA2FB57F'
),
(
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflBb0OQx.js',
'js',
84,
'123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQ0STUVWXYZ!"#$%&\'()*+,@./:;<=>'
),
(
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vfl9FYC6l.js',
'js',
83,
'123456789abcdefghijklmnopqr0tuvwxyzABCDETGHIJKLMNOPQRS>UVWXYZ!"#$%&\'()*+,-./:;<=F'
),
(
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflCGk6yw/html5player.js',
'js',
'4646B5181C6C3020DF1D9C7FCFEA.AD80ABF70C39BD369CCCAE780AFBB98FA6B6CB42766249D9488C288',
'82C8849D94266724DC6B6AF89BBFA087EACCD963.B93C07FBA084ACAEFCF7C9D1FD0203C6C1815B6B'
)
]
class TestSignature(unittest.TestCase):
def setUp(self):
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
self.TESTDATA_DIR = os.path.join(TEST_DIR, 'testdata')
if not os.path.exists(self.TESTDATA_DIR):
os.mkdir(self.TESTDATA_DIR)
def make_tfunc(url, stype, sig_input, expected_sig):
m = re.match(r'.*-([a-zA-Z0-9_-]+)(?:/watch_as3|/html5player)?\.[a-z]+$', url)
assert m, '%r should follow URL format' % url
test_id = m.group(1)
def test_func(self):
basename = 'player-%s.%s' % (test_id, stype)
fn = os.path.join(self.TESTDATA_DIR, basename)<|fim▁hole|> ie = YoutubeIE()
if stype == 'js':
with io.open(fn, encoding='utf-8') as testf:
jscode = testf.read()
func = ie._parse_sig_js(jscode)
else:
assert stype == 'swf'
with open(fn, 'rb') as testf:
swfcode = testf.read()
func = ie._parse_sig_swf(swfcode)
src_sig = (
compat_str(string.printable[:sig_input])
if isinstance(sig_input, int) else sig_input)
got_sig = func(src_sig)
self.assertEqual(got_sig, expected_sig)
test_func.__name__ = str('test_signature_' + stype + '_' + test_id)
setattr(TestSignature, test_func.__name__, test_func)
for test_spec in _TESTS:
make_tfunc(*test_spec)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
if not os.path.exists(fn):
compat_urlretrieve(url, fn)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Ethereum protocol module.
//!
//! Contains all Ethereum network specific stuff, such as denominations and
//! consensus specifications.
/// Export the ethash module.
pub mod ethash;
/// Export the denominations module.
pub mod denominations;
pub use self::ethash::Ethash;
pub use self::denominations::*;
use super::spec::*;
/// Create a new Olympic chain spec.
pub fn new_olympic() -> Spec {
Spec::load(include_bytes!("../../res/ethereum/olympic.json"))
}
/// Create a new Frontier mainnet chain spec.
pub fn new_frontier() -> Spec {
Spec::load(include_bytes!("../../res/ethereum/frontier.json"))
}
/// Create a new Frontier chain spec as though it never changes to Homestead.
pub fn new_frontier_test() -> Spec {
Spec::load(include_bytes!("../../res/ethereum/frontier_test.json"))
}
/// Create a new Homestead chain spec as though it never changed from Frontier.
pub fn new_homestead_test() -> Spec {
Spec::load(include_bytes!("../../res/ethereum/homestead_test.json"))
}
/// Create a new Frontier main net chain spec without genesis accounts.
pub fn new_mainnet_like() -> Spec {
Spec::load(include_bytes!("../../res/ethereum/frontier_like_test.json"))
}
/// Create a new Morden chain spec.
pub fn new_morden() -> Spec {
Spec::load(include_bytes!("../../res/ethereum/morden.json"))
}
#[cfg(test)]
mod tests {
use common::*;
use state::*;
use engine::*;
use super::*;
use tests::helpers::*;
#[test]
fn ensure_db_good() {
let spec = new_morden();
let engine = &spec.engine;
let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db();
let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut());
let s = State::from_existing(db, genesis_header.state_root.clone(), engine.account_start_nonce());
assert_eq!(s.balance(&address_from_hex("0000000000000000000000000000000000000001")), U256::from(1u64));
assert_eq!(s.balance(&address_from_hex("0000000000000000000000000000000000000002")), U256::from(1u64));
assert_eq!(s.balance(&address_from_hex("0000000000000000000000000000000000000003")), U256::from(1u64));
assert_eq!(s.balance(&address_from_hex("0000000000000000000000000000000000000004")), U256::from(1u64));
assert_eq!(s.balance(&address_from_hex("102e61f5d8f9bc71d0ad4a084df4e65e05ce0e1c")), U256::from(1u64) << 200);
assert_eq!(s.balance(&address_from_hex("0000000000000000000000000000000000000000")), U256::from(0u64));
}
#[test]
fn morden() {
let morden = new_morden();
assert_eq!(morden.state_root(), H256::from_str("f3f4696bbf3b3b07775128eb7a3763279a394e382130f27c21e70233e04946a9").unwrap());
let genesis = morden.genesis_block();
assert_eq!(BlockView::new(&genesis).header_view().sha3(), H256::from_str("0cd786a2425d16f152c658316c423e6ce1181e15c3295826d7c9904cba9ce303").unwrap());
let _ = morden.engine;<|fim▁hole|> fn frontier() {
let frontier = new_frontier();
assert_eq!(frontier.state_root(), H256::from_str("d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544").unwrap());
let genesis = frontier.genesis_block();
assert_eq!(BlockView::new(&genesis).header_view().sha3(), H256::from_str("d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3").unwrap());
let _ = frontier.engine;
}
}<|fim▁end|>
|
}
#[test]
|
<|file_name|>PostgreServerHome.java<|end_file_name|><|fim▁begin|>/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.postgresql;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.connection.LocalNativeClientLocation;
/**
* PostgreServerHome
*/
public class PostgreServerHome extends LocalNativeClientLocation {
private static final Log log = Log.getLog(PostgreServerHome.class);
private String name;
private String version;
private String branding;
private String dataDirectory;
protected PostgreServerHome(String id, String path, String version, String branding, String dataDirectory) {
super(id, path);
this.name = branding == null ? id : branding;
this.version = version;
this.branding = branding;
this.dataDirectory = dataDirectory;
}
@Override
public String getDisplayName() {
return name;
}
public String getProductName() {
return branding;
}
public String getProductVersion() {
return version;
}
public String getBranding() {
return branding;
}
public String getDataDirectory() {
return dataDirectory;
}
}<|fim▁end|>
|
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
|
<|file_name|>udp.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use fmt;
use io::{self, Error, ErrorKind};
use net::{ToSocketAddrs, SocketAddr};
use sys_common::net as net_imp;
use sys_common::{AsInner, FromInner, IntoInner};
use time::Duration;
/// A User Datagram Protocol socket.
///
/// This is an implementation of a bound UDP socket. This supports both IPv4 and
/// IPv6 addresses, and there is no corresponding notion of a server because UDP
/// is a datagram protocol.
///
/// # Examples
///
/// ```no_run
/// use std::net::UdpSocket;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut socket = try!(UdpSocket::bind("127.0.0.1:34254"));
///
/// let mut buf = [0; 10];
/// let (amt, src) = try!(socket.recv_from(&mut buf));
///
/// // Send a reply to the socket we received data from
/// let buf = &mut buf[..amt];
/// buf.reverse();
/// try!(socket.send_to(buf, &src));
///
/// drop(socket); // close the socket
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub struct UdpSocket(net_imp::UdpSocket);
impl UdpSocket {
/// Creates a UDP socket from the given address.
///
/// The address type can be any implementor of `ToSocketAddr` trait. See
/// its documentation for concrete examples.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn bind<A: ToSocketAddrs>(addr: A) -> io::Result<UdpSocket> {
super::each_addr(addr, net_imp::UdpSocket::bind).map(UdpSocket)
}
/// Receives data from the socket. On success, returns the number of bytes
/// read and the address from whence the data came.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> {
self.0.recv_from(buf)
}
/// Sends data on the socket to the given address. On success, returns the
/// number of bytes written.
///
/// Address type can be any implementor of `ToSocketAddrs` trait. See its
/// documentation for concrete examples.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn send_to<A: ToSocketAddrs>(&self, buf: &[u8], addr: A)
-> io::Result<usize> {
match try!(addr.to_socket_addrs()).next() {
Some(addr) => self.0.send_to(buf, &addr),
None => Err(Error::new(ErrorKind::InvalidInput,
"no addresses to send data to")),
}
}
/// Returns the socket address that this socket was created from.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn local_addr(&self) -> io::Result<SocketAddr> {
self.0.socket_addr()
}
/// Creates a new independently owned handle to the underlying socket.
///
/// The returned `UdpSocket` is a reference to the same socket that this
/// object references. Both handles will read and write the same port, and
/// options set on one socket will be propagated to the other.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn try_clone(&self) -> io::Result<UdpSocket> {
self.0.duplicate().map(UdpSocket)
}
/// Sets the read timeout to the timeout specified.
///
/// If the value specified is `None`, then `read` calls will block
/// indefinitely. It is an error to pass the zero `Duration` to this
/// method.
#[unstable(feature = "socket_timeout", reason = "RFC 1047 - recently added",
issue = "27773")]
pub fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.0.set_read_timeout(dur)
}
/// Sets the write timeout to the timeout specified.
///
/// If the value specified is `None`, then `write` calls will block
/// indefinitely. It is an error to pass the zero `Duration` to this
/// method.
#[unstable(feature = "socket_timeout", reason = "RFC 1047 - recently added",
issue = "27773")]
pub fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.0.set_write_timeout(dur)
}
/// Returns the read timeout of this socket.
///
/// If the timeout is `None`, then `read` calls will block indefinitely.
#[unstable(feature = "socket_timeout", reason = "RFC 1047 - recently added",
issue = "27773")]
pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
self.0.read_timeout()
}
/// Returns the write timeout of this socket.
///
/// If the timeout is `None`, then `write` calls will block indefinitely.
#[unstable(feature = "socket_timeout", reason = "RFC 1047 - recently added",
issue = "27773")]
pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
self.0.write_timeout()
}
}
impl AsInner<net_imp::UdpSocket> for UdpSocket {
fn as_inner(&self) -> &net_imp::UdpSocket { &self.0 }
}
impl FromInner<net_imp::UdpSocket> for UdpSocket {
fn from_inner(inner: net_imp::UdpSocket) -> UdpSocket { UdpSocket(inner) }
}
impl IntoInner<net_imp::UdpSocket> for UdpSocket {
fn into_inner(self) -> net_imp::UdpSocket { self.0 }
}
impl fmt::Debug for UdpSocket {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use io::ErrorKind;
use net::*;
use net::test::{next_test_ip4, next_test_ip6};
use sync::mpsc::channel;
use sys_common::AsInner;
use time::Duration;
use thread;
fn each_ip(f: &mut FnMut(SocketAddr, SocketAddr)) {
f(next_test_ip4(), next_test_ip4());
f(next_test_ip6(), next_test_ip6());
}
macro_rules! t {
($e:expr) => {
match $e {
Ok(t) => t,
Err(e) => panic!("received error for `{}`: {}", stringify!($e), e),
}
}
}
// FIXME #11530 this fails on android because tests are run as root
#[cfg_attr(any(windows, target_os = "android"), ignore)]
#[test]
fn bind_error() {
let addr = SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), 1);
match UdpSocket::bind(&addr) {
Ok(..) => panic!(),
Err(e) => assert_eq!(e.kind(), ErrorKind::PermissionDenied),
}
}
#[test]
fn socket_smoke_test_ip4() {
each_ip(&mut |server_ip, client_ip| {
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
let _t = thread::spawn(move|| {
let client = t!(UdpSocket::bind(&client_ip));
rx1.recv().unwrap();
t!(client.send_to(&[99], &server_ip));
tx2.send(()).unwrap();
});
let server = t!(UdpSocket::bind(&server_ip));
tx1.send(()).unwrap();
let mut buf = [0];
let (nread, src) = t!(server.recv_from(&mut buf));
assert_eq!(nread, 1);
assert_eq!(buf[0], 99);
assert_eq!(src, client_ip);
rx2.recv().unwrap();
})
}
#[test]
fn socket_name_ip4() {
each_ip(&mut |addr, _| {
let server = t!(UdpSocket::bind(&addr));
assert_eq!(addr, t!(server.local_addr()));
})
}
#[test]
fn udp_clone_smoke() {
each_ip(&mut |addr1, addr2| {
let sock1 = t!(UdpSocket::bind(&addr1));
let sock2 = t!(UdpSocket::bind(&addr2));
let _t = thread::spawn(move|| {
let mut buf = [0, 0];
assert_eq!(sock2.recv_from(&mut buf).unwrap(), (1, addr1));
assert_eq!(buf[0], 1);
t!(sock2.send_to(&[2], &addr1));
});
let sock3 = t!(sock1.try_clone());
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
let _t = thread::spawn(move|| {
rx1.recv().unwrap();
t!(sock3.send_to(&[1], &addr2));
tx2.send(()).unwrap();
});
tx1.send(()).unwrap();
let mut buf = [0, 0];
assert_eq!(sock1.recv_from(&mut buf).unwrap(), (1, addr2));
rx2.recv().unwrap();
})
}
#[test]
fn udp_clone_two_read() {
each_ip(&mut |addr1, addr2| {
let sock1 = t!(UdpSocket::bind(&addr1));
let sock2 = t!(UdpSocket::bind(&addr2));
let (tx1, rx) = channel();
let tx2 = tx1.clone();
let _t = thread::spawn(move|| {
t!(sock2.send_to(&[1], &addr1));
rx.recv().unwrap();
t!(sock2.send_to(&[2], &addr1));
rx.recv().unwrap();
});
let sock3 = t!(sock1.try_clone());
let (done, rx) = channel();
let _t = thread::spawn(move|| {
let mut buf = [0, 0];
t!(sock3.recv_from(&mut buf));
tx2.send(()).unwrap();
done.send(()).unwrap();
});
let mut buf = [0, 0];
t!(sock1.recv_from(&mut buf));
tx1.send(()).unwrap();
rx.recv().unwrap();
})
}
#[test]
fn udp_clone_two_write() {
each_ip(&mut |addr1, addr2| {
let sock1 = t!(UdpSocket::bind(&addr1));
let sock2 = t!(UdpSocket::bind(&addr2));
let (tx, rx) = channel();
let (serv_tx, serv_rx) = channel();
let _t = thread::spawn(move|| {
let mut buf = [0, 1];
rx.recv().unwrap();
t!(sock2.recv_from(&mut buf));
serv_tx.send(()).unwrap();
});
let sock3 = t!(sock1.try_clone());<|fim▁hole|> match sock3.send_to(&[1], &addr2) {
Ok(..) => { let _ = tx2.send(()); }
Err(..) => {}
}
done.send(()).unwrap();
});
match sock1.send_to(&[2], &addr2) {
Ok(..) => { let _ = tx.send(()); }
Err(..) => {}
}
drop(tx);
rx.recv().unwrap();
serv_rx.recv().unwrap();
})
}
#[test]
fn debug() {
let name = if cfg!(windows) {"socket"} else {"fd"};
let socket_addr = next_test_ip4();
let udpsock = t!(UdpSocket::bind(&socket_addr));
let udpsock_inner = udpsock.0.socket().as_inner();
let compare = format!("UdpSocket {{ addr: {:?}, {}: {:?} }}",
socket_addr, name, udpsock_inner);
assert_eq!(format!("{:?}", udpsock), compare);
}
// FIXME: re-enabled bitrig/openbsd/netbsd tests once their socket timeout code
// no longer has rounding errors.
#[cfg_attr(any(target_os = "bitrig", target_os = "netbsd", target_os = "openbsd"), ignore)]
#[test]
fn timeouts() {
let addr = next_test_ip4();
let stream = t!(UdpSocket::bind(&addr));
let dur = Duration::new(15410, 0);
assert_eq!(None, t!(stream.read_timeout()));
t!(stream.set_read_timeout(Some(dur)));
assert_eq!(Some(dur), t!(stream.read_timeout()));
assert_eq!(None, t!(stream.write_timeout()));
t!(stream.set_write_timeout(Some(dur)));
assert_eq!(Some(dur), t!(stream.write_timeout()));
t!(stream.set_read_timeout(None));
assert_eq!(None, t!(stream.read_timeout()));
t!(stream.set_write_timeout(None));
assert_eq!(None, t!(stream.write_timeout()));
}
#[test]
fn test_read_timeout() {
let addr = next_test_ip4();
let mut stream = t!(UdpSocket::bind(&addr));
t!(stream.set_read_timeout(Some(Duration::from_millis(1000))));
let mut buf = [0; 10];
let wait = Duration::span(|| {
let kind = stream.recv_from(&mut buf).err().expect("expected error").kind();
assert!(kind == ErrorKind::WouldBlock || kind == ErrorKind::TimedOut);
});
assert!(wait > Duration::from_millis(400));
assert!(wait < Duration::from_millis(1600));
}
#[test]
fn test_read_with_timeout() {
let addr = next_test_ip4();
let mut stream = t!(UdpSocket::bind(&addr));
t!(stream.set_read_timeout(Some(Duration::from_millis(1000))));
t!(stream.send_to(b"hello world", &addr));
let mut buf = [0; 11];
t!(stream.recv_from(&mut buf));
assert_eq!(b"hello world", &buf[..]);
let wait = Duration::span(|| {
let kind = stream.recv_from(&mut buf).err().expect("expected error").kind();
assert!(kind == ErrorKind::WouldBlock || kind == ErrorKind::TimedOut);
});
assert!(wait > Duration::from_millis(400));
assert!(wait < Duration::from_millis(1600));
}
}<|fim▁end|>
|
let (done, rx) = channel();
let tx2 = tx.clone();
let _t = thread::spawn(move|| {
|
<|file_name|>schema.go<|end_file_name|><|fim▁begin|>// The MIT License (MIT)
//
// Copyright © 2017 CyberAgent, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"encoding/json"
"os"
"strconv"
"github.com/olekukonko/tablewriter"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/cyberagent/typebook/client/go/model"
)
var schemaCmd = &cobra.Command{
Use: "schema",
Short: "manage and look up schemas under a subject",
Long: "Manage and look up schemas under a subject.",
PersistentPreRun: func(cmd *cobra.Command, args []string) {
viper.BindPFlag("subject", cmd.Flags().Lookup("subject"))
},
}
func init() {
RootCmd.AddCommand(schemaCmd)
schemaCmd.PersistentFlags().String("subject", "", "name of subject")
}
func showSchemaMetas(schemas ...model.Schema) {
table := tablewriter.NewWriter(os.Stdout)
table.SetHeader([]string{"ID", "SUBJECT", "VERSION"})
for _, schema := range schemas {
table.Append([]string{strconv.FormatInt(schema.Id, 10), schema.Subject, schema.Version.String()})
}
table.Render()
}
func showSchemaVersions(versions ...model.SemVer) {
table := tablewriter.NewWriter(os.Stdout)
table.SetHeader([]string{"VERSION"})
for _, version := range versions {
table.Append([]string{version.String()})
}
table.Render()
}
func getPrettySchemaDef(schema *model.Schema) (string, error) {
intermediate := make(map[string]interface{})
if err := json.Unmarshal([]byte(schema.Definition), &intermediate); err != nil {
return "", err
}
js, err := prettyJSON(intermediate, 2)
if err != nil {
return "", err
}
return string(js), nil<|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>DBcon.py<|end_file_name|><|fim▁begin|># coding:utf-8
'''
Created on 19/1/2015
@author: PC30
'''
from flaskext.mysql import MySQL#importar mysql
from flask import Flask#importar flask
class DBcon():
'''
classdocs
'''
pass<|fim▁hole|> '''
Constructor
'''
pass
def conexion(self):
mysql = MySQL()#llamando a mysql
app = Flask(__name__)#instanciando a flask
app.config['MYSQL_DATABASE_USER'] = 'python'#nombre de usuario
app.config['MYSQL_DATABASE_PASSWORD'] = '123456'#contrase�a de ususario
app.config['MYSQL_DATABASE_DB'] = 'sisventas'#nombre de la base de datos
app.config['MYSQL_DATABASE_HOST'] = 'localhost'#servidor donde se encuantra
mysql.init_app(app)
return mysql<|fim▁end|>
|
def __init__(self):
|
<|file_name|>NextState.hpp<|end_file_name|><|fim▁begin|>// -*- mode: c++; c-basic-style: "bsd"; c-basic-offset: 4; -*-
/*
* kdm/event/NextState.hpp
* Copyright (C) Cátedra SAES-UMU 2010 <[email protected]>
* Copyright (C) INCHRON GmbH 2016 <[email protected]>
*
* EMF4CPP is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* EMF4CPP is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef KDM_EVENT_NEXTSTATE_HPP
#define KDM_EVENT_NEXTSTATE_HPP
#include <ecorecpp/mapping_forward.hpp>
#include <kdm/dllKdm.hpp>
#include <kdm/event_forward.hpp>
#include <kdm/kdm_forward.hpp>
#include <kdm/event/AbstractEventRelationship.hpp>
#include "EventPackage.hpp"
/*PROTECTED REGION ID(NextState_pre) START*/
// Please, enable the protected region if you add manually written code.
// To do this, add the keyword ENABLED before START.
/*PROTECTED REGION END*/
namespace kdm
{
namespace event
{
class EXPORT_KDM_DLL NextState : public virtual ::kdm::event::AbstractEventRelationship
{
public:
NextState();
virtual ~NextState();
virtual void _initialize();
// Operations
// Attributes
// References
virtual ::kdm::event::State_ptr getTo () const;
virtual void setTo (::kdm::event::State_ptr _to);
virtual ::kdm::event::Transition_ptr getFrom () const;
virtual void setFrom (::kdm::event::Transition_ptr _from);
<|fim▁hole|>
/*PROTECTED REGION ID(NextState) START*/
// Please, enable the protected region if you add manually written code.
// To do this, add the keyword ENABLED before START.
/*PROTECTED REGION END*/
// EObjectImpl
virtual ::ecore::EJavaObject eGet ( ::ecore::EInt _featureID, ::ecore::EBoolean _resolve);
virtual void eSet ( ::ecore::EInt _featureID, ::ecore::EJavaObject const& _newValue);
virtual ::ecore::EBoolean eIsSet ( ::ecore::EInt _featureID);
virtual void eUnset ( ::ecore::EInt _featureID);
virtual ::ecore::EClass_ptr _eClass ();
virtual void _inverseAdd ( ::ecore::EInt _featureID, ::ecore::EJavaObject const& _newValue);
virtual void _inverseRemove ( ::ecore::EInt _featureID, ::ecore::EJavaObject const& _oldValue);
/*PROTECTED REGION ID(NextStateImpl) START*/
// Please, enable the protected region if you add manually written code.
// To do this, add the keyword ENABLED before START.
/*PROTECTED REGION END*/
protected:
NextState_ptr _this()
{ return NextState_ptr(this);}
// Attributes
// References
::kdm::event::State_ptr m_to;
::kdm::event::Transition_ptr m_from;
};
}
// event
}// kdm
#endif // KDM_EVENT_NEXTSTATE_HPP<|fim▁end|>
|
/* This is the same value as getClassifierId() returns, but as a static
* value it can be used in template expansions. */
static const int classifierId = EventPackage::NEXTSTATE;
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from os.path import abspath, dirname, join
#
# Bokeh documentation build configuration file, created by
# sphinx-quickstart on Sat Oct 12 23:43:03 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.7'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.ifconfig',
'sphinx.ext.napoleon',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'bokeh.sphinxext.bokeh_autodoc',
'bokeh.sphinxext.bokeh_color',
'bokeh.sphinxext.bokeh_enum',
'bokeh.sphinxext.bokeh_gallery',
'bokeh.sphinxext.bokeh_github',
'bokeh.sphinxext.bokeh_jinja',
'bokeh.sphinxext.bokeh_model',
'bokeh.sphinxext.bokeh_options',
'bokeh.sphinxext.bokeh_palette',
'bokeh.sphinxext.bokeh_palette_group',
'bokeh.sphinxext.bokeh_plot',
'bokeh.sphinxext.bokeh_prop',
'bokeh.sphinxext.bokeh_releases',
'bokeh.sphinxext.bokeh_sitemap',
'bokeh.sphinxext.collapsible_code_block',
]
napoleon_include_init_with_doc = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Bokeh'
copyright = '© Copyright 2015-2018, Anaconda and Bokeh Contributors.'
# Get the standard computed Bokeh version string to use for |version|
# and |release|
from bokeh import __version__
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# Check for version override (e.g. when re-deploying a previously released
# docs, or when pushing test docs that do not have a corresponding BokehJS
# available on CDN)
from bokeh.settings import settings
if settings.docs_version():
version = release = settings.docs_version()
# get all the versions that will appear in the version dropdown
f = open(join(dirname(abspath(__file__)), "all_versions.txt"))
all_versions = [x.strip() for x in reversed(f.readlines())]
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
#
# NOTE: in these docs all .py script are assumed to be bokeh plot scripts!
# with bokeh_plot_pyfile_include_dirs set desired folder to look for .py files
bokeh_plot_pyfile_include_dirs = ['docs']
# Whether to allow builds to succeed if a Google API key is not defined and plots
# containing "GOOGLE_API_KEY" are processed
bokeh_missing_google_api_key_ok = False
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# Sort members by type
autodoc_member_order = 'groupwise'
# patterns to exclude
exclude_patterns = ['docs/releases/*']
# This would more properly be done with rst_epilog but something about
# the combination of this with the bokeh-gallery directive breaks the build
rst_prolog = """
.. |Color| replace:: :py:class:`~bokeh.core.properties.Color`
.. |DataSpec| replace:: :py:class:`~bokeh.core.properties.DataSpec`
.. |Document| replace:: :py:class:`~bokeh.document.Document`
.. |HasProps| replace:: :py:class:`~bokeh.core.has_props.HasProps`
.. |Model| replace:: :py:class:`~bokeh.model.Model`
.. |Property| replace:: :py:class:`~bokeh.core.property.bases.Property`
.. |PropertyDescriptor| replace:: :py:class:`~bokeh.core.property.descriptor.PropertyDescriptor`
.. |PropertyContainer| replace:: :py:class:`~bokeh.core.property.wrappers.PropertyContainer`
.. |UnitsSpec| replace:: :py:class:`~bokeh.core.properties.UnitsSpec`
.. |field| replace:: :py:func:`~bokeh.core.properties.field`
.. |value| replace:: :py:func:`~bokeh.core.properties.value`
"""
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bokeh_theme'
html_theme_path = ['.']
html_context = {
'SITEMAP_BASE_URL': 'https://bokeh.pydata.org/en/', # Trailing slash is needed
'DESCRIPTION': 'Bokeh visualization library, documentation site.',
'AUTHOR': 'Bokeh contributors',
'VERSION': version,
'NAV': (
('Github', '//github.com/bokeh/bokeh'),
),
'ABOUT': (
('Vision and Work', 'vision'),
('Team', 'team'),
('Citation', 'citation'),
('Contact', 'contact'),
),
'SOCIAL': (
('Contribute', 'contribute'),
('Mailing list', '//groups.google.com/a/anaconda.com/forum/#!forum/bokeh'),
('Github', '//github.com/bokeh/bokeh'),
('Twitter', '//twitter.com/BokehPlots'),
),
'NAV_DOCS': (
('Installation', 'installation'),
('User Guide', 'user_guide'),
('Gallery', 'gallery'),
('Tutorial', 'https://mybinder.org/v2/gh/bokeh/bokeh-notebooks/master?filepath=tutorial%2F00%20-%20Introduction%20and%20Setup.ipynb'),
('Reference', 'reference'),
('Releases', 'releases'),
('Developer Guide', 'dev_guide'),
),
'ALL_VERSIONS': all_versions,
}
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = True
# Output file base name for HTML help builder.
htmlhelp_basename = 'Bokehdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Bokeh.tex', u'Bokeh Documentation', u'Anaconda', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'bokeh', u'Bokeh Documentation',
[u'Anaconda'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Bokeh', u'Bokeh Documentation', u'Anaconda', 'Bokeh', 'Interactive Web Plotting for Python', 'Graphics'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# intersphinx settings
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),<|fim▁hole|>}<|fim▁end|>
|
'pandas': ('http://pandas.pydata.org/pandas-docs/stable/', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None)
|
<|file_name|>coupon.go<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 @ to2.net.
* name :
* author : jarryliu
* date : 2013-12-03 21:04
* description :
* history :
*/
package promotion
import (
"go2o/core/domain/interface/member"
)
// 优惠券促销
type ICouponPromotion interface {
GetDomainId() int32
// 获取促销内容
GetDetailsValue() ValueCoupon
// 设置促销内容
SetDetailsValue(*ValueCoupon) error
// 获取绑定
GetBinds() []ValueCouponBind
// 获取占用
GetTakes() []ValueCouponTake
// 保存
Save() (id int32, err error)
// 获取优惠券描述
GetDescribe() string
<|fim▁hole|> // 是否可用,传递会员及订单金额
// error返回不适用的详细信息
CanUse(member.IMember, float32) (bool, error)
// 是否允许占用
CanTake() bool
// 获取占用
GetTake(memberId int64) (*ValueCouponTake, error)
//占用
Take(memberId int64) error
// 应用到订单
ApplyTake(couponTakeId int32) error
// 绑定
Bind(memberId int64) error
//获取绑定
GetBind(memberId int64) (*ValueCouponBind, error)
//绑定
Binds(memberIds []string) error
//使用优惠券
UseCoupon(couponBindId int32) error
}
type (
ValueCoupon struct {
// 优惠券编号
Id int32 `db:"id" pk:"yes"`
//优惠码
Code string `db:"code"`
// 优惠码可用数量
Amount int `db:"amount"`
// 优惠码数量
TotalAmount int `db:"total_amount"`
//优惠金额
Fee int `db:"fee"`
//赠送积分
Integral int `db:"integral"`
//订单折扣(不打折为100)
Discount int `db:"discount"`
//等级限制
MinLevel int `db:"min_level"`
//订单金额限制
MinFee int `db:"min_fee"`
BeginTime int64 `db:"begin_time"`
OverTime int64 `db:"over_time"`
//是否需要绑定。反之可以直接使用
NeedBind int `db:"need_bind"`
CreateTime int64 `db:"create_time"`
}
//优惠券绑定
ValueCouponBind struct {
Id int32 `db:"id" auto:"yes" pk:"yes"`
//会员编号
MemberId int64 `db:"member_id"`
//优惠券编号
CouponId int32 `db:"coupon_id"`
//绑定时间
BindTime int64 `db:"bind_time"`
//是否使用
IsUsed int `db:"is_used"`
//使用时间
UseTime int64 `db:"use_time"`
}
ValueCouponTake struct {
Id int32 `db:"id" auto:"yes" pk:"yes"`
//会员编号
MemberId int64 `db:"member_id"`
//优惠券编号
CouponId int32 `db:"coupon_id"`
//占用时间
TakeTime int64 `db:"take_time"`
//释放时间,超过该时间,优惠券释放
ExtraTime int64 `db:"extra_time"`
//是否应用到订单
IsApply int `db:"is_apply"`
//更新时间
ApplyTime int64 `db:"apply_time"`
}
)<|fim▁end|>
|
// 获取优惠的金额
GetCouponFee(orderFee float32) float32
|
<|file_name|>test_generate_course_overview.py<|end_file_name|><|fim▁begin|># pylint: disable=missing-docstring
from django.core.management.base import CommandError
from mock import patch
from nose.plugins.attrib import attr
from openedx.core.djangoapps.content.course_overviews.management.commands import generate_course_overview
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@attr('shard_2')
class TestGenerateCourseOverview(ModuleStoreTestCase):
"""
Tests course overview management command.
"""
def setUp(self):<|fim▁hole|> """
super(TestGenerateCourseOverview, self).setUp()
self.course_key_1 = CourseFactory.create().id
self.course_key_2 = CourseFactory.create().id
self.command = generate_course_overview.Command()
def _assert_courses_not_in_overview(self, *courses):
"""
Assert that courses doesn't exist in the course overviews.
"""
course_keys = CourseOverview.get_all_course_keys()
for expected_course_key in courses:
self.assertNotIn(expected_course_key, course_keys)
def _assert_courses_in_overview(self, *courses):
"""
Assert courses exists in course overviews.
"""
course_keys = CourseOverview.get_all_course_keys()
for expected_course_key in courses:
self.assertIn(expected_course_key, course_keys)
def test_generate_all(self):
"""
Test that all courses in the modulestore are loaded into course overviews.
"""
# ensure that the newly created courses aren't in course overviews
self._assert_courses_not_in_overview(self.course_key_1, self.course_key_2)
self.command.handle(all=True)
# CourseOverview will be populated with all courses in the modulestore
self._assert_courses_in_overview(self.course_key_1, self.course_key_2)
def test_generate_one(self):
"""
Test that a specified course is loaded into course overviews.
"""
self._assert_courses_not_in_overview(self.course_key_1, self.course_key_2)
self.command.handle(unicode(self.course_key_1), all=False)
self._assert_courses_in_overview(self.course_key_1)
self._assert_courses_not_in_overview(self.course_key_2)
def test_invalid_key(self):
"""
Test that CommandError is raised for invalid key.
"""
with self.assertRaises(CommandError):
self.command.handle('not/found', all=False)
@patch('openedx.core.djangoapps.content.course_overviews.models.log')
def test_not_found_key(self, mock_log):
"""
Test keys not found are logged.
"""
self.command.handle('fake/course/id', all=False)
self.assertTrue(mock_log.exception.called)
def test_no_params(self):
"""
Test exception raised when no parameters are specified.
"""
with self.assertRaises(CommandError):
self.command.handle(all=False)<|fim▁end|>
|
"""
Create courses in modulestore.
|
<|file_name|>test_bilinear.py<|end_file_name|><|fim▁begin|>import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
def _uniform(*shape):
return numpy.random.uniform(-1, 1, shape).astype(numpy.float32)
@testing.parameterize(*testing.product({
'in_shapes': [((2,), (4,)), ((2, 1), (4, 2))],
'out_size': [3],
'batch_size': [2]
}))
class TestBilinearFunction(unittest.TestCase):
def setUp(self):
e1_shape = (self.batch_size,) + self.in_shapes[0]
e2_shape = (self.batch_size,) + self.in_shapes[1]
e1_size = numpy.prod(self.in_shapes[0])
e2_size = numpy.prod(self.in_shapes[1])
self.e1 = _uniform(*e1_shape)
self.e2 = _uniform(*e2_shape)
self.W = _uniform(e1_size, e2_size, self.out_size)
self.V1 = _uniform(e1_size, self.out_size)
self.V2 = _uniform(e2_size, self.out_size)
self.b = _uniform(self.out_size)
self.gy = _uniform(self.batch_size, self.out_size)
self.gge1 = _uniform(*self.e1.shape)
self.gge2 = _uniform(*self.e2.shape)
self.ggW = _uniform(*self.W.shape)
self.ggV1 = _uniform(*self.V1.shape)
self.ggV2 = _uniform(*self.V2.shape)
self.ggb = _uniform(*self.b.shape)
self.check_backward_options = {
'atol': 1e-5, 'rtol': 1e-4, 'dtype': numpy.float64}
self.check_double_backward_options = {
'atol': 1e-4, 'rtol': 1e-3, 'dtype': numpy.float64}
def check_forward(self, e1_data, e2_data, W_data, V1_data, V2_data,
b_data):
e1 = chainer.Variable(e1_data)
e2 = chainer.Variable(e2_data)
W = chainer.Variable(W_data)
e1_data = e1_data.reshape(e1_data.shape[0], -1)
e2_data = e2_data.reshape(e2_data.shape[0], -1)
xp = cuda.get_array_module(e1)
y_expect = xp.einsum('ij,ik,jkl->il', e1_data, e2_data, W_data)
flags = V1_data is None, V2_data is None, b_data is None
if any(flags):
if not all(flags):
raise ValueError(
'Test either all or none of the optional parameters.')
y = functions.bilinear(e1, e2, W)
else:
V1 = chainer.Variable(V1_data)
V2 = chainer.Variable(V2_data)
b = chainer.Variable(b_data)
y = functions.bilinear(e1, e2, W, V1, V2, b)
y_expect = xp.einsum('ij,ik,jkl->il', e1_data, e2_data, W_data)
y_expect += e1_data.dot(V1_data)
y_expect += e2_data.dot(V2_data)
y_expect += b_data
testing.assert_allclose(y_expect, cuda.to_cpu(y.data))
assert y.data.dtype == e1_data.dtype
def test_forward_cpu(self):
self.check_forward(self.e1, self.e2, self.W, self.V1, self.V2, self.b)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(
cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W),
cuda.to_gpu(self.V1), cuda.to_gpu(self.V2), cuda.to_gpu(self.b))
def test_partial_backward_cpu(self):
gradient_check.check_backward(
functions.bilinear, (self.e1, self.e2, self.W), self.gy,
**self.check_backward_options)
@attr.gpu
def test_partial_backward_gpu(self):
gradient_check.check_backward(
functions.bilinear,
(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W)),
cuda.to_gpu(self.gy), **self.check_backward_options)
def test_full_backward_cpu(self):
gradient_check.check_backward(
functions.bilinear,
(self.e1, self.e2, self.W, self.V1, self.V2, self.b), self.gy,
**self.check_backward_options)
@attr.gpu
def test_full_backward_gpu(self):
gradient_check.check_backward(
functions.bilinear,
(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W),
cuda.to_gpu(self.V1), cuda.to_gpu(self.V2), cuda.to_gpu(self.b)),
cuda.to_gpu(self.gy), **self.check_backward_options)
def test_partial_double_backward_cpu(self):
gradient_check.check_double_backward(
functions.bilinear, (self.e1, self.e2, self.W), self.gy,
(self.gge1, self.gge2, self.ggW), **self.check_backward_options)
@attr.gpu
def test_partial_double_backward_gpu(self):
gradient_check.check_double_backward(
functions.bilinear,
(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W)),
cuda.to_gpu(self.gy),
(cuda.to_gpu(self.gge1), cuda.to_gpu(self.gge2),
cuda.to_gpu(self.ggW)), **self.check_backward_options)
def test_full_double_backward_cpu(self):
def f(*inputs):
y = functions.bilinear(*inputs)
return y * y
gradient_check.check_double_backward(
f, (self.e1, self.e2, self.W, self.V1, self.V2, self.b),
self.gy,
(self.gge1, self.gge2, self.ggW, self.ggV1, self.ggV2, self.ggb),
**self.check_double_backward_options)
@attr.gpu
def test_full_double_backward_gpu(self):
def f(*inputs):
y = functions.bilinear(*inputs)
return y * y
gradient_check.check_double_backward(
f,
(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W),
cuda.to_gpu(self.V1), cuda.to_gpu(self.V2), cuda.to_gpu(self.b)),
cuda.to_gpu(self.gy),
(cuda.to_gpu(self.gge1), cuda.to_gpu(self.gge2),
cuda.to_gpu(self.ggW), cuda.to_gpu(self.V1), cuda.to_gpu(self.V2),
cuda.to_gpu(self.ggb)), **self.check_double_backward_options)
@attr.slow
class TestBilinearFunctionLarge(unittest.TestCase):
def setUp(self):
self.e1 = _uniform(256, 256)
self.e2 = _uniform(256, 256)
self.w = _uniform(256, 256, 256)
self.v1 = _uniform(256, 256)
self.v2 = _uniform(256, 256)
self.b = _uniform(256)
def test_cpu(self):<|fim▁hole|> def test_gpu(self):
chainer.functions.bilinear(*map(cuda.to_gpu, (
self.e1, self.e2, self.w, self.v1, self.v2, self.b)))
class TestBilinearFunctionInvalidArgument(unittest.TestCase):
def setUp(self):
e1 = _uniform(3, 2)
e2 = _uniform(3, 4)
W = _uniform(2, 4, 5)
V1 = _uniform(2, 5)
self.e1 = chainer.Variable(e1)
self.e2 = chainer.Variable(e2)
self.W = chainer.Variable(W)
self.V1 = chainer.Variable(V1)
def test_invalid_full_partial_ambiguous(self):
with self.assertRaises(ValueError):
functions.bilinear(self.e1, self.e2, self.W, self.V1)
testing.run_module(__name__, __file__)<|fim▁end|>
|
chainer.functions.bilinear(
self.e1, self.e2, self.w, self.v1, self.v2, self.b)
@attr.gpu
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .tables import (
BatchCreateRowsRequest,<|fim▁hole|> BatchCreateRowsResponse,
BatchDeleteRowsRequest,
BatchUpdateRowsRequest,
BatchUpdateRowsResponse,
ColumnDescription,
CreateRowRequest,
DeleteRowRequest,
GetRowRequest,
GetTableRequest,
GetWorkspaceRequest,
LabeledItem,
ListRowsRequest,
ListRowsResponse,
ListTablesRequest,
ListTablesResponse,
ListWorkspacesRequest,
ListWorkspacesResponse,
LookupDetails,
RelationshipDetails,
Row,
Table,
UpdateRowRequest,
Workspace,
View,
)
__all__ = (
"BatchCreateRowsRequest",
"BatchCreateRowsResponse",
"BatchDeleteRowsRequest",
"BatchUpdateRowsRequest",
"BatchUpdateRowsResponse",
"ColumnDescription",
"CreateRowRequest",
"DeleteRowRequest",
"GetRowRequest",
"GetTableRequest",
"GetWorkspaceRequest",
"LabeledItem",
"ListRowsRequest",
"ListRowsResponse",
"ListTablesRequest",
"ListTablesResponse",
"ListWorkspacesRequest",
"ListWorkspacesResponse",
"LookupDetails",
"RelationshipDetails",
"Row",
"Table",
"UpdateRowRequest",
"Workspace",
"View",
)<|fim▁end|>
| |
<|file_name|>testLoadCourse.js<|end_file_name|><|fim▁begin|>var ERR = require('async-stacktrace');
var assert = require('assert');
var fs = require('fs');<|fim▁hole|>
var courseDB = require('../lib/course-db');
var logger = require('./dummyLogger');
describe('courseDB.loadFullCourse() on exampleCourse', function() {
this.timeout(20000);
var courseDir = 'exampleCourse';
var course;
before('load course from disk', function(callback) {
courseDB.loadFullCourse(courseDir, logger, function(err, c) {
if (ERR(err, callback)) return;
course = c;
callback(null);
});
});
describe('the in-memory "course" object', function() {
it('should contain "courseInfo"', function() {
assert.ok(course.courseInfo);
});
it('should contain "questionDB"', function() {
assert.ok(course.questionDB);
});
it('should contain "courseInstanceDB"', function() {
assert.ok(course.courseInstanceDB);
});
});
});
describe('courseDB.loadFullCourse() on brokenCourse', function() {
var courseDir = 'tests/testLoadCourse/brokenCourse';
var assessmentFilename = `${courseDir}/courseInstances/Fa18/assessments/quiz1/infoAssessment.json`;
var questionFilename = `${courseDir}/questions/basicV3/info.json`;
beforeEach('write correct infoAssessment and question', function(callback) {
var assessmentJson = {
'uuid': 'bee70f4d-4220-47f1-b4ed-59c88ce08657',
'type': 'Exam',
'number': '1',
'title': 'Test quiz 1',
'set': 'Quiz',
'allowAccess': [
{ 'startDate': '2018-01-01T00:00:00',
'endDate': '2019-01-01T00:00:00'},
],
};
var questionJson = {
'uuid': 'ba0b8e5b-6348-43f8-b483-083e0bea6332',
'title': 'Basic V3 question',
'topic': 'basic',
'type': 'v3',
};
fs.writeFile(assessmentFilename, JSON.stringify(assessmentJson), function(err) {
if (ERR(err, callback)) return;
fs.writeFile(questionFilename, JSON.stringify(questionJson), function(err) {
if (ERR(err, callback)) return;
callback(null);
});
});
});
after('removing test files', function(callback) {
fs.unlink(assessmentFilename, function(err) {
if (ERR(err, callback)) return;
fs.unlink(questionFilename, function(err) {
if (ERR(err, callback)) return;
callback(null);
});
});
});
describe('trying to load broken course pieces', function() {
it('assessment: invalid set should fail', function(callback) {
var assessmentJson = {
'uuid': 'bee70f4d-4220-47f1-b4ed-59c88ce08657',
'type': 'Exam',
'number': '1',
'title': 'Test quiz 1',
'set': 'NotARealSet',
'allowAccess': [
{ 'startDate': '2018-01-01T00:00:00',
'endDate': '2019-01-01T00:00:00'},
],
};
var filename = 'courseInstances/Fa18/assessments/quiz1/infoAssessment.json';
loadHelper(courseDir, filename, assessmentJson, /invalid "set":/, callback);
});
it('assessment: access rule: invalid startDate should fail', function(callback) {
var assessmentJson = {
'uuid': 'bee70f4d-4220-47f1-b4ed-59c88ce08657',
'type': 'Exam',
'number': '1',
'title': 'Test quiz 1',
'set': 'Quiz',
'allowAccess': [
{ 'startDate': 'NotADate',
'endDate': '2019-01-01T00:00:00'},
],
};
var filename = 'courseInstances/Fa18/assessments/quiz1/infoAssessment.json';
loadHelper(courseDir, filename, assessmentJson, /invalid allowAccess startDate/, callback);
});
it('assessment: access rule: invalid endDate should fail', function(callback) {
var assessmentJson = {
'uuid': 'bee70f4d-4220-47f1-b4ed-59c88ce08657',
'type': 'Exam',
'number': '1',
'title': 'Test quiz 1',
'set': 'Quiz',
'allowAccess': [
{ 'startDate': '2019-01-01T22:22:22',
'endDate': 'AlsoReallyNotADate'},
],
};
var filename = 'courseInstances/Fa18/assessments/quiz1/infoAssessment.json';
loadHelper(courseDir, filename, assessmentJson, /invalid allowAccess endDate/, callback);
});
it('assessment: access rule: startDate after endDate should fail', function(callback) {
var assessmentJson = {
'uuid': 'bee70f4d-4220-47f1-b4ed-59c88ce08657',
'type': 'Exam',
'number': '1',
'title': 'Test quiz 1',
'set': 'Quiz',
'allowAccess': [
{ 'startDate': '2020-01-01T11:11:11',
'endDate': '2019-01-01T00:00:00'},
],
};
var filename = 'courseInstances/Fa18/assessments/quiz1/infoAssessment.json';
loadHelper(courseDir, filename, assessmentJson, /must not be after/, callback);
});
});
});
var loadHelper = function(courseDir, filename, contents, expectedErrorRE, callback) {
fs.writeFile(courseDir + '/' + filename, JSON.stringify(contents), function(err) {
if (err) { return callback(err); }
courseDB.loadFullCourse(courseDir, logger, function(err, _c) {
if (err) {
if (expectedErrorRE.test(err)) {
callback(null);
} else {
callback(new Error('unexpected error ' + err));
}
} else {
callback(new Error('returned successfully, which should not happen'));
}
});
});
};<|fim▁end|>
| |
<|file_name|>stream.py<|end_file_name|><|fim▁begin|>import io
import json
class Stream(object):
__shortname__ = "stream"
"""
This is a base class that should be inherited when implementing
different stream types. Should only be created by plugins.
"""
def __init__(self, session):
self.session = session
def __repr__(self):
return "<Stream()>"
def __json__(self):
return dict(type=type(self).shortname())
def open(self):
"""
Attempts to open a connection to the stream.
Returns a file-like object that can be used to read the stream data.
Raises :exc:`StreamError` on failure.
"""
raise NotImplementedError
@property
def json(self):
obj = self.__json__()
return json.dumps(obj)
<|fim▁hole|> def shortname(cls):
return cls.__shortname__
class StreamIO(io.IOBase):
pass
__all__ = ["Stream", "StreamIO"]<|fim▁end|>
|
@classmethod
|
<|file_name|>gen_sps.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import click
from copy import copy
from netCDF4 import Dataset
import numpy as np
import numpy.ma as ma
import os
import rasterio
import rasterio.warp as rwarp
import time
import osr
from .. import geotools
from .. import utils
def earth_radius():
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
return srs.GetSemiMajor()
def init_nc(dst_ds, transform, lats, lons, years, variables):
# Set attributes
dst_ds.setncattr('Conventions', u'CF-1.5')
dst_ds.setncattr('GDAL', u'GDAL 1.11.3, released 2015/09/16')
# Create dimensions
dst_ds.createDimension('time', None)
dst_ds.createDimension('lat', len(lats))
dst_ds.createDimension('lon', len(lons))
# Create variables
times = dst_ds.createVariable("time", "f8", ("time"), zlib=True,
least_significant_digit=3)
latitudes = dst_ds.createVariable("lat", "f4", ("lat"), zlib=True,
least_significant_digit = 3)
longitudes = dst_ds.createVariable("lon", "f4", ("lon"), zlib=True,
least_significant_digit=3)
crs = dst_ds.createVariable('crs', "S1", ())
# Add metadata
dst_ds.history = "Created at " + time.ctime(time.time())
dst_ds.source = "gen-sps.py"
latitudes.units = "degrees_north"
latitudes.long_name = 'latitude'
longitudes.units = "degrees_east"
longitudes.long_name = "longitude"
times.units = "years since 2010-01-01 00:00:00.0"
times.calendar = "gregorian"
times.standard_name = "time"
times.axis = 'T'
# Assign data to variables
latitudes[:] = lats
longitudes[:] = lons
times[:] = years
srs = osr.SpatialReference()
srs.ImportFromWkt(geotools.WGS84_WKT)
crs.grid_mapping_name = 'latitude_longitude'
crs.spatial_ref = srs.ExportToWkt()
crs.GeoTransform = ' '.join(map(str, transform))
crs.longitude_of_prime_meridian = srs.GetPrimeMeridian()
crs.semi_major_axis = srs.GetSemiMajor()
crs.inverse_flattening = srs.GetInvFlattening()
out = {}
for name, dtype, units, fill in variables:
dst_data = dst_ds.createVariable(name, dtype,
("time", "lat","lon"), zlib = True,
least_significant_digit = 4,
fill_value = fill)
dst_data.units = units
dst_data.grid_mapping = 'crs'
out[name] = dst_data
return out
def get_transform(r1, r2):
# Get the geo transform using r1 resolution but r2 bounds
dst = rasterio.open(r1)
src = rasterio.open(r2)
#src_bounds = np.around(src.bounds, decimals=3)
affine, width, height = rwarp.calculate_default_transform(src.crs,
dst.crs,
src.width,
src.height,
*src.bounds,
resolution=dst.res)
ul = affine * (0.5, 0.5)
lr = affine * (width - 0.5, height - 0.5)
lats = np.linspace(ul[1], lr[1], height)
lons = np.linspace(ul[0], lr[0], width)
cratio = np.prod(dst.res) / np.prod(src.res)
#cratio = 1.0
static = rasterio.open(utils.luh2_static('carea'))
carea = static.read(1, window=static.window(*src.bounds))
rcs = (np.sin(np.radians(lats + dst.res[0] / 2.0)) -
np.sin(np.radians(lats - dst.res[0] / 2.0))) * \
(dst.res[0] * np.pi/180) * earth_radius() ** 2 / 1e6
#carea *= rcs.reshape(carea.shape[0], 1)
return affine, lats, lons, dst.res, cratio# / carea
def mixing(year):
if year % 10 == 0:
return [year]
y0 = year - (year % 10)
return (y0, y0 + 10)
def resample(ds, bidx, resolution, resampling, out):
arr = ds.read(bidx, masked=True)
nodata = ds.nodatavals[bidx - 1]
if nodata is None: #"'nodata' must be set!"
nodata = -9999
if ds.crs.data == {}:
crs = ds.crs.from_string(u'epsg:4326')
else:
crs = ds.crs
newaff, width, height = rwarp.calculate_default_transform(crs, crs, ds.width,
ds.height,
*ds.bounds,
resolution=resolution)
out.mask.fill(False)
rwarp.reproject(arr, out,
src_transform = ds.affine,
dst_transform = newaff,
width = width,
height = height,
src_nodata = nodata,
dst_nodata = nodata,
src_crs = crs,
resampling = resampling)
out.mask = np.where(out == nodata, 1, 0)
def main():
years = range(2010, 2101)
ssps = ['ssp%d' % i for i in range(1, 6)]
variables = [(ssp, 'f4', 'ppl/km^2', -9999) for ssp in ssps]
fname = '%s/luh2/un_codes-full.tif' % utils.outdir()
affine, lats, lons, res, cfudge = get_transform(fname,
utils.sps(ssps[0], 2010))
arr = (ma.empty((len(lats), len(lons)), fill_value=-9999),
ma.empty((len(lats), len(lons)), fill_value=-9999))
oname = '%s/luh2/sps.nc' % utils.outdir()
with Dataset(oname, 'w') as out:
data = init_nc(out, affine.to_gdal(), lats, lons, years, variables)
for ssp in ssps:
print(ssp)
with click.progressbar(enumerate(years), length=len(years)) as bar:
for idx, year in bar:
yy = mixing(year)
files = map(lambda y: utils.sps(ssp, y), yy)
rasters = map(rasterio.open, files)
if len(rasters) == 1:
resample(rasters[0], 1, res,
rwarp.Resampling.average, arr[0])
data[ssp][idx, :, :] = np.clip(arr[0], 0, None) * cfudge
else:
f0 = (year % 10) / 10.0
resample(rasters[0], 1, res,
rwarp.Resampling.average, arr[0])
resample(rasters[1], 1, res,
rwarp.Resampling.average, arr[1])
data[ssp][idx, :, :] = ((1 - f0) * np.clip(arr[0], 0, None) +
f0 * np.clip(arr[1], 0, None)) * cfudge
if __name__ == '__main__':<|fim▁hole|><|fim▁end|>
|
main()
|
<|file_name|>fetcher.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.11.1
var Fetcher, ware;
ware = require('ware');<|fim▁hole|> }
Fetcher.prototype.args = function() {
this.args = arguments;
return this;
};
Fetcher.prototype.use = function(operation) {
this.ware.use(operation);
return this;
};
Fetcher.prototype.fetch = function(callback) {
var args;
args = [].slice.call(this.args);
args.push(callback);
return this.ware.run.apply(this.ware, args);
};
Fetcher.prototype.getLayers = function() {
return this.ware.fns;
};
return Fetcher;
})();
module.exports = {
"new": function() {
return new Fetcher;
}
};<|fim▁end|>
|
Fetcher = (function() {
function Fetcher() {
this.ware = ware();
|
<|file_name|>signaltools.py<|end_file_name|><|fim▁begin|># Author: Travis Oliphant
# 1999 -- 2002
import sigtools
from scipy import linalg
from scipy.fftpack import fft, ifft, ifftshift, fft2, ifft2, fftn, \
ifftn, fftfreq
from numpy import polyadd, polymul, polydiv, polysub, roots, \
poly, polyval, polyder, cast, asarray, isscalar, atleast_1d, \
ones, real_if_close, zeros, array, arange, where, rank, \
newaxis, product, ravel, sum, r_, iscomplexobj, take, \
argsort, allclose, expand_dims, unique, prod, sort, reshape, \
transpose, dot, mean, ndarray, atleast_2d
import numpy as np
from scipy.misc import factorial
from windows import get_window
from _arraytools import axis_slice, axis_reverse, odd_ext, even_ext, const_ext
__all__ = ['correlate', 'fftconvolve', 'convolve', 'convolve2d', 'correlate2d',
'order_filter', 'medfilt', 'medfilt2d', 'wiener', 'lfilter',
'lfiltic', 'deconvolve', 'hilbert', 'hilbert2', 'cmplx_sort',
'unique_roots', 'invres', 'invresz', 'residue', 'residuez',
'resample', 'detrend', 'lfilter_zi', 'filtfilt', 'decimate']
_modedict = {'valid': 0, 'same': 1, 'full': 2}
_boundarydict = {'fill': 0, 'pad': 0, 'wrap': 2, 'circular': 2, 'symm': 1,
'symmetric': 1, 'reflect': 4}
def _valfrommode(mode):
try:
val = _modedict[mode]
except KeyError:
if mode not in [0, 1, 2]:
raise ValueError("Acceptable mode flags are 'valid' (0),"
" 'same' (1), or 'full' (2).")
val = mode
return val
def _bvalfromboundary(boundary):
try:
val = _boundarydict[boundary] << 2
except KeyError:
if val not in [0, 1, 2]:
raise ValueError("Acceptable boundary flags are 'fill', 'wrap'"
" (or 'circular'), \n and 'symm' (or 'symmetric').")
val = boundary << 2
return val
def correlate(in1, in2, mode='full'):
"""
Cross-correlate two N-dimensional arrays.
Cross-correlate in1 and in2 with the output size determined by the mode
argument.
Parameters
----------
in1: array
first input.
in2: array
second input. Should have the same number of dimensions as in1.
mode: str {'valid', 'same', 'full'}
a string indicating the size of the output:
- 'valid': the output consists only of those elements that do not
rely on the zero-padding.
- 'same': the output is the same size as the largest input centered
with respect to the 'full' output.
- 'full': the output is the full discrete linear cross-correlation
of the inputs. (Default)
Returns
-------
out: array
an N-dimensional array containing a subset of the discrete linear
cross-correlation of in1 with in2.
Notes
-----
The correlation z of two arrays x and y of rank d is defined as
z[...,k,...] = sum[..., i_l, ...]
x[..., i_l,...] * conj(y[..., i_l + k,...])
"""
val = _valfrommode(mode)
if mode == 'valid':
ps = [i - j + 1 for i, j in zip(in1.shape, in2.shape)]
out = np.empty(ps, in1.dtype)
for i in range(len(ps)):
if ps[i] <= 0:
raise ValueError("Dimension of x(%d) < y(%d) " \
"not compatible with valid mode" % \
(in1.shape[i], in2.shape[i]))
z = sigtools._correlateND(in1, in2, out, val)
else:
ps = [i + j - 1 for i, j in zip(in1.shape, in2.shape)]
# zero pad input
in1zpadded = np.zeros(ps, in1.dtype)
sc = [slice(0, i) for i in in1.shape]
in1zpadded[sc] = in1.copy()
if mode == 'full':
out = np.empty(ps, in1.dtype)
z = sigtools._correlateND(in1zpadded, in2, out, val)
elif mode == 'same':
out = np.empty(in1.shape, in1.dtype)
z = sigtools._correlateND(in1zpadded, in2, out, val)
else:
raise ValueError("Uknown mode %s" % mode)
return z
def _centered(arr, newsize):
# Return the center newsize portion of the array.
newsize = asarray(newsize)
currsize = array(arr.shape)
startind = (currsize - newsize) / 2
endind = startind + newsize
myslice = [slice(startind[k], endind[k]) for k in range(len(endind))]
return arr[tuple(myslice)]
def fftconvolve(in1, in2, mode="full"):
"""Convolve two N-dimensional arrays using FFT. See convolve.
"""
s1 = array(in1.shape)
s2 = array(in2.shape)
complex_result = (np.issubdtype(in1.dtype, np.complex) or
np.issubdtype(in2.dtype, np.complex))
size = s1 + s2 - 1
# Always use 2**n-sized FFT
fsize = 2 ** np.ceil(np.log2(size))
IN1 = fftn(in1, fsize)
IN1 *= fftn(in2, fsize)
fslice = tuple([slice(0, int(sz)) for sz in size])
ret = ifftn(IN1)[fslice].copy()
del IN1
if not complex_result:
ret = ret.real
if mode == "full":
return ret
elif mode == "same":
if product(s1, axis=0) > product(s2, axis=0):
osize = s1
else:
osize = s2
return _centered(ret, osize)
elif mode == "valid":
return _centered(ret, abs(s2 - s1) + 1)
def convolve(in1, in2, mode='full'):
"""
Convolve two N-dimensional arrays.
Convolve in1 and in2 with output size determined by mode.
Parameters
----------
in1: array
first input.
in2: array
second input. Should have the same number of dimensions as in1.
mode: str {'valid', 'same', 'full'}
a string indicating the size of the output:
``valid`` : the output consists only of those elements that do not
rely on the zero-padding.
``same`` : the output is the same size as the largest input centered
with respect to the 'full' output.
``full`` : the output is the full discrete linear cross-correlation
of the inputs. (Default)
Returns
-------
out: array
an N-dimensional array containing a subset of the discrete linear
cross-correlation of in1 with in2.
"""
volume = asarray(in1)
kernel = asarray(in2)
if rank(volume) == rank(kernel) == 0:
return volume * kernel
elif not volume.ndim == kernel.ndim:
raise ValueError("in1 and in2 should have the same rank")
slice_obj = [slice(None, None, -1)] * len(kernel.shape)
if mode == 'valid':
for d1, d2 in zip(volume.shape, kernel.shape):
if not d1 >= d2:
raise ValueError(
"in1 should have at least as many items as in2 in " \
"every dimension for valid mode.")
if np.iscomplexobj(kernel):
return correlate(volume, kernel[slice_obj].conj(), mode)
else:
return correlate(volume, kernel[slice_obj], mode)
def order_filter(a, domain, rank):
"""
Perform an order filter on an N-dimensional array.
Perform an order filter on the array in. The domain argument acts as a
mask centered over each pixel. The non-zero elements of domain are
used to select elements surrounding each input pixel which are placed
in a list. The list is sorted, and the output for that pixel is the
element corresponding to rank in the sorted list.
Parameters
----------
a : ndarray
The N-dimensional input array.
domain : array_like
A mask array with the same number of dimensions as `in`.
Each dimension should have an odd number of elements.
rank : int
A non-negative integer which selects the element from the
sorted list (0 corresponds to the smallest element, 1 is the
next smallest element, etc.).
Returns
-------
out : ndarray
The results of the order filter in an array with the same
shape as `in`.
Examples
--------
>>> import scipy.signal
>>> x = np.arange(25).reshape(5, 5)
>>> domain = np.identity(3)
>>> x
array([[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24]])
>>> sp.signal.order_filter(x, domain, 0)
array([[ 0., 0., 0., 0., 0.],
[ 0., 0., 1., 2., 0.],
[ 0., 5., 6., 7., 0.],
[ 0., 10., 11., 12., 0.],
[ 0., 0., 0., 0., 0.]])
>>> sp.signal.order_filter(x, domain, 2)
array([[ 6., 7., 8., 9., 4.],
[ 11., 12., 13., 14., 9.],
[ 16., 17., 18., 19., 14.],
[ 21., 22., 23., 24., 19.],
[ 20., 21., 22., 23., 24.]])
"""
domain = asarray(domain)
size = domain.shape
for k in range(len(size)):
if (size[k] % 2) != 1:
raise ValueError("Each dimension of domain argument "
" should have an odd number of elements.")
return sigtools._order_filterND(a, domain, rank)
def medfilt(volume, kernel_size=None):
"""
Perform a median filter on an N-dimensional array.
Apply a median filter to the input array using a local window-size
given by kernel_size.
Parameters
----------
volume : array_like
An N-dimensional input array.
kernel_size : array_like, optional
A scalar or an N-length list giving the size of the median filter
window in each dimension. Elements of `kernel_size` should be odd.
If `kernel_size` is a scalar, then this scalar is used as the size in
each dimension. Default size is 3 for each dimension.
Returns
-------
out : ndarray
An array the same size as input containing the median filtered
result.
"""
volume = atleast_1d(volume)
if kernel_size is None:
kernel_size = [3] * len(volume.shape)
kernel_size = asarray(kernel_size)
if len(kernel_size.shape) == 0:
kernel_size = [kernel_size.item()] * len(volume.shape)
kernel_size = asarray(kernel_size)
for k in range(len(volume.shape)):
if (kernel_size[k] % 2) != 1:
raise ValueError("Each element of kernel_size should be odd.")
domain = ones(kernel_size)
numels = product(kernel_size, axis=0)
order = int(numels / 2)
return sigtools._order_filterND(volume, domain, order)
def wiener(im, mysize=None, noise=None):
"""
Perform a Wiener filter on an N-dimensional array.
Apply a Wiener filter to the N-dimensional array `im`.
Parameters
----------
im : ndarray
An N-dimensional array.
mysize : int or arraylike, optional
A scalar or an N-length list giving the size of the Wiener filter
window in each dimension. Elements of mysize should be odd.
If mysize is a scalar, then this scalar is used as the size
in each dimension.
noise : float, optional
The noise-power to use. If None, then noise is estimated as the
average of the local variance of the input.
Returns
-------
out : ndarray
Wiener filtered result with the same shape as `im`.
"""
im = asarray(im)
if mysize is None:
mysize = [3] * len(im.shape)
mysize = asarray(mysize)
# Estimate the local mean
lMean = correlate(im, ones(mysize), 'same') / product(mysize, axis=0)
# Estimate the local variance
lVar = (correlate(im ** 2, ones(mysize), 'same') / product(mysize, axis=0)
- lMean ** 2)
# Estimate the noise power if needed.
if noise == None:
noise = mean(ravel(lVar), axis=0)
res = (im - lMean)
res *= (1 - noise / lVar)
res += lMean
out = where(lVar < noise, lMean, res)
return out
def convolve2d(in1, in2, mode='full', boundary='fill', fillvalue=0):
"""Convolve two 2-dimensional arrays.
Convolve `in1` and `in2` with output size determined by mode and boundary
conditions determined by `boundary` and `fillvalue`.
Parameters
----------
in1, in2 : ndarray
Two-dimensional input arrays to be convolved.
mode: str, optional
A string indicating the size of the output:
``valid`` : the output consists only of those elements that do not
rely on the zero-padding.
``same`` : the output is the same size as the largest input centered
with respect to the 'full' output.
``full`` : the output is the full discrete linear cross-correlation
of the inputs. (Default)
boundary : str, optional
A flag indicating how to handle boundaries:
- 'fill' : pad input arrays with fillvalue. (default)
- 'wrap' : circular boundary conditions.
- 'symm' : symmetrical boundary conditions.
fillvalue : scalar, optional
Value to fill pad input arrays with. Default is 0.
Returns
-------
out : ndarray
A 2-dimensional array containing a subset of the discrete linear
convolution of `in1` with `in2`.
"""
if mode == 'valid':
for d1, d2 in zip(np.shape(in1), np.shape(in2)):
if not d1 >= d2:
raise ValueError(
"in1 should have at least as many items as in2 in " \
"every dimension for valid mode.")
val = _valfrommode(mode)
bval = _bvalfromboundary(boundary)
return sigtools._convolve2d(in1, in2, 1, val, bval, fillvalue)
def correlate2d(in1, in2, mode='full', boundary='fill', fillvalue=0):
"""Cross-correlate two 2-dimensional arrays.
Cross correlate in1 and in2 with output size determined by mode and
boundary conditions determined by `boundary` and `fillvalue`.
Parameters
----------
in1, in2 : ndarray
Two-dimensional input arrays to be convolved.
mode: str, optional
A string indicating the size of the output:
``valid`` : the output consists only of those elements that do not
rely on the zero-padding.
``same`` : the output is the same size as the largest input centered
with respect to the 'full' output.
``full`` : the output is the full discrete linear cross-correlation
of the inputs. (Default)
boundary : str, optional
A flag indicating how to handle boundaries:
- 'fill' : pad input arrays with fillvalue. (default)
- 'wrap' : circular boundary conditions.
- 'symm' : symmetrical boundary conditions.
fillvalue : scalar, optional
Value to fill pad input arrays with. Default is 0.
Returns
-------
out : ndarray
A 2-dimensional array containing a subset of the discrete linear
cross-correlation of `in1` with `in2`.
"""
val = _valfrommode(mode)
bval = _bvalfromboundary(boundary)
return sigtools._convolve2d(in1, in2, 0, val, bval, fillvalue)
def medfilt2d(input, kernel_size=3):
"""
Median filter a 2-dimensional array.
Apply a median filter to the input array using a local window-size
given by `kernel_size` (must be odd).
Parameters
----------
input : array_like
A 2-dimensional input array.
kernel_size : array_like, optional
A scalar or a list of length 2, giving the size of the
median filter window in each dimension. Elements of
`kernel_size` should be odd. If `kernel_size` is a scalar,
then this scalar is used as the size in each dimension.
Default is a kernel of size (3, 3).
Returns
-------
out : ndarray
An array the same size as input containing the median filtered
result.
"""
image = asarray(input)
if kernel_size is None:
kernel_size = [3] * 2
kernel_size = asarray(kernel_size)
if len(kernel_size.shape) == 0:
kernel_size = [kernel_size.item()] * 2
kernel_size = asarray(kernel_size)
for size in kernel_size:
if (size % 2) != 1:
raise ValueError("Each element of kernel_size should be odd.")
return sigtools._medfilt2d(image, kernel_size)
def lfilter(b, a, x, axis=-1, zi=None):
"""
Filter data along one-dimension with an IIR or FIR filter.
Filter a data sequence, x, using a digital filter. This works for many
fundamental data types (including Object type). The filter is a direct
form II transposed implementation of the standard difference equation
(see Notes).
Parameters
----------
b : array_like
The numerator coefficient vector in a 1-D sequence.
a : array_like
The denominator coefficient vector in a 1-D sequence. If a[0]
is not 1, then both a and b are normalized by a[0].
x : array_like
An N-dimensional input array.
axis : int
The axis of the input data array along which to apply the
linear filter. The filter is applied to each subarray along
this axis (*Default* = -1)
zi : array_like (optional)
Initial conditions for the filter delays. It is a vector
(or array of vectors for an N-dimensional input) of length
max(len(a),len(b))-1. If zi=None or is not given then initial
rest is assumed. SEE signal.lfiltic for more information.
Returns
-------
y : array
The output of the digital filter.
zf : array (optional)
If zi is None, this is not returned, otherwise, zf holds the
final filter delay values.
Notes
-----
The filter function is implemented as a direct II transposed structure.
This means that the filter implements
::
a[0]*y[n] = b[0]*x[n] + b[1]*x[n-1] + ... + b[nb]*x[n-nb]
- a[1]*y[n-1] - ... - a[na]*y[n-na]
using the following difference equations::
y[m] = b[0]*x[m] + z[0,m-1]
z[0,m] = b[1]*x[m] + z[1,m-1] - a[1]*y[m]
...
z[n-3,m] = b[n-2]*x[m] + z[n-2,m-1] - a[n-2]*y[m]
z[n-2,m] = b[n-1]*x[m] - a[n-1]*y[m]
where m is the output sample number and n=max(len(a),len(b)) is the
model order.
The rational transfer function describing this filter in the
z-transform domain is::
-1 -nb
b[0] + b[1]z + ... + b[nb] z
Y(z) = ---------------------------------- X(z)
-1 -na
a[0] + a[1]z + ... + a[na] z
"""
if isscalar(a):
a = [a]
if zi is None:
return sigtools._linear_filter(b, a, x, axis)
else:
return sigtools._linear_filter(b, a, x, axis, zi)
def lfiltic(b, a, y, x=None):
"""
Construct initial conditions for lfilter
Given a linear filter (b,a) and initial conditions on the output y
and the input x, return the inital conditions on the state vector zi
which is used by lfilter to generate the output given the input.
If M=len(b)-1 and N=len(a)-1. Then, the initial conditions are given
in the vectors x and y as::
x = {x[-1],x[-2],...,x[-M]}
y = {y[-1],y[-2],...,y[-N]}
If x is not given, its inital conditions are assumed zero.
If either vector is too short, then zeros are added
to achieve the proper length.
The output vector zi contains::
zi = {z_0[-1], z_1[-1], ..., z_K-1[-1]} where K=max(M,N).
"""
N = np.size(a) - 1
M = np.size(b) - 1
K = max(M, N)
y = asarray(y)
zi = zeros(K, y.dtype.char)
if x is None:
x = zeros(M, y.dtype.char)
else:
x = asarray(x)
L = np.size(x)
if L < M:
x = r_[x, zeros(M - L)]
L = np.size(y)
if L < N:
y = r_[y, zeros(N - L)]
for m in range(M):
zi[m] = sum(b[m + 1:] * x[:M - m], axis=0)
for m in range(N):
zi[m] -= sum(a[m + 1:] * y[:N - m], axis=0)
return zi
def deconvolve(signal, divisor):
"""Deconvolves divisor out of signal.
"""
num = atleast_1d(signal)
den = atleast_1d(divisor)
N = len(num)
D = len(den)
if D > N:
quot = []
rem = num
else:
input = ones(N - D + 1, float)
input[1:] = 0
quot = lfilter(num, den, input)
rem = num - convolve(den, quot, mode='full')
return quot, rem
def hilbert(x, N=None, axis=-1):
"""
Compute the analytic signal.
The transformation is done along the last axis by default.
Parameters
----------
x : array_like
Signal data
N : int, optional
Number of Fourier components. Default: ``x.shape[axis]``
axis : int, optional
Axis along which to do the transformation. Default: -1.
Returns
-------
xa : ndarray
Analytic signal of `x`, of each 1-D array along `axis`
Notes
-----
The analytic signal `x_a(t)` of `x(t)` is::
x_a = F^{-1}(F(x) 2U) = x + i y
where ``F`` is the Fourier transform, ``U`` the unit step function,
and ``y`` the Hilbert transform of ``x``. [1]_
`axis` argument is new in scipy 0.8.0.
References
----------
.. [1] Wikipedia, "Analytic signal".
http://en.wikipedia.org/wiki/Analytic_signal
"""
x = asarray(x)
if iscomplexobj(x):
raise ValueError("x must be real.")
if N is None:
N = x.shape[axis]
if N <= 0:
raise ValueError("N must be positive.")
Xf = fft(x, N, axis=axis)
h = zeros(N)
if N % 2 == 0:
h[0] = h[N / 2] = 1
h[1:N / 2] = 2
else:
h[0] = 1
h[1:(N + 1) / 2] = 2
if len(x.shape) > 1:
ind = [newaxis] * x.ndim
ind[axis] = slice(None)
h = h[ind]
x = ifft(Xf * h, axis=axis)
return x
def hilbert2(x, N=None):
"""
Compute the '2-D' analytic signal of `x`
Parameters
----------
x : array_like
2-D signal data.
N : int or tuple of two ints, optional
Number of Fourier components. Default is ``x.shape``
Returns
-------
xa : ndarray
Analytic signal of `x` taken along axes (0,1).
References
----------
.. [1] Wikipedia, "Analytic signal",
http://en.wikipedia.org/wiki/Analytic_signal
"""
x = atleast_2d(x)
if len(x.shape) > 2:
raise ValueError("x must be rank 2.")
if iscomplexobj(x):
raise ValueError("x must be real.")
if N is None:
N = x.shape
elif isinstance(N, int):
if N <= 0:
raise ValueError("N must be positive.")
N = (N, N)
elif len(N) != 2 or any(n <= 0 for n in N):
raise ValueError("When given as a tuple, N must hold exactly "
"two positive integers")
Xf = fft2(x, N, axes=(0, 1))
h1 = zeros(N[0], 'd')
h2 = zeros(N[1], 'd')
for p in range(2):
h = eval("h%d" % (p + 1))
N1 = N[p]
if N1 % 2 == 0:
h[0] = h[N1 / 2] = 1
h[1:N1 / 2] = 2
else:
h[0] = 1
h[1:(N1 + 1) / 2] = 2
exec("h%d = h" % (p + 1), globals(), locals())
h = h1[:, newaxis] * h2[newaxis, :]
k = len(x.shape)
while k > 2:
h = h[:, newaxis]
k -= 1
x = ifft2(Xf * h, axes=(0, 1))
return x
def cmplx_sort(p):
"sort roots based on magnitude."
p = asarray(p)
if iscomplexobj(p):
indx = argsort(abs(p))
else:
indx = argsort(p)
return take(p, indx, 0), indx
def unique_roots(p, tol=1e-3, rtype='min'):
"""
Determine unique roots and their multiplicities from a list of roots.
Parameters
----------
p : array_like
The list of roots.
tol : float, optional
The tolerance for two roots to be considered equal. Default is 1e-3.
rtype : {'max', 'min, 'avg'}, optional
How to determine the returned root if multiple roots are within
`tol` of each other.
- 'max': pick the maximum of those roots.
- 'min': pick the minimum of those roots.
- 'avg': take the average of those roots.
Returns
-------
pout : ndarray
The list of unique roots, sorted from low to high.
mult : ndarray
The multiplicity of each root.
Notes
-----
This utility function is not specific to roots but can be used for any
sequence of values for which uniqueness and multiplicity has to be
determined. For a more general routine, see `numpy.unique`.
Examples
--------
>>> vals = [0, 1.3, 1.31, 2.8, 1.25, 2.2, 10.3]
>>> uniq, mult = sp.signal.unique_roots(vals, tol=2e-2, rtype='avg')
Check which roots have multiplicity larger than 1:
>>> uniq[mult > 1]
array([ 1.305])
"""
if rtype in ['max', 'maximum']:
comproot = np.maximum
elif rtype in ['min', 'minimum']:
comproot = np.minimum
elif rtype in ['avg', 'mean']:
comproot = np.mean
p = asarray(p) * 1.0
tol = abs(tol)
p, indx = cmplx_sort(p)
pout = []
mult = []
indx = -1
curp = p[0] + 5 * tol
sameroots = []
for k in range(len(p)):
tr = p[k]
if abs(tr - curp) < tol:
sameroots.append(tr)
curp = comproot(sameroots)
pout[indx] = curp
mult[indx] += 1
else:
pout.append(tr)
curp = tr
sameroots = [tr]
indx += 1
mult.append(1)
return array(pout), array(mult)
def invres(r, p, k, tol=1e-3, rtype='avg'):
"""Compute b(s) and a(s) from partial fraction expansion: r,p,k
If M = len(b) and N = len(a)
b(s) b[0] x**(M-1) + b[1] x**(M-2) + ... + b[M-1]
H(s) = ------ = ----------------------------------------------
a(s) a[0] x**(N-1) + a[1] x**(N-2) + ... + a[N-1]
r[0] r[1] r[-1]
= -------- + -------- + ... + --------- + k(s)
(s-p[0]) (s-p[1]) (s-p[-1])
If there are any repeated roots (closer than tol), then the partial
fraction expansion has terms like
r[i] r[i+1] r[i+n-1]
-------- + ----------- + ... + -----------
(s-p[i]) (s-p[i])**2 (s-p[i])**n
See Also
--------
residue, poly, polyval, unique_roots
"""
extra = k
p, indx = cmplx_sort(p)
r = take(r, indx, 0)
pout, mult = unique_roots(p, tol=tol, rtype=rtype)
p = []
for k in range(len(pout)):
p.extend([pout[k]] * mult[k])
a = atleast_1d(poly(p))
if len(extra) > 0:
b = polymul(extra, a)
else:
b = [0]
indx = 0
for k in range(len(pout)):
temp = []
for l in range(len(pout)):
if l != k:
temp.extend([pout[l]] * mult[l])
for m in range(mult[k]):
t2 = temp[:]
t2.extend([pout[k]] * (mult[k] - m - 1))
b = polyadd(b, r[indx] * poly(t2))
indx += 1
b = real_if_close(b)
while allclose(b[0], 0, rtol=1e-14) and (b.shape[-1] > 1):
b = b[1:]
return b, a
def residue(b, a, tol=1e-3, rtype='avg'):
"""
Compute partial-fraction expansion of b(s) / a(s).
If ``M = len(b)`` and ``N = len(a)``, then the partial-fraction
expansion H(s) is defined as::
b(s) b[0] s**(M-1) + b[1] s**(M-2) + ... + b[M-1]
H(s) = ------ = ----------------------------------------------
a(s) a[0] s**(N-1) + a[1] s**(N-2) + ... + a[N-1]
r[0] r[1] r[-1]
= -------- + -------- + ... + --------- + k(s)
(s-p[0]) (s-p[1]) (s-p[-1])
If there are any repeated roots (closer together than `tol`), then H(s)
has terms like::
r[i] r[i+1] r[i+n-1]
-------- + ----------- + ... + -----------
(s-p[i]) (s-p[i])**2 (s-p[i])**n
Returns
-------
r : ndarray
Residues.
p : ndarray
Poles.
k : ndarray
Coefficients of the direct polynomial term.
See Also
--------
invres, numpy.poly, unique_roots
"""
b, a = map(asarray, (b, a))
rscale = a[0]
k, b = polydiv(b, a)
p = roots(a)
r = p * 0.0
pout, mult = unique_roots(p, tol=tol, rtype=rtype)
p = []
for n in range(len(pout)):
p.extend([pout[n]] * mult[n])
p = asarray(p)
# Compute the residue from the general formula
indx = 0
for n in range(len(pout)):
bn = b.copy()
pn = []
for l in range(len(pout)):
if l != n:
pn.extend([pout[l]] * mult[l])
an = atleast_1d(poly(pn))
# bn(s) / an(s) is (s-po[n])**Nn * b(s) / a(s) where Nn is
# multiplicity of pole at po[n]
sig = mult[n]
for m in range(sig, 0, -1):
if sig > m:
# compute next derivative of bn(s) / an(s)
term1 = polymul(polyder(bn, 1), an)
term2 = polymul(bn, polyder(an, 1))
bn = polysub(term1, term2)
an = polymul(an, an)
r[indx + m - 1] = polyval(bn, pout[n]) / polyval(an, pout[n]) \
/ factorial(sig - m)
indx += sig
return r / rscale, p, k
def residuez(b, a, tol=1e-3, rtype='avg'):
"""Compute partial-fraction expansion of b(z) / a(z).
If M = len(b) and N = len(a)
b(z) b[0] + b[1] z**(-1) + ... + b[M-1] z**(-M+1)
H(z) = ------ = ----------------------------------------------
a(z) a[0] + a[1] z**(-1) + ... + a[N-1] z**(-N+1)
r[0] r[-1]
= --------------- + ... + ---------------- + k[0] + k[1]z**(-1) ...
(1-p[0]z**(-1)) (1-p[-1]z**(-1))
If there are any repeated roots (closer than tol), then the partial
fraction expansion has terms like
r[i] r[i+1] r[i+n-1]
-------------- + ------------------ + ... + ------------------
(1-p[i]z**(-1)) (1-p[i]z**(-1))**2 (1-p[i]z**(-1))**n
See also
--------
invresz, poly, polyval, unique_roots
"""
b, a = map(asarray, (b, a))
gain = a[0]
brev, arev = b[::-1], a[::-1]
krev, brev = polydiv(brev, arev)
if krev == []:
k = []
else:
k = krev[::-1]
b = brev[::-1]
p = roots(a)
r = p * 0.0
pout, mult = unique_roots(p, tol=tol, rtype=rtype)
p = []
for n in range(len(pout)):
p.extend([pout[n]] * mult[n])
p = asarray(p)
# Compute the residue from the general formula (for discrete-time)
# the polynomial is in z**(-1) and the multiplication is by terms
# like this (1-p[i] z**(-1))**mult[i]. After differentiation,
# we must divide by (-p[i])**(m-k) as well as (m-k)!
indx = 0
for n in range(len(pout)):
bn = brev.copy()
pn = []
for l in range(len(pout)):
if l != n:
pn.extend([pout[l]] * mult[l])
an = atleast_1d(poly(pn))[::-1]
# bn(z) / an(z) is (1-po[n] z**(-1))**Nn * b(z) / a(z) where Nn is
# multiplicity of pole at po[n] and b(z) and a(z) are polynomials.
sig = mult[n]
for m in range(sig, 0, -1):
if sig > m:
# compute next derivative of bn(s) / an(s)
term1 = polymul(polyder(bn, 1), an)
term2 = polymul(bn, polyder(an, 1))
bn = polysub(term1, term2)
an = polymul(an, an)
r[indx + m - 1] = (polyval(bn, 1.0 / pout[n]) /
polyval(an, 1.0 / pout[n]) /
factorial(sig - m) / (-pout[n]) ** (sig - m))
indx += sig
return r / gain, p, k
def invresz(r, p, k, tol=1e-3, rtype='avg'):
"""Compute b(z) and a(z) from partial fraction expansion: r,p,k
If M = len(b) and N = len(a)
b(z) b[0] + b[1] z**(-1) + ... + b[M-1] z**(-M+1)
H(z) = ------ = ----------------------------------------------
a(z) a[0] + a[1] z**(-1) + ... + a[N-1] z**(-N+1)
r[0] r[-1]
= --------------- + ... + ---------------- + k[0] + k[1]z**(-1) ...
(1-p[0]z**(-1)) (1-p[-1]z**(-1))
If there are any repeated roots (closer than tol), then the partial
fraction expansion has terms like
r[i] r[i+1] r[i+n-1]
-------------- + ------------------ + ... + ------------------
(1-p[i]z**(-1)) (1-p[i]z**(-1))**2 (1-p[i]z**(-1))**n
See also
--------
residuez, poly, polyval, unique_roots
"""
extra = asarray(k)
p, indx = cmplx_sort(p)
r = take(r, indx, 0)
pout, mult = unique_roots(p, tol=tol, rtype=rtype)
p = []
for k in range(len(pout)):
p.extend([pout[k]] * mult[k])
a = atleast_1d(poly(p))
if len(extra) > 0:
b = polymul(extra, a)
else:
b = [0]
indx = 0
brev = asarray(b)[::-1]
for k in range(len(pout)):
temp = []
# Construct polynomial which does not include any of this root
for l in range(len(pout)):
if l != k:
temp.extend([pout[l]] * mult[l])
for m in range(mult[k]):
t2 = temp[:]
t2.extend([pout[k]] * (mult[k] - m - 1))
brev = polyadd(brev, (r[indx] * poly(t2))[::-1])
indx += 1
b = real_if_close(brev[::-1])
return b, a
def resample(x, num, t=None, axis=0, window=None):
"""
Resample `x` to `num` samples using Fourier method along the given axis.
The resampled signal starts at the same value as `x` but is sampled
with a spacing of ``len(x) / num * (spacing of x)``. Because a
Fourier method is used, the signal is assumed to be periodic.
Parameters
----------
x : array_like
The data to be resampled.
num : int
The number of samples in the resampled signal.
t : array_like, optional
If `t` is given, it is assumed to be the sample positions
associated with the signal data in `x`.
axis : int, optional
The axis of `x` that is resampled. Default is 0.
window : array_like, callable, string, float, or tuple, optional
Specifies the window applied to the signal in the Fourier
domain. See below for details.
Returns
-------
resampled_x or (resampled_x, resampled_t)
Either the resampled array, or, if `t` was given, a tuple
containing the resampled array and the corresponding resampled
positions.
Notes
-----
The argument `window` controls a Fourier-domain window that tapers
the Fourier spectrum before zero-padding to alleviate ringing in
the resampled values for sampled signals you didn't intend to be
interpreted as band-limited.
If `window` is a function, then it is called with a vector of inputs
indicating the frequency bins (i.e. fftfreq(x.shape[axis]) ).
If `window` is an array of the same length as `x.shape[axis]` it is
assumed to be the window to be applied directly in the Fourier
domain (with dc and low-frequency first).
For any other type of `window`, the function `scipy.signal.get_window`
is called to generate the window.
The first sample of the returned vector is the same as the first
sample of the input vector. The spacing between samples is changed
from dx to:
dx * len(x) / num
If `t` is not None, then it represents the old sample positions,
and the new sample positions will be returned as well as the new
samples.
"""
x = asarray(x)
X = fft(x, axis=axis)
Nx = x.shape[axis]
if window is not None:
if callable(window):
W = window(fftfreq(Nx))
elif isinstance(window, ndarray) and window.shape == (Nx,):
W = window
else:
W = ifftshift(get_window(window, Nx))
newshape = ones(len(x.shape))
newshape[axis] = len(W)
W.shape = newshape
X = X * W
sl = [slice(None)] * len(x.shape)
newshape = list(x.shape)
newshape[axis] = num
N = int(np.minimum(num, Nx))
Y = zeros(newshape, 'D')
sl[axis] = slice(0, (N + 1) / 2)
Y[sl] = X[sl]
sl[axis] = slice(-(N - 1) / 2, None)
Y[sl] = X[sl]
y = ifft(Y, axis=axis) * (float(num) / float(Nx))
if x.dtype.char not in ['F', 'D']:
y = y.real
if t is None:
return y
else:
new_t = arange(0, num) * (t[1] - t[0]) * Nx / float(num) + t[0]
return y, new_t
def detrend(data, axis=-1, type='linear', bp=0):
"""
Remove linear trend along axis from data.
Parameters
----------
data : array_like
The input data.
axis : int, optional
The axis along which to detrend the data. By default this is the
last axis (-1).
type : {'linear', 'constant'}, optional
The type of detrending. If ``type == 'linear'`` (default),
the result of a linear least-squares fit to `data` is subtracted
from `data`.
If ``type == 'constant'``, only the mean of `data` is subtracted.
bp : array_like of ints, optional
A sequence of break points. If given, an individual linear fit is
performed for each part of `data` between two break points.
Break points are specified as indices into `data`.
Returns
-------
ret : ndarray
The detrended input data.
Examples
--------
>>> randgen = np.random.RandomState(9)
>>> npoints = 1e3
>>> noise = randgen.randn(npoints)
>>> x = 3 + 2*np.linspace(0, 1, npoints) + noise
>>> (sp.signal.detrend(x) - noise).max() < 0.01
True
"""
if type not in ['linear', 'l', 'constant', 'c']:
raise ValueError("Trend type must be 'linear' or 'constant'.")
data = asarray(data)
dtype = data.dtype.char
if dtype not in 'dfDF':
dtype = 'd'
if type in ['constant', 'c']:
ret = data - expand_dims(mean(data, axis), axis)
return ret
else:
dshape = data.shape
N = dshape[axis]
bp = sort(unique(r_[0, bp, N]))
if np.any(bp > N):
raise ValueError("Breakpoints must be less than length "
"of data along given axis.")
Nreg = len(bp) - 1
# Restructure data so that axis is along first dimension and
# all other dimensions are collapsed into second dimension
rnk = len(dshape)
if axis < 0:
axis = axis + rnk
newdims = r_[axis, 0:axis, axis + 1:rnk]
newdata = reshape(transpose(data, tuple(newdims)),
(N, prod(dshape, axis=0) / N))
newdata = newdata.copy() # make sure we have a copy
if newdata.dtype.char not in 'dfDF':
newdata = newdata.astype(dtype)
# Find leastsq fit and remove it for each piece
for m in range(Nreg):
Npts = bp[m + 1] - bp[m]
A = ones((Npts, 2), dtype)
A[:, 0] = cast[dtype](arange(1, Npts + 1) * 1.0 / Npts)
sl = slice(bp[m], bp[m + 1])
coef, resids, rank, s = linalg.lstsq(A, newdata[sl])
newdata[sl] = newdata[sl] - dot(A, coef)
# Put data back in original shape.
tdshape = take(dshape, newdims, 0)
ret = reshape(newdata, tuple(tdshape))
vals = range(1, rnk)
olddims = vals[:axis] + [0] + vals[axis:]
ret = transpose(ret, tuple(olddims))
return ret
def lfilter_zi(b, a):
"""
Compute an initial state `zi` for the lfilter function that corresponds
to the steady state of the step response.
A typical use of this function is to set the initial state so that the
output of the filter starts at the same value as the first element of
the signal to be filtered.
Parameters
----------
b, a : array_like (1-D)
The IIR filter coefficients. See `scipy.signal.lfilter` for more
information.
Returns
-------
zi : 1-D ndarray
The initial state for the filter.
Notes
-----
A linear filter with order m has a state space representation (A, B, C, D),
for which the output y of the filter can be expressed as::
z(n+1) = A*z(n) + B*x(n)
y(n) = C*z(n) + D*x(n)
where z(n) is a vector of length m, A has shape (m, m), B has shape
(m, 1), C has shape (1, m) and D has shape (1, 1) (assuming x(n) is
a scalar). lfilter_zi solves::
zi = A*zi + B
In other words, it finds the initial condition for which the response
to an input of all ones is a constant.
Given the filter coefficients `a` and `b`, the state space matrices
for the transposed direct form II implementation of the linear filter,
which is the implementation used by scipy.signal.lfilter, are::
A = scipy.linalg.companion(a).T
B = b[1:] - a[1:]*b[0]
assuming `a[0]` is 1.0; if `a[0]` is not 1, `a` and `b` are first
divided by a[0].
Examples
--------
The following code creates a lowpass Butterworth filter. Then it
applies that filter to an array whose values are all 1.0; the
output is also all 1.0, as expected for a lowpass filter. If the
`zi` argument of `lfilter` had not been given, the output would have
shown the transient signal.
>>> from numpy import array, ones
>>> from scipy.signal import lfilter, lfilter_zi, butter
>>> b, a = butter(5, 0.25)
>>> zi = lfilter_zi(b, a)
>>> y, zo = lfilter(b, a, ones(10), zi=zi)
>>> y
array([1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])
Another example:
>>> x = array([0.5, 0.5, 0.5, 0.0, 0.0, 0.0, 0.0])
>>> y, zf = lfilter(b, a, x, zi=zi*x[0])
>>> y
array([ 0.5 , 0.5 , 0.5 , 0.49836039, 0.48610528,
0.44399389, 0.35505241])
Note that the `zi` argument to `lfilter` was computed using
`lfilter_zi` and scaled by `x[0]`. Then the output `y` has no
transient until the input drops from 0.5 to 0.0.
"""
# FIXME: Can this function be replaced with an appropriate
# use of lfiltic? For example, when b,a = butter(N,Wn),
# lfiltic(b, a, y=numpy.ones_like(a), x=numpy.ones_like(b)).
#
# We could use scipy.signal.normalize, but it uses warnings in
# cases where a ValueError is more appropriate, and it allows
# b to be 2D.
b = np.atleast_1d(b)
if b.ndim != 1:
raise ValueError("Numerator b must be rank 1.")
a = np.atleast_1d(a)
if a.ndim != 1:
raise ValueError("Denominator a must be rank 1.")
while len(a) > 1 and a[0] == 0.0:
a = a[1:]
if a.size < 1:
raise ValueError("There must be at least one nonzero `a` coefficient.")
if a[0] != 1.0:
# Normalize the coefficients so a[0] == 1.
a = a / a[0]
b = b / a[0]
n = max(len(a), len(b))
# Pad a or b with zeros so they are the same length.
if len(a) < n:
a = np.r_[a, np.zeros(n - len(a))]
elif len(b) < n:
b = np.r_[b, np.zeros(n - len(b))]
IminusA = np.eye(n - 1) - linalg.companion(a).T
B = b[1:] - a[1:] * b[0]
# Solve zi = A*zi + B
zi = np.linalg.solve(IminusA, B)
# For future reference: we could also use the following
# explicit formulas to solve the linear system:
#
# zi = np.zeros(n - 1)
# zi[0] = B.sum() / IminusA[:,0].sum()
# asum = 1.0
# csum = 0.0
# for k in range(1,n-1):
# asum += a[k]
# csum += b[k] - a[k]*b[0]
# zi[k] = asum*zi[0] - csum
return zi
def filtfilt(b, a, x, axis=-1, padtype='odd', padlen=None):
"""A forward-backward filter.
This function applies a linear filter twice, once forward
and once backwards. The combined filter has linear phase.
Before applying the filter, the function can pad the data along the
given axis in one of three ways: odd, even or constant. The odd
and even extensions have the corresponding symmetry about the end point
of the data. The constant extension extends the data with the values
at end points. On both the forward and backwards passes, the
initial condition of the filter is found by using lfilter_zi and
scaling it by the end point of the extended data.
Parameters
----------
b : array_like, 1-D
The numerator coefficient vector of the filter.
a : array_like, 1-D
The denominator coefficient vector of the filter. If a[0]
is not 1, then both a and b are normalized by a[0].
x : array_like
The array of data to be filtered.
axis : int, optional
The axis of `x` to which the filter is applied.
Default is -1.
padtype : str or None, optional
Must be 'odd', 'even', 'constant', or None. This determines the
type of extension to use for the padded signal to which the filter
is applied. If `padtype` is None, no padding is used. The default
is 'odd'.
padlen : int or None, optional
The number of elements by which to extend `x` at both ends of
`axis` before applying the filter. This value must be less than
`x.shape[axis]-1`. `padlen=0` implies no padding.
The default value is 3*max(len(a),len(b)).
Returns
-------
y : ndarray
The filtered output, an array of type numpy.float64 with the same
shape as `x`.
See Also
--------
lfilter_zi
lfilter
Examples
--------
First we create a one second signal that is the sum of two pure sine
waves, with frequencies 5 Hz and 250 Hz, sampled at 2000 Hz.
>>> t = np.linspace(0, 1.0, 2001)
>>> xlow = np.sin(2 * np.pi * 5 * t)
>>> xhigh = np.sin(2 * np.pi * 250 * t)
>>> x = xlow + xhigh
Now create a lowpass Butterworth filter with a cutoff of 0.125 times
the Nyquist rate, or 125 Hz, and apply it to x with filtfilt. The
result should be approximately xlow, with no phase shift.
>>> from scipy.signal import butter
>>> b, a = butter(8, 0.125)
>>> y = filtfilt(b, a, x, padlen=150)
>>> np.abs(y - xlow).max()
9.1086182074789912e-06
We get a fairly clean result for this artificial example because
the odd extension is exact, and with the moderately long padding,
the filter's transients have dissipated by the time the actual data
is reached. In general, transient effects at the edges are
unavoidable.
"""
if padtype not in ['even', 'odd', 'constant', None]:
raise ValueError(("Unknown value '%s' given to padtype. padtype must "
"be 'even', 'odd', 'constant', or None.") %
padtype)
b = np.asarray(b)
a = np.asarray(a)
x = np.asarray(x)
ntaps = max(len(a), len(b))
if padtype is None:
padlen = 0
if padlen is None:
# Original padding; preserved for backwards compatibility.
edge = ntaps * 3
else:
edge = padlen
# x's 'axis' dimension must be bigger than edge.
if x.shape[axis] <= edge:
raise ValueError("The length of the input vector x must be at least "
"padlen, which is %d." % edge)
if padtype is not None and edge > 0:
# Make an extension of length `edge` at each
# end of the input array.
if padtype == 'even':
ext = even_ext(x, edge, axis=axis)
elif padtype == 'odd':
ext = odd_ext(x, edge, axis=axis)
else:<|fim▁hole|> ext = x
# Get the steady state of the filter's step response.
zi = lfilter_zi(b, a)
# Reshape zi and create x0 so that zi*x0 broadcasts
# to the correct value for the 'zi' keyword argument
# to lfilter.
zi_shape = [1] * x.ndim
zi_shape[axis] = zi.size
zi = np.reshape(zi, zi_shape)
x0 = axis_slice(ext, stop=1, axis=axis)
# Forward filter.
(y, zf) = lfilter(b, a, ext, zi=zi * x0)
# Backward filter.
# Create y0 so zi*y0 broadcasts appropriately.
y0 = axis_slice(y, start=-1, axis=axis)
(y, zf) = lfilter(b, a, axis_reverse(y, axis=axis), zi=zi * y0)
# Reverse y.
y = axis_reverse(y, axis=axis)
if edge > 0:
# Slice the actual signal from the extended signal.
y = axis_slice(y, start=edge, stop=-edge, axis=axis)
return y
from scipy.signal.filter_design import cheby1
from scipy.signal.fir_filter_design import firwin
def decimate(x, q, n=None, ftype='iir', axis=-1):
"""Downsample the signal x by an integer factor q, using an order n filter.
By default an order 8 Chebyshev type I filter is used. A 30 point FIR
filter with hamming window is used if ftype is 'fir'.
Parameters
----------
x : N-d array
the signal to be downsampled
q : int
the downsampling factor
n : int or None
the order of the filter (1 less than the length for 'fir')
ftype : {'iir' or 'fir'}
the type of the lowpass filter
axis : int
the axis along which to decimate
Returns
-------
y : N-d array
the down-sampled signal
See also
--------
resample
"""
if not isinstance(q, int):
raise TypeError("q must be an integer")
if n is None:
if ftype == 'fir':
n = 30
else:
n = 8
if ftype == 'fir':
b = firwin(n + 1, 1. / q, window='hamming')
a = 1.
else:
b, a = cheby1(n, 0.05, 0.8 / q)
y = lfilter(b, a, x, axis=axis)
sl = [slice(None)] * y.ndim
sl[axis] = slice(None, None, q)
return y[sl]<|fim▁end|>
|
ext = const_ext(x, edge, axis=axis)
else:
|
<|file_name|>tigerchefspider.py<|end_file_name|><|fim▁begin|>from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from product_spiders.items import Product, ProductLoader
class TigerChefSpider(BaseSpider):
name = 'tigerchef.com'
allowed_domains = ['tigerchef.com']
start_urls = ('http://www.tigerchef.com',)
def parse(self, response):
hxs = HtmlXPathSelector(response)
#categories = hxs.select('//div[@class="sidebar_nav"]//li/a/@href').extract()
categories = hxs.select('//div[@class="navigation"]/ul/li/a/@href').extract()
categories += hxs.select('//ul[@class="cl_subs"]//a/@href').extract()
loaded = False
for category in categories:
loaded = True
yield Request(category)
next_page = hxs.select('//a[@rel="next"]/@href').extract()
if next_page:
base_url = get_base_url(response)
loaded = True
yield Request(urljoin_rfc(base_url, next_page[0]))
products = [product for product in self.parse_products(hxs)]
for product in products:
yield product
if (not products or not loaded) and response.meta.get('retries', 0) < 3:
yield Request(response.url, dont_filter=True,
meta={'retries': response.meta.get('retries', 0) + 1})
def parse_products(self, hxs):
products = hxs.select('//div[starts-with(@id, "product_")]')
for product in products:
product_loader = ProductLoader(Product(), product)
product_loader.add_xpath('url', './/span[@class="description"]/a/@href')
product_loader.add_xpath('name', './/span[@class="description"]/a/b/text()')
#product_loader.add_xpath('price', './/label/text()')
product_loader.add_xpath('price', './/div[@class="our_price"]/text()')
product_loader.add_xpath('sku', './/span[@class="description"]', re='Model #:[\s(]*([\S^)]*)')<|fim▁hole|><|fim▁end|>
|
yield product_loader.load_item()
|
<|file_name|>api.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from abc import ABCMeta
from abc import abstractmethod
import six
@six.add_metaclass(ABCMeta)
class CryptographicEngine(object):
"""
The abstract base class of the cryptographic engine hierarchy.
A cryptographic engine is responsible for generating all cryptographic
objects and conducting all cryptographic operations for a KMIP server
instance.
"""
@abstractmethod
def create_symmetric_key(self, algorithm, length):
"""
Create a symmetric key.
Args:
algorithm(CryptographicAlgorithm): An enumeration specifying the
algorithm for which the created key will be compliant.
length(int): The length of the key to be created. This value must
be compliant with the constraints of the provided algorithm.
Returns:
dict: A dictionary containing the key data, with the following
key/value fields:
* value - the bytes of the key
* format - a KeyFormatType enumeration for the bytes format
"""
@abstractmethod
def create_asymmetric_key_pair(self, algorithm, length):
"""
Create an asymmetric key pair.
Args:
algorithm(CryptographicAlgorithm): An enumeration specifying the
algorithm for which the created keys will be compliant.
length(int): The length of the keys to be created. This value must
be compliant with the constraints of the provided algorithm.<|fim▁hole|>
Returns:
dict: A dictionary containing the public key data, with the
following key/value fields:
* value - the bytes of the key
* format - a KeyFormatType enumeration for the bytes format
dict: A dictionary containing the private key data, identical in
structure to the public key dictionary.
"""<|fim▁end|>
| |
<|file_name|>dms.js<|end_file_name|><|fim▁begin|>/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
/* Geodesy representation conversion functions (c) Chris Veness 2002-2015 */
/* - www.movable-type.co.uk/scripts/latlong.html MIT Licence */
/* */
/* Sample usage: */
/* var lat = Dms.parseDMS('51° 28′ 40.12″ N'); */
/* var lon = Dms.parseDMS('000° 00′ 05.31″ W'); */
/* var p1 = new LatLon(lat, lon); */
/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
/* jshint node:true *//* global define */
'use strict';
/**
* Tools for converting between numeric degrees and degrees / minutes / seconds.
*
* @namespace
*/
var Dms = {};
// note Unicode Degree = U+00B0. Prime = U+2032, Double prime = U+2033
/**
* Parses string representing degrees/minutes/seconds into numeric degrees.
*
* This is very flexible on formats, allowing signed decimal degrees, or deg-min-sec optionally
* suffixed by compass direction (NSEW). A variety of separators are accepted (eg 3° 37′ 09″W).
* Seconds and minutes may be omitted.
*
* @param {string|number} dmsStr - Degrees or deg/min/sec in variety of formats.
* @returns {number} Degrees as decimal number.
*/
Dms.parseDMS = function(dmsStr) {
// check for signed decimal degrees without NSEW, if so return it directly
if (typeof dmsStr == 'number' && isFinite(dmsStr)) return Number(dmsStr);
// strip off any sign or compass dir'n & split out separate d/m/s
var dms = String(dmsStr).trim().replace(/^-/,'').replace(/[NSEW]$/i,'').split(/[^0-9.,]+/);
if (dms[dms.length-1]=='') dms.splice(dms.length-1); // from trailing symbol
if (dms == '') return NaN;
// and convert to decimal degrees...
var deg;
switch (dms.length) {
case 3: // interpret 3-part result as d/m/s
deg = dms[0]/1 + dms[1]/60 + dms[2]/3600;
break;
case 2: // interpret 2-part result as d/m
deg = dms[0]/1 + dms[1]/60;
break;
case 1: // just d (possibly decimal) or non-separated dddmmss
deg = dms[0];
// check for fixed-width unseparated format eg 0033709W
//if (/[NS]/i.test(dmsStr)) deg = '0' + deg; // - normalise N/S to 3-digit degrees
//if (/[0-9]{7}/.test(deg)) deg = deg.slice(0,3)/1 + deg.slice(3,5)/60 + deg.slice(5)/3600;
break;
default:
return NaN;
}
if (/^-|[WS]$/i.test(dmsStr.trim())) deg = -deg; // take '-', west and south as -ve
return Number(deg);
};
/**
* Converts decimal degrees to deg/min/sec format
* - degree, prime, double-prime symbols are added, but sign is discarded, though no compass
* direction is added.
*
* @private
* @param {number} deg - Degrees to be formatted as specified.
* @param {string} [format=dms] - Return value as 'd', 'dm', 'dms' for deg, deg+min, deg+min+sec.
* @param {number} [dp=0|2|4] - Number of decimal places to use – default 0 for dms, 2 for dm, 4 for d.
* @returns {string} Degrees formatted as deg/min/secs according to specified format.
*/
Dms.toDMS = function(deg, format, dp) {
if (isNaN(deg)) return null; // give up here if we can't make a number from deg
// default values
if (format === undefined) format = 'dms';
if (dp === undefined) {
switch (format) {
case 'd': case 'deg': dp = 4; break;
case 'dm': case 'deg+min': dp = 2; break;
case 'dms': case 'deg+min+sec': dp = 0; break;
default: format = 'dms'; dp = 0; // be forgiving on invalid format
}
}
deg = Math.abs(deg); // (unsigned result ready for appending compass dir'n)
var dms, d, m, s;
switch (format) {
default: // invalid format spec!
case 'd': case 'deg':
d = deg.toFixed(dp); // round degrees
if (d<100) d = '0' + d; // pad with leading zeros
if (d<10) d = '0' + d;
dms = d + '°';
break;
case 'dm': case 'deg+min':
var min = (deg*60).toFixed(dp); // convert degrees to minutes & round
d = Math.floor(min / 60); // get component deg/min
m = (min % 60).toFixed(dp); // pad with trailing zeros
if (d<100) d = '0' + d; // pad with leading zeros
if (d<10) d = '0' + d;
if (m<10) m = '0' + m;
dms = d + '°' + m + '′';
break;
case 'dms': case 'deg+min+sec':
var sec = (deg*3600).toFixed(dp); // convert degrees to seconds & round
d = Math.floor(sec / 3600); // get component deg/min/sec
m = Math.floor(sec/60) % 60;
s = (sec % 60).toFixed(dp); // pad with trailing zeros
if (d<100) d = '0' + d; // pad with leading zeros
if (d<10) d = '0' + d;
if (m<10) m = '0' + m;
if (s<10) s = '0' + s;
dms = d + '°' + m + '′' + s + '″';
break;
}
return dms;
};
/**
* Converts numeric degrees to deg/min/sec latitude (2-digit degrees, suffixed with N/S).
*
* @param {number} deg - Degrees to be formatted as specified.
* @param {string} [format=dms] - Return value as 'd', 'dm', 'dms' for deg, deg+min, deg+min+sec.
* @param {number} [dp=0|2|4] - Number of decimal places to use – default 0 for dms, 2 for dm, 4 for d.
* @returns {string} Degrees formatted as deg/min/secs according to specified format.
*/
Dms.toLat = function(deg, format, dp) {
var lat = Dms.toDMS(deg, format, dp);
return lat===null ? '–' : lat.slice(1) + (deg<0 ? 'S' : 'N'); // knock off initial '0' for lat!
};
/**
* Convert numeric degrees to deg/min/sec longitude (3-digit degrees, suffixed with E/W)
*
* @param {number} deg - Degrees to be formatted as specified.
* @param {string} [format=dms] - Return value as 'd', 'dm', 'dms' for deg, deg+min, deg+min+sec.
* @param {number} [dp=0|2|4] - Number of decimal places to use – default 0 for dms, 2 for dm, 4 for d.
* @returns {string} Degrees formatted as deg/min/secs according to specified format.
*/
Dms.toLon = function(deg, format, dp) {
var lon = Dms.toDMS(deg, format, dp);
return lon===null ? '–' : lon + (deg<0 ? 'W' : 'E');
};
/**
* Converts numeric degrees to deg/min/sec as a bearing (0°..360°)
*
* @param {number} deg - Degrees to be formatted as specified.
* @param {string} [format=dms] - Return value as 'd', 'dm', 'dms' for deg, deg+min, deg+min+sec.
* @param {number} [dp=0|2|4] - Number of decimal places to use – default 0 for dms, 2 for dm, 4 for d.
* @returns {string} Degrees formatted as deg/min/secs according to specified format.
*/
Dms.toBrng = function(deg, format, dp) {
deg = (Number(deg)+360) % 360; // normalise -ve values to 180°..360°
var brng = Dms.toDMS(deg, format, dp);
return brng===null ? '–' : brng.replace('360', '0'); // just in case rounding took us up to 360°!
};<|fim▁hole|> * Returns compass point (to given precision) for supplied bearing.
*
* @param {number} bearing - Bearing in degrees from north.
* @param {number} [precision=3] - Precision (cardinal / intercardinal / secondary-intercardinal).
* @returns {string} Compass point for supplied bearing.
*
* @example
* var point = Dms.compassPoint(24); // point = 'NNE'
* var point = Dms.compassPoint(24, 1); // point = 'N'
*/
Dms.compassPoint = function(bearing, precision) {
if (precision === undefined) precision = 3;
// note precision = max length of compass point; it could be extended to 4 for quarter-winds
// (eg NEbN), but I think they are little used
bearing = ((bearing%360)+360)%360; // normalise to 0..360
var point;
switch (precision) {
case 1: // 4 compass points
switch (Math.round(bearing*4/360)%4) {
case 0: point = 'N'; break;
case 1: point = 'E'; break;
case 2: point = 'S'; break;
case 3: point = 'W'; break;
}
break;
case 2: // 8 compass points
switch (Math.round(bearing*8/360)%8) {
case 0: point = 'N'; break;
case 1: point = 'NE'; break;
case 2: point = 'E'; break;
case 3: point = 'SE'; break;
case 4: point = 'S'; break;
case 5: point = 'SW'; break;
case 6: point = 'W'; break;
case 7: point = 'NW'; break;
}
break;
case 3: // 16 compass points
switch (Math.round(bearing*16/360)%16) {
case 0: point = 'N'; break;
case 1: point = 'NNE'; break;
case 2: point = 'NE'; break;
case 3: point = 'ENE'; break;
case 4: point = 'E'; break;
case 5: point = 'ESE'; break;
case 6: point = 'SE'; break;
case 7: point = 'SSE'; break;
case 8: point = 'S'; break;
case 9: point = 'SSW'; break;
case 10: point = 'SW'; break;
case 11: point = 'WSW'; break;
case 12: point = 'W'; break;
case 13: point = 'WNW'; break;
case 14: point = 'NW'; break;
case 15: point = 'NNW'; break;
}
break;
default:
throw new RangeError('Precision must be between 1 and 3');
}
return point;
};
/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
/** Polyfill String.trim for old browsers
* (q.v. blog.stevenlevithan.com/archives/faster-trim-javascript) */
if (String.prototype.trim === undefined) {
String.prototype.trim = function() {
return String(this).replace(/^\s\s*/, '').replace(/\s\s*$/, '');
};
}
/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
if (typeof module != 'undefined' && module.exports) module.exports = Dms; // CommonJS (Node)
if (typeof define == 'function' && define.amd) define([], function() { return Dms; }); // AMD<|fim▁end|>
|
/**
|
<|file_name|>injection_rwx.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014 Optiv, Inc. ([email protected])
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.cuckoo.common.abstracts import Signature
class InjectionRWX(Signature):
name = "injection_rwx"
description = "Creates RWX memory"
severity = 2
confidence = 50
categories = ["injection"]
authors = ["Optiv"]
minimum = "1.2"
evented = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
<|fim▁hole|> if call["api"] == "NtAllocateVirtualMemory" or call["api"] == "VirtualProtectEx":
protection = self.get_argument(call, "Protection")
# PAGE_EXECUTE_READWRITE
if protection == "0x00000040":
return True
elif call["api"] == "NtProtectVirtualMemory":
protection = self.get_argument(call, "NewAccessProtection")
# PAGE_EXECUTE_READWRITE
if protection == "0x00000040":
return True<|fim▁end|>
|
filter_apinames = set(["NtAllocateVirtualMemory","NtProtectVirtualMemory","VirtualProtectEx"])
filter_analysistypes = set(["file"])
def on_call(self, call, process):
|
<|file_name|>gui.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
""" gui -- The main wicd GUI module.
Module containing the code for the main wicd GUI.
"""
#
# Copyright (C) 2007-2009 Adam Blackburn
# Copyright (C) 2007-2009 Dan O'Reilly
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import time
import gobject
import pango
import gtk
from itertools import chain
from dbus import DBusException
from wicd import misc
from wicd import wpath
from wicd import dbusmanager
from wicd.misc import noneToString
import prefs
from prefs import PreferencesDialog
import netentry
from netentry import WiredNetworkEntry, WirelessNetworkEntry
from guiutil import error, LabelEntry
from wicd.translations import language
if __name__ == '__main__':
wpath.chdir(__file__)
proxy_obj = daemon = wireless = wired = bus = None
DBUS_AVAIL = False
def setup_dbus(force=True):
global bus, daemon, wireless, wired, DBUS_AVAIL
try:
dbusmanager.connect_to_dbus()
except DBusException:
if force:
print "Can't connect to the daemon, trying to start it automatically..."
if not misc.PromptToStartDaemon():
print "Failed to find a graphical sudo program, cannot continue."
return False
try:
dbusmanager.connect_to_dbus()
except DBusException:
error(None, "Could not connect to wicd's D-Bus interface. " +
"Check the wicd log for error messages.")
return False
else:
return False
prefs.setup_dbus()
netentry.setup_dbus()
bus = dbusmanager.get_bus()
dbus_ifaces = dbusmanager.get_dbus_ifaces()
daemon = dbus_ifaces['daemon']
wireless = dbus_ifaces['wireless']
wired = dbus_ifaces['wired']
DBUS_AVAIL = True
return True
def handle_no_dbus(from_tray=False):
global DBUS_AVAIL
DBUS_AVAIL = False
if from_tray: return False
print "Wicd daemon is shutting down!"
error(None, language['lost_dbus'], block=False)
return False
class WiredProfileChooser:
""" Class for displaying the wired profile chooser. """
def __init__(self):
""" Initializes and runs the wired profile chooser. """
# Import and init WiredNetworkEntry to steal some of the
# functions and widgets it uses.
wired_net_entry = WiredNetworkEntry()
dialog = gtk.Dialog(title = language['wired_network_found'],
flags = gtk.DIALOG_MODAL,
buttons = (gtk.STOCK_CONNECT, 1,
gtk.STOCK_CANCEL, 2))
dialog.set_has_separator(False)
dialog.set_size_request(400, 150)
instruct_label = gtk.Label(language['choose_wired_profile'] + ':\n')
stoppopcheckbox = gtk.CheckButton(language['stop_showing_chooser'])
wired_net_entry.is_full_gui = False
instruct_label.set_alignment(0, 0)
stoppopcheckbox.set_active(False)
# Remove widgets that were added to the normal WiredNetworkEntry
# so that they can be added to the pop-up wizard.
wired_net_entry.vbox_top.remove(wired_net_entry.hbox_temp)
wired_net_entry.vbox_top.remove(wired_net_entry.profile_help)
dialog.vbox.pack_start(instruct_label, fill=False, expand=False)
dialog.vbox.pack_start(wired_net_entry.profile_help, False, False)
dialog.vbox.pack_start(wired_net_entry.hbox_temp, False, False)
dialog.vbox.pack_start(stoppopcheckbox, False, False)
dialog.show_all()
wired_profiles = wired_net_entry.combo_profile_names
wired_net_entry.profile_help.hide()
if wired_net_entry.profile_list != None:
wired_profiles.set_active(0)
print "wired profiles found"
else:
print "no wired profiles found"
wired_net_entry.profile_help.show()
response = dialog.run()
if response == 1:
print 'reading profile ', wired_profiles.get_active_text()
wired.ReadWiredNetworkProfile(wired_profiles.get_active_text())
wired.ConnectWired()
else:
if stoppopcheckbox.get_active():
daemon.SetForcedDisconnect(True)
dialog.destroy()
class appGui(object):
""" The main wicd GUI class. """
def __init__(self, standalone=False, tray=None):
""" Initializes everything needed for the GUI. """
setup_dbus()
self.tray = tray
gladefile = os.path.join(wpath.gtk, "wicd.ui")
self.wTree = gtk.Builder()
self.wTree.add_from_file(gladefile)
self.window = self.wTree.get_object("window1")
width = int(gtk.gdk.screen_width() / 2)
if width > 530:
width = 530
self.window.resize(width, int(gtk.gdk.screen_height() / 1.7))
dic = { "refresh_clicked" : self.refresh_clicked,
"quit_clicked" : self.exit,
"rfkill_clicked" : self.switch_rfkill,
"disconnect_clicked" : self.disconnect_all,
"main_exit" : self.exit,
"cancel_clicked" : self.cancel_connect,
"hidden_clicked" : self.connect_hidden,
"preferences_clicked" : self.settings_dialog,
"about_clicked" : self.about_dialog,
"create_adhoc_clicked" : self.create_adhoc_network,
}
self.wTree.connect_signals(dic)
# Set some strings in the GUI - they may be translated
label_instruct = self.wTree.get_object("label_instructions")
label_instruct.set_label(language['select_a_network'])
probar = self.wTree.get_object("progressbar")
probar.set_text(language['connecting'])
self.rfkill_button = self.wTree.get_object("rfkill_button")
self.all_network_list = self.wTree.get_object("network_list_vbox")<|fim▁hole|> self.wired_network_box = gtk.VBox(False, 0)
self.wired_network_box.show_all()
self.network_list = gtk.VBox(False, 0)
self.all_network_list.pack_start(self.wired_network_box, False, False)
self.all_network_list.pack_start(self.network_list, True, True)
self.network_list.show_all()
self.status_area = self.wTree.get_object("connecting_hbox")
self.status_bar = self.wTree.get_object("statusbar")
menu = self.wTree.get_object("menu1")
self.status_area.hide_all()
if os.path.exists(os.path.join(wpath.images, "wicd.png")):
self.window.set_icon_from_file(os.path.join(wpath.images, "wicd.png"))
self.statusID = None
self.first_dialog_load = True
self.is_visible = True
self.pulse_active = False
self.pref = None
self.standalone = standalone
self.wpadrivercombo = None
self.connecting = False
self.refreshing = False
self.prev_state = None
self.update_cb = None
self.network_list.set_sensitive(False)
label = gtk.Label("%s..." % language['scanning'])
self.network_list.pack_start(label)
label.show()
self.wait_for_events(0.2)
self.window.connect('delete_event', self.exit)
self.window.connect('key-release-event', self.key_event)
daemon.SetGUIOpen(True)
bus.add_signal_receiver(self.dbus_scan_finished, 'SendEndScanSignal',
'org.wicd.daemon.wireless')
bus.add_signal_receiver(self.dbus_scan_started, 'SendStartScanSignal',
'org.wicd.daemon.wireless')
bus.add_signal_receiver(self.update_connect_buttons, 'StatusChanged',
'org.wicd.daemon')
bus.add_signal_receiver(self.handle_connection_results,
'ConnectResultsSent', 'org.wicd.daemon')
bus.add_signal_receiver(lambda: setup_dbus(force=False),
"DaemonStarting", "org.wicd.daemon")
bus.add_signal_receiver(self._do_statusbar_update, 'StatusChanged',
'org.wicd.daemon')
if standalone:
bus.add_signal_receiver(handle_no_dbus, "DaemonClosing",
"org.wicd.daemon")
self._do_statusbar_update(*daemon.GetConnectionStatus())
self.wait_for_events(0.1)
self.update_cb = misc.timeout_add(2, self.update_statusbar)
self.refresh_clicked()
def handle_connection_results(self, results):
if results not in ['Success', 'aborted'] and self.is_visible:
error(self.window, language[results], block=False)
def create_adhoc_network(self, widget=None):
""" Shows a dialog that creates a new adhoc network. """
print "Starting the Ad-Hoc Network Creation Process..."
dialog = gtk.Dialog(title = language['create_adhoc_network'],
flags = gtk.DIALOG_MODAL,
buttons=(gtk.STOCK_CANCEL, 2, gtk.STOCK_OK, 1))
dialog.set_has_separator(False)
dialog.set_size_request(400, -1)
self.chkbox_use_encryption = gtk.CheckButton(language['use_wep_encryption'])
self.chkbox_use_encryption.set_active(False)
ip_entry = LabelEntry(language['ip'] + ':')
essid_entry = LabelEntry(language['essid'] + ':')
channel_entry = LabelEntry(language['channel'] + ':')
self.key_entry = LabelEntry(language['key'] + ':')
self.key_entry.set_auto_hidden(True)
self.key_entry.set_sensitive(False)
chkbox_use_ics = gtk.CheckButton(language['use_ics'])
self.chkbox_use_encryption.connect("toggled",
self.toggle_encrypt_check)
channel_entry.entry.set_text('3')
essid_entry.entry.set_text('My_Adhoc_Network')
ip_entry.entry.set_text('169.254.12.10') # Just a random IP
vbox_ah = gtk.VBox(False, 0)
self.wired_network_box = gtk.VBox(False, 0)
vbox_ah.pack_start(self.chkbox_use_encryption, False, False)
vbox_ah.pack_start(self.key_entry, False, False)
vbox_ah.show()
dialog.vbox.pack_start(essid_entry)
dialog.vbox.pack_start(ip_entry)
dialog.vbox.pack_start(channel_entry)
dialog.vbox.pack_start(chkbox_use_ics)
dialog.vbox.pack_start(vbox_ah)
dialog.vbox.set_spacing(5)
dialog.show_all()
response = dialog.run()
if response == 1:
wireless.CreateAdHocNetwork(essid_entry.entry.get_text(),
channel_entry.entry.get_text(),
ip_entry.entry.get_text().strip(),
"WEP",
self.key_entry.entry.get_text(),
self.chkbox_use_encryption.get_active(),
False) #chkbox_use_ics.get_active())
dialog.destroy()
def toggle_encrypt_check(self, widget=None):
""" Toggles the encryption key entry box for the ad-hoc dialog. """
self.key_entry.set_sensitive(self.chkbox_use_encryption.get_active())
def switch_rfkill(self, widget=None):
""" Switches wifi card on/off. """
wireless.SwitchRfKill()
if wireless.GetRfKillEnabled():
self.rfkill_button.set_stock_id(gtk.STOCK_MEDIA_PLAY)
self.rfkill_button.set_label(language['switch_on_wifi'])
else:
self.rfkill_button.set_stock_id(gtk.STOCK_MEDIA_STOP)
self.rfkill_button.set_label(language['switch_off_wifi'])
def disconnect_all(self, widget=None):
""" Disconnects from any active network. """
def handler(*args):
gobject.idle_add(self.all_network_list.set_sensitive, True)
self.all_network_list.set_sensitive(False)
daemon.Disconnect(reply_handler=handler, error_handler=handler)
def about_dialog(self, widget, event=None):
""" Displays an about dialog. """
dialog = gtk.AboutDialog()
dialog.set_name("Wicd")
dialog.set_version(daemon.Hello())
dialog.set_authors([ "Adam Blackburn", "Dan O'Reilly", "Andrew Psaltis" ])
dialog.set_website("http://wicd.sourceforge.net")
dialog.run()
dialog.destroy()
def key_event (self, widget, event=None):
""" Handle key-release-events. """
if event.state & gtk.gdk.CONTROL_MASK and \
gtk.gdk.keyval_name(event.keyval) in ["w", "q"]:
self.exit()
def settings_dialog(self, widget, event=None):
""" Displays a general settings dialog. """
if not self.pref:
self.pref = PreferencesDialog(self, self.wTree)
else:
self.pref.load_preferences_diag()
if self.pref.run() == 1:
self.pref.save_results()
self.pref.hide()
def connect_hidden(self, widget):
""" Prompts the user for a hidden network, then scans for it. """
dialog = gtk.Dialog(title=language['hidden_network'],
flags=gtk.DIALOG_MODAL,
buttons=(gtk.STOCK_CONNECT, 1, gtk.STOCK_CANCEL, 2))
dialog.set_has_separator(False)
lbl = gtk.Label(language['hidden_network_essid'])
textbox = gtk.Entry()
dialog.vbox.pack_start(lbl)
dialog.vbox.pack_start(textbox)
dialog.show_all()
button = dialog.run()
if button == 1:
answer = textbox.get_text()
dialog.destroy()
self.refresh_networks(None, True, answer)
else:
dialog.destroy()
def cancel_connect(self, widget):
""" Alerts the daemon to cancel the connection process. """
#should cancel a connection if there
#is one in progress
cancel_button = self.wTree.get_object("cancel_button")
cancel_button.set_sensitive(False)
daemon.CancelConnect()
# Prevents automatic reconnecting if that option is enabled
daemon.SetForcedDisconnect(True)
def pulse_progress_bar(self):
""" Pulses the progress bar while connecting to a network. """
if not self.pulse_active:
return False
if not self.is_visible:
return True
try:
gobject.idle_add(self.wTree.get_object("progressbar").pulse)
except:
pass
return True
def update_statusbar(self):
""" Triggers a status update in wicd-monitor. """
if not self.is_visible:
return True
daemon.UpdateState()
if self.connecting:
# If we're connecting, don't wait for the monitor to send
# us a signal, since it won't until the connection is made.
self._do_statusbar_update(*daemon.GetConnectionStatus())
return True
def _do_statusbar_update(self, state, info):
if not self.is_visible:
return True
if state == misc.WIRED:
return self.set_wired_state(info)
elif state == misc.WIRELESS:
return self.set_wireless_state(info)
elif state == misc.CONNECTING:
return self.set_connecting_state(info)
elif state in (misc.SUSPENDED, misc.NOT_CONNECTED):
return self.set_not_connected_state(info)
return True
def set_wired_state(self, info):
if self.connecting:
# Adjust our state from connecting->connected.
self._set_not_connecting_state()
self.set_status(language['connected_to_wired'].replace('$A', info[0]))
return True
def set_wireless_state(self, info):
if self.connecting:
# Adjust our state from connecting->connected.
self._set_not_connecting_state()
self.set_status(language['connected_to_wireless'].replace
('$A', info[1]).replace
('$B', daemon.FormatSignalForPrinting(info[2])).replace
('$C', info[0]))
return True
def set_not_connected_state(self, info):
if self.connecting:
# Adjust our state from connecting->not-connected.
self._set_not_connecting_state()
self.set_status(language['not_connected'])
return True
def _set_not_connecting_state(self):
if self.connecting:
if self.update_cb:
gobject.source_remove(self.update_cb)
self.update_cb = misc.timeout_add(2, self.update_statusbar)
self.connecting = False
if self.pulse_active:
self.pulse_active = False
gobject.idle_add(self.all_network_list.set_sensitive, True)
gobject.idle_add(self.status_area.hide_all)
if self.statusID:
gobject.idle_add(self.status_bar.remove_message, 1, self.statusID)
def set_connecting_state(self, info):
if not self.connecting:
if self.update_cb:
gobject.source_remove(self.update_cb)
self.update_cb = misc.timeout_add(500, self.update_statusbar,
milli=True)
self.connecting = True
if not self.pulse_active:
self.pulse_active = True
misc.timeout_add(100, self.pulse_progress_bar, milli=True)
gobject.idle_add(self.all_network_list.set_sensitive, False)
gobject.idle_add(self.status_area.show_all)
if self.statusID:
gobject.idle_add(self.status_bar.remove_message, 1, self.statusID)
if info[0] == "wireless":
essid, stat = wireless.CheckWirelessConnectingMessage()
gobject.idle_add(self.set_status, "%s: %s" % (essid,
language[str(stat)]))
elif info[0] == "wired":
gobject.idle_add(self.set_status, language['wired_network'] + ': ' +
language[str(wired.CheckWiredConnectingMessage())])
return True
def update_connect_buttons(self, state=None, x=None, force_check=False):
""" Updates the connect/disconnect buttons for each network entry.
If force_check is given, update the buttons even if the
current network state is the same as the previous.
"""
if not DBUS_AVAIL: return
if not state:
state, x = daemon.GetConnectionStatus()
if self.prev_state != state or force_check:
apbssid = wireless.GetApBssid()
for entry in chain(self.network_list, self.wired_network_box):
if hasattr(entry, "update_connect_button"):
entry.update_connect_button(state, apbssid)
self.prev_state = state
def set_status(self, msg):
""" Sets the status bar message for the GUI. """
self.statusID = self.status_bar.push(1, msg)
def dbus_scan_finished(self):
""" Calls for a non-fresh update of the gui window.
This method is called after a wireless scan is completed.
"""
if not DBUS_AVAIL: return
gobject.idle_add(self.refresh_networks, None, False, None)
def dbus_scan_started(self):
""" Called when a wireless scan starts. """
if not DBUS_AVAIL: return
self.network_list.set_sensitive(False)
def _remove_items_from_vbox(self, vbox):
for z in vbox:
vbox.remove(z)
z.destroy()
del z
def refresh_clicked(self, widget=None):
""" Kick off an asynchronous wireless scan. """
if not DBUS_AVAIL or self.connecting: return
self.refreshing = True
# Remove stuff already in there.
self._remove_items_from_vbox(self.wired_network_box)
self._remove_items_from_vbox(self.network_list)
label = gtk.Label("%s..." % language['scanning'])
self.network_list.pack_start(label)
self.network_list.show_all()
if wired.CheckPluggedIn() or daemon.GetAlwaysShowWiredInterface():
printLine = True # In this case we print a separator.
wirednet = WiredNetworkEntry()
self.wired_network_box.pack_start(wirednet, False, False)
wirednet.connect_button.connect("clicked", self.connect,
"wired", 0, wirednet)
wirednet.disconnect_button.connect("clicked", self.disconnect,
"wired", 0, wirednet)
wirednet.advanced_button.connect("clicked",
self.edit_advanced, "wired", 0,
wirednet)
state, x = daemon.GetConnectionStatus()
wirednet.update_connect_button(state)
self._wired_showing = True
else:
self._wired_showing = False
wireless.Scan(False)
def refresh_networks(self, widget=None, fresh=True, hidden=None):
""" Refreshes the network list.
If fresh=True, scans for wireless networks and displays the results.
If a ethernet connection is available, or the user has chosen to,
displays a Wired Network entry as well.
If hidden isn't None, will scan for networks after running
iwconfig <wireless interface> essid <hidden>.
"""
if fresh:
if hidden:
wireless.SetHiddenNetworkESSID(noneToString(hidden))
self.refresh_clicked()
return
print "refreshing..."
self.network_list.set_sensitive(False)
self._remove_items_from_vbox(self.network_list)
self.wait_for_events()
printLine = False # We don't print a separator by default.
if self._wired_showing:
printLine = True
num_networks = wireless.GetNumberOfNetworks()
instruct_label = self.wTree.get_object("label_instructions")
if num_networks > 0:
instruct_label.show()
for x in range(0, num_networks):
if printLine:
sep = gtk.HSeparator()
self.network_list.pack_start(sep, padding=10, fill=False,
expand=False)
sep.show()
else:
printLine = True
tempnet = WirelessNetworkEntry(x)
self.network_list.pack_start(tempnet, False, False)
tempnet.connect_button.connect("clicked",
self.connect, "wireless", x,
tempnet)
tempnet.disconnect_button.connect("clicked",
self.disconnect, "wireless",
x, tempnet)
tempnet.advanced_button.connect("clicked",
self.edit_advanced, "wireless",
x, tempnet)
else:
instruct_label.hide()
if wireless.GetKillSwitchEnabled():
label = gtk.Label(language['killswitch_enabled'] + ".")
else:
label = gtk.Label(language['no_wireless_networks_found'])
self.network_list.pack_start(label)
label.show()
self.update_connect_buttons(force_check=True)
self.network_list.set_sensitive(True)
self.refreshing = False
def save_settings(self, nettype, networkid, networkentry):
""" Verifies and saves the settings for the network entry. """
entry = networkentry.advanced_dialog
opt_entlist = []
req_entlist = []
# First make sure all the Addresses entered are valid.
if entry.chkbox_static_ip.get_active():
req_entlist = [entry.txt_ip, entry.txt_netmask]
opt_entlist = [entry.txt_gateway]
if entry.chkbox_static_dns.get_active() and \
not entry.chkbox_global_dns.get_active():
for ent in [entry.txt_dns_1, entry.txt_dns_2, entry.txt_dns_3]:
opt_entlist.append(ent)
# Required entries.
for lblent in req_entlist:
lblent.set_text(lblent.get_text().strip())
if not misc.IsValidIP(lblent.get_text()):
error(self.window, language['invalid_address'].
replace('$A', lblent.label.get_label()))
return False
# Optional entries, only check for validity if they're entered.
for lblent in opt_entlist:
lblent.set_text(lblent.get_text().strip())
if lblent.get_text() and not misc.IsValidIP(lblent.get_text()):
error(self.window, language['invalid_address'].
replace('$A', lblent.label.get_label()))
return False
# Now save the settings.
if nettype == "wireless":
if not networkentry.save_wireless_settings(networkid):
return False
elif nettype == "wired":
if not networkentry.save_wired_settings():
return False
return True
def edit_advanced(self, widget, ttype, networkid, networkentry):
""" Display the advanced settings dialog.
Displays the advanced settings dialog and saves any changes made.
If errors occur in the settings, an error message will be displayed
and the user won't be able to save the changes until the errors
are fixed.
"""
dialog = networkentry.advanced_dialog
dialog.set_values()
dialog.show_all()
while True:
if self.run_settings_dialog(dialog, ttype, networkid, networkentry):
break
dialog.hide()
def run_settings_dialog(self, dialog, nettype, networkid, networkentry):
""" Runs the settings dialog.
Runs the settings dialog and returns True if settings are saved
successfully, and false otherwise.
"""
result = dialog.run()
if result == gtk.RESPONSE_ACCEPT:
if self.save_settings(nettype, networkid, networkentry):
return True
else:
return False
return True
def check_encryption_valid(self, networkid, entry):
""" Make sure that encryption settings are properly filled in. """
# Make sure no entries are left blank
if entry.chkbox_encryption.get_active():
encryption_info = entry.encryption_info
for entry_info in encryption_info.itervalues():
if entry_info[0].entry.get_text() == "" and \
entry_info[1] == 'required':
error(self.window, "%s (%s)" % (language['encrypt_info_missing'],
entry_info[0].label.get_label())
)
return False
# Make sure the checkbox is checked when it should be
elif not entry.chkbox_encryption.get_active() and \
wireless.GetWirelessProperty(networkid, "encryption"):
error(self.window, language['enable_encryption'])
return False
return True
def _wait_for_connect_thread_start(self):
self.wTree.get_object("progressbar").pulse()
if not self._connect_thread_started:
return True
else:
misc.timeout_add(2, self.update_statusbar)
self.update_statusbar()
return False
def connect(self, widget, nettype, networkid, networkentry):
""" Initiates the connection process in the daemon. """
def handler(*args):
self._connect_thread_started = True
def setup_interface_for_connection():
cancel_button = self.wTree.get_object("cancel_button")
cancel_button.set_sensitive(True)
self.all_network_list.set_sensitive(False)
if self.statusID:
gobject.idle_add(self.status_bar.remove_message, 1, self.statusID)
gobject.idle_add(self.set_status, language["disconnecting_active"])
gobject.idle_add(self.status_area.show_all)
self.wait_for_events()
self._connect_thread_started = False
if nettype == "wireless":
if not self.check_encryption_valid(networkid,
networkentry.advanced_dialog):
self.edit_advanced(None, nettype, networkid, networkentry)
return False
setup_interface_for_connection()
wireless.ConnectWireless(networkid, reply_handler=handler,
error_handler=handler)
elif nettype == "wired":
setup_interface_for_connection()
wired.ConnectWired(reply_handler=handler, error_handler=handler)
gobject.source_remove(self.update_cb)
misc.timeout_add(100, self._wait_for_connect_thread_start, milli=True)
def disconnect(self, widget, nettype, networkid, networkentry):
""" Disconnects from the given network.
Keyword arguments:
widget -- The disconnect button that was pressed.
event -- unused
nettype -- "wired" or "wireless", depending on the network entry type.
networkid -- unused
networkentry -- The NetworkEntry containing the disconnect button.
"""
def handler(*args):
gobject.idle_add(self.all_network_list.set_sensitive, True)
gobject.idle_add(self.network_list.set_sensitive, True)
widget.hide()
networkentry.connect_button.show()
daemon.SetForcedDisconnect(True)
self.network_list.set_sensitive(False)
if nettype == "wired":
wired.DisconnectWired(reply_handler=handler, error_handler=handler)
else:
wireless.DisconnectWireless(reply_handler=handler,
error_handler=handler)
def wait_for_events(self, amt=0):
""" Wait for any pending gtk events to finish before moving on.
Keyword arguments:
amt -- a number specifying the number of ms to wait before checking
for pending events.
"""
time.sleep(amt)
while gtk.events_pending():
gtk.main_iteration()
def exit(self, widget=None, event=None):
""" Hide the wicd GUI.
This method hides the wicd GUI and writes the current window size
to disc for later use. This method normally does NOT actually
destroy the GUI, it just hides it.
"""
self.window.hide()
gobject.source_remove(self.update_cb)
bus.remove_signal_receiver(self._do_statusbar_update, 'StatusChanged',
'org.wicd.daemon')
[width, height] = self.window.get_size()
try:
daemon.SetGUIOpen(False)
except DBusException:
pass
if self.standalone:
sys.exit(0)
self.is_visible = False
return True
def show_win(self):
""" Brings the GUI out of the hidden state.
Method to show the wicd GUI, alert the daemon that it is open,
and refresh the network list.
"""
self.window.present()
self.window.deiconify()
self.wait_for_events()
self.is_visible = True
daemon.SetGUIOpen(True)
self.wait_for_events(0.1)
gobject.idle_add(self.refresh_clicked)
self._do_statusbar_update(*daemon.GetConnectionStatus())
bus.add_signal_receiver(self._do_statusbar_update, 'StatusChanged',
'org.wicd.daemon')
self.update_cb = misc.timeout_add(2, self.update_statusbar)
if __name__ == '__main__':
setup_dbus()
app = appGui(standalone=True)
mainloop = gobject.MainLoop()
mainloop.run()<|fim▁end|>
|
self.all_network_list.show_all()
|
<|file_name|>article_simulated_estimate_mpi.py<|end_file_name|><|fim▁begin|>'''
run with ex: mpiexec -n 10 python article_simulated_estimate_mpi.py
Created on Jul 11, 2014
@author: jonaswallin
'''
from __future__ import division
import time
import scipy.spatial as ss
import article_simulatedata
from mpi4py import MPI
import numpy as np
import BayesFlow as bm
import matplotlib
import matplotlib.pyplot as plt
import numpy.random as npr
import BayesFlow.plot as bm_plot
import matplotlib.ticker as ticker
from article_plotfunctions import plotQ_joint, plotQ, plot_theta
folderFigs = "/Users/jonaswallin/Dropbox/articles/FlowCap/figs/"
sim = 10**2
nCells = 1500
thin = 2
nPers = 80
save_fig = 0
Y = []
####
# COLLECTING THE DATA
####
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
Y,act_komp, mus, Thetas, Sigmas, P = np.array(article_simulatedata.simulate_data_v1(nCells = nCells, nPersons = nPers))
else:
Y = None
act_komp = None
#npr.seed(123546)
####
# Setting up model
####
hGMM = bm.hierarical_mixture_mpi(K = 4)
hGMM.set_data(Y)
hGMM.set_prior_param0()
hGMM.update_GMM()
hGMM.update_prior()
hGMM.set_p_labelswitch(1.)
hGMM.set_prior_actiavation(10)
hGMM.set_nu_MH_param(10,200)
for i,GMM in enumerate(hGMM.GMMs):
GMM._label =i
for i in range(min(sim,2000)):
hGMM.sample()
np.set_printoptions(precision=3)
#hGMM.reset_prior()
bm.distance_sort_MPI(hGMM)
hGMM.set_p_activation([0.7,0.7])
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
theta_sim = []
Q_sim = []
nu_sim = []
Y_sim = []
Y0_sim = []
##############
# MCMC PART
##############
##############
# BURN IN
##############
for i in range(min(np.int(np.ceil(0.1*sim)),8000)):#burn in
hGMM.sample()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
mus_vec = np.zeros((len(Y), hGMM.K, hGMM.d))
actkomp_vec = np.zeros((len(Y), hGMM.K))
count = 0
hGMM.set_p_labelswitch(.4)
for i in range(sim):#
# sampling the thining
for k in range(thin):
# simulating
hGMM.sample()
##
# since label switching affects the posterior of mu, and active_komp
# it needs to be estimated each time
##
labels = hGMM.get_labelswitches()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
for j in range(labels.shape[0]):
if labels[j,0] != -1:
mus_vec[j,labels[j,0],:], mus_vec[j,labels[j,1],:] = mus_vec[j,labels[j,1],:], mus_vec[j,labels[j,0],:]
actkomp_vec[j,labels[j,0]], actkomp_vec[j,labels[j,1]] = actkomp_vec[j,labels[j,1]], actkomp_vec[j,labels[j,0]]
###################
# storing data
# for post analysis
###################
mus_ = hGMM.get_mus()
thetas = hGMM.get_thetas()
Qs = hGMM.get_Qs()
nus = hGMM.get_nus()
if sim - i < nCells * nPers:
Y_sample = hGMM.sampleY()
active_komp = hGMM.get_activekompontent()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
print "iter =%d"%i
count += 1
mus_vec += mus_
actkomp_vec += active_komp
theta_sim.append(thetas)
Q_sim.append(Qs/(nus.reshape(nus.shape[0],1,1)- Qs.shape[1]-1) )
nu_sim.append(nus)
# storing the samples equal to number to the first indiviual
if sim - i < nCells:
Y0_sim.append(hGMM.GMMs[0].simulate_one_obs().reshape(3))
Y_sim.append(Y_sample)
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
actkomp_vec /= count
mus_vec /= count
mus_ = mus_vec
hGMM.save_to_file("/Users/jonaswallin/Dropbox/temp/")
##
# fixing ploting options
##
matplotlib.rcParams['ps.useafm'] = True
matplotlib.rcParams['pdf.use14corefonts'] = True
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"]
#hGMM.plot_GMM_scatter_all([0, 1])
mus_colors = ['r','b','k','m']
f, ax = hGMM.plot_mus([0,1,2], colors =mus_colors, size_point = 5 )
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
######################
#ordering mus
mus_true_mean = []
mus_mean = []
for k in range(hGMM.K):
mus_true_mean.append(np.array(np.ma.masked_invalid(mus[:,k,:]).mean(0)))
mus_mean.append(np.array(np.ma.masked_invalid(mus_[:,k,:].T).mean(0)))
mus_true_mean = np.array(mus_true_mean)
mus_mean = np.array(mus_mean)
ss_mat = ss.distance.cdist( mus_true_mean, mus_mean, "euclidean")
#print ss_mat
col_index = []
for k in range(hGMM.K):
col_index.append( np.argmin(ss_mat[k,:]))
#print col_index
#####################
######################
theta_sim = np.array(theta_sim)
Q_sim = np.array(Q_sim)
nu_sim = np.array(nu_sim)
np.set_printoptions(precision=2)
perc_theta = []
perc_Q_vec = []
for k in range(hGMM.K):
perc_ = np.percentile(theta_sim[:,col_index[k],:] - Thetas[k],[2.5,50,97.5],axis=0)
perc_theta.append(np.array(perc_).T)
#print "%d & %s & %s & %s & \\hline" %(k, np.mean(theta_sim[:,col_index[k],:],0) - Thetas[k],perc_[0],perc_[1])
perc_Q = np.percentile(Q_sim[:,col_index[k],:] - Sigmas[k],[2.5,50,97.5],axis=0)
#print "Q = %s"%(np.mean(Q_sim[:,col_index[k],:],0))
perc_Q_vec.append(perc_Q)
theta_string = ""
Q_string = ""
theta_diff = np.mean(theta_sim[:,col_index[k],:],0) - Thetas[k]
Q_diff = np.mean(Q_sim[:,col_index[k],:] - Sigmas[k] ,0)
for d in range(hGMM.d):
theta_string += " %.2f (%.2f, %.2f) &"%(perc_[1][d], perc_[0][d], perc_[2][d])
for dd in range(hGMM.d):
Q_string += " %.3f (%.3f, %.3f) &"%(perc_Q[1][d,dd],perc_Q[0][d,dd],perc_Q[2][d,dd] )
Q_string = Q_string[:-1]
Q_string +="\\\ \n"
theta_string = theta_string[:-1]
print "theta[%d]= \n%s\n"%(k,theta_string)
print "Q[%d]= \n%s "%(k,Q_string)
perc_nu = np.percentile(nu_sim[:,col_index[k]] - 100,[2.5,50,97.5],axis=0)
print "nu = %.2f (%d, %d)"%(perc_nu[1],perc_nu[0],perc_nu[2])
Y_sim = np.array(Y_sim)
Y0_sim = np.array(Y0_sim)
for k in range(hGMM.K):
k_ = np.where(np.array(col_index)==k)[0][0]
print("k_ == %s"%k_)
mu_k = mus[:,k_,:].T
#print actkomp_vec[:,col_index[k]]
index = np.isnan(mu_k[:,0])==False
ax.scatter(mu_k[index,0],mu_k[index,1],mu_k[index,2], s=50, edgecolor=mus_colors[k],facecolors='none')
ax.view_init(48,22)<|fim▁hole|> fig_nu = plt.figure(figsize=(6,0.5))
ax_nu = fig_nu.add_subplot(111)
for k in range(hGMM.K):
ax_nu.plot(nu_sim[:,col_index[k]])
f_histY = bm_plot.histnd(Y_sim, 50, [0, 100], [0,100])
f_histY0 = bm_plot.histnd(Y0_sim, 50, [0, 100], [0,100])
f_theta = plot_theta(np.array(perc_theta))
figs_Q = plotQ(perc_Q_vec)
fig_Q_joint = plotQ_joint(perc_Q_vec)
np.set_printoptions(precision=4, suppress=True)
for i, GMM in enumerate(hGMM.GMMs):
#print("p[%d,%d] = %s"%(hGMM.comm.Get_rank(),i,GMM.p))
hGMM.comm.Barrier()
if MPI.COMM_WORLD.Get_rank() == 0 and save_fig: # @UndefinedVariable
print col_index
fig_nu.savefig(folderFigs + "nus_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
fig_nu.savefig(folderFigs + "nus_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f.savefig(folderFigs + "dcluster_centers_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
f.savefig(folderFigs + "dcluster_centers_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f_histY.savefig(folderFigs + "hist2d_simulated.eps", type="eps",bbox_inches='tight')
f_histY.savefig(folderFigs + "hist2d_simulated.pdf", type="pdf",bbox_inches='tight')
f_histY0.savefig(folderFigs + "hist2d_indv_simulated.eps", type="eps",bbox_inches='tight')
f_histY0.savefig(folderFigs + "hist2d_indv_simulated.pdf", type="pdf",bbox_inches='tight')
f_theta.savefig(folderFigs + "theta_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f_theta.savefig(folderFigs + "theta_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
fig_Q_joint.savefig(folderFigs + "Qjoint_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
fig_Q_joint.savefig(folderFigs + "Qjoint_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
for i,f_Q in enumerate(figs_Q):
f_Q.savefig(folderFigs + "Q%d_simulated.pdf"%(i+1), type="pdf",transparent=True,bbox_inches='tight')
f_Q.savefig(folderFigs + "Q%d_simulated.eps"%(i+1), type="eps",transparent=True,bbox_inches='tight')
else:
plt.show()<|fim▁end|>
| |
<|file_name|>conflict_resolver.py<|end_file_name|><|fim▁begin|># ===============================================================================
# Copyright 2015 Jake Ross
#<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import HasTraits, Str, List, Instance
from traitsui.api import View, UItem, Item, TableEditor
from traitsui.table_column import ObjectColumn
from pychron.core.helpers.traitsui_shortcuts import okcancel_view
from pychron.core.ui.enum_editor import myEnumEditor
class Conflict(HasTraits):
queue_name = Str
runspec = Instance('pychron.experiment.automated_run.spec.AutomatedRunSpec')
identifier = Str
position = Str
repository_identifier = Str
repository_ids = Str
available_ids = List
class ConflictResolver(HasTraits):
conflicts = List
available_ids = List
def apply(self):
for c in self.conflicts:
c.runspec.repository_identifier = c.repository_identifier
def add_conflicts(self, qname, cs):
for ai, exps in cs:
self.conflicts.append(Conflict(queue_name=qname,
runspec=ai,
position=ai.position,
repository_identifier=ai.repository_identifier,
identifier=ai.identifier,
repository_ids=','.join(exps),
available_ids=self.available_ids))
def traits_view(self):
cols = [ObjectColumn(name='queue_name', editable=False),
ObjectColumn(name='identifier', editable=False),
ObjectColumn(name='position', editable=False),
ObjectColumn(name='repository_identifier',
label='Assigned Repository',
tooltip='Repository assigned to this analysis in the Experiment Queue',
editor=myEnumEditor(name='available_ids')),
ObjectColumn(name='repository_ids',
label='Existing Repositories',
tooltip='Set of repositories that already contain this L#',
editable=False)]
v = okcancel_view(UItem('conflicts', editor=TableEditor(columns=cols)),
title='Resolve Repository Conflicts')
return v
if __name__ == '__main__':
def main():
from pychron.paths import paths
paths.build('_dev')
from pychron.core.helpers.logger_setup import logging_setup
from pychron.experiment.automated_run.spec import AutomatedRunSpec
logging_setup('dvcdb')
from pychron.dvc.dvc_database import DVCDatabase
from itertools import groupby
db = DVCDatabase(kind='mysql', host='localhost', username='root', name='pychronmeta', password='Argon')
db.connect()
identifiers = ['63290', '63291']
runs = [AutomatedRunSpec(identifier='63290', repository_identifier='Cather_McIntoshd')]
cr = ConflictResolver()
experiments = {}
cr.available_ids = db.get_repository_identifiers()
eas = db.get_associated_repositories(identifiers)
for idn, exps in groupby(eas, key=lambda x: x[1]):
experiments[idn] = [e[0] for e in exps]
conflicts = []
for ai in runs:
identifier = ai.identifier
es = experiments[identifier]
if ai.repository_identifier not in es:
conflicts.append((ai, es))
if conflicts:
cr.add_conflicts('Foo', conflicts)
if cr.conflicts:
info = cr.edit_traits(kind='livemodal')
if info.result:
cr.apply()
# for ci in runs:
# print ci.identifier, ci.experiment_identifier
from traits.api import Button
class Demo(HasTraits):
test = Button
def traits_view(self):
return View(Item('test'))
def _test_fired(self):
main()
d = Demo()
d.configure_traits()
# ============= EOF =============================================<|fim▁end|>
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
use std::collections::{BTreeMap, HashMap};
use std::iter::FromIterator;
type DocId = i32;
const MIN_DOC_ID: DocId = std::i32::MIN;
const MAX_DOC_ID: DocId = std::i32::MAX;
#[derive(Clone, Debug, PartialEq)]
struct ScoredDoc {<|fim▁hole|>
struct Term<'a> {
field: &'a str,
token: &'a str,
weight: f32,
}
// used to populate an index in a simple way
type IndexEntry<'a> = (&'a str, &'a str, DocId, f32);
type DensePostingList = Vec<ScoredDoc>;
type WeightedPostingListIterator<'a> = (f32, std::slice::Iter<'a, ScoredDoc>);
#[derive(Debug)]
struct InvertedIndexNaive<'a> {
// TODO: dictionaries and IDF's, etc as well
posting_lists: HashMap<&'a str, HashMap<&'a str, DensePostingList>>,
}
impl<'a> InvertedIndexNaive<'a> {
// TODO: figure out the lifetime stuff to make this return an iterator and can early terminate on
fn get_lists_for_terms(&self, terms: &[Term<'a>]) -> Vec<Option<(f32, &DensePostingList)>> {
terms
.iter()
.map(|term| {
self.posting_lists.get(term.field).and_then(|field_result| {
field_result.get(term.token).map(|list| (term.weight, list))
})
})
.collect()
}
}
impl<'a> FromIterator<IndexEntry<'a>> for InvertedIndexNaive<'a> {
fn from_iter<T: IntoIterator<Item = IndexEntry<'a>>>(iter: T) -> Self {
// insert field -> (token -> (doc id -> score))
let mut nested_maps = HashMap::new();
for (field, token, doc_id, payload) in iter {
let field_entry = nested_maps.entry(field).or_insert_with(HashMap::new);
let token_entry = field_entry.entry(token).or_insert_with(BTreeMap::new);
token_entry.insert(doc_id, payload);
}
let mut posting_lists = HashMap::new();
// turn BTreeMaps into DesnPostingLists
for (field, token_map) in nested_maps {
let field_entry = posting_lists.entry(field).or_insert_with(HashMap::new);
for (token, score_map) in token_map {
let posting_list = score_map
.iter()
.map(|x| ScoredDoc {
id: *x.0,
score: *x.1,
})
.collect();
field_entry.insert(token, posting_list);
}
}
InvertedIndexNaive { posting_lists }
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
enum AndState {
Searching,
IterationsAfterEmptyList(usize),
Done,
}
impl AndState {
fn next_state(&self, frontier_length: usize) -> Self {
match self {
AndState::Searching => AndState::IterationsAfterEmptyList(0),
AndState::IterationsAfterEmptyList(i) => {
if *i == frontier_length - 1 {
AndState::Done
} else {
AndState::IterationsAfterEmptyList(i + 1)
}
}
AndState::Done => AndState::Done,
}
}
}
impl Default for AndState {
fn default() -> Self {
AndState::Done
}
}
#[derive(Default)]
struct And<'a> {
state: AndState,
frontier: Vec<WeightedPostingListIterator<'a>>,
last_viable_id: i32,
}
// TODO: ctor that takes in existing iterators
impl<'a> And<'a> {
fn new(index: &'a InvertedIndexNaive, terms: &[Term<'a>]) -> Self {
let mut frontier = Vec::with_capacity(terms.len());
for lookup in index.get_lists_for_terms(terms) {
match lookup {
None => return And::default(),
Some((weight, list)) => frontier.push((weight, list.iter())),
}
}
And {
state: AndState::Searching,
frontier,
last_viable_id: MIN_DOC_ID,
}
}
}
impl Iterator for And<'_> {
type Item = ScoredDoc;
fn next(&mut self) -> Option<ScoredDoc> {
let frontier_length = self.frontier.len();
let mut matched_doc_count = 0;
let mut score = 0.0;
while self.state != AndState::Done {
for (weight, posting_list) in self.frontier.iter_mut() {
let mut current_state = self.state;
let mut last_viable_id = self.last_viable_id;
let mut list_iter = posting_list.skip_while(|doc| doc.id < last_viable_id);
match list_iter.next() {
None => current_state = current_state.next_state(frontier_length),
Some(doc) => {
let score_contribution = *weight * doc.score;
if doc.id == last_viable_id {
score += score_contribution;
matched_doc_count += 1;
} else {
score = score_contribution;
matched_doc_count = 0;
last_viable_id = doc.id;
}
}
}
if matched_doc_count == frontier_length - 1 {
return Some(ScoredDoc {
id: last_viable_id,
score,
});
}
self.last_viable_id = last_viable_id;
self.state = current_state;
if self.state == AndState::Done {
break;
}
}
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_and() {
let inverted_index = &InvertedIndexNaive {
posting_lists: HashMap::new(),
};
let term = &vec![Term {
field: "your",
token: "mom",
weight: 9000.1,
}];
let and = And::new(inverted_index, term);
assert_eq!(and.count(), 0);
}
#[test]
fn nonempty_and() {
let inverted_index: InvertedIndexNaive = [
("description", "very", 1, 1.0),
("description", "very", 5, 1.0),
("description", "very", 6, 1.0),
("description", "human", 2, 1.0),
("description", "human", 5, 2.0),
("description", "human", 6, 1.0),
("description", "like", 3, 1.0),
("description", "like", 5, 3.0),
("description", "eyes", 4, 1.0),
("description", "eyes", 5, 4.0),
("description", "eyes", 6, 1.0),
("title", "manul", 1, 1.0),
("title", "manul", 5, 5.0),
("title", "manul", 6, 1.0),
("title", "cat", 2, 1.0),
("title", "cat", 5, 6.0),
("title", "cat", 6, 1.0),
("title", "facial", 3, 1.0),
("title", "facial", 5, 7.0),
("title", "facial", 6, 1.0),
("title", "expression", 4, 1.0),
("title", "expression", 5, 8.0),
("title", "expression", 6, 1.0),
]
.iter()
.cloned()
.collect();
let terms = [
Term {
field: "description",
token: "very",
weight: 1.0,
},
Term {
field: "description",
token: "human",
weight: 10.0,
},
Term {
field: "description",
token: "like",
weight: 100.0,
},
Term {
field: "description",
token: "eyes",
weight: 1_000.0,
},
Term {
field: "title",
token: "manul",
weight: 10_000.0,
},
Term {
field: "title",
token: "cat",
weight: 100_000.0,
},
Term {
field: "title",
token: "facial",
weight: 1_000_000.0,
},
Term {
field: "title",
token: "expression",
weight: 10_000_000.0,
},
];
let and = And::new(&inverted_index, &terms);
let actual: Vec<ScoredDoc> = and.collect();
assert_eq!(
actual,
vec![ScoredDoc {
id: 5,
score: 87_654_321.0
}]
)
}
}<|fim▁end|>
|
id: DocId,
score: f32,
}
|
<|file_name|>vector.py<|end_file_name|><|fim▁begin|>from victor.exceptions import (
FieldValidationException,
FieldTypeConversionError,
FieldRequiredError,
VectorInputTypeError
)
class Field(object):
required = True
"""Field is required and an exception will be raised if missing"""
missing_value = None
"""Value to use when field is missing and not required"""
strict = False
"""Field value must pass validation or an exception will be raised"""
cast_cls = None
data = None
def __init__(self, required=True, missing_value=None, strict=False):
self.required = required
self.missing_value = missing_value
self.strict = strict
def _validate(self, value):
return True
def _cast_type(self, value):
return self.cast_cls(value)
def set_data(self, value):
if self.strict:
if not self._validate(value):
raise FieldValidationException('%s does not '
'except this value'
% self.__class__.__name__)
elif self.cast_cls is not None:
value = self._cast_type(value)
self.data = value
class CharField(Field):
pass
class StringField(Field):
cast_cls = str
def _validate(self, value):
if not isinstance(value, (str, unicode)):
return False
return True
class IntField(Field):
cast_cls = int
_cast_fallback_value = 0
def __init__(self, *args, **kwargs):
super(IntField, self).__init__(*args, **kwargs)
if self.missing_value is None:
self.missing_value = self._cast_fallback_value
def _cast_type(self, value):<|fim▁hole|> return self.cast_cls(value)
except ValueError, exc:
if self.missing_value is False:
raise FieldTypeConversionError('Could not convert '
'data or use missing_value: %s'
% exc)
return self.missing_value
class FloatField(IntField):
cast_class = float
_cast_fallback_value = 0.0
class ListField(Field):
cls = None
"""Field class to represent list items"""
def __init__(self, cls, *args, **kwargs):
assert isinstance(cls, Field), 'cls is not a valid Field instance'
self.cls = cls
super(ListField, self).__init__(*args, **kwargs)
def _validate(self, value):
if not isinstance(value, (list, tuple)):
raise FieldValidationException('ListField requires data '
'to be a sequence type')
for x in value:
self.cls.set_data(value)
self.data = value
return True
class Vector(object):
def __init__(self):
self.input_data = {}
self._fields = {}
self._map = {}
self._required = []
self._setup_fields()
def get_name(self):
return self.__class__.__name__
def __call__(self, data):
return self.input(data)
def input(self, data):
self._map = {}
if not isinstance(data, dict):
raise VectorInputTypeError('Vector input not a dictionary')
self._validate(data)
self._map_attrs(data)
def _setup_fields(self):
self._fields = {}
for a in dir(self):
v = getattr(self, a)
if isinstance(v, Field):
self._fields[a] = v
if v.required:
self._required.append(a)
self._reset_fields()
def _reset_fields(self):
for f in self.get_fields():
setattr(self, f, None)
def _validate(self, input_data):
for f in self._required:
if f not in input_data:
raise FieldRequiredError('Missing field %s is a required field'
% f)
for k, v in input_data.iteritems():
if k in self.get_fields():
f = self.get_field(k)
f.set_data(v)
def _map_attrs(self, input_data):
self.input_data = input_data
for k, v in self.input_data.iteritems():
if k in self.get_fields():
# setattr(self, k, self.get_field(k).data)
self._map[k] = self.get_field(k).data
else:
# setattr(self, k, v)
self._map[k] = v
for k, v in self._map.iteritems():
setattr(self, k, v)
def get_fields(self):
return self._fields
def get_field(self, name):
return self._fields[name]
@property
def data(self):
return self._map<|fim▁end|>
|
try:
|
<|file_name|>E0401.rs<|end_file_name|><|fim▁begin|>trait Baz<T> {}<|fim▁hole|>
fn foo<T>(x: T) {
fn bfnr<U, V: Baz<U>, W: Fn()>(y: T) { //~ ERROR E0401
}
fn baz<U,
V: Baz<U>,
W: Fn()>
(y: T) { //~ ERROR E0401
}
bfnr(x); //~ ERROR type annotations needed
}
struct A<T> {
inner: T,
}
impl<T> Iterator for A<T> {
type Item = u8;
fn next(&mut self) -> Option<u8> {
fn helper(sel: &Self) -> u8 { //~ ERROR E0401
unimplemented!();
}
Some(helper(self))
}
}
fn main() {
}<|fim▁end|>
| |
<|file_name|>RemoveTileAtWorldXY.js<|end_file_name|><|fim▁begin|>/**
* @author Richard Davey <[email protected]>
* @copyright 2018 Photon Storm Ltd.
* @license {@link https://github.com/photonstorm/phaser/blob/master/license.txt|MIT License}
*/
var RemoveTileAt = require('./RemoveTileAt');
var WorldToTileX = require('./WorldToTileX');
var WorldToTileY = require('./WorldToTileY');
/**
* Removes the tile at the given world coordinates in the specified layer and updates the layer's
* collision information.
*
* @function Phaser.Tilemaps.Components.RemoveTileAtWorldXY
* @private
* @since 3.0.0
*
* @param {number} worldX - [description]
* @param {number} worldY - [description]
* @param {boolean} [replaceWithNull=true] - If true, this will replace the tile at the specified
* location with null instead of a Tile with an index of -1.
* @param {boolean} [recalculateFaces=true] - [description]
* @param {Phaser.Cameras.Scene2D.Camera} [camera=main camera] - [description]
* @param {Phaser.Tilemaps.LayerData} layer - The Tilemap Layer to act upon.
*
* @return {Phaser.Tilemaps.Tile} The Tile object that was removed.
*/
var RemoveTileAtWorldXY = function (worldX, worldY, replaceWithNull, recalculateFaces, camera, layer)<|fim▁hole|> var tileX = WorldToTileX(worldX, true, camera, layer);
var tileY = WorldToTileY(worldY, true, camera, layer);
return RemoveTileAt(tileX, tileY, replaceWithNull, recalculateFaces, layer);
};
module.exports = RemoveTileAtWorldXY;<|fim▁end|>
|
{
|
<|file_name|>timeoutsAsyncScript.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = timeoutsAsyncScript;
<|fim▁hole|>
var _depcrecationWarning2 = _interopRequireDefault(_depcrecationWarning);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
*
* Set the amount of time, in milliseconds, that asynchronous scripts executed
* by /session/:sessionId/execute_async are permitted to run before they are
* aborted and a |Timeout| error is returned to the client.
*
* Deprecated! Please use the `timeouts` command instead.
*
* @see https://github.com/SeleniumHQ/selenium/wiki/JsonWireProtocol#sessionsessionidtimeoutsasync_script
*
* @param {Number} ms The amount of time, in milliseconds, that time-limited commands are permitted to run.
* @type protocol
* @deprecated
*
*/
function timeoutsAsyncScript(ms) {
/*!
* parameter check
*/
if (typeof ms !== 'number') {
throw new _ErrorHandler.ProtocolError('number or type of arguments don\'t agree with timeoutsAsyncScript protocol command');
}
(0, _depcrecationWarning2.default)('timeoutsAsyncScript');
return this.requestHandler.create('/session/:sessionId/timeouts/async_script', { ms: ms });
}
module.exports = exports['default'];<|fim▁end|>
|
var _ErrorHandler = require('../utils/ErrorHandler');
var _depcrecationWarning = require('../helpers/depcrecationWarning');
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># Copyright 2016 Yanis Guenane <[email protected]>
# Author: Yanis Guenane <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import os
import setuptools
from lecm import version
def _get_requirements():
requirements_path = '%s/%s' % (os.path.dirname(os.path.abspath(__file__)),
'requirements.txt')
with open(requirements_path, 'r') as f:
requirements = f.read()
# remove the dependencies which comes from url source because
# it's not supported by install_requires
return [dep for dep in requirements.split('\n')
if not dep.startswith('-e')]
def _get_readme():
readme_path = '%s/%s' % (os.path.dirname(os.path.abspath(__file__)),
'README.rst')
with codecs.open(readme_path, 'r', encoding='utf8') as f:
return f.read()
setuptools.setup(
name='lecm',
version=version.__version__,
packages=setuptools.find_packages(),
author='Yanis Guenane',
author_email='[email protected]',
description='Tool to manage Let''s Encrypt certificates \
from configuration file',
long_description=_get_readme(),
install_requires=_get_requirements(),
url='https://github.com/Spredzy/lecm',
license='Apache v2.0',
include_package_data=True,
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',<|fim▁hole|> 'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
entry_points={
'console_scripts': [
'lecm = lecm.shell:main'
],
}
)<|fim▁end|>
|
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
|
<|file_name|>difftool.py<|end_file_name|><|fim▁begin|>from __future__ import division, absolute_import, unicode_literals
from qtpy import QtWidgets
from qtpy.QtCore import Qt
from . import cmds
from . import gitcmds
from . import hotkeys
from . import icons
from . import qtutils
from . import utils
from .i18n import N_
from .widgets import completion
from .widgets import defs
from .widgets import filetree
from .widgets import standard<|fim▁hole|> """Show a dialog for diffing two commits"""
dlg = Difftool(parent, a=a, b=b, context=context)
dlg.show()
dlg.raise_()
return dlg.exec_() == QtWidgets.QDialog.Accepted
def diff_expression(parent, expr,
create_widget=False, hide_expr=False,
focus_tree=False, context=None):
"""Show a diff dialog for diff expressions"""
dlg = Difftool(parent,
expr=expr, hide_expr=hide_expr,
focus_tree=focus_tree, context=context)
if create_widget:
return dlg
dlg.show()
dlg.raise_()
return dlg.exec_() == QtWidgets.QDialog.Accepted
class Difftool(standard.Dialog):
def __init__(self, parent, a=None, b=None, expr=None, title=None,
hide_expr=False, focus_tree=False, context=None):
"""Show files with differences and launch difftool"""
standard.Dialog.__init__(self, parent=parent)
self.a = a
self.b = b
self.diff_expr = expr
self.context = context
if title is None:
title = N_('git-cola diff')
self.setWindowTitle(title)
self.setWindowModality(Qt.WindowModal)
self.expr = completion.GitRefLineEdit(parent=self)
if expr is not None:
self.expr.setText(expr)
if expr is None or hide_expr:
self.expr.hide()
self.tree = filetree.FileTree(parent=self)
self.diff_button = qtutils.create_button(text=N_('Compare'),
icon=icons.diff(),
enabled=False,
default=True)
self.diff_button.setShortcut(hotkeys.DIFF)
self.diff_all_button = qtutils.create_button(text=N_('Compare All'),
icon=icons.diff())
self.edit_button = qtutils.edit_button()
self.edit_button.setShortcut(hotkeys.EDIT)
self.close_button = qtutils.close_button()
self.button_layout = qtutils.hbox(defs.no_margin, defs.spacing,
self.close_button,
qtutils.STRETCH,
self.edit_button,
self.diff_all_button,
self.diff_button)
self.main_layout = qtutils.vbox(defs.margin, defs.spacing,
self.expr, self.tree,
self.button_layout)
self.setLayout(self.main_layout)
self.tree.itemSelectionChanged.connect(self.tree_selection_changed)
self.tree.itemDoubleClicked.connect(self.tree_double_clicked)
self.tree.up.connect(self.focus_input)
self.expr.textChanged.connect(self.text_changed)
self.expr.activated.connect(self.focus_tree)
self.expr.down.connect(self.focus_tree)
self.expr.enter.connect(self.focus_tree)
qtutils.connect_button(self.diff_button, self.diff)
qtutils.connect_button(self.diff_all_button,
lambda: self.diff(dir_diff=True))
qtutils.connect_button(self.edit_button, self.edit)
qtutils.connect_button(self.close_button, self.close)
qtutils.add_action(self, 'Focus Input', self.focus_input, hotkeys.FOCUS)
qtutils.add_action(self, 'Diff All', lambda: self.diff(dir_diff=True),
hotkeys.CTRL_ENTER, hotkeys.CTRL_RETURN)
qtutils.add_close_action(self)
self.init_state(None, self.resize_widget, parent)
self.refresh()
if focus_tree:
self.focus_tree()
def resize_widget(self, parent):
"""Set the initial size of the widget"""
width, height = qtutils.default_size(parent, 720, 420)
self.resize(width, height)
def focus_tree(self):
"""Focus the files tree"""
self.tree.setFocus()
def focus_input(self):
"""Focus the expression input"""
self.expr.setFocus()
def text_changed(self, txt):
self.diff_expr = txt
self.refresh()
def refresh(self):
"""Redo the diff when the expression changes"""
if self.diff_expr is not None:
self.diff_arg = utils.shell_split(self.diff_expr)
elif self.b is None:
self.diff_arg = [self.a]
else:
self.diff_arg = [self.a, self.b]
self.refresh_filenames()
def refresh_filenames(self):
if self.a and self.b is None:
filenames = gitcmds.diff_index_filenames(self.a)
else:
filenames = gitcmds.diff(self.diff_arg)
self.tree.set_filenames(filenames, select=True)
def tree_selection_changed(self):
has_selection = self.tree.has_selection()
self.diff_button.setEnabled(has_selection)
self.diff_all_button.setEnabled(has_selection)
def tree_double_clicked(self, item, column):
path = self.tree.filename_from_item(item)
left, right = self._left_right_args()
cmds.difftool_launch(left=left, right=right, paths=[path],
context=self.context)
def diff(self, dir_diff=False):
paths = self.tree.selected_filenames()
left, right = self._left_right_args()
cmds.difftool_launch(left=left, right=right, paths=paths,
dir_diff=dir_diff, context=self.context)
def _left_right_args(self):
if self.diff_arg:
left = self.diff_arg[0]
else:
left = None
if len(self.diff_arg) > 1:
right = self.diff_arg[1]
else:
right = None
return (left, right)
def edit(self):
paths = self.tree.selected_filenames()
cmds.do(cmds.Edit, paths)<|fim▁end|>
|
def diff_commits(parent, a, b, context=None):
|
<|file_name|>DoubleRippleImageOp.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2009 Piotr Piastucki
*
* This file is part of Patchca CAPTCHA library.
*
* Patchca is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by<|fim▁hole|> * Patchca is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Patchca. If not, see <http://www.gnu.org/licenses/>.
*/
package cn.osworks.aos.core.captcha.filter.library;
public class DoubleRippleImageOp extends RippleImageOp {
@Override
protected void transform(int x, int y, double[] t) {
double tx = Math.sin((double) y / yWavelength + yRandom) + 1.3 * Math.sin((double) 0.6 * y / yWavelength + yRandom);
double ty = Math.cos((double) x / xWavelength + xRandom) + 1.3 * Math.cos((double) 0.6 * x / xWavelength + xRandom);
t[0] = x + xAmplitude * tx;
t[1] = y + yAmplitude * ty;
}
}<|fim▁end|>
|
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
|
<|file_name|>nodelist.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use crate::dom::bindings::codegen::Bindings::NodeListBinding;
use crate::dom::bindings::codegen::Bindings::NodeListBinding::NodeListMethods;
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::htmlformelement::HTMLFormElement;
use crate::dom::node::{ChildrenMutation, Node};
use crate::dom::window::Window;
use dom_struct::dom_struct;
use servo_atoms::Atom;
use std::cell::Cell;
#[derive(JSTraceable, MallocSizeOf)]
#[unrooted_must_root_lint::must_root]
pub enum NodeListType {
Simple(Vec<Dom<Node>>),
Children(ChildrenList),
Labels(LabelsList),
Radio(RadioList),
ElementsByName(ElementsByNameList),
}
// https://dom.spec.whatwg.org/#interface-nodelist
#[dom_struct]
pub struct NodeList {
reflector_: Reflector,
list_type: NodeListType,
}
impl NodeList {
#[allow(unrooted_must_root)]
pub fn new_inherited(list_type: NodeListType) -> NodeList {
NodeList {
reflector_: Reflector::new(),
list_type: list_type,
}
}
#[allow(unrooted_must_root)]
pub fn new(window: &Window, list_type: NodeListType) -> DomRoot<NodeList> {
reflect_dom_object(
Box::new(NodeList::new_inherited(list_type)),
window,
NodeListBinding::Wrap,
)
}
pub fn new_simple_list<T>(window: &Window, iter: T) -> DomRoot<NodeList>
where
T: Iterator<Item = DomRoot<Node>>,
{
NodeList::new(
window,
NodeListType::Simple(iter.map(|r| Dom::from_ref(&*r)).collect()),
)
}
pub fn new_simple_list_slice(window: &Window, slice: &[&Node]) -> DomRoot<NodeList> {
NodeList::new(
window,
NodeListType::Simple(slice.iter().map(|r| Dom::from_ref(*r)).collect()),
)
}
pub fn new_child_list(window: &Window, node: &Node) -> DomRoot<NodeList> {
NodeList::new(window, NodeListType::Children(ChildrenList::new(node)))
}
pub fn new_labels_list(window: &Window, element: &HTMLElement) -> DomRoot<NodeList> {
NodeList::new(window, NodeListType::Labels(LabelsList::new(element)))
}
pub fn new_elements_by_name_list(
window: &Window,
document: &Document,
name: DOMString,
) -> DomRoot<NodeList> {
NodeList::new(
window,
NodeListType::ElementsByName(ElementsByNameList::new(document, name)),
)
}
pub fn empty(window: &Window) -> DomRoot<NodeList> {
NodeList::new(window, NodeListType::Simple(vec![]))
}
}
impl NodeListMethods for NodeList {
// https://dom.spec.whatwg.org/#dom-nodelist-length
fn Length(&self) -> u32 {
match self.list_type {
NodeListType::Simple(ref elems) => elems.len() as u32,
NodeListType::Children(ref list) => list.len(),
NodeListType::Labels(ref list) => list.len(),
NodeListType::Radio(ref list) => list.len(),
NodeListType::ElementsByName(ref list) => list.len(),
}
}
// https://dom.spec.whatwg.org/#dom-nodelist-item
fn Item(&self, index: u32) -> Option<DomRoot<Node>> {
match self.list_type {
NodeListType::Simple(ref elems) => elems
.get(index as usize)
.map(|node| DomRoot::from_ref(&**node)),
NodeListType::Children(ref list) => list.item(index),
NodeListType::Labels(ref list) => list.item(index),
NodeListType::Radio(ref list) => list.item(index),
NodeListType::ElementsByName(ref list) => list.item(index),
}
}
// https://dom.spec.whatwg.org/#dom-nodelist-item
fn IndexedGetter(&self, index: u32) -> Option<DomRoot<Node>> {
self.Item(index)
}
}
impl NodeList {
pub fn as_children_list(&self) -> &ChildrenList {
if let NodeListType::Children(ref list) = self.list_type {
list
} else {
panic!("called as_children_list() on a non-children node list")
}
}
pub fn as_radio_list(&self) -> &RadioList {
if let NodeListType::Radio(ref list) = self.list_type {
list
} else {
panic!("called as_radio_list() on a non-radio node list")
}
}
pub fn iter<'a>(&'a self) -> impl Iterator<Item = DomRoot<Node>> + 'a {
let len = self.Length();
// There is room for optimization here in non-simple cases,
// as calling Item repeatedly on a live list can involve redundant work.
(0..len).flat_map(move |i| self.Item(i))
}
}
#[derive(JSTraceable, MallocSizeOf)]
#[unrooted_must_root_lint::must_root]
pub struct ChildrenList {
node: Dom<Node>,
#[ignore_malloc_size_of = "Defined in rust-mozjs"]
last_visited: MutNullableDom<Node>,
last_index: Cell<u32>,
}
impl ChildrenList {
pub fn new(node: &Node) -> ChildrenList {
let last_visited = node.GetFirstChild();
ChildrenList {
node: Dom::from_ref(node),
last_visited: MutNullableDom::new(last_visited.as_deref()),
last_index: Cell::new(0u32),
}
}
pub fn len(&self) -> u32 {
self.node.children_count()
}
pub fn item(&self, index: u32) -> Option<DomRoot<Node>> {
// This always start traversing the children from the closest element
// among parent's first and last children and the last visited one.
let len = self.len() as u32;
if index >= len {
return None;
}
if index == 0u32 {
// Item is first child if any, not worth updating last visited.
return self.node.GetFirstChild();
}
let last_index = self.last_index.get();
if index == last_index {
// Item is last visited child, no need to update last visited.
return Some(self.last_visited.get().unwrap());
}
let last_visited = if index - 1u32 == last_index {
// Item is last visited's next sibling.
self.last_visited.get().unwrap().GetNextSibling().unwrap()
} else if last_index > 0 && index == last_index - 1u32 {
// Item is last visited's previous sibling.
self.last_visited
.get()
.unwrap()
.GetPreviousSibling()
.unwrap()
} else if index > last_index {
if index == len - 1u32 {
// Item is parent's last child, not worth updating last visited.
return Some(self.node.GetLastChild().unwrap());
}
if index <= last_index + (len - last_index) / 2u32 {
// Item is closer to the last visited child and follows it.
self.last_visited
.get()
.unwrap()
.inclusively_following_siblings()
.nth((index - last_index) as usize)
.unwrap()
} else {
// Item is closer to parent's last child and obviously
// precedes it.
self.node
.GetLastChild()
.unwrap()
.inclusively_preceding_siblings()
.nth((len - index - 1u32) as usize)
.unwrap()
}
} else if index >= last_index / 2u32 {
// Item is closer to the last visited child and precedes it.
self.last_visited
.get()
.unwrap()
.inclusively_preceding_siblings()
.nth((last_index - index) as usize)
.unwrap()
} else {
// Item is closer to parent's first child and obviously follows it.
debug_assert!(index < last_index / 2u32);
self.node
.GetFirstChild()
.unwrap()
.inclusively_following_siblings()
.nth(index as usize)
.unwrap()
};
self.last_visited.set(Some(&last_visited));
self.last_index.set(index);
Some(last_visited)
}
pub fn children_changed(&self, mutation: &ChildrenMutation) {
fn prepend(list: &ChildrenList, added: &[&Node], next: &Node) {
let len = added.len() as u32;
if len == 0u32 {
return;
}
let index = list.last_index.get();
if index < len {
list.last_visited.set(Some(added[index as usize]));
} else if index / 2u32 >= len {
// If last index is twice as large as the number of added nodes,
// updating only it means that less nodes will be traversed if
// caller is traversing the node list linearly.
list.last_index.set(len + index);
} else {
// If last index is not twice as large but still larger,
// it's better to update it to the number of added nodes.
list.last_visited.set(Some(next));
list.last_index.set(len);
}
}
fn replace(
list: &ChildrenList,
prev: Option<&Node>,
removed: &Node,
added: &[&Node],
next: Option<&Node>,
) {
let index = list.last_index.get();
if removed == &*list.last_visited.get().unwrap() {
let visited = match (prev, added, next) {
(None, _, None) => {
// Such cases where parent had only one child should
// have been changed into ChildrenMutation::ReplaceAll
// by ChildrenMutation::replace().
unreachable!()
},
(_, added, _) if !added.is_empty() => added[0],
(_, _, Some(next)) => next,
(Some(prev), _, None) => {
list.last_index.set(index - 1u32);
prev
},
};
list.last_visited.set(Some(visited));
} else if added.len() != 1 {
// The replaced child isn't the last visited one, and there are
// 0 or more than 1 nodes to replace it. Special care must be
// given to update the state of that ChildrenList.
match (prev, next) {
(Some(_), None) => {},
(None, Some(next)) => {
list.last_index.set(index - 1);
prepend(list, added, next);
},
(Some(_), Some(_)) => {
list.reset();
},
(None, None) => unreachable!(),
}
}
}
match *mutation {
ChildrenMutation::Append { .. } => {},
ChildrenMutation::Insert { .. } => {
self.reset();
},
ChildrenMutation::Prepend { added, next } => {
prepend(self, added, next);
},
ChildrenMutation::Replace {
prev,
removed,
added,
next,
} => {
replace(self, prev, removed, added, next);
},
ChildrenMutation::ReplaceAll { added, .. } => {
let len = added.len();
let index = self.last_index.get();
if len == 0 {
self.last_visited.set(None);
self.last_index.set(0u32);
} else if index < len as u32 {
self.last_visited.set(Some(added[index as usize]));
} else {
// Setting last visited to parent's last child serves no purpose,
// so the middle is arbitrarily chosen here in case the caller
// wants random access.
let middle = len / 2;
self.last_visited.set(Some(added[middle]));
self.last_index.set(middle as u32);
}
},
ChildrenMutation::ChangeText => {},
}
}
fn reset(&self) {
self.last_visited.set(self.node.GetFirstChild().as_deref());
self.last_index.set(0u32);
}
}
// Labels lists: There might be room for performance optimization
// analogous to the ChildrenMutation case of a children list,
// in which we can keep information from an older access live
// if we know nothing has happened that would change it.
// However, label relationships can happen from further away
// in the DOM than parent-child relationships, so it's not as simple,
// and it's possible that tracking label moves would end up no faster
// than recalculating labels.
#[derive(JSTraceable, MallocSizeOf)]
#[unrooted_must_root_lint::must_root]
pub struct LabelsList {
element: Dom<HTMLElement>,
}
impl LabelsList {
pub fn new(element: &HTMLElement) -> LabelsList {
LabelsList {
element: Dom::from_ref(element),
}
}
<|fim▁hole|>
pub fn item(&self, index: u32) -> Option<DomRoot<Node>> {
self.element.label_at(index)
}
}
// Radio node lists: There is room for performance improvement here;
// a form is already aware of changes to its set of controls,
// so a radio list can cache and cache-invalidate its contents
// just by hooking into what the form already knows without a
// separate mutation observer. FIXME #25482
#[derive(Clone, Copy, JSTraceable, MallocSizeOf)]
pub enum RadioListMode {
ControlsExceptImageInputs,
Images,
}
#[derive(JSTraceable, MallocSizeOf)]
#[unrooted_must_root_lint::must_root]
pub struct RadioList {
form: Dom<HTMLFormElement>,
mode: RadioListMode,
name: Atom,
}
impl RadioList {
pub fn new(form: &HTMLFormElement, mode: RadioListMode, name: Atom) -> RadioList {
RadioList {
form: Dom::from_ref(form),
mode: mode,
name: name,
}
}
pub fn len(&self) -> u32 {
self.form.count_for_radio_list(self.mode, &self.name)
}
pub fn item(&self, index: u32) -> Option<DomRoot<Node>> {
self.form.nth_for_radio_list(index, self.mode, &self.name)
}
}
#[derive(JSTraceable, MallocSizeOf)]
#[unrooted_must_root_lint::must_root]
pub struct ElementsByNameList {
document: Dom<Document>,
name: DOMString,
}
impl ElementsByNameList {
pub fn new(document: &Document, name: DOMString) -> ElementsByNameList {
ElementsByNameList {
document: Dom::from_ref(document),
name: name,
}
}
pub fn len(&self) -> u32 {
self.document.elements_by_name_count(&self.name)
}
pub fn item(&self, index: u32) -> Option<DomRoot<Node>> {
self.document
.nth_element_by_name(index, &self.name)
.and_then(|n| Some(DomRoot::from_ref(&*n)))
}
}<|fim▁end|>
|
pub fn len(&self) -> u32 {
self.element.labels_count()
}
|
<|file_name|>setJustifyContent.test.js<|end_file_name|><|fim▁begin|>import Yoga from '@react-pdf/yoga';
import setJustifyContent from '../../src/node/setJustifyContent';
describe('node setJustifyContent', () => {
const mock = jest.fn();
const node = { _yogaNode: { setJustifyContent: mock } };
beforeEach(() => {
mock.mockReset();
});
test('should return node if no yoga node available', () => {
const emptyNode = { box: { width: 10, height: 20 } };
const result = setJustifyContent(null)(emptyNode);
expect(result).toBe(emptyNode);
});
test('Should set center', () => {
const result = setJustifyContent('center')(node);
expect(mock.mock.calls).toHaveLength(1);
expect(mock.mock.calls[0][0]).toBe(Yoga.JUSTIFY_CENTER);
expect(result).toBe(node);
});
test('Should set flex-end', () => {
const result = setJustifyContent('flex-end')(node);
expect(mock.mock.calls).toHaveLength(1);
expect(mock.mock.calls[0][0]).toBe(Yoga.JUSTIFY_FLEX_END);
expect(result).toBe(node);
});
test('Should set flex-start', () => {
const result = setJustifyContent('flex-start')(node);
expect(mock.mock.calls).toHaveLength(1);<|fim▁hole|>
test('Should set space-between', () => {
const result = setJustifyContent('space-between')(node);
expect(mock.mock.calls).toHaveLength(1);
expect(mock.mock.calls[0][0]).toBe(Yoga.JUSTIFY_SPACE_BETWEEN);
expect(result).toBe(node);
});
test('Should set space-around', () => {
const result = setJustifyContent('space-around')(node);
expect(mock.mock.calls).toHaveLength(1);
expect(mock.mock.calls[0][0]).toBe(Yoga.JUSTIFY_SPACE_AROUND);
expect(result).toBe(node);
});
test('Should set space-evenly', () => {
const result = setJustifyContent('space-evenly')(node);
expect(mock.mock.calls).toHaveLength(1);
expect(mock.mock.calls[0][0]).toBe(Yoga.JUSTIFY_SPACE_EVENLY);
expect(result).toBe(node);
});
});<|fim▁end|>
|
expect(mock.mock.calls[0][0]).toBe(Yoga.JUSTIFY_FLEX_START);
expect(result).toBe(node);
});
|
<|file_name|>SolarSystemUninhabited.java<|end_file_name|><|fim▁begin|>package calc.lib;
/** Class (POJO) used to store Uninhabited Solar System JSON objects
*
* @author Carlin Robertson
* @version 1.0
*
*/
public class SolarSystemUninhabited {
private String[] information;
private String requirePermit;
private Coords coords;
private String name;
public String[] getInformation ()
{
return information;
}
public void setInformation (String[] information)
{
this.information = information;
}
public String getRequirePermit ()
{
return requirePermit;
}
public void setRequirePermit (String requirePermit)
{
this.requirePermit = requirePermit;
}
public Coords getCoords ()
{
return coords;
}
public void setCoords (Coords coords)
{
this.coords = coords;<|fim▁hole|> {
return name;
}
public void setName (String name)
{
this.name = name;
}
}<|fim▁end|>
|
}
public String getName ()
|
<|file_name|>test_vmware_methods.py<|end_file_name|><|fim▁begin|>"""This module contains tests that exercise the canned VMware Automate stuff."""
from textwrap import dedent
import fauxfactory
import pytest
from widgetastic.widget import View
from widgetastic_patternfly import Dropdown
from cfme import test_requirements
from cfme.common import BaseLoggedInPage
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
from cfme.utils.generators import random_vm_name
from cfme.utils.log import logger
from cfme.utils.wait import wait_for
pytestmark = [
test_requirements.automate,
pytest.mark.meta(server_roles="+automate"),
pytest.mark.long_running,
pytest.mark.ignore_stream("upstream"),<|fim▁hole|> pytest.mark.provider(
[VMwareProvider], required_fields=[['provisioning', 'template']],
scope="module")
]
@pytest.fixture(scope="module")
def cls(domain):
original_class = domain.parent\
.instantiate(name='ManageIQ')\
.namespaces.instantiate(name='System')\
.classes.instantiate(name='Request')
original_class.copy_to(domain=domain)
return domain.namespaces.instantiate(name='System').classes.instantiate(name='Request')
@pytest.fixture(scope="module")
def testing_group(appliance):
group_desc = fauxfactory.gen_alphanumeric()
group = appliance.collections.button_groups.create(
text=group_desc,
hover=group_desc,
type=appliance.collections.button_groups.VM_INSTANCE
)
yield group
group.delete_if_exists()
@pytest.fixture(scope="function")
def testing_vm(setup_provider, provider):
collection = provider.appliance.provider_based_collection(provider)
try:
template_name = provider.data['templates']['full_template']['name']
except KeyError:
pytest.skip('Unable to identify full_template for provider: {}'.format(provider))
vm = collection.instantiate(
random_vm_name("ae-hd"),
provider,
template_name=template_name
)
try:
vm.create_on_provider(find_in_cfme=True, allow_skip="default")
yield vm
finally:
vm.cleanup_on_provider()
def test_vmware_vimapi_hotadd_disk(
appliance, request, testing_group, testing_vm, domain, cls):
"""Tests hot adding a disk to vmware vm. This test exercises the `VMware_HotAdd_Disk` method,
located in `/Integration/VMware/VimApi`
Polarion:
assignee: ghubale
initialEstimate: 1/8h
casecomponent: Automate
caseimportance: critical
tags: automate
testSteps:
1. It creates an instance in ``System/Request`` that can be accessible from eg. button
2. Then it creates a button, that refers to the ``VMware_HotAdd_Disk`` in ``Request``.
The button shall belong in the VM and instance button group.
3. After the button is created, it goes to a VM's summary page, clicks the button.
4. The test waits until the capacity of disks is raised.
Bugzilla:
1211627
1311221
"""
meth = cls.methods.create(
name=fauxfactory.gen_alpha(15, start="load_value_"),
script=dedent('''\
# Sets the capacity of the new disk.
$evm.root['size'] = 1 # GB
exit MIQ_OK
'''))
request.addfinalizer(meth.delete_if_exists)
# Instance that calls the method and is accessible from the button
instance = cls.instances.create(
name=fauxfactory.gen_alpha(23, start="VMware_HotAdd_Disk_"),
fields={
"meth4": {'value': meth.name}, # To get the value
"rel5": {'value': "/Integration/VMware/VimApi/VMware_HotAdd_Disk"},
},
)
request.addfinalizer(instance.delete_if_exists)
# Button that will invoke the dialog and action
button_name = fauxfactory.gen_alphanumeric()
button = testing_group.buttons.create(
text=button_name,
hover=button_name,
system="Request",
request=instance.name)
request.addfinalizer(button.delete_if_exists)
def _get_disk_capacity():
view = testing_vm.load_details(refresh=True)
return view.entities.summary('Datastore Allocation Summary').get_text_of('Total Allocation')
original_disk_capacity = _get_disk_capacity()
logger.info('Initial disk allocation: %s', original_disk_capacity)
class CustomButtonView(View):
custom_button = Dropdown(testing_group.text)
view = appliance.browser.create_view(CustomButtonView)
view.custom_button.item_select(button.text)
view = appliance.browser.create_view(BaseLoggedInPage)
view.flash.assert_no_error()
try:
wait_for(
lambda: _get_disk_capacity() > original_disk_capacity, num_sec=180, delay=5)
finally:
logger.info('End disk capacity: %s', _get_disk_capacity())<|fim▁end|>
|
pytest.mark.tier(3),
|
<|file_name|>binary_search_test.cpp<|end_file_name|><|fim▁begin|>#include <gtest/gtest.h>
#include "loquat/misc/binary_search.hpp"
TEST(BinarySearchTest, Integer){
for(int l = -16; l <= 16; ++l){
for(int r = l; r <= 16; ++r){
for(int expect = l; expect <= r; ++expect){
const auto actual = loquat::binary_search(
l, r, [=](int x){ return x < expect; });
EXPECT_EQ(expect, actual);
}<|fim▁hole|> }
}
}
TEST(BinarySearchTest, Floating){
for(int li = -16; li <= 16; ++li){
for(int ri = li; ri <= li; ++ri){
const double l = li * 1.1, r = ri * 1.1;
for(double expect = l; expect < r; expect += 0.2){
const auto actual = loquat::binary_search(
l, r, [=](double x){ return x < expect; });
EXPECT_DOUBLE_EQ(expect, actual);
}
const auto min_actual = loquat::binary_search(
l, r, [=](double){ return true; });
EXPECT_DOUBLE_EQ(l, min_actual);
const auto max_actual = loquat::binary_search(
l, r, [=](double){ return false; });
EXPECT_DOUBLE_EQ(r, max_actual);
}
}
}<|fim▁end|>
| |
<|file_name|>upgma.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <string>
#include <vector>
#include <algorithm>
#include <getopt.h>
#include "upgma.h"
#include "utils.h"
#include "seq_utils.h"
#include "sequence.h"
#include "seq_reader.h"
#include "node.h"
#include "tree.h"
#include "tree_utils.h"
UPGMA::UPGMA (std::istream* pios):num_taxa_(0), num_char_(0), newickstring_(""),
tree_(nullptr) {
std::string alphaName; // not used, but required by reader
seqs_ = ingest_alignment(pios, alphaName);
num_taxa_ = static_cast<int>(seqs_.size());
num_char_ = static_cast<int>(seqs_[0].get_length());
// check that it is aligned (doesn't make sense otherwise)
if (!is_aligned(seqs_)) {
std::cerr << "Error: sequences are not aligned. Exiting." << std::endl;
exit(0);
}
names_ = collect_names(seqs_);
full_distmatrix_ = build_matrix();
}
std::vector< std::vector<double> > UPGMA::build_matrix () {<|fim▁hole|> // 1) skip self comparisons
// 2) only calculate one half of matrix (i.e., no duplicate calcs)
auto nt = static_cast<size_t>(num_taxa_);
std::vector< std::vector<double> > distances(nt,
std::vector<double>(nt, 0.0));
double tempScore = 0.0;
for (size_t i = 0; i < nt; i++) {
std::string seq1 = seqs_[i].get_sequence();
for (size_t j = (i + 1); j < nt; j++) {
std::string seq2 = seqs_[j].get_sequence();
// get distance
tempScore = static_cast<double>(calc_hamming_dist(seq1, seq2));
// put scale in terms of number of sites. original version did not do this
tempScore /= static_cast<double>(num_char_);
// put in both top and bottom of matrix, even though only top is used
distances[i][j] = distances[j][i] = tempScore;
}
}
// just for debugging
/*
std::cout << "\t";
for (unsigned int i = 0; i < names_.size(); i++) {
std::cout << names_[i] << "\t";
}
std::cout << std::endl;
for (int i = 0; i < num_taxa_; i++) {
std::cout << names_[i] << "\t";
for (int j = 0; j < num_taxa_; j++) {
std::cout << distances[i][j] << "\t";
}
std::cout << std::endl;
}
*/
return distances;
}
// find smallest pairwise distance
// will always find this on the top half of the matrix i.e., mini1 < mini2
double UPGMA::get_smallest_distance (const std::vector< std::vector<double> >& dmatrix,
unsigned long& mini1, unsigned long& mini2) {
// super large value
double minD = 99999999999.99;
size_t numseqs = dmatrix.size();
for (size_t i = 0; i < (numseqs - 1); i++) {
auto idx = static_cast<size_t>(std::min_element(dmatrix[i].begin() + (i + 1),
dmatrix[i].end()) - dmatrix[i].begin());
if (dmatrix[i][idx] < minD) {
minD = dmatrix[i][idx];
mini1 = i;
mini2 = idx;
}
}
return minD;
}
void UPGMA::construct_tree () {
// location of minimum distance (top half)
unsigned long ind1 = 0;
unsigned long ind2 = 0;
// initialize
std::vector< std::vector<double> > dMatrix = full_distmatrix_;
Node * anc = nullptr; // new node, ancestor of 2 clusters
Node * left = nullptr;
Node * right = nullptr;
auto nt = static_cast<size_t>(num_taxa_);
size_t numClusters = nt;
// keep list of nodes left to be clustered. initially all terminal nodes
std::vector<Node *> nodes(nt);
for (size_t i = 0; i < nt; i++) {
auto * nd = new Node();
nd->setName(names_[i]);
nd->setHeight(0.0);
nodes[i] = nd;
}
while (numClusters > 1) {
// 1. get smallest distance present in the matrix
double minD = get_smallest_distance(dMatrix, ind1, ind2);
left = nodes[ind1];
right = nodes[ind2];
// 2. create new ancestor node
anc = new Node();
// 3. add nodes in new cluster above as children to new ancestor
anc->addChild(*left); // addChild calls setParent
anc->addChild(*right);
// 4. compute edgelengths: half of the distance
// edgelengths must subtract the existing height
double newHeight = 0.5 * minD;
left->setBL(newHeight - left->getHeight());
right->setBL(newHeight - right->getHeight());
// make sure to set the height of anc for the next iteration to use
anc->setHeight(newHeight);
// 5. compute new distance matrix (1 fewer rows & columns)
// new distances are proportional averages (size of clusters)
// new cluster is placed first (row & column)
std::vector<double> avdists(numClusters, 0.0);
double Lweight = left->isExternal() ? 1.0 : static_cast<double>(left->getChildCount());
double Rweight = right->isExternal() ? 1.0 : static_cast<double>(right->getChildCount());
for (unsigned long i = 0; i < numClusters; i++) {
avdists[i] = ((dMatrix[ind1][i] * Lweight) + (dMatrix[ind2][i] * Rweight)) / (Lweight + Rweight);
}
numClusters--;
std::vector< std::vector<double> > newDistances(numClusters,
std::vector<double>(numClusters, 0.0));
// put in distances to new clusters first
double tempDist = 0.0;
unsigned long count = 0;
for (size_t i = 0; i < nodes.size(); i++) {
if (i != ind1 && i != ind2) {
count++;
tempDist = avdists[i];
newDistances[0][count] = tempDist;
newDistances[count][0] = tempDist;
}
}
// now, fill in remaining
unsigned long icount = 1;
auto ndsize = nodes.size();
for (size_t i = 0; i < ndsize; i++) {
if (i != ind1 && i != ind2) {
size_t jcount = 1;
for (size_t j = 0; j < ndsize; j++) {
if (j != ind1 && j != ind2) {
newDistances[icount][jcount] = dMatrix[i][j];
newDistances[jcount][icount] = dMatrix[i][j];
jcount++;
}
}
icount++;
}
}
// replace distance matrix
dMatrix = newDistances;
// 6. finally, update node vector (1 shorter). new node always goes first)
std::vector<Node *> newNodes(numClusters);
newNodes[0] = anc;
unsigned long counter = 1;
for (unsigned long i = 0; i < ndsize; i++) {
if (i != ind1 && i != ind2) {
newNodes[counter] = nodes[i];
counter++;
}
}
// replace node vector
nodes = newNodes;
}
tree_ = new Tree(anc);
tree_->setEdgeLengthsPresent(true); // used by newick writer
}
std::string UPGMA::get_newick () {
if (newickstring_.empty()) {
construct_tree();
}
newickstring_ = getNewickString(tree_);
return newickstring_;
}
std::vector< std::vector<double> > UPGMA::get_matrix () const {
return full_distmatrix_;
}<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export default from './CallToAction.jsx';
|
<|file_name|>simple.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
def get(self):
return self.field
a = Foo()
a.set("hello world")
z = a.get()
print z
z
a<|fim▁end|>
|
class Foo(object):
def set(self, value):
self.field = value
|
<|file_name|>schema.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------------
# Copyright (c) 2011-2015, The American Gut Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from psycopg2 import connect
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
import agr
# table definitions, these are of the form: [(table_name, table_definition)].
# the motivation for this structure is to allow for checking if tables exist
# easily (see schema_is_sane)
tables = [
('biom',
"""create table biom (
sample varchar,
biom json,
biomtxt text,
constraint pk_biom primary key(sample)
)"""),
('metadata',
"""create table metadata (
sample varchar,
category varchar,
value varchar,
constraint pk_metadata primary key (sample, category),
constraint fk_metadata foreign key (sample) references biom(sample)
)"""),
('fastq',
"""create table fastq (
sample varchar,
url varchar,
constraint pk_fastq primary key (sample),
constraint fk_fastq foreign key (sample) references biom(sample),
constraint uc_fastq unique (url)
)"""),
('state',
"""create table state (
biom_sha varchar)""")
]
def database_connectivity(user=agr.db_user, password=agr.db_password,
host=agr.db_host, dbname=agr.db_name):
"""Determine if we can connect to the database
Paramters
---------
user : str
The database usermame
password : str
The password for the user
host : str
The database host
Returns
-------
bool
True if a connection was made, False otherwise
"""
try:
c = connect(user=user, password=password, host=host, dbname=dbname)
except:
return False
else:
c.close()
return True
def database_exists(user=agr.db_user, password=agr.db_password,
host=agr.db_host, dbname=agr.db_name):
"""Determine if the database exists
Paramters
---------
user : str
The database usermame
password : str
The password for the user
host : str
The database host
dbname : str
The name of the database to connect to
Returns
-------
bool
True if the database exists, False otherwise
"""
try:
c = connect(user=user, password=password, host=host, dbname=dbname)
except:
return False
else:
c.close()
return True
def schema_is_sane():
"""Check to see if the expected tables exist
Notes
-----
Assumes we have connectivity and the database exists.
The structure of the tables is _not_ verified, only checks that the table
names exist.
Database credentials are sourced from the agr module (e.g., the environment
configuration.
Returns
-------
bool
The expected tables appear to exist
"""
c = connect(user=agr.db_user, password=agr.db_password,
host=agr.db_host, dbname=agr.db_name)
cur = c.cursor()
for table_name, _ in tables:
cur.execute("""select exists(select *
from information_schema.tables
where table_name=%s)""", [table_name])
if not cur.fetchone()[0]:
return False
return True
def schema_has_data():
"""Check to see if the schema appears to have data
Notes
-----
Assumes we have connectivity and the database exists.
The structure of the tables is _not_ verified, only checks that there
appears to be rows in the tables.
Database credentials are sourced from the agr module (e.g., the environment
configuration.
Returns
-------
bool
If all of the tables appear to have data.
"""
if not schema_is_sane():
return False
c = connect(user=agr.db_user, password=agr.db_password,
host=agr.db_host, dbname=agr.db_name)
cur = c.cursor()
for table_name, _ in tables:
cur.execute("select count(1) from %s" % table_name)
if cur.fetchone()[0] == 0:
return False
return True
def create_database():
"""Create the database and the schema
Notes
-----<|fim▁hole|>
Database credentials are sourced from the agr module (e.g., the environment
configuration.
"""
c = connect(user=agr.admin_db_user, password=agr.admin_db_password,
host=agr.db_host)
c.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = c.cursor()
cur.execute('drop database if exists ag_rest')
cur.execute('create database %s' % agr.db_name)
cur.close()
c.close()
create_tables()
def create_tables():
"""Create the tables"""
c = connect(user=agr.admin_db_user, password=agr.admin_db_password,
host=agr.db_host, dbname=agr.db_name)
c.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = c.cursor()
for _, table in tables:
cur.execute(table)
if __name__ == '__main__':
import sys
if not database_connectivity():
sys.stderr.write("Cannot connect to the database\n")
sys.exit(1)
if not agr.test_environment:
if sys.argv[1] == 'FORCE_CREATE_TABLES':
create_tables()
sys.exit(0)
else:
sys.stderr.write("This does not appear to be a test environment\n")
sys.exit(1)
if database_exists() and schema_is_sane() and schema_has_data():
sys.exit(0)
else:
create_database()<|fim▁end|>
|
Assumes we have connectivity.
|
<|file_name|>reification_macros.rs<|end_file_name|><|fim▁begin|>#![macro_use]
// This isn't as bad as it looks.
// I mean, it's pretty bad, don't get me wrong...
//
// The purpose is to generate `Reifiable` `impl`s
// for any `enum` or `struct`.
//
// Basically, `reify` pattern-matches to destructure the actual Rust value,
// and then constructs a `Value` of the corresponding shape.
//
// And `reflect` does the opposite.
//
// But, in the process, I have to work around
// what feels like every single limitation of `macro_rules!` in Rust,
// as if I were aiming for them.
//
// Who wrote that piece of junk, anyway?
//
// This should be rewritten, now that user-defined derives are stable.
macro_rules! Reifiable {
// HACK: everything is parameterized over 't...
/* struct */
((lifetime) $(#[$($whatever:tt)*])* $(pub)* struct $name:ident<$lifetime:tt> { $($contents:tt)* }) => {
Reifiable!((remove_p_and_a) struct $name<$lifetime> @ { $($contents)*, } );
// HACK: we add commas to the end of the contents, becuase it's easier to parse
// if they end in a comma (this breaks `structs` that already have final commas...)
};
((lifetime) $(#[$($whatever:tt)*])* $(pub)* struct $name:ident<$lifetime:tt $(, $ty_param_ty:ident)*> { $($contents:tt)* }) => {
Reifiable!((remove_p_and_a) struct
$name<$lifetime $(, $ty_param_ty)*> @ <$($ty_param_ty),*>
{ $($contents)*, } );
};
// no lifetime parameter
(() $(#[$($whatever:tt)*])* $(pub)* struct $name:ident$(<$($ty_param_ty:ident),*>)* { $($contents:tt)* }) => {
Reifiable!((remove_p_and_a) struct
$name$(<$($ty_param_ty),*>)* @ $(<$($ty_param_ty),*>)*
{ $($contents)*, } );
};
// TODO: This lacks support for type-parameterized `struct`s ...
// done! Go to `make_impl`!
((remove_p_and_a) $(pub)* struct $name:ident
$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { $(,)* }
$( [ $( $accum:tt )* ] )* ) => {
Reifiable!((make_impl) struct $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)*
{ $($($accum)*)* } );
};
// remove `pub`
((remove_p_and_a) $(pub)* struct $name:ident
$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* {
pub $($contents:tt)*
} $( [ $( $accum:tt )* ] )* ) => {
Reifiable!((remove_p_and_a) struct $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* {
$( $contents )*
} $( [ $($accum)* ] )* );
};
// remove attributes (such as `///`!)
((remove_p_and_a) $(pub)* struct $name:ident
$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* {
#[$($whatever:tt)*] $($contents:tt)*
} $( [ $( $accum:tt )* ] )* ) => {
Reifiable!((remove_p_and_a) struct $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* {
$( $contents )*
} $( [ $($accum)* ] )*);
};
// no `pub` or attr this time
((remove_p_and_a) $(pub)* struct $name:ident
$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* {
$field_car:ident : $t_car:ty, $($cdr:tt)*
} $( [ $( $accum:tt )* ] )* ) => {
Reifiable!((remove_p_and_a) struct $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* {
$( $cdr )*
} [ $field_car : $t_car $(, $($accum)* )* ]);
};
((make_impl) struct $name:ident
$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)*
{ $( $field:ident : $t:ty),* }) => {
impl<'t $($(, $ty_param_ty : crate::runtime::reify::Reifiable)*)*>
crate::runtime::reify::Reifiable
for $name<$($($ty_param),*)*> {
fn ty() -> crate::ast::Ast {
type_defn_wrapper!($(<$($ty_param_ty),*>)* => { "Type" "struct" :
"component_name" => [@"c" $( (at stringify!($field)) ),* ],
"component" =>
// TODO: unless we specify arguments with the same name as parameters,
// we get bogus results
// (we get
// ∀ K V. μ Assoc. struct{ n: Option<AssocNode<ident rust_usize>> }
// rather than
// ∀ K V. μ Assoc. struct{ n: Option<AssocNode<K V>> }
[@"c" $( (, <$t as crate::runtime::reify::Reifiable>::ty_invocation() ) ),*]
})
}
fn ty_name() -> crate::name::Name { crate::name::n(stringify!($name)) }
fn concrete_arguments() -> Option<Vec<crate::ast::Ast>> {
// HACK: at runtime, check to see if we need type parameters by making a vector
let argument_list : Vec<&str> = vec![$( $( stringify!($ty_param_ty) ),* )*];
if argument_list.len() > 0 {
Some(vec![ $( $( <$ty_param_ty>::ty_invocation() ),* )* ])
} else {
None
}
}
fn reify(&self) -> crate::runtime::eval::Value {
crate::runtime::eval::Struct(assoc_n!(
$( (stringify!($field)) => self.$field.reify()),* ))
}
#[allow(unused_variables)]
fn reflect(v: &crate::runtime::eval::Value) -> Self {
extract!((v) crate::runtime::eval::Struct = (ref env) =>
$name {
$( $field :
<$t as crate::runtime::reify::Reifiable>::reflect(
env.find(&crate::name::n(stringify!($field))).unwrap())),*
})
}
}
};
/* enum */
// `lifetime` means that we need to pull off a lifetime argument.
// The whole set of type parameters comes after `name`;
// we make a just-the-types type parameters after the @.
((lifetime) $(#[$($whatever:tt)*])* $(pub)* enum $name:ident<$lifetime:tt> { $( $contents:tt )* }) => {
Reifiable!((remove_attr) enum $name<$lifetime> @ { $( $contents )* , });
};
((lifetime) $(#[$($whatever:tt)*])* $(pub)* enum $name:ident<$lifetime:tt $(, $ty_param_ty:ident)*> {
$( $contents:tt )*
}) => {
Reifiable!((remove_attr) enum $name<$lifetime $(, $ty_param_ty)*> @ <$($ty_param_ty),*> {
$( $contents )* ,
});
};
(() $(#[$($whatever:tt)*])* $(pub)* enum $name:ident$(<$($ty_param_ty:ident),*>)* { $( $contents:tt )* }) => {
Reifiable!((remove_attr) enum $name$(<$($ty_param_ty),*>)* @ $(<$($ty_param_ty),*>),* {
$( $contents )* ,
});
};
// done! (has to go first)
((remove_attr) enum $name:ident$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { $(,)* }
$([ $($accum:tt)* ])*) => {
Reifiable!((make_impl) enum $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)*
{ $($( $accum )*)* } );
};
// drop the attribute
((remove_attr) enum $name:ident$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* {
#[ $($whatever:tt)* ] $( $contents:tt )*
} $([ $($accum:tt)* ])* ) => {
Reifiable!((remove_attr) enum $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* {
$( $contents )*
} $([ $($accum)* ])* );
};
// no attribute this time
((remove_attr) enum $name:ident$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* {
$choice:ident$(( $($part:ty),* ))*, $( $contents:tt )*
} $([ $($accum:tt)* ])*) => {
Reifiable!((remove_attr) enum $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* {
$( $contents )*
} [ $choice $(( $($part),* ))* , $($($accum)*)* ]);
};
// The `$((...))*` and `$(<...>)*` patterns deal with the fact that the `()` and `<>`
// might be completely absent (the `*` matches 0 or 1 times)
// The second set of type parameters are those that are not lifetime params...
((make_impl) enum $name:ident$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* {
$($choice:ident$(( $($part:ty),* ))* ,)*
}) => {
impl<'t $($(, $ty_param_ty : crate::runtime::reify::Reifiable)*)*>
crate::runtime::reify::Reifiable
for $name<$($($ty_param),*)*> {
fn ty() -> crate::ast::Ast {
type_defn_wrapper!($(<$($ty_param_ty),*>)* => { "Type" "enum" :
"name" => [@"c" $( (at (stringify!($choice))) ),* ],
"component" => [@"c" $( [ $($(
(, <$part as crate::runtime::reify::Reifiable>::ty_invocation() )
),*)*]),*]
})
}
fn ty_name() -> crate::name::Name { crate::name::n(stringify!($name)) }
fn concrete_arguments() -> Option<Vec<crate::ast::Ast>> {
// HACK: at runtime, check to see if we need type parameters by making a vector
let argument_list : Vec<&str> = vec![$( $( stringify!($ty_param_ty) ),* )*];
if argument_list.len() > 0 {
Some(vec![ $( $( <$ty_param_ty>::ty_invocation() ),* )* ])
} else {
None
}
}
#[allow(unused_mut)] // rustc bug! `v` has to be mutable, but it complains
fn reify(&self) -> crate::runtime::eval::Value {
match *self { $(
choice_pat!( ( $($($part),*)* ) (a b c d e f g h i j k l m n o p q r s t)
$name::$choice ; ())
=> {
let mut v = vec![];
choice_vec!( ( $($($part),*)* ) (a b c d e f g h i j k l m n o p q r s t)
v);
crate::runtime::eval::Value::Enum(crate::name::n(stringify!($choice)), v)
}
),* }
}
#[allow(unused_variables)]
fn reflect(v: &crate::runtime::eval::Value) -> Self {
extract!((v) crate::runtime::eval::Enum = (ref choice, ref parts) => {
make_enum_reflect!(choice; parts; $name$(<$($ty_param),*>)*/**/
{ $($choice $(( $($part),* ))*),* } )
})
}
}
}
}
// makes a pattern matching an enum with _n_ components, using the first _n_
// of the input names (be sure to supply enough names!)
macro_rules! choice_pat {
( ($t_car:ty $(, $t_cdr:ty)* ) ($i_car:ident $($i_cdr:ident)*)
$choice:path; ($($accum:ident),*)) => {
choice_pat!( ($($t_cdr),* ) ($($i_cdr)*) $choice; ($i_car $(, $accum)*))
};
( ( ) ($($i_cdr:ident)*) $choice:path; ( ) ) => {
$choice
};
( ( ) ($($i_cdr:ident)*) $choice:path; ( $($accum:ident),+ ) ) => {
$choice($(ref $accum),*)
};
}
macro_rules! choice_vec {
/* the types are ignored, except for how many of them there are */
( ($t_car:ty $(, $t_cdr:ty)*) ($i_car:ident $($i_cdr:ident)*) $v:expr) => { {
choice_vec!( ($($t_cdr),*) ($($i_cdr)*) $v);
$v.push($i_car.reify());
} };
( ( ) ($($i_cdr:ident)*) $v:expr) => { {} }
}
// workaround for MBE limitation; need to walk choices, but *not* ty_param,
// so we use this to manually walk over the choices
macro_rules! make_enum_reflect {
($choice_name:ident; $parts_name:ident; $name:ident$(<$($ty_param:tt),*>)*/**/ {
$choice_car:ident $(( $($part_cars:ty),* ))*
$(, $choice_cdr:ident$(( $($part_cdr:ty),* ))*)*
}) => {
if $choice_name.is(stringify!($choice_car)) {
unpack_parts!( $(( $($part_cars),* ))* $parts_name; 0;
$name::$choice_car$(::< $($ty_param),* >)*; ())
} else {
make_enum_reflect!($choice_name; $parts_name; $name$(<$($ty_param),*>)*/**/ {
$($choice_cdr $(( $($part_cdr),* ))* ),* })
}
};
($choice_name:ident; $parts_name:ident; $name:ident$(<$($ty_param:tt),*>)*/**/ { } ) => {
icp!("invalid enum choice: {:#?}", $choice_name)
}
}
macro_rules! unpack_parts {
( ($t_car:ty $(, $t_cdr:ty)*) $v:expr; $idx:expr; $ctor:expr; ($($accum:expr),*)) => {
unpack_parts!( ( $($t_cdr),* ) $v; ($idx + 1); $ctor;
($($accum, )*
<$t_car as crate::runtime::reify::Reifiable>::reflect(& $v[$idx])))
};
( () $v:expr; $idx:expr; $ctor:expr; ($($accum:expr),*)) => {<|fim▁hole|> ( $v:expr; $idx:expr; $ctor:expr; ()) => {
$ctor // special case: a value, not a 0-arg constructor
}
}
// For `ty`
macro_rules! type_defn_wrapper {
( $(<$($ty_param_ty:ident),*>)* => $body:tt ) => {{
// In this context, we want reification of the type parameters
// to produce type variables, not whatever those parameters "actually" are
// (because they're actually `Irr`, since they are irrelevant).
$( $(
struct $ty_param_ty {}
impl crate::runtime::reify::Reifiable for $ty_param_ty {
fn ty_name() -> crate::name::Name { crate::name::n(stringify!($ty_param_ty)) }
fn reify(&self) -> crate::runtime::eval::Value { icp!() }
fn reflect(_: &crate::runtime::eval::Value) -> Self { icp!() }
}
)* )*
// All types will be ∀, even if in Rust they have no parameters;
// this is safe, but a nuisance.
// All types will be μ. I think this is the way things work in most languages.
ast!({"Type" "forall_type" :
"param" => [ $($( (at stringify!($ty_param_ty))),*)*],
"body" => (import [* [forall "param"]] {"Type" "mu_type" :
"param" => [(import [prot "param"]
(vr Self::ty_name()))],
"body" => (import [* [prot "param"]] $body)
})
})
}}
}
macro_rules! refer_to_type {
($name:tt < $( $arg:ty ),* >) => {
ast!({ "Type" "type_apply" :
"type_rator" => (, ::ast::Ast::VariableReference(::name::n(stringify!($name))) ),
"arg" => [ (, $( refer_to_type!($arg)),* )]
})
};
($name:tt) => {
::ast::Ast::VariableReference(::name::n(stringify!($name)))
}
}<|fim▁end|>
|
$ctor($($accum),*)
};
|
<|file_name|>cart66-library.js<|end_file_name|><|fim▁begin|>var ajaxManager = (function() {
$jq = jQuery.noConflict();
var requests = [];
return {
addReq: function(opt) {
requests.push(opt);
},
removeReq: function(opt) {
if($jq.inArray(opt, requests) > -1) {
requests.splice($jq.inArray(opt, requests), 1);
}
},
run: function() {
var self = this, orgSuc;
if(requests.length) {
oriSuc = requests[0].complete;
requests[0].complete = function() {
if(typeof oriSuc === 'function') {
oriSuc();
}
requests.shift();
self.run.apply(self, []);
};
$jq.ajax(requests[0]);
} else {
self.tid = setTimeout(function() {
self.run.apply(self, []);
}, 1000);
}
},
stop: function() {
requests = [];
clearTimeout(this.tid);
}
};
}());
ajaxManager.run();
(function($){
$(document).ready(function(){
$('.purAddToCart, .purAddToCartImage').click(function() {
$(this).attr('disabled', 'disabled');
})
$('.Cart66AjaxWarning').hide();
// Added to remove error on double-click when add to cart is clicked
$('.purAddToCart, .purAddToCartImage').click(function() {
$(this).attr('disabled', 'disabled');
})
$('.ajax-button').click(function() {
$(this).attr('disabled', true);
var id = $(this).attr('id').replace('addToCart_', '');
$('#task_' + id).val('ajax');
var product = C66.products[id];
if(C66.trackInventory) {
inventoryCheck(id, C66.ajaxurl, product.ajax, product.name, product.returnUrl, product.addingText);
}
else {
if(product.ajax === 'no') {
$('#task_' + id).val('addToCart');
$('#cartButtonForm_' + id).submit();
return false;
}
else if(product.ajax === 'yes' || product.ajax === 'true') {
buttonTransform(id, C66.ajaxurl, product.name, product.returnUrl, product.addingText);
}
}
return false;
});
$('.modalClose').click(function() {
$('.Cart66Unavailable, .Cart66Warning, .Cart66Error, .alert-message').fadeOut(800);
});
$('#Cart66CancelPayPalSubscription').click(function() {
return confirm('Are you sure you want to cancel your subscription?\n');
});
var original_methods = $('#shipping_method_id').html();
var selected_country = $('#shipping_country_code').val();
$('.methods-country').each(function() {
if(!$(this).hasClass(selected_country) && !$(this).hasClass('all-countries') && !$(this).hasClass('select')) {
$(this).remove();
}
});
$('#shipping_country_code').change(function() {
var selected_country = $(this).val();
$('#shipping_method_id').html(original_methods);
$('.methods-country').each(function() {
if(!$(this).hasClass(selected_country) && !$(this).hasClass('all-countries') && !$(this).hasClass('select')) {
$(this).remove();
}
});
$("#shipping_method_id option:eq(1)").attr('selected','selected').change();
});
$('#shipping_method_id').change(function() {
$('#Cart66CartForm').submit();
});
$('#live_rates').change(function() {
$('#Cart66CartForm').submit();
});
$('.showEntriesLink').click(function() {
var panel = $(this).attr('rel');
$('#' + panel).toggle();
return false;
});
$('#change_shipping_zip_link').click(function() {
$('#set_shipping_zip_row').toggle();
return false;
});
})
})(jQuery);
function getCartButtonFormData(formId) {
$jq = jQuery.noConflict();
var theForm = $jq('#' + formId);
var str = '';
$jq('input:not([type=checkbox], :radio), input[type=checkbox]:checked, input:radio:checked, select, textarea', theForm).each(
function() {
var name = $jq(this).attr('name');
var val = $jq(this).val();
str += name + '=' + encodeURIComponent(val) + '&';
}
);
return str.substring(0, str.length-1);
}
function inventoryCheck(formId, ajaxurl, useAjax, productName, productUrl, addingText) {
$jq = jQuery.noConflict();
var mydata = getCartButtonFormData('cartButtonForm_' + formId);
ajaxManager.addReq({
type: "POST",
url: ajaxurl + '=1',
data: mydata,
dataType: 'json',
success: function(response) {
if(response[0]) {
$jq('#task_' + formId).val('addToCart');
if(useAjax == 'no') {
$jq('#cartButtonForm_' + formId).submit();
}
else {
buttonTransform(formId, ajaxurl, productName, productUrl, addingText);
}
}
else {
$jq('.modalClose').show();
$jq('#stock_message_box_' + formId).fadeIn(300);
$jq('#stock_message_' + formId).html(response[1]);
$jq('#addToCart_' + formId).removeAttr('disabled');
}
},
error: function(xhr,err){
alert("readyState: "+xhr.readyState+"\nstatus: "+xhr.status);
}
});
}
function addToCartAjax(formId, ajaxurl, productName, productUrl, buttonText) {
$jq = jQuery.noConflict();
var options1 = $jq('#cartButtonForm_' + formId + ' .cart66Options.options_1').val();
var options2 = $jq('#cartButtonForm_' + formId + ' .cart66Options.options_2').val();
var itemQuantity = $jq('#Cart66UserQuantityInput_' + formId).val();
var itemUserPrice = $jq('#Cart66UserPriceInput_' + formId).val();
var cleanProductId = formId.split('_');<|fim▁hole|> options_1: options1,
options_2: options2,
item_quantity: itemQuantity,
item_user_price: itemUserPrice,
product_url: productUrl
};
ajaxManager.addReq({
type: "POST",
url: ajaxurl + '=2',
data: data,
dataType: 'json',
success: function(response) {
$jq('#addToCart_' + formId).removeAttr('disabled');
$jq('#addToCart_' + formId).removeClass('ajaxPurAddToCart');
$jq('#addToCart_' + formId).val(buttonText);
$jq.hookExecute('addToCartAjaxHook', response);
ajaxUpdateCartWidgets(ajaxurl);
if($jq('.customAjaxAddToCartMessage').length > 0) {
$jq('.customAjaxAddToCartMessage').show().html(response.msg);
$jq.hookExecute('customAjaxAddToCartMessage', response);
}
else {
if((response.msgId) == 0){
$jq('.success_' + formId).fadeIn(300);
$jq('.success_message_' + formId).html(response.msg);
if(typeof response.msgHeader !== 'undefined') {
$jq('.success' + formId + ' .message-header').html(response.msgHeader);
}
$jq('.success_' + formId).delay(2000).fadeOut(300);
}
if((response.msgId) == -1){
$jq('.warning_' + formId).fadeIn(300);
$jq('.warning_message_' + formId).html(response.msg);
if(typeof response.msgHeader !== 'undefined') {
$jq('.warning' + formId + ' .message-header').html(response.msgHeader);
}
}
if((response.msgId) == -2){
$jq('.error_' + formId).fadeIn(300);
$jq('.error_message_' + formId).html(response.msg);
if(typeof response.msgHeader !== 'undefined') {
$jq('.error_' + formId + ' .message-header').html(response.msgHeader);
}
}
}
}
})
}
function buttonTransform(formId, ajaxurl, productName, productUrl, addingText) {
$jq = jQuery.noConflict();
var buttonText = $jq('#addToCart_' + formId).val();
$jq('#addToCart_' + formId).attr('disabled', 'disabled');
$jq('#addToCart_' + formId).addClass('ajaxPurAddToCart');
$jq('#addToCart_' + formId).val(addingText);
addToCartAjax(formId, ajaxurl, productName, productUrl, buttonText);
}
function ajaxUpdateCartWidgets(ajaxurl) {
$jq = jQuery.noConflict();
var widgetId = $jq('.Cart66CartWidget').attr('id');
var data = {
action: "ajax_cart_elements"
};
ajaxManager.addReq({
type: "POST",
url: ajaxurl + '=3',
data: data,
dataType: 'json',
success: function(response) {
$jq.hookExecute('cartElementsAjaxHook', response);
$jq('#Cart66AdvancedSidebarAjax, #Cart66WidgetCartContents').show();
$jq('.Cart66WidgetViewCartCheckoutEmpty, #Cart66WidgetCartEmpty').hide();
$jq('#Cart66WidgetCartLink').each(function(){
widgetContent = "<span id=\"Cart66WidgetCartCount\">" + response.summary.count + "</span>";
widgetContent += "<span id=\"Cart66WidgetCartCountText\">" + response.summary.items + "</span>";
widgetContent += "<span id=\"Cart66WidgetCartCountDash\"> – </span>"
widgetContent += "<span id=\"Cart66WidgetCartPrice\">" + response.summary.amount + "</span>";
$jq(this).html(widgetContent).fadeIn('slow');
});
$jq('.Cart66RequireShipping').each(function(){
if(response.shipping == 1) {
$jq(this).show();
}
})
$jq('#Cart66WidgetCartEmptyAdvanced').each(function(){
widgetContent = C66.youHave + ' ' + response.summary.count + " " + response.summary.items + " (" + response.summary.amount + ") " + C66.inYourShoppingCart;
$jq(this).html(widgetContent).fadeIn('slow');
});
$jq("#Cart66AdvancedWidgetCartTable .product_items").remove();
$jq.each(response.products.reverse(), function(index, array){
widgetContent = "<tr class=\"product_items\"><td>";
widgetContent += "<span class=\"Cart66ProductTitle\">" + array.productName + "</span>";
widgetContent += "<span class=\"Cart66QuanPrice\">";
widgetContent += "<span class=\"Cart66ProductQuantity\">" + array.productQuantity + "</span>";
widgetContent += "<span class=\"Cart66MetaSep\"> x </span>";
widgetContent += "<span class=\"Cart66ProductPrice\">" + array.productPrice + "</span>";
widgetContent += "</span>";
widgetContent += "</td><td class=\"Cart66ProductSubtotalColumn\">";
widgetContent += "<span class=\"Cart66ProductSubtotal\">" + array.productSubtotal + "</span>";
widgetContent += "</td></tr>";
$jq("#Cart66AdvancedWidgetCartTable tbody").prepend(widgetContent).fadeIn("slow");
});
$jq('.Cart66Subtotal').each(function(){
$jq(this).html(response.subtotal)
});
$jq('.Cart66Shipping').each(function(){
$jq(this).html(response.shippingAmount)
});
}
})
}
jQuery.extend({
hookExecute: function (function_name, response){
if (typeof window[function_name] == "function"){
window[function_name](response);
return true;
}
else{
return false;
}
}
});<|fim▁end|>
|
cleanProductId = cleanProductId[0];
var data = {
cart66ItemId: cleanProductId,
itemName: productName,
|
<|file_name|>BenchmarkDescriptorDistance.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.descriptor;
import boofcv.struct.feature.TupleDesc_B;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.TimeValue;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;<|fim▁hole|>import java.util.concurrent.TimeUnit;
@SuppressWarnings("ResultOfMethodCallIgnored") @BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 2)
@Measurement(iterations = 5)
@State(Scope.Benchmark)
@Fork(value = 1)
public class BenchmarkDescriptorDistance {
static int NUM_FEATURES = 10000;
List<TupleDesc_B> binaryA = new ArrayList<>();
List<TupleDesc_B> binaryB = new ArrayList<>();
HammingTable16 table = new HammingTable16();
@Setup public void setup() {
Random rand = new Random(234234);
binaryA = new ArrayList<>();
binaryB = new ArrayList<>();
for (int i = 0; i < NUM_FEATURES; i++) {
binaryA.add(randomFeature(rand));
binaryB.add(randomFeature(rand));
}
}
@Benchmark public void hammingTable() {
for (int i = 0; i < binaryA.size(); i++) {
tableScore(binaryA.get(i), binaryB.get(i));
}
}
private int tableScore( TupleDesc_B a, TupleDesc_B b ) {
int score = 0;
for (int i = 0; i < a.data.length; i++) {
int dataA = a.data[i];
int dataB = b.data[i];
score += table.lookup((short)dataA, (short)dataB);
score += table.lookup((short)(dataA >> 16), (short)(dataB >> 16));
}
return score;
}
@Benchmark public void equationOld() {
for (int i = 0; i < binaryA.size(); i++) {
ExperimentalDescriptorDistance.hamming(binaryA.get(i), binaryB.get(i));
}
}
@Benchmark public void equation() {
for (int i = 0; i < binaryA.size(); i++) {
DescriptorDistance.hamming(binaryA.get(i), binaryB.get(i));
}
}
private TupleDesc_B randomFeature( Random rand ) {
TupleDesc_B feat = new TupleDesc_B(512);
for (int j = 0; j < feat.data.length; j++) {
feat.data[j] = rand.nextInt();
}
return feat;
}
public static void main( String[] args ) throws RunnerException {
Options opt = new OptionsBuilder()
.include(BenchmarkDescriptorDistance.class.getSimpleName())
.warmupTime(TimeValue.seconds(1))
.measurementTime(TimeValue.seconds(1))
.build();
new Runner(opt).run();
}
}<|fim▁end|>
| |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# django-notes documentation build configuration file, created by
# sphinx-quickstart on Wed May 13 10:34:42 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-notes'
copyright = u'2010, Roman Dolgiy'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
from pdb import set_trace
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from django_notes import __version__ as release
version = '.'.join(release.split('.')[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the<|fim▁hole|>#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-notes-doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-notes.tex', u'django-notes Documentation',
u'Roman Dolgiy', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True<|fim▁end|>
|
# base URL from which the finished HTML is served.
|
<|file_name|>init.ts<|end_file_name|><|fim▁begin|>/// <reference path="application.ts" />
/// <reference path="pages/IndexPageController.ts" />
/// <reference path="pages/DetailsPageController.ts" />
module ft{
declare var Framework7: any;
interface Framework7View{
}
interface Framework7App{
addView(view:String, callback:Framework7ViewOptions):Framework7View
}
interface Framework7ViewOptions{
dynamicNavbar:boolean;
domCache:boolean;
}
export class Init{
private fw7App:Framework7App;
private mainView:Framework7View;
private fw7ViewOptions:Framework7ViewOptions;
private angApp:application;
<|fim▁hole|> constructor(){
this.configApp();
}
private configApp():void {
// Initialize app
this.fw7App = new Framework7({
animateNavBackIcon: true
});
this.fw7ViewOptions = {
dynamicNavbar: true,
domCache: true
}
// Add view
this.mainView = this.fw7App.addView('.view-main', this.fw7ViewOptions);
// Init Angular
this.angApp = new application( 'ft', [] );
// Init controllers
this.angApp.addController( 'IndexPageController', ft.pages.IndexPageController);
this.angApp.addController( 'DetailsPageController', ft.pages.DetailsPageController);
}
}
// Everything starts here
new Init();
}<|fim▁end|>
| |
<|file_name|>split-select.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
import { FormControl } from '@angular/forms';<|fim▁hole|> selector: 'lu-split-select',
templateUrl: './split-select.component.html'
})
export class SplitSelectComponent {
item = { id: 1, name: 'initial value' };
option = { id: 2, name: 'option' };
formControl = new FormControl();
}<|fim▁end|>
|
@Component({
|
<|file_name|>NUG_unstructured_ICON_triangles_PyNGL.py<|end_file_name|><|fim▁begin|>"""
NCL User Guide Python Example: PyNGL_unstructured_ICON_triangles.py
Grid type: unstructured
Model: ICON
Info: - colored triangles
- add labelbar (colorbar)
- wkColorMap
18.02.16 meier-fleischer(at)dkrz.de
"""
import numpy as np
import math, time, sys, os
import Nio, Ngl
t1 = time.time() #-- retrieve start time
#-- define variables
diri = './'
fname = 'ta_ps_850.nc' #-- data path and file name
gname = 'r2b4_amip.nc' #-- grid info file
VarName = 'ta' #-- variable name
#---Test if files exist
if(not os.path.exists(diri+fname) or not os.path.exists(diri+gname)):
print("You do not have the necessary files to run this example, '%s' and '%s'." % (diri+fname,diri+gname))
print("You can get the files from the NCL website at:")
print("http://www.ncl.ucar.edu/Document/Manuals/NCL_User_Guide/Data/")
sys.exit()
#-- open file and read variables
f = Nio.open_file(diri + fname,'r') #-- add data file
g = Nio.open_file(diri + gname,'r') #-- add grid file (not contained in data file!!!)
#-- read a timestep of 'ta'
variable = f.variables['ta'] #-- first time step, lev, ncells
data = variable[0,0,:] #-- ta [time,lev,ncells]; miss _FillValue
var = data - 273.15 #-- convert to degrees Celsius; miss _FillValue
#-- define _FillValue and missing_value if not existing
missing = -1e20
if not hasattr(var,'_FillValue'):
var._FillValue = missing #-- set _FillValue
if not hasattr(var,'missing_value'):
var.missing_value = missing #-- set missing_value
varM = np.ma.array(var, mask=np.equal(var,missing)) #-- mask array with missing values
nummissing = np.count_nonzero(varM.mask) #-- number of missing values
#-- set data intervals, levels, labels, color indices<|fim▁hole|>
levels = range(varMin,varMax,varInt) #-- set levels array
nlevs = len(levels) #-- number of levels
labels = ['{:.2f}'.format(x) for x in levels] #-- convert list of floats to list of strings
#-- print info to stdout
print ''
print 'min/max: %.2f' %np.min(varM) + ' /' + ' %.2f' %np.max(varM)
print ''
print 'varMin: %3d' %varMin
print 'varMax: %3d' %varMax
print 'varInt: %3d' %varInt
print ''
print 'missing_value: ', missing
print 'missing values: ', nummissing
#-------------------------------------------------------------------
#-- define the x-, y-values and the polygon points
#-------------------------------------------------------------------
rad2deg = 45./np.arctan(1.) #-- radians to degrees
x, y = g.variables['clon'][:], g.variables['clat'][:]
vlon, vlat = g.variables['clon_vertices'][:], g.variables['clat_vertices'][:]
x, y = x*rad2deg, y*rad2deg #-- cell center, lon, lat
vlat, vlon = vlat*rad2deg, vlon * rad2deg #-- cell latitude/longitude vertices
ncells, nv = vlon.shape #-- ncells: number of cells; nv: number of edges
#-- print information to stdout
print ''
print 'cell points: ', nv
print 'cells: ', str(ncells)
print ''
#-- rearrange the longitude values to -180.-180.
def rearrange(vlon):
less_than = vlon < -180.
greater_than = vlon > 180.
vlon[less_than] = vlon[less_than] + 360.
vlon[greater_than] = vlon[greater_than] - 360.
return vlon
vlon = rearrange(vlon) #-- set longitude values to -180.-180. degrees
print 'min/max vlon: ', np.min(vlon), np.max(vlon)
print 'min/max vlat: ', np.min(vlat), np.max(vlat)
print ''
#-- open a workstation for second plot: triangles plot
wkres = Ngl.Resources()
wkres.wkWidth, wkres.wkHeight = 2500, 2500
wks_type = 'png'
wks = Ngl.open_wks(wks_type,'unstructured_ICON_triangles_ngl',wkres)
#-- define colormap
colormap = Ngl.read_colormap_file('WhiteBlueGreenYellowRed')[22::12,:] #-- RGB ! [256,4] -> [20,4]
#-- select every 12th color
colormap[19,:] = [1.,1.,1.,0.] #-- white for missing values
print ''
print 'levels: ',levels
print 'labels: ',labels
print ''
print 'nlevs: %3d' %nlevs
print ''
#-- set map resources
mpres = Ngl.Resources()
mpres.nglDraw = False #-- turn off plot draw and frame advance. We will
mpres.nglFrame = False #-- do it later after adding subtitles.
mpres.mpGridAndLimbOn = False
mpres.mpGeophysicalLineThicknessF = 2.
mpres.pmTitleDisplayMode = 'Always'
mpres.tiMainString = 'PyNGL: unstructured grid ICON'
#-- create only a map
map = Ngl.map(wks,mpres)
Ngl.draw(map)
#-- assign and initialize array which will hold the color indices of the cells
gscolors = -1*(np.ones((ncells,),dtype=np.int)) #-- assign array containing zeros; init to transparent: -1
#-- set color index of all cells in between levels
for m in xrange(0,nlevs):
vind = [] #-- empty list for color indices
for i in xrange(0,ncells-1):
if (varM[i] >= levels[m] and varM[i] < levels[m+1]):
gscolors[i] = m+1 # 1 to nlevs
vind.append(i)
print 'finished level %3d' % m , ' -- %5d ' % len(vind) , ' polygons considered - gscolors %3d' % (m+1)
del vind
gscolors[varM < varMin] = 0 #-- set color index for cells less than level[0]
gscolors[varM >= varMax] = nlevs+1 #-- set color index for cells greater than levels[nlevs-1]
gscolors[np.nonzero(varM.mask)] = -1 #-- set color index for missing locations
#-- set polygon resources
pgres = Ngl.Resources()
pgres.gsEdgesOn = True #-- draw the edges
pgres.gsFillIndex = 0 #-- solid fill
pgres.gsLineColor = 'black' #-- edge line color
pgres.gsLineThicknessF = 0.7 #-- line thickness
pgres.gsColors = colormap[gscolors,:] #-- use color array
pgres.gsSegments = range(0,len(vlon[:,0])*3,3) #-- define segments array for fast draw
lon1d, lat1d = np.ravel(vlon), np.ravel(vlat) #-- convert to 1D-arrays
#-- add polygons to map
polyg = Ngl.add_polygon(wks,map,lon1d,lat1d,pgres)
#-- add a labelbar
lbres = Ngl.Resources()
lbres.vpWidthF = 0.85
lbres.vpHeightF = 0.15
lbres.lbOrientation = 'Horizontal'
lbres.lbFillPattern = 'SolidFill'
lbres.lbMonoFillPattern = 21 #-- must be 21 for color solid fill
lbres.lbMonoFillColor = False #-- use multiple colors
lbres.lbFillColors = colormap
lbres.lbLabelFontHeightF= 0.014
lbres.lbLabelAlignment = 'InteriorEdges'
lbres.lbLabelStrings = labels
lb = Ngl.labelbar_ndc(wks,nlevs+1,labels,0.1,0.24,lbres)
#-- maximize and draw the plot and advance the frame
Ngl.draw(map)
Ngl.frame(wks)
#-- get wallclock time
t2 = time.time()
print ''
print 'Wallclock time: %0.3f seconds' % (t2-t1)
print ''
Ngl.end()<|fim▁end|>
|
varMin, varMax, varInt = -32, 28, 4 #-- set data minimum, maximum, interval
|
<|file_name|>category_links.py<|end_file_name|><|fim▁begin|><|fim▁hole|># This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from django.utils.translation import ugettext_lazy as _
from shuup.core.models import Category
from shuup.xtheme import TemplatedPlugin
from shuup.xtheme.plugins.forms import GenericPluginForm, TranslatableField
class CategoryLinksConfigForm(GenericPluginForm):
"""
A configuration form for the CategoryLinksPlugin
"""
def populate(self):
"""
A custom populate method to display category choices
"""
for field in self.plugin.fields:
if isinstance(field, tuple):
name, value = field
value.initial = self.plugin.config.get(name, value.initial)
self.fields[name] = value
self.fields["categories"] = forms.ModelMultipleChoiceField(
queryset=Category.objects.all_visible(customer=None),
required=False,
initial=self.plugin.config.get("categories", None),
)
def clean(self):
"""
A custom clean method to save category configuration information in a serializable form
"""
cleaned_data = super(CategoryLinksConfigForm, self).clean()
categories = cleaned_data.get("categories", [])
cleaned_data["categories"] = [category.pk for category in categories if hasattr(category, "pk")]
return cleaned_data
class CategoryLinksPlugin(TemplatedPlugin):
"""
A plugin for displaying links to visible categories on the shop front
"""
identifier = "category_links"
name = _("Category Links")
template_name = "shuup/xtheme/plugins/category_links.jinja"
editor_form_class = CategoryLinksConfigForm
fields = [
("title", TranslatableField(label=_("Title"), required=False, initial="")),
("show_all_categories", forms.BooleanField(
label=_("Show all categories"),
required=False,
initial=True,
help_text=_("All categories are shown, even if not selected"),
)),
"categories",
]
def get_context_data(self, context):
"""
A custom get_context_data method to return only visible categories
for request customer.
"""
selected_categories = self.config.get("categories", [])
show_all_categories = self.config.get("show_all_categories", True)
request = context.get("request")
categories = Category.objects.all_visible(
customer=getattr(request, "customer"),
shop=getattr(request, "shop")
)
if not show_all_categories:
categories = categories.filter(id__in=selected_categories)
return {
"title": self.get_translated_value("title"),
"categories": categories,
}<|fim▁end|>
|
# -*- coding: utf-8 -*-
|
<|file_name|>viewermaterial.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************
This file is part of JahshakaVR, VR Authoring Toolkit
http://www.jahshaka.com
Copyright (c) 2016 GPLv3 Jahshaka LLC <[email protected]>
This is free software: you may copy, redistribute
and/or modify it under the terms of the GPLv3 License
For more information see the LICENSE file
*************************************************************************/
#include "viewermaterial.h"
#include "irisgl/src/graphics/texture2d.h"
#include "irisgl/src/graphics/material.h"
#include "irisgl/src/graphics/graphicsdevice.h"
ViewerMaterial::ViewerMaterial()
{
createProgramFromShaderSource(":assets/shaders/viewer.vert",
":assets/shaders/viewer.frag");
this->setRenderLayer((int)iris::RenderLayer::Opaque);
renderStates.rasterState = iris::RasterizerState::createCullNone();
}
void ViewerMaterial::setTexture(iris::Texture2DPtr tex)
{
texture = tex;
if(!!tex)
this->addTexture("tex",tex);<|fim▁hole|>
iris::Texture2DPtr ViewerMaterial::getTexture()
{
return texture;
}
void ViewerMaterial::begin(iris::GraphicsDevicePtr device, iris::ScenePtr scene)
{
Material::begin(device, scene);
}
void ViewerMaterial::end(iris::GraphicsDevicePtr device, iris::ScenePtr scene)
{
Material::end(device, scene);
}
ViewerMaterialPtr ViewerMaterial::create()
{
return ViewerMaterialPtr(new ViewerMaterial());
}<|fim▁end|>
|
else
this->removeTexture("tex");
}
|
<|file_name|>OUNITAI.cpp<|end_file_name|><|fim▁begin|>/*
* Seven Kingdoms: Ancient Adversaries
*
* Copyright 1997,1998 Enlight Software Ltd.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
//Filename : OUNITAI.CPP
//Description : Object Unit AI
#include <OSYS.h>
#include <OSPY.h>
#include <OREBEL.h>
#include <OUNIT.h>
#include <OCONFIG.h>
#include <OREGIONS.h>
#include <OF_CAMP.h>
#include <ONATION.h>
#ifdef NO_DEBUG_UNIT
#undef err_when
#undef err_here
#undef err_if
#undef err_else
#undef err_now
#define err_when(cond)
#define err_here()
#define err_if(cond)
#define err_else
#define err_now(msg)
#undef DEBUG
#endif
//--------- Begin of function Unit::process_ai --------//
//
// [int] forceExecute - whether force execute all AI functions
// without checking day interavals.
// (default: 0)
//
void Unit::process_ai()
{
err_when( !nation_recno );
//-*********** simulate aat ************-//
#ifdef DEBUG
if(debug_sim_game_type)
return;
#endif
//-*********** simulate aat ************-//
//------ the aggressive_mode of AI units is always 1 ------//
aggressive_mode = 1;
//------- handle Seek Path failures ------//
if( ai_handle_seek_path_fail() )
return;
//--- if it's a spy from other nation, don't control it ---//
if( spy_recno && true_nation_recno() != nation_recno )
{
//--- a random chance of the AI catching the spy and resign it ---//
if( is_visible() && m.random(365 * FRAMES_PER_DAY)==0 ) // if the unit stay outside for one year, it will get caught
{
stop2();
resign(COMMAND_AI);
return;
}
if( !spy_array[spy_recno]->notify_cloaked_nation_flag ) // if notify_cloaked_nation_flag is 1, the nation will take it as its own spies
return;
}
//----- think about rewarding this unit -----//
if( race_id && rank_id != RANK_KING &&
info.game_date%5 == sprite_recno%5 )
{
think_reward();
}
//-----------------------------------------//
if( !is_visible() )
return;
//--- if the unit has stopped, but ai_action_id hasn't been reset ---//
if( cur_action==SPRITE_IDLE && action_mode==ACTION_STOP &&
action_mode2==ACTION_STOP && ai_action_id )
{
nation_array[nation_recno]->action_failure(ai_action_id, sprite_recno);
err_when( ai_action_id ); // it should have been reset
}
//---- King flees under attack or surrounded by enemy ---//
if( race_id && rank_id==RANK_KING )
{
if( think_king_flee() )
return;
}
//---- General flees under attack or surrounded by enemy ---//
if( race_id && rank_id==RANK_GENERAL &&
info.game_date%7 == sprite_recno%7 )
{
if( think_general_flee() )
return;
}
//-- let Unit::next_day() process it process original_action_mode --//
if( original_action_mode )
return;
//------ if the unit is not stop right now ------//
if( !is_ai_all_stop() )
{
think_stop_chase();
return;
}
//-----------------------------------------//
if( mobile_type==UNIT_LAND )
{
if( ai_escape_fire() )
return;
}
//---------- if this is your spy --------//
if( spy_recno && true_nation_recno()==nation_recno )
think_spy_action();
//------ if this unit is from a camp --------//
if( home_camp_firm_recno )
{
Firm* firmCamp = firm_array[home_camp_firm_recno];
int rc;
if( rank_id == RANK_SOLDIER )
rc = firmCamp->worker_count < MAX_WORKER;
else
rc = !firmCamp->overseer_recno;
if( rc )
{
if( return_camp() )
return;
}
home_camp_firm_recno = 0; // the camp is already occupied by somebody
}
//----------------------------------------//
if( race_id && rank_id==RANK_KING )
{
think_king_action();
}
else if( race_id && rank_id==RANK_GENERAL )
{
think_general_action();
}
else
{
if( unit_res[unit_id]->unit_class == UNIT_CLASS_WEAPON )
{
if( info.game_date%15 == sprite_recno%15 ) // don't call too often as the action may fail and it takes a while to call the function each time
{
think_weapon_action(); //-- ships AI are called in UnitMarine --//
}
}
else if( race_id )
{
//--- if previous attempts for new action failed, don't call think_normal_human_action() so frequently then ---//
if( ai_no_suitable_action )
{
if( info.game_date%15 != sprite_recno%15 ) // don't call too often as the action may fail and it takes a while to call the function each time
return;
}
//---------------------------------//
if( !think_normal_human_action() )
{
ai_no_suitable_action = 1; // set this flag so think_normal_human_action() won't be called continously
if( !leader_unit_recno ) // only when the unit is not led by a commander
{
resign(COMMAND_AI);
}
else
{
ai_move_to_nearby_town();
}
}
}
}
}
//---------- End of function Unit::process_ai --------//
//--------- Begin of function Unit::think_stop_chase --------//
int Unit::think_stop_chase()
{
//-----------------------------------------------------//
//
// Stop the chase if the target is being far away from
// its original attacking location.
//
//-----------------------------------------------------//
if( !(action_mode==ACTION_ATTACK_UNIT && ai_original_target_x_loc>=0) )
return 0;
if( unit_array.is_deleted(action_para) )
{
stop2();
return 1;
}
Unit* targetUnit = unit_array[action_para];
if( !targetUnit->is_visible() )
{
stop2();
return 1;
}
//----------------------------------------//
int aiChaseDistance = 10 + nation_array[nation_recno]->pref_military_courage/20; // chase distance: 10 to 15
int curDistance = m.points_distance( targetUnit->next_x_loc(), targetUnit->next_y_loc(),
ai_original_target_x_loc, ai_original_target_y_loc );
if( curDistance <= aiChaseDistance )
return 0;
//--------- stop the unit ----------------//
stop2();
//--- if this unit leads a troop, stop the action of all troop members as well ---//
int leaderUnitRecno;
if( leader_unit_recno )
leaderUnitRecno = leader_unit_recno;
else
leaderUnitRecno = sprite_recno;
TeamInfo* teamInfo = unit_array[leaderUnitRecno]->team_info;
if( teamInfo )
{
for( int i=teamInfo->member_count-1 ; i>=0 ; i-- )
{
int unitRecno = teamInfo->member_unit_array[i];
if( unit_array.is_deleted(unitRecno) )
continue;
unit_array[unitRecno]->stop2();
}
}
return 1;
}
//---------- End of function Unit::think_stop_chase --------//
//--------- Begin of function Unit::is_ai_all_stop --------//
int Unit::is_ai_all_stop()
{
return cur_action==SPRITE_IDLE && action_mode==ACTION_STOP &&
action_mode2==ACTION_STOP && !ai_action_id;
}
//---------- End of function Unit::is_ai_all_stop --------//
//--------- Begin of function Unit::ai_move_to_nearby_town --------//
void Unit::ai_move_to_nearby_town()
{
//---- look for towns to assign to -----//
Nation* ownNation = nation_array[nation_recno];
Town *townPtr, *bestTown=NULL;
int regionId = world.get_region_id( next_x_loc(), next_y_loc() );
int curDistance, curRating, bestRating=0;
int curXLoc = next_x_loc(), curYLoc = next_y_loc();
for( int i=town_array.size() ; i>0 ; i-- ) // can't use ai_town_array[] because this function will be called by Unit::betray() when a unit defected to the player's kingdom
{
if( town_array.is_deleted(i) )
continue;
townPtr = town_array[i];
if( townPtr->nation_recno != nation_recno )
continue;
if( townPtr->region_id != regionId )
continue;
//-------------------------------------//
curDistance = m.points_distance(curXLoc, curYLoc, townPtr->center_x, townPtr->center_y );
if( curDistance < 10 ) // no need to move if the unit is already close enough to the town.
return;
curRating = 100 - 100 * curDistance / MAX_WORLD_X_LOC;
curRating += townPtr->population;
//-------------------------------------//
if( curRating > bestRating )
{
bestRating = curRating;
bestTown = townPtr;
}
}
if( bestTown )
move_to_town_surround(bestTown->loc_x1, bestTown->loc_y1, sprite_info->loc_width, sprite_info->loc_height);
}
//---------- End of function Unit::ai_move_to_nearby_town --------//
//--------- Begin of function Unit::ai_escape_fire --------//
//
// Move away if the unit currently stands on a burning ground.
//
int Unit::ai_escape_fire()
{
if(cur_action!=SPRITE_IDLE)
return 0;
if(mobile_type!=UNIT_LAND)
return 0;
Location *locPtr = world.get_loc(next_x_loc(), next_y_loc());
if( !locPtr->fire_str() )
return 0;
//--------------------------------------------//
int checkLimit = 400; // checking for 400 location
int xShift, yShift, checkXLoc, checkYLoc;
int curXLoc = next_x_loc();
int curYLoc = next_y_loc();
for(int i=2; i<checkLimit; i++)
{
m.cal_move_around_a_point(i, 20, 20, xShift, yShift);
checkXLoc = curXLoc + xShift;
checkYLoc = curYLoc + yShift;
if(checkXLoc<0 || checkXLoc>=MAX_WORLD_X_LOC || checkYLoc<0 || checkYLoc>=MAX_WORLD_Y_LOC)
continue;
if(!locPtr->can_move(mobile_type))
continue;
locPtr = world.get_loc(checkXLoc, checkYLoc);
if( locPtr->fire_str()==0 ) // move to a safe place now
{
move_to(checkXLoc, checkYLoc);
return 1;
}
}
return 0;
}
//---------- End of function Unit::ai_escape_fire --------//
//--------- Begin of function Unit::think_spy_action --------//
void Unit::think_spy_action()
{
ai_move_to_nearby_town(); // just move it to one of our towns
}
//---------- End of function Unit::think_spy_action --------//
//--------- Begin of function Unit::think_king_action --------//
int Unit::think_king_action()
{
return think_leader_action();
}
//---------- End of function Unit::think_king_action --------//
//--------- Begin of function Unit::think_general_action --------//
int Unit::think_general_action()
{
if( think_leader_action() )
return 1;
//--- if the general is not assigned to a camp due to its low competency ----//
Nation* ownNation = nation_array[nation_recno];
int rc = 0;
if( team_info->member_count <= 1 )
{
rc = 1;
}
//--- if the skill of the general and the number of soldiers he commands is not large enough to justify building a new camp ---//
else if( skill.skill_level + team_info->member_count*4
< 40 + ownNation->pref_keep_general/5 ) // 40 to 60
{
rc = 1;
}
//-- think about splitting the team and assign them into other forts --//
else if( ownNation->ai_has_too_many_camp() )
{
rc = 1;
}
//--------- demote the general to soldier and disband the troop -------//
if( rc )
{
set_rank(RANK_SOLDIER);
return think_normal_human_action();
}
return 0;
}
//---------- End of function Unit::think_general_action --------//
//--------- Begin of function Unit::think_leader_action --------//
//
// Think about the action of a leader (either a general or a king).
//
int Unit::think_leader_action()
{
Nation* nationPtr = nation_array[nation_recno];
FirmCamp *firmCamp, *bestCamp=NULL;
int curRating, bestRating=10, delActionRecno=0;
int curXLoc = next_x_loc(), curYLoc = next_y_loc();
int curRegionId = world.get_region_id( curXLoc, curYLoc );
if( rank_id == RANK_KING ) // if this unit is the king, always assign it to a camp regardless of whether the king is a better commander than the existing one
bestRating = -1000;
err_when( skill.skill_id != SKILL_LEADING );
//----- think about which camp to move to -----//
for( int i=nationPtr->ai_camp_count-1 ; i>=0 ; i-- )
{
firmCamp = (FirmCamp*) firm_array[ nationPtr->ai_camp_array[i] ];
if( firmCamp->region_id != curRegionId )
continue;
//--- if the commander of this camp is the king, never replace him ---//
if( firmCamp->overseer_recno == nationPtr->king_unit_recno )
continue;
//-------------------------------------//
int curLeadership = firmCamp->cur_commander_leadership();
int newLeadership = firmCamp->new_commander_leadership(race_id, skill.skill_level);
curRating = newLeadership - curLeadership;
//-------------------------------------//
if( curRating > bestRating )
{
//--- if there is already somebody being assigned to it ---//
int actionRecno=0;
if( rank_id != RANK_KING ) // don't check this if the current unit is the king
{
actionRecno = nationPtr->is_action_exist(firmCamp->loc_x1, firmCamp->loc_y1,
-1, -1, ACTION_AI_ASSIGN_OVERSEER, FIRM_CAMP);
if( actionRecno && nationPtr->get_action(actionRecno)->processing_instance_count )
continue;
}
bestRating = curRating;
bestCamp = firmCamp;
delActionRecno = actionRecno;
}
}
if( !bestCamp )
return 0;
//----- delete an unprocessed queued action if there is any ----//
if( delActionRecno )
nationPtr->del_action(delActionRecno);
//--------- move to the camp now ---------//
//-- if there is room in the camp to host all soldiers led by this general --//
if( team_info->member_count-1 <= MAX_WORKER-bestCamp->worker_count )
{
validate_team();
unit_array.assign( bestCamp->loc_x1, bestCamp->loc_y1, 0, COMMAND_AI,
team_info->member_unit_array, team_info->member_count );
return 1;
}
else //--- otherwise assign the general only ---//
{
return nationPtr->add_action(bestCamp->loc_x1, bestCamp->loc_y1, -1, -1, ACTION_AI_ASSIGN_OVERSEER, FIRM_CAMP, 1, sprite_recno);
}
return 0;
}
//---------- End of function Unit::think_leader_action --------//
//--------- Begin of function Unit::think_normal_human_action --------//
int Unit::think_normal_human_action()
{
if( home_camp_firm_recno )
return 0;
if( leader_unit_recno &&
unit_array[leader_unit_recno]->is_visible() ) // if the unit is led by a commander, let the commander makes the decision. If the leader has been assigned to a firm, don't consider it as a leader anymore
{
return 0;
}
err_when( !race_id );
err_when( !nation_recno );
//---- think about assign the unit to a firm that needs workers ----//
Nation* ownNation = nation_array[nation_recno];
Firm *firmPtr, *bestFirm=NULL;
int regionId = world.get_region_id( next_x_loc(), next_y_loc() );
int skillId = skill.skill_id;
int skillLevel = skill.skill_level;
int i, curRating, bestRating=0;
int curXLoc = next_x_loc(), curYLoc = next_y_loc();
if( skill.skill_id )
{
for( i=firm_array.size() ; i>0 ; i-- )
{
if( firm_array.is_deleted(i) )
continue;
firmPtr = firm_array[i];
if( firmPtr->nation_recno != nation_recno )
continue;
if( firmPtr->region_id != regionId )
continue;
curRating = 0;
if( skill.skill_id == SKILL_CONSTRUCTION ) // if this is a construction worker
{
if( firmPtr->builder_recno ) // assign the construction worker to this firm as an residental builder
continue;
}
else
{
if( !firmPtr->worker_array ||
firmPtr->firm_skill_id != skillId )
{
continue;
}
//----- if the firm is full of worker ------//
if( firmPtr->is_worker_full() )
{
//---- get the lowest skill worker of the firm -----//
Worker* workerPtr = firmPtr->worker_array;
int minSkill=100;
for( int j=0 ; j<firmPtr->worker_count ; j++, workerPtr++ )
{
if( workerPtr->skill_level < minSkill )
minSkill = workerPtr->skill_level;
}
//------------------------------//
if( firmPtr->majority_race() == race_id )
{
if( skill.skill_level < minSkill+10 )
continue;
}
else //-- for different race, only assign if the skill is significantly higher than the existing ones --//
{
if( skill.skill_level < minSkill+30 )
continue;
}
}
else
{
curRating += 300; // if the firm is not full, rating + 300
}
}
//-------- calculate the rating ---------//
curRating += world.distance_rating( curXLoc, curYLoc, firmPtr->center_x, firmPtr->center_y );
if( firmPtr->majority_race() == race_id )
curRating += 70;
curRating += (MAX_WORKER - firmPtr->worker_count) * 10;
//-------------------------------------//
if( curRating > bestRating )
{
bestRating = curRating;
bestFirm = firmPtr;
}
}
if( bestFirm )
{
assign(bestFirm->loc_x1, bestFirm->loc_y1);
return 1;
}
}
//---- look for towns to assign to -----//
bestRating = 0;
int hasTownInRegion=0;
Town *townPtr, *bestTown=NULL;
for( i=town_array.size() ; i>0 ; i-- ) // can't use ai_town_array[] because this function will be called by Unit::betray() when a unit defected to the player's kingdom
{
if( town_array.is_deleted(i) )
continue;
townPtr = town_array[i];
if( townPtr->nation_recno != nation_recno )
continue;
if( townPtr->region_id != regionId )
continue;
hasTownInRegion = 1;
if( townPtr->population >= MAX_TOWN_POPULATION || !townPtr->is_base_town )
continue;
//--- only assign to towns of the same race ---//
if( ownNation->pref_town_harmony > 50 )
{
if( townPtr->majority_race() != race_id )
continue;
}
//-------- calculate the rating ---------//
curRating = world.distance_rating(curXLoc, curYLoc, townPtr->center_x, townPtr->center_y );
curRating += 300 * townPtr->race_pop_array[race_id-1] / townPtr->population; // racial homogenous bonus
curRating += MAX_TOWN_POPULATION - townPtr->population;
//-------------------------------------//
if( curRating > bestRating )
{
bestRating = curRating;
bestTown = townPtr;
}
}
if( bestTown )
{
assign(bestTown->loc_x1, bestTown->loc_y1);
return 1;
}
//----- if we don't have any existing towns in this region ----//
if( !hasTownInRegion )
{
//#ifdef AMPLUS
// --- if region is too small don't consider this area, stay in the island forever --//
if( region_array[regionId]->region_stat_id == 0 )
return 0;
//#endif
//-- if we also don't have any existing camps in this region --//
if( region_array.get_region_stat(regionId)->camp_nation_count_array[nation_recno-1]==0 )
{
//---- try to build one if this unit can ----//
if( ownNation->cash > firm_res[FIRM_CAMP]->setup_cost &&
firm_res[FIRM_CAMP]->can_build(sprite_recno) &&
!leader_unit_recno ) // if this unit is commanded by a leader, let the leader build the camp
{
ai_build_camp();
}
}
else // if there is already a camp in this region, try to settle a new town next to the camp
{
ai_settle_new_town();
}
return 1; // if we don't have any town in this region, return 1, so this unit won't be resigned and so it can wait for other units to set up camps and villages later ---//
}
return 0;
}
//---------- End of function Unit::think_normal_human_action --------//
//--------- Begin of function Unit::think_weapon_action --------//
int Unit::think_weapon_action()
{
//---- first try to assign the weapon to an existing camp ----//
if( think_assign_weapon_to_camp() )
return 1;
//----- if no camp to assign, build a new one ------//
if( think_build_camp() )
return 1;
return 0;
}
//---------- End of function Unit::think_weapon_action --------//
//--------- Begin of function Unit::think_assign_weapon_to_camp --------//
int Unit::think_assign_weapon_to_camp()
{
Nation *nationPtr = nation_array[nation_recno];
FirmCamp *firmCamp, *bestCamp=NULL;
int curRating=0, bestRating=0;
int regionId = world.get_region_id( next_x_loc(), next_y_loc() );
int curXLoc = next_x_loc(), curYLoc = next_y_loc();
for( int i=0 ; i<nationPtr->ai_camp_count ; i++ )
{
firmCamp = (FirmCamp*) firm_array[ nationPtr->ai_camp_array[i] ];
if( firmCamp->region_id != regionId || firmCamp->is_worker_full() )
continue;
//-------- calculate the rating ---------//
curRating = world.distance_rating(curXLoc, curYLoc, firmCamp->center_x, firmCamp->center_y );
curRating += (MAX_WORKER - firmCamp->worker_count) * 10;
//-------------------------------------//
if( curRating > bestRating )
{
bestRating = curRating;
bestCamp = firmCamp;
}
}
//-----------------------------------//
if( bestCamp )
{
assign(bestCamp->loc_x1, bestCamp->loc_y1);
return 1;
}
return 0;
}
//---------- End of function Unit::think_assign_weapon_to_camp --------//
//--------- Begin of function Unit::think_build_camp --------//
//
// Think about building a camp next to the town which is
// closest to the unit.
//
int Unit::think_build_camp()
{
//---- select a town to build the camp ---//
Nation* ownNation = nation_array[nation_recno];
Town *townPtr, *bestTown=NULL;
int curRating=0, bestRating=0;
int regionId = world.get_region_id( next_x_loc(), next_y_loc() );
int curXLoc = next_x_loc(), curYLoc = next_y_loc();
for( int i=ownNation->ai_town_count-1 ; i>=0 ; i-- )
{
townPtr = town_array[ ownNation->ai_town_array[i] ];
if( townPtr->region_id != regionId )
continue;
if( !townPtr->is_base_town || townPtr->no_neighbor_space )
continue;
curRating = world.distance_rating(curXLoc, curYLoc, townPtr->center_x, townPtr->center_y );
if( curRating > bestRating )
{
bestRating = curRating;
bestTown = townPtr;
}
}
if( bestTown )
return bestTown->ai_build_neighbor_firm(FIRM_CAMP);
return 0;
}
//---------- End of function Unit::think_build_camp --------//
//--------- Begin of function Unit::think_reward --------//
int Unit::think_reward()
{
Nation* ownNation = nation_array[nation_recno];
//----------------------------------------------------------//
// The need to secure high loyalty on this unit is based on:
// -its skill
// -its combat level
// -soldiers commanded by this unit
//----------------------------------------------------------//
if( spy_recno && true_nation_recno() == nation_recno ) // if this is a spy of ours
{
return 0; // Spy::think_reward() will handle this.
}
int curLoyalty = loyalty;
int neededLoyalty;
//----- if this unit is on a mission ------/
if( ai_action_id )
{
neededLoyalty = UNIT_BETRAY_LOYALTY+10;
}
//----- otherwise only reward soldiers and generals ------//
else if( skill.skill_id == SKILL_LEADING )
{
//----- calculate the needed loyalty --------//
neededLoyalty = commanded_soldier_count()*5 + skill.skill_level;
if( unit_mode == UNIT_MODE_OVERSEE ) // if this unit is an overseer
{
if( loyalty < UNIT_BETRAY_LOYALTY ) // if this unit's loyalty is < betrayel level, reward immediately
{
reward(nation_recno); // reward it immediatley if it's an overseer, don't check ai_should_spend()
return 1;
}
neededLoyalty += 30;
}
neededLoyalty = MAX( UNIT_BETRAY_LOYALTY+10, neededLoyalty ); // 10 points above the betray loyalty level to prevent betrayal
neededLoyalty = MIN( 100, neededLoyalty );
}
else
{
return 0;
}
//------- if the loyalty is already high enough ------//
if( curLoyalty >= neededLoyalty )
return 0;
//---------- see how many cash & profit we have now ---------//
int rewardNeedRating = neededLoyalty - curLoyalty;
if( curLoyalty < UNIT_BETRAY_LOYALTY+5 )
rewardNeedRating += 50;
if( ownNation->ai_should_spend(rewardNeedRating) )
{
reward(nation_recno);
return 1;
}
return 0;
}
//---------- End of function Unit::think_reward --------//
//--------- Begin of function Unit::ai_leader_being_attacked --------//
//
// This function is called when the king is under attack.
//
// <int> attackerUnitRecno - recno of the attacker unit.
//
void Unit::ai_leader_being_attacked(int attackerUnitRecno)
{
err_when( !team_info );
if( unit_array[attackerUnitRecno]->nation_recno == nation_recno ) // this can happen when the unit has just changed nation
return;
//------------------------------------//
int rc=0, callIntervalDays;
if( rank_id == RANK_KING )
{
rc = 1;
callIntervalDays = 7;
}
else if( rank_id == RANK_GENERAL )
{
rc = skill.skill_level >= 30 + (100-nation_array[nation_recno]->pref_keep_general)/2; // 30 to 80
callIntervalDays = 15; // don't call too freqently
}
if( rc )
{
if( info.game_date > team_info->ai_last_request_defense_date + callIntervalDays )
{
team_info->ai_last_request_defense_date = info.game_date;
nation_array[nation_recno]->ai_defend(attackerUnitRecno);
}
}
}
//---------- End of function Unit::ai_leader_being_attacked --------//
//--------- Begin of function Unit::think_king_flee --------//
//
// Note: we still need to keep think_king_action() because
// between these two functions, a number of things
// may be done, like returning home camp. We only
// carry out actions in this function if the king
// is in danger and urgently need to flee.
//
int Unit::think_king_flee()
{
if( force_move_flag && cur_action != SPRITE_IDLE ) // the king is already fleeing now
return 1;
//------- if the king is alone --------//
Nation* ownNation = nation_array[nation_recno];
//------------------------------------------------//
// When the king is alone and there is no assigned action OR
// when the king is injured, the king will flee
// back to its camp.
//------------------------------------------------//
if( ( team_info->member_count==0 && !ai_action_id ) ||
hit_points < 125-ownNation->pref_military_courage/4 )
{
//------------------------------------------//
//
// If the king is currently under attack, flee
// to the nearest camp with the maximum protection.
//
//------------------------------------------//
Firm *firmCamp, *bestCamp=NULL;
int curRating, bestRating=0;
int curXLoc = next_x_loc(), curYLoc = next_y_loc();
int curRegionId = world.get_region_id( curXLoc, curYLoc );
if( cur_action == SPRITE_ATTACK )
{
for( int i=ownNation->ai_camp_count-1 ; i>=0 ; i-- )
{
firmCamp = (FirmCamp*) firm_array[ ownNation->ai_camp_array[i] ];
if( firmCamp->region_id != curRegionId )
continue;
if( firmCamp->overseer_recno && rank_id!=RANK_KING ) // if there is already a commander in this camp. However if this is the king, than ingore this
continue;
curRating = world.distance_rating( curXLoc, curYLoc,
firmCamp->center_x, firmCamp->center_y );
if( curRating > bestRating )
{
bestRating = curRating;
bestCamp = firmCamp;
}
}
}
else if( home_camp_firm_recno ) // if there is a home for the king
{
bestCamp = firm_array[home_camp_firm_recno];
}
//------------------------------------//
if( bestCamp )
{
if( config.ai_aggressiveness > OPTION_LOW )
force_move_flag = 1;
assign( bestCamp->loc_x1, bestCamp->loc_y1 );
}
else // if the king is neither under attack or has a home camp, then call the standard think_leader_action()
{
think_leader_action();
}
return cur_action != SPRITE_IDLE;
}
return 0;
}
//---------- End of function Unit::think_king_flee --------//
//--------- Begin of function Unit::think_general_flee --------//
int Unit::think_general_flee()
{
if( force_move_flag && cur_action != SPRITE_IDLE ) // the general is already fleeing now
return 1;
//------- if the general is alone --------//
Nation* ownNation = nation_array[nation_recno];
//------------------------------------------------//
// When the general is alone and there is no assigned action OR
// when the general is injured, the general will flee
// back to its camp.
//------------------------------------------------//
if( ( team_info->member_count==0 && !ai_action_id ) ||
hit_points < max_hit_points * (75+ownNation->pref_military_courage/2) / 200 ) // 75 to 125 / 200
{
//------------------------------------------//
//
// If the general is currently under attack, flee
// to the nearest camp with the maximum protection.
//
//------------------------------------------//
Firm *firmCamp, *bestCamp=NULL;
int curRating, bestRating=0;
int curXLoc = next_x_loc(), curYLoc = next_y_loc();
int curRegionId = world.get_region_id( curXLoc, curYLoc );
if( cur_action == SPRITE_ATTACK )
{
for( int i=ownNation->ai_camp_count-1 ; i>=0 ; i-- )
{
firmCamp = (FirmCamp*) firm_array[ ownNation->ai_camp_array[i] ];
if( firmCamp->region_id != curRegionId )
continue;
curRating = world.distance_rating( curXLoc, curYLoc,
firmCamp->center_x, firmCamp->center_y );
if( curRating > bestRating )
{
bestRating = curRating;
bestCamp = firmCamp;
}
}
}
else if( home_camp_firm_recno ) // if there is a home for the general
{
bestCamp = firm_array[home_camp_firm_recno];
}
//------------------------------------//
if( bestCamp )
{
if( bestCamp->overseer_recno ) // if there is already an overseer there, just move close to the camp for protection
{
if( config.ai_aggressiveness > OPTION_LOW )
force_move_flag = 1;
move_to( bestCamp->loc_x1, bestCamp->loc_y1 );
}
else
assign( bestCamp->loc_x1, bestCamp->loc_y1 );
}
else // if the general is neither under attack or has a home camp, then call the standard think_leader_action()
{
think_leader_action();
}
return cur_action != SPRITE_IDLE;
}
return 0;
}
//---------- End of function Unit::think_general_flee --------//
//--------- Begin of function Unit::ai_build_camp --------//
//
// Order this unit to build a camp in its region.
//
int Unit::ai_build_camp()
{
//--- to prevent building more than one camp at the same time ---//
int curRegionId = region_id();
Nation* ownNation = nation_array[nation_recno];
if( ownNation->is_action_exist( ACTION_AI_BUILD_FIRM, FIRM_CAMP, curRegionId ) )
return 0;
//------- locate a place for the camp --------//
FirmInfo* firmInfo = firm_res[FIRM_CAMP];
int xLoc=0, yLoc=0;
char teraMask = UnitRes::mobile_type_to_mask(UNIT_LAND);
if( world.locate_space_random(xLoc, yLoc, MAX_WORLD_X_LOC-1,
MAX_WORLD_Y_LOC-1, firmInfo->loc_width+2, firmInfo->loc_height+2, // leave at least one location space around the building
MAX_WORLD_X_LOC*MAX_WORLD_Y_LOC, curRegionId, 1, teraMask) )
{
return ownNation->add_action( xLoc, yLoc, -1, -1,
ACTION_AI_BUILD_FIRM, FIRM_CAMP, 1, sprite_recno );
}
return 0;
}
//---------- End of function Unit::ai_build_camp --------//
//--------- Begin of function Unit::ai_settle_new_town --------//
//
// Settle a new village next to one of the camps in this region.
//
int Unit::ai_settle_new_town()
{
//----- locate a suitable camp for the new town to settle next to ----//
Nation* ownNation = nation_array[nation_recno];
FirmCamp* firmCamp, *bestCamp=NULL;
int curRegionId = region_id();
int curRating, bestRating=0;
for( int i=ownNation->ai_camp_count-1 ; i>=0 ; i-- )
{
firmCamp = (FirmCamp*) firm_array[ ownNation->ai_camp_array[i] ];
if( firmCamp->region_id != curRegionId )
continue;
curRating = firmCamp->total_combat_level();
if( curRating > bestRating )
{
bestRating = curRating;
bestCamp = firmCamp;
}
}
if( !bestCamp )
return 0;
//--------- settle a new town now ---------//
int xLoc=bestCamp->loc_x1;
int yLoc=bestCamp->loc_y1;
if( world.locate_space(xLoc, yLoc, bestCamp->loc_x2, bestCamp->loc_y2,
STD_TOWN_LOC_WIDTH, STD_TOWN_LOC_HEIGHT,
UNIT_LAND, curRegionId, 1 ) ) // 1-build flag
{
settle( xLoc, yLoc );
return 1;
}
return 0;
}
//---------- End of function Unit::ai_settle_new_town --------//<|fim▁hole|>// This function is used for handling cases when AI units are not
// able to seek a path successfully.
//
int Unit::ai_handle_seek_path_fail()
{
if( seek_path_fail_count < 5 ) // wait unit it has failed many times
return 0;
//----- try to move to a new location -----//
if( seek_path_fail_count==5 )
{
stop2(); // stop the unit and think for new action
return 0;
}
//--- if the seek path has failed too many times, resign the unit ---//
int resignFlag = 0;
if( rank_id == RANK_SOLDIER && !leader_unit_recno )
{
if( seek_path_fail_count>=7 )
resignFlag = 1;
}
else if( rank_id == RANK_GENERAL )
{
if( seek_path_fail_count >= 7+skill.skill_level/10 )
resignFlag = 1;
}
if( resignFlag && is_visible() )
{
resign(COMMAND_AI);
return 1;
}
else
return 0;
}
//---------- End of function Unit::ai_handle_seek_path_fail --------//<|fim▁end|>
|
//--------- Begin of function Unit::ai_handle_seek_path_fail --------//
//
|
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
from __future__ import absolute_import
import os
import logging
from celery import task, current_task
from django.conf import settings
from .models import Video
from .quvi import Quvi
from .utils import download_thumbnail, celery_download, encode_videos
LOGGER = logging.getLogger(__name__)
<|fim▁hole|>@task
def fetch_video(quvi_dump, video_id):
""" Task to download the video.
The video parameter is an object of type Video, as defined in
megascops.models.Video
"""
quvi = Quvi(dump=quvi_dump)
video = Video.objects.get(pk=video_id)
if quvi.thumbnail_url:
video.thumbnail = download_thumbnail(quvi.thumbnail_url)
dest_path = os.path.join(settings.MEDIA_ROOT, video.path)
celery_download(quvi.stream.url, dest_path, current_task)
video.state = "READY"
video.save()
@task
def encode_task(video_id):
"""Encode video using avconv"""
try:
video = Video.objects.get(pk=video_id)
except Video.DoesNotExist:
print "The requested video does not exist"
return
encode_videos(video)<|fim▁end|>
| |
<|file_name|>Messages.java<|end_file_name|><|fim▁begin|>/**
* Aptana Studio
* Copyright (c) 2005-2011 by Appcelerator, Inc. All Rights Reserved.
* Licensed under the terms of the GNU Public License (GPL) v3 (with exceptions).
* Please see the license.html included with this distribution for details.
* Any modifications to this file must keep this entire header intact.
*/<|fim▁hole|>
/**
*
* @author Ingo Muschenetz
*
*/
public final class Messages extends NLS
{
private static final String BUNDLE_NAME = "com.aptana.core.internal.resources.messages"; //$NON-NLS-1$
private Messages()
{
}
static
{
// initialize resource bundle
NLS.initializeMessages(BUNDLE_NAME, Messages.class);
}
/**
* MarkerManager_MarkerIDIsDefined
*/
public static String MarkerManager_MarkerIDIsDefined;
/**
* UniformResourceMarker_UniformResourceMarketInfoNull
*/
public static String UniformResourceMarker_UniformResourceMarketInfoNull;
}<|fim▁end|>
|
package com.aptana.core.internal.resources;
import org.eclipse.osgi.util.NLS;
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* React components for kanna projects.
* @module kanna-lib-components
*/
"use strict";
module.exports = {
/**
* @name KnAccordionArrow
*/
get KnAccordionArrow() { return require('./kn_accordion_arrow'); },
/**
* @name KnAccordionBody
*/
get KnAccordionBody() { return require('./kn_accordion_body'); },
/**
* @name KnAccordionHeader
*/
get KnAccordionHeader() { return require('./kn_accordion_header'); },
/**
* @name KnAccordion
*/
get KnAccordion() { return require('./kn_accordion'); },
/**
* @name KnAnalogClock
*/
get KnAnalogClock() { return require('./kn_analog_clock'); },
/**
* @name KnBody
*/
get KnBody() { return require('./kn_body'); },
/**
* @name KnButton
*/
get KnButton() { return require('./kn_button'); },
/**
* @name KnCheckbox
*/
get KnCheckbox() { return require('./kn_checkbox'); },
/**
* @name KnClock
*/
get KnClock() { return require('./kn_clock'); },
/**
* @name KnContainer
*/
get KnContainer() { return require('./kn_container'); },
/**
* @name KnDesktopShowcase
*/
get KnDesktopShowcase() { return require('./kn_desktop_showcase'); },
/**
* @name KnDigitalClock
*/
get KnDigitalClock() { return require('./kn_digital_clock'); },
/**
* @name KnFaIcon
*/
get KnFaIcon() { return require('./kn_fa_icon'); },
/**
* @name KnFooter
*/
get KnFooter() { return require('./kn_footer'); },
/**
* @name KnHead
*/
get KnHead() { return require('./kn_head'); },
/**
* @name KnHeaderLogo
*/
get KnHeaderLogo() { return require('./kn_header_logo'); },
/**
* @name KnHeaderTabItem
*/
get KnHeaderTabItem() { return require('./kn_header_tab_item'); },
/**
* @name KnHeaderTab
*/
get KnHeaderTab() { return require('./kn_header_tab'); },
/**
* @name KnHeader
*/
get KnHeader() { return require('./kn_header'); },
/**
* @name KnHtml
*/
get KnHtml() { return require('./kn_html'); },
/**
* @name KnIcon
*/
get KnIcon() { return require('./kn_icon'); },
/**
* @name KnImage
*/
get KnImage() { return require('./kn_image'); },
/**
* @name KnIonIcon
*/
get KnIonIcon() { return require('./kn_ion_icon'); },
/**
* @name KnLabel
*/
get KnLabel() { return require('./kn_label'); },
/**
* @name KnLinks
*/
get KnLinks() { return require('./kn_links'); },
/**
* @name KnListItemArrowIcon
*/
get KnListItemArrowIcon() { return require('./kn_list_item_arrow_icon'); },
/**
* @name KnListItemIcon
*/
get KnListItemIcon() { return require('./kn_list_item_icon'); },
/**
* @name KnListItemText
*/
get KnListItemText() { return require('./kn_list_item_text'); },
/**
* @name KnListItem
*/
get KnListItem() { return require('./kn_list_item'); },
/**
* @name KnList
*/
get KnList() { return require('./kn_list'); },
/**
* @name KnMain
*/
get KnMain() { return require('./kn_main'); },
/**
* @name KnMobileShowcase
*/
get KnMobileShowcase() { return require('./kn_mobile_showcase'); },
/**
* @name KnNote
*/
get KnNote() { return require('./kn_note'); },
/**
* @name KnPassword
*/
get KnPassword() { return require('./kn_password'); },
/**
* @name KnRadio
*/
get KnRadio() { return require('./kn_radio'); },
/**
* @name KnRange
*/
get KnRange() { return require('./kn_range'); },
/**
* @name KnShowcase
*/
get KnShowcase() { return require('./kn_showcase'); },
/**
* @name KnSlider
*/
get KnSlider() { return require('./kn_slider'); },
/**
* @name KnSlideshow
*/
get KnSlideshow() { return require('./kn_slideshow'); },
/**
* @name KnSpinner
*/
get KnSpinner() { return require('./kn_spinner'); },
/**
* @name KnTabItem
*/<|fim▁hole|> */
get KnTab() { return require('./kn_tab'); },
/**
* @name KnText
*/
get KnText() { return require('./kn_text'); },
/**
* @name KnThemeStyle
*/
get KnThemeStyle() { return require('./kn_theme_style'); }
};<|fim▁end|>
|
get KnTabItem() { return require('./kn_tab_item'); },
/**
* @name KnTab
|
<|file_name|>instr_aesdeclast.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
<|fim▁hole|>
#[test]
fn aesdeclast_2() {
run_test(&Instruction { mnemonic: Mnemonic::AESDECLAST, operand1: Some(Direct(XMM4)), operand2: Some(IndirectScaledDisplaced(EAX, Four, 1919230320, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 223, 36, 133, 112, 33, 101, 114], OperandSize::Dword)
}
#[test]
fn aesdeclast_3() {
run_test(&Instruction { mnemonic: Mnemonic::AESDECLAST, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 223, 212], OperandSize::Qword)
}
#[test]
fn aesdeclast_4() {
run_test(&Instruction { mnemonic: Mnemonic::AESDECLAST, operand1: Some(Direct(XMM6)), operand2: Some(IndirectScaledIndexed(RBX, RSI, Two, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 223, 52, 115], OperandSize::Qword)
}<|fim▁end|>
|
#[test]
fn aesdeclast_1() {
run_test(&Instruction { mnemonic: Mnemonic::AESDECLAST, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 223, 202], OperandSize::Dword)
}
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::env;
use std::io::Read;
use std::net::*;
use std::sync::atomic::*;
use std::thread;
use std::time::Duration;
static AMT: AtomicUsize = ATOMIC_USIZE_INIT;
fn main() {
thread::spawn(|| {
loop {
thread::sleep(Duration::new(1, 0));
println!("{}", AMT.swap(0, Ordering::SeqCst));
}
});<|fim▁hole|> let addr = addr.parse::<SocketAddr>().unwrap();
let mut c = TcpStream::connect(&addr).unwrap();
let mut b = [0; 64 * 1024];
loop {
match c.read(&mut b).unwrap() {
0 => break,
n => { AMT.fetch_add(n, Ordering::SeqCst); }
}
}
}<|fim▁end|>
|
let addr = env::args().nth(1).unwrap_or("127.0.0.1:12345".to_string());
|
<|file_name|>strategies.py<|end_file_name|><|fim▁begin|>"""
Implementation of various trading strategies.
"""
from cointrol.core.models import (
Order, TradingSession,
RelativeStrategyProfile, FixedStrategyProfile
)
class TradeAction:
BUY, SELL = Order.BUY, Order.SELL
def __init__(self, action, price):
self.action = action
self.price = price
def __str__(self):
return '{action} at ${price}'.format(
action=Order.TYPES[self.action],
price=self.price
)
class BaseTradingStrategy:
def __init__(self,
session: TradingSession,
last_order: Order):
self.session = session
self.profile = session.profile
self.last_order = last_order
def get_trade_action(self) -> TradeAction:
if self.last_order.type == Order.SELL:
return TradeAction(action=Order.BUY,
price=self.get_buy_price())
else:
return TradeAction(action=Order.SELL,
price=self.get_sell_price())
def get_buy_price(self):
raise NotImplementedError
def get_sell_price(self):
raise NotImplementedError
class FixedStrategy(BaseTradingStrategy):<|fim▁hole|> return self.profile.buy
def get_sell_price(self):
return self.profile.sell
class RelativeStrategy(BaseTradingStrategy):
profile = None
""":type: RelativeStrategyProfile"""
def get_buy_price(self):
return self.last_order.price * (self.profile.buy / 100)
def get_sell_price(self):
return self.last_order.price * (self.profile.sell / 100)
# {Profile model class: implementation class}
MAPPING = {
FixedStrategyProfile: FixedStrategy,
RelativeStrategyProfile: RelativeStrategy,
}
def get_for_session(session, latest_order) -> BaseTradingStrategy:
implementation_class = MAPPING[type(session.profile)]
return implementation_class(session, latest_order)<|fim▁end|>
|
profile = None
""":type: FixedStrategyProfile"""
def get_buy_price(self):
|
<|file_name|>test_tensor_functions.py<|end_file_name|><|fim▁begin|>from sympy import symbols, Dij, LeviCivita
x, y = symbols('x,y')
def test_Dij():<|fim▁hole|>
def test_levicivita():
assert LeviCivita(1, 2, 3) == 1
assert LeviCivita(1, 3, 2) == -1
assert LeviCivita(1, 2, 2) == 0
i,j,k = symbols('i j k')
assert LeviCivita(i, j, k) == LeviCivita(i,j,k, evaluate=False)
assert LeviCivita(i, j, i) == 0
assert LeviCivita(1, i, i) == 0
assert LeviCivita(i, j, k).doit() == (j - i)*(k - i)*(k - j)/2
assert LeviCivita(1, 2, 3, 1) == 0
assert LeviCivita(4, 5, 1, 2, 3) == 1
assert LeviCivita(4, 5, 2, 1, 3) == -1<|fim▁end|>
|
assert Dij(1, 1) == 1
assert Dij(1, 2) == 0
assert Dij(x, x) == 1
assert Dij(x**2-y**2, x**2-y**2) == 1
|
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>// Karma configuration
// http://karma-runner.github.io/0.10/config/configuration-file.html
module.exports = function(config) {
config.set({
// base path, that will be used to resolve files and exclude
basePath: '',
// testing framework to use (jasmine/mocha/qunit/...)
frameworks: ['mocha', 'chai', 'sinon'],
// list of files / patterns to load in the browser<|fim▁hole|> 'app/scripts/*.js',
'app/scripts/**/*.js',
'test/mock/**/*.js',
'test/spec/**/*.js'
],
// list of files / patterns to exclude
exclude: [],
// web server port
port: 8080,
// level of logging
// possible values: LOG_DISABLE || LOG_ERROR || LOG_WARN || LOG_INFO || LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: false,
// Start these browsers, currently available:
// - Chrome
// - ChromeCanary
// - Firefox
// - Opera
// - Safari (only Mac)
// - PhantomJS
// - IE (only Windows)
browsers: ['Chrome'],
// Continuous Integration mode
// if true, it capture browsers, run tests and exit
singleRun: false
});
};<|fim▁end|>
|
files: [
'app/bower_components/angular/angular.js',
'app/bower_components/angular-mocks/angular-mocks.js',
|
<|file_name|>WorkerRuntimeAgent.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#if ENABLE(INSPECTOR) && ENABLE(WORKERS)
#include "WorkerRuntimeAgent.h"
#include "InjectedScript.h"
#include "InstrumentingAgents.h"
#include "ScriptState.h"
#include "WorkerContext.h"
#include "WorkerDebuggerAgent.h"
#include "WorkerRunLoop.h"
#include "WorkerThread.h"
namespace WebCore {
WorkerRuntimeAgent::WorkerRuntimeAgent(InstrumentingAgents* instrumentingAgents, InspectorCompositeState* state, InjectedScriptManager* injectedScriptManager, WorkerContext* workerContext)
: InspectorRuntimeAgent(instrumentingAgents, state, injectedScriptManager)
, m_workerContext(workerContext)
, m_paused(false)
{
m_instrumentingAgents->setWorkerRuntimeAgent(this);
}
WorkerRuntimeAgent::~WorkerRuntimeAgent()
{
m_instrumentingAgents->setWorkerRuntimeAgent(0);
}
InjectedScript WorkerRuntimeAgent::injectedScriptForEval(ErrorString* error, const int* executionContextId)
{
if (executionContextId) {
*error = "Execution context id is not supported for workers as there is only one execution context.";
return InjectedScript();
}
ScriptState* scriptState = scriptStateFromWorkerContext(m_workerContext);
return injectedScriptManager()->injectedScriptFor(scriptState);
}
void WorkerRuntimeAgent::muteConsole()
{
// We don't need to mute console for workers.
}
void WorkerRuntimeAgent::unmuteConsole()
{
// We don't need to mute console for workers.
}
void WorkerRuntimeAgent::run(ErrorString*)
{
m_paused = false;
}
#if ENABLE(JAVASCRIPT_DEBUGGER)
void WorkerRuntimeAgent::pauseWorkerContext(WorkerContext* context)
{
m_paused = true;
MessageQueueWaitResult result;
do {
result = context->thread()->runLoop().runInMode(context, WorkerDebuggerAgent::debuggerTaskMode);
// Keep waiting until execution is resumed.
} while (result == MessageQueueMessageReceived && m_paused);
}
#endif // ENABLE(JAVASCRIPT_DEBUGGER)
<|fim▁hole|><|fim▁end|>
|
} // namespace WebCore
#endif // ENABLE(INSPECTOR) && ENABLE(WORKERS)
|
<|file_name|>ColumnListItem.js<|end_file_name|><|fim▁begin|>/*!
* ${copyright}
*/
// Provides control sap.m.ColumnListItem.
sap.ui.define([
"sap/ui/core/Element",
"sap/ui/core/library",
"./library",
"./ListItemBase",
"./ColumnListItemRenderer",
"sap/ui/thirdparty/jquery",
// jQuery custom selectors ":sapFocusable", ":sapTabbable"
"sap/ui/dom/jquery/Selectors"
],
function(Element, coreLibrary, library, ListItemBase, ColumnListItemRenderer, jQuery) {
"use strict";
// shortcut for sap.m.ListType
var ListItemType = library.ListType;
// shortcut for sap.ui.core.VerticalAlign
var VerticalAlign = coreLibrary.VerticalAlign;
/**
* Constructor for a new ColumnListItem.
*
* @param {string} [sId] Id for the new control, generated automatically if no id is given
* @param {object} [mSettings] Initial settings for the new control
*
* @class
* <code>sap.m.ColumnListItem</code> can be used with the <code>cells</code> aggregation to create rows for the <code>sap.m.Table</code> control.
* The <code>columns</code> aggregation of the <code>sap.m.Table</code> should match with the cells aggregation.
*
* <b>Note:</b> This control should only be used within the <code>sap.m.Table</code> control.
* The inherited <code>counter</code> property of <code>sap.m.ListItemBase</code> is not supported.
*
* @extends sap.m.ListItemBase
*
* @author SAP SE
* @version ${version}
*
* @constructor
* @public
* @since 1.12
* @alias sap.m.ColumnListItem
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var ColumnListItem = ListItemBase.extend("sap.m.ColumnListItem", /** @lends sap.m.ColumnListItem.prototype */ { metadata : {
library : "sap.m",
properties : {
/**
* Sets the vertical alignment of all the cells within the table row (including selection and navigation).
* <b>Note:</b> <code>vAlign</code> property of <code>sap.m.Column</code> overrides the property for cell vertical alignment if both are set.
* @since 1.20
*/
vAlign : {type : "sap.ui.core.VerticalAlign", group : "Appearance", defaultValue : VerticalAlign.Inherit}
},
defaultAggregation : "cells",
aggregations : {
/**
* Every <code>control</code> inside the <code>cells</code> aggregation defines one cell of the row.
* <b>Note:</b> The order of the <code>cells</code> aggregation must match the order of the <code>columns</code> aggregation of <code>sap.m.Table</code>.
*/<|fim▁hole|> cells : {type : "sap.ui.core.Control", multiple : true, singularName : "cell", bindable : "bindable"}
}
}});
/**
* TablePopin element that handles own events.
*/
var TablePopin = Element.extend("sap.m.TablePopin", {
ontap: function(oEvent) {
// prevent the tap event if selection is done within the popin control and mark the event
if (oEvent.isMarked() || ListItemBase.detectTextSelection(this.getDomRef())) {
return oEvent.stopImmediatePropagation(true);
}
// focus to the main row if there is nothing to focus in the popin
if (oEvent.srcControl === this || !jQuery(oEvent.target).is(":sapFocusable")) {
this.getParent().focus();
}
},
_onMouseEnter: function() {
var $this = jQuery(this),
$parent = $this.prev();
if (!$parent.length || !$parent.hasClass("sapMLIBHoverable") || $parent.hasClass("sapMPopinHovered")) {
return;
}
$parent.addClass("sapMPopinHovered");
},
_onMouseLeave: function() {
var $this = jQuery(this),
$parent = $this.prev();
if (!$parent.length || !$parent.hasClass("sapMLIBHoverable") || !$parent.hasClass("sapMPopinHovered")) {
return;
}
$parent.removeClass("sapMPopinHovered");
}
});
// defines tag name
ColumnListItem.prototype.TagName = "tr";
// enable the ACC announcement for "not selected"
ColumnListItem.prototype._bAnnounceNotSelected = true;
ColumnListItem.prototype.init = function() {
ListItemBase.prototype.init.call(this);
this._bNeedsTypeColumn = false;
this._aClonedHeaders = [];
};
ColumnListItem.prototype.onAfterRendering = function() {
ListItemBase.prototype.onAfterRendering.call(this);
this._checkTypeColumn();
var oPopin = this.hasPopin();
if (oPopin) {
this.$Popin().on("mouseenter", oPopin._onMouseEnter).on("mouseleave", oPopin._onMouseLeave);
}
};
ColumnListItem.prototype.exit = function() {
ListItemBase.prototype.exit.call(this);
this._checkTypeColumn(false);
this._destroyClonedHeaders();
if (this._oPopin) {
this._oPopin.destroy(true);
this._oPopin = null;
}
};
// remove pop-in from DOM when setVisible false is called
ColumnListItem.prototype.setVisible = function(bVisible) {
ListItemBase.prototype.setVisible.call(this, bVisible);
if (!bVisible && this.hasPopin()) {
this.removePopin();
}
return this;
};
// returns responsible table control for the item
ColumnListItem.prototype.getTable = function() {
var oParent = this.getParent();
if (oParent && oParent.isA("sap.m.Table")) {
return oParent;
}
};
/**
* Returns the pop-in element.
*
* @protected
* @since 1.30.9
*/
ColumnListItem.prototype.getPopin = function() {
if (!this._oPopin) {
this._oPopin = new TablePopin({
id: this.getId() + "-sub"
}).addDelegate({
// handle the events of pop-in
ontouchstart: this.ontouchstart,
ontouchmove: this.ontouchmove,
ontap: this.ontap,
ontouchend: this.ontouchend,
ontouchcancel: this.ontouchcancel,
onsaptabnext: this.onsaptabnext,
onsaptabprevious: this.onsaptabprevious,
onsapup: this.onsapup,
onsapdown: this.onsapdown,
oncontextmenu: this.oncontextmenu
}, this).setParent(this, null, true);
}
return this._oPopin;
};
/**
* Returns pop-in DOMRef as a jQuery Object
*
* @protected
* @since 1.26
*/
ColumnListItem.prototype.$Popin = function() {
return this.$("sub");
};
/**
* Determines whether control has pop-in or not.
* @protected
*/
ColumnListItem.prototype.hasPopin = function() {
return this._oPopin;
};
/**
* Pemove pop-in from DOM
* @protected
*/
ColumnListItem.prototype.removePopin = function() {
this._oPopin && this.$Popin().remove();
};
/**
* Returns the tabbable DOM elements as a jQuery collection
* When popin is available this separated dom should also be included
*
* @returns {jQuery} jQuery object
* @protected
* @since 1.26
*/
ColumnListItem.prototype.getTabbables = function() {
return this.$().add(this.$Popin()).find(":sapTabbable");
};
ColumnListItem.prototype.getAccessibilityType = function(oBundle) {
return oBundle.getText("ACC_CTR_TYPE_ROW");
};
ColumnListItem.prototype.getContentAnnouncement = function(oBundle) {
var oTable = this.getTable();
if (!oTable) {
return;
}
var aOutput = [],
aCells = this.getCells(),
aColumns = oTable.getColumns(true);
aColumns.sort(function(oCol1, oCol2) {
var iCol1Index = oCol1.getIndex(), iCol2Index = oCol2.getIndex(), iIndexDiff = iCol1Index - iCol2Index;
if (iIndexDiff == 0) { return 0; }
if (iCol1Index < 0) { return 1; }
if (iCol2Index < 0) { return -1; }
return iIndexDiff;
}).forEach(function(oColumn) {
var oCell = aCells[oColumn.getInitialOrder()];
if (!oCell || !oColumn.getVisible() || (oColumn.isHidden() && !oColumn.isPopin())) {
return;
}
var oHeader = oColumn.getHeader();
if (oHeader && oHeader.getVisible()) {
aOutput.push(ListItemBase.getAccessibilityText(oHeader) + " " + ListItemBase.getAccessibilityText(oCell, true));
} else {
aOutput.push(ListItemBase.getAccessibilityText(oCell, true));
}
});
return aOutput.join(" . ").trim();
};
// update the aria-selected for the cells
ColumnListItem.prototype.updateSelectedDOM = function(bSelected, $This) {
ListItemBase.prototype.updateSelectedDOM.apply(this, arguments);
// update popin as well
if (this.hasPopin()) {
this.$Popin().attr("aria-selected", bSelected);
}
};
ColumnListItem.prototype.onfocusin = function(oEvent) {
if (oEvent.isMarked()) {
return;
}
if (oEvent.srcControl === this) {
this.$().children(".sapMListTblCellDup").find(":sapTabbable").attr("tabindex", -1);
}
ListItemBase.prototype.onfocusin.apply(this, arguments);
};
// informs the table when item's type column requirement is changed
ColumnListItem.prototype._checkTypeColumn = function(bNeedsTypeColumn) {
if (bNeedsTypeColumn == undefined) {
bNeedsTypeColumn = this._needsTypeColumn();
}
if (this._bNeedsTypeColumn != bNeedsTypeColumn) {
this._bNeedsTypeColumn = bNeedsTypeColumn;
this.informList("TypeColumnChange", bNeedsTypeColumn);
}
};
// determines whether type column for this item is necessary or not
ColumnListItem.prototype._needsTypeColumn = function() {
var sType = this.getType();
return this.getVisible() && (
sType == ListItemType.Detail ||
sType == ListItemType.Navigation ||
sType == ListItemType.DetailAndActive
);
};
// Adds cloned header to the local collection
ColumnListItem.prototype._addClonedHeader = function(oHeader) {
return this._aClonedHeaders.push(oHeader);
};
// Destroys cloned headers that are generated for popin
ColumnListItem.prototype._destroyClonedHeaders = function() {
if (this._aClonedHeaders.length) {
this._aClonedHeaders.forEach(function(oClone) {
oClone.destroy("KeepDom");
});
this._aClonedHeaders = [];
}
};
// active feedback for pop-in
ColumnListItem.prototype._activeHandlingInheritor = function() {
this._toggleActiveClass(true);
};
// inactive feedback for pop-in
ColumnListItem.prototype._inactiveHandlingInheritor = function() {
this._toggleActiveClass(false);
};
// toggles the active class of the pop-in.
ColumnListItem.prototype._toggleActiveClass = function(bSwitch) {
if (this.hasPopin()) {
this.$Popin().toggleClass("sapMLIBActive", bSwitch);
}
};
return ColumnListItem;
});<|fim▁end|>
| |
<|file_name|>test_str.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Tests for the backported class:`str` class.
"""
from __future__ import absolute_import, unicode_literals, print_function
from future.builtins import *
from future import utils
from future.tests.base import unittest, expectedFailurePY2
import os
TEST_UNICODE_STR = u'ℝεα∂@ßʟ℮ ☂ℯṧт υηḯ¢☺ḓ℮'
class TestStr(unittest.TestCase):
def test_str(self):
self.assertFalse(str is bytes)
self.assertEqual(str('blah'), u'blah') # u'' prefix: Py3.3 and Py2 only
self.assertEqual(str(b'1234'), "b'1234'")
def test_bool_str(self):
s1 = str(u'abc')
s2 = u'abc'
s3 = str(u'')
s4 = u''
self.assertEqual(bool(s1), bool(s2))
self.assertEqual(bool(s3), bool(s4))
def test_os_path_join(self):
"""
Issue #15: can't os.path.join(u'abc', str(u'def'))
"""
self.assertEqual(os.path.join(u'abc', str(u'def')),
u'abc{0}def'.format(os.sep))
def test_str_encode_utf8(self):
b = str(TEST_UNICODE_STR).encode('utf-8')
self.assertTrue(isinstance(b, bytes))
self.assertFalse(isinstance(b, str))
s = b.decode('utf-8')
self.assertTrue(isinstance(s, str))
self.assertEqual(s, TEST_UNICODE_STR)
def test_str_encode_cp1251(self):
b1 = b'\xcd\xeb\xff'
s1 = str(b1, 'cp1251')
self.assertEqual(s1, u'Нля')
b2 = bytes(b'\xcd\xeb\xff')
s2 = str(b2, 'cp1251')
self.assertEqual(s2, u'Нля')
def test_str_encode_decode_with_py2_str_arg(self):
# Try passing a standard Py2 string (as if unicode_literals weren't imported)
b = str(TEST_UNICODE_STR).encode(utils.bytes_to_native_str(b'utf-8'))
self.assertTrue(isinstance(b, bytes))
self.assertFalse(isinstance(b, str))
s = b.decode(utils.bytes_to_native_str(b'utf-8'))
self.assertTrue(isinstance(s, str))
self.assertEqual(s, TEST_UNICODE_STR)
def test_str_encode_decode_big5(self):
a = u'Unicode string: \u5b54\u5b50'
self.assertEqual(str(a), a.encode('big5').decode('big5'))
def test_str_empty(self):
"""
str() -> u''
"""
self.assertEqual(str(), u'')
def test_str_iterable_of_ints(self):
self.assertEqual(str([65, 66, 67]), '[65, 66, 67]')
self.assertNotEqual(str([65, 66, 67]), 'ABC')
def test_str_str(self):
self.assertEqual(str('ABC'), u'ABC')
self.assertEqual(str('ABC'), 'ABC')
def test_str_is_str(self):
s = str(u'ABC')
self.assertTrue(str(s) is s)
self.assertEqual(repr(str(s)), "'ABC'")
def test_str_fromhex(self):
self.assertFalse(hasattr(str, 'fromhex'))
def test_str_hasattr_decode(self):
"""
This test tests whether hasattr(s, 'decode') is False, like it is on Py3.
Sometimes code (such as http.client in Py3.3) checks hasattr(mystring,
'decode') to determine if a string-like thing needs encoding. It would
be nice to have this return False so the string can be treated on Py2
like a Py3 string.
"""
s = str(u'abcd')
self.assertFalse(hasattr(s, 'decode'))
self.assertTrue(hasattr(s, 'encode'))
def test_isinstance_str(self):
self.assertTrue(isinstance(str('blah'), str))
def test_isinstance_str_subclass(self):
"""
Issue #89
"""
value = str(u'abc')
class Magic(str):
pass
self.assertTrue(isinstance(value, str))
self.assertFalse(isinstance(value, Magic))
def test_str_getitem(self):
s = str('ABCD')
self.assertNotEqual(s[0], 65)
self.assertEqual(s[0], 'A')
self.assertEqual(s[-1], 'D')
self.assertEqual(s[0:1], 'A')
self.assertEqual(s[:], u'ABCD')
@unittest.expectedFailure
def test_u_literal_creates_newstr_object(self):
"""
It would nice if the u'' or '' literal syntax could be coaxed
into producing our new str objects somehow ...
"""
s = u'ABCD'
self.assertTrue(isinstance(s, str))
self.assertFalse(repr(b).startswith('b'))
def test_repr(self):
s = str('ABCD')
self.assertFalse(repr(s).startswith('b'))
def test_str(self):
b = str('ABCD')
self.assertTrue(str(b), 'ABCD')
def test_str_setitem(self):
s = 'ABCD'
with self.assertRaises(TypeError):
s[0] = b'B'
def test_str_iteration(self):
s = str('ABCD')
for item in s:
self.assertFalse(isinstance(item, int))
self.assertTrue(isinstance(item, str))
self.assertNotEqual(list(s), [65, 66, 67, 68])
self.assertEqual(list(s), ['A', 'B', 'C', 'D'])
def test_str_plus_bytes(self):
s = str(u'ABCD')
b = b'EFGH'
# We allow this now:
# with self.assertRaises(TypeError):
# s + b
# str objects don't have an __radd__ method, so the following
# does not raise a TypeError. Is this a problem?
# with self.assertRaises(TypeError):
# b + s
# Now with our custom bytes object:
b2 = bytes(b'EFGH')
with self.assertRaises(TypeError):
s + b2
with self.assertRaises(TypeError):
b2 + s
def test_str_plus_str(self):
s1 = str('ABCD')
s2 = s1 + s1
self.assertEqual(s2, u'ABCDABCD')
self.assertTrue(isinstance(s2, str))
s3 = s1 + u'ZYXW'
self.assertEqual(s3, 'ABCDZYXW')
self.assertTrue(isinstance(s3, str))
s4 = 'ZYXW' + s1
self.assertEqual(s4, 'ZYXWABCD')
self.assertTrue(isinstance(s4, str))
def test_str_join_str(self):
s = str(' * ')
strings = ['AB', 'EFGH', 'IJKL', TEST_UNICODE_STR]
result = s.join(strings)
self.assertEqual(result, 'AB * EFGH * IJKL * ' + TEST_UNICODE_STR)
self.assertTrue(isinstance(result, str))
def test_str_join_bytes(self):
s = str('ABCD')
byte_strings1 = [b'EFGH', u'IJKL']
# We allow this on Python 2 for compatibility with old libraries:
if utils.PY2:
self.assertEqual(s.join(byte_strings1), u'EFGHABCDIJKL')
byte_strings2 = [bytes(b'EFGH'), u'IJKL']
with self.assertRaises(TypeError):
s.join(byte_strings2)
def test_str_join_staticmethod(self):
"""
Issue #33
"""
c = str.join('-', ['a', 'b'])
self.assertEqual(c, 'a-b')
self.assertEqual(type(c), str)
def test_str_join_staticmethod_workaround_1(self):
"""
Issue #33
"""
c = str('-').join(['a', 'b'])
self.assertEqual(c, 'a-b')
self.assertEqual(type(c), str)
def test_str_join_staticmethod_workaround_2(self):
"""
Issue #33
"""
c = str.join(str('-'), ['a', 'b'])
self.assertEqual(c, 'a-b')
self.assertEqual(type(c), str)
def test_str_replace(self):
s = str('ABCD')
c = s.replace('A', 'F')
self.assertEqual(c, 'FBCD')
self.assertTrue(isinstance(c, str))
with self.assertRaises(TypeError):
s.replace(bytes(b'A'), u'F')
with self.assertRaises(TypeError):
s.replace(u'A', bytes(b'F'))
def test_str_partition(self):
s1 = str('ABCD')
parts = s1.partition('B')
self.assertEqual(parts, ('A', 'B', 'CD'))
self.assertTrue(all([isinstance(p, str) for p in parts]))
s2 = str('ABCDABCD')
parts = s2.partition('B')
self.assertEqual(parts, ('A', 'B', 'CDABCD'))
def test_str_rpartition(self):
s2 = str('ABCDABCD')
parts = s2.rpartition('B')
self.assertEqual(parts, ('ABCDA', 'B', 'CD'))
self.assertTrue(all([isinstance(p, str) for p in parts]))
def test_str_contains_something(self):
s = str('ABCD')
self.assertTrue('A' in s)
if utils.PY2:
self.assertTrue(b'A' in s)
with self.assertRaises(TypeError):
bytes(b'A') in s
with self.assertRaises(TypeError):
65 in s # unlike bytes
self.assertTrue('AB' in s)
self.assertFalse(str([65, 66]) in s) # unlike bytes
self.assertFalse('AC' in s)
self.assertFalse('Z' in s)
def test_str_index(self):
s = str('ABCD')
self.assertEqual(s.index('B'), 1)
with self.assertRaises(TypeError):
s.index(67)
with self.assertRaises(TypeError):
s.index(bytes(b'C'))
def test_startswith(self):
s = str('abcd')
self.assertTrue(s.startswith('a'))
self.assertTrue(s.startswith(('a', 'd')))
self.assertTrue(s.startswith(str('ab')))
if utils.PY2:
# We allow this, because e.g. Python 2 os.path.join concatenates
# its arg with a byte-string '/' indiscriminately.
self.assertFalse(s.startswith(b'A'))
self.assertTrue(s.startswith(b'a'))
with self.assertRaises(TypeError) as cm:
self.assertFalse(s.startswith(bytes(b'A')))
with self.assertRaises(TypeError) as cm:
s.startswith((bytes(b'A'), bytes(b'B')))
with self.assertRaises(TypeError) as cm:
s.startswith(65)
def test_join(self):
sep = str('-')
self.assertEqual(sep.join('abcd'), 'a-b-c-d')
if utils.PY2:
sep.join(b'abcd')
with self.assertRaises(TypeError) as cm:
sep.join(bytes(b'abcd'))
def test_endswith(self):
s = str('abcd')
self.assertTrue(s.endswith('d'))
self.assertTrue(s.endswith(('b', 'd')))
self.assertTrue(s.endswith(str('cd')))
self.assertFalse(s.endswith(('A', 'B')))<|fim▁hole|> self.assertTrue(s.endswith((b'D', b'd')))
with self.assertRaises(TypeError) as cm:
s.endswith(65)
with self.assertRaises(TypeError) as cm:
s.endswith((bytes(b'D'),))
def test_split(self):
s = str('ABCD')
self.assertEqual(s.split('B'), ['A', 'CD'])
if utils.PY2:
self.assertEqual(s.split(b'B'), ['A', 'CD'])
with self.assertRaises(TypeError) as cm:
s.split(bytes(b'B'))
def test_rsplit(self):
s = str('ABCD')
self.assertEqual(s.rsplit('B'), ['A', 'CD'])
if utils.PY2:
self.assertEqual(s.rsplit(b'B'), ['A', 'CD'])
with self.assertRaises(TypeError) as cm:
s.rsplit(bytes(b'B'))
def test_eq_bytes(self):
s = str('ABCD')
b = bytes(b'ABCD')
self.assertNotEqual(s, b)
self.assertNotEqual(str(''), bytes(b''))
native_s = 'ABCD'
native_b = b'ABCD'
self.assertFalse(b == native_s)
self.assertTrue(b != native_s)
# Fails on Py2:
# self.assertNotEqual(native_s, b)
# with no obvious way to change this.
# For backward compatibility with broken string-handling code in
# Py2 libraries, we allow the following:
if utils.PY2:
self.assertTrue(native_b == s)
self.assertFalse(s != native_b)
def test_eq(self):
s = str('ABCD')
self.assertEqual('ABCD', s)
self.assertEqual(s, 'ABCD')
self.assertEqual(s, s)
self.assertTrue(u'ABCD' == s)
if utils.PY2:
self.assertTrue(b'ABCD' == s)
else:
self.assertFalse(b'ABCD' == s)
self.assertFalse(bytes(b'ABCD') == s)
# We want to ensure comparison against unknown types return
# NotImplemented so that the interpreter can rerun the test with the
# other class. We expect the operator to return False if both return
# NotImplemented.
class OurCustomString(object):
def __init__(self, string):
self.string = string
def __eq__(self, other):
return NotImplemented
our_str = OurCustomString("foobar")
new_str = str("foobar")
self.assertFalse(our_str == new_str)
self.assertFalse(new_str == our_str)
self.assertIs(new_str.__eq__(our_str), NotImplemented)
self.assertIs(our_str.__eq__(new_str), NotImplemented)
def test_hash(self):
s = str('ABCD')
self.assertIsInstance(hash(s),int)
def test_ne(self):
s = str('ABCD')
self.assertNotEqual('A', s)
self.assertNotEqual(s, 'A')
self.assertNotEqual(s, 5)
self.assertNotEqual(2.7, s)
self.assertNotEqual(s, ['A', 'B', 'C', 'D'])
if utils.PY2:
self.assertFalse(b'ABCD' != s)
else:
self.assertTrue(b'ABCD' != s)
self.assertTrue(bytes(b'ABCD') != s)
def test_cmp(self):
s = str(u'ABC')
with self.assertRaises(TypeError):
s > 3
with self.assertRaises(TypeError):
s < 1000
with self.assertRaises(TypeError):
s <= 3
with self.assertRaises(TypeError):
s >= int(3)
with self.assertRaises(TypeError):
s < 3.3
with self.assertRaises(TypeError):
s > (3.3 + 3j)
with self.assertRaises(TypeError):
s >= (1, 2)
with self.assertRaises(TypeError):
s <= [1, 2]
def test_mul(self):
s = str(u'ABC')
c = s * 4
self.assertTrue(isinstance(c, str))
self.assertEqual(c, u'ABCABCABCABC')
d = s * int(4)
self.assertTrue(isinstance(d, str))
self.assertEqual(d, u'ABCABCABCABC')
if utils.PY2:
e = s * long(4)
self.assertTrue(isinstance(e, str))
self.assertEqual(e, u'ABCABCABCABC')
with self.assertRaises(TypeError):
s * 3.3
with self.assertRaises(TypeError):
s * (3.3 + 3j)
def test_rmul(self):
s = str(u'XYZ')
c = 3 * s
self.assertTrue(isinstance(c, str))
self.assertEqual(c, u'XYZXYZXYZ')
d = s * int(3)
self.assertTrue(isinstance(d, str))
self.assertEqual(d, u'XYZXYZXYZ')
if utils.PY2:
e = long(3) * s
self.assertTrue(isinstance(e, str))
self.assertEqual(e, u'XYZXYZXYZ')
with self.assertRaises(TypeError):
3.3 * s
with self.assertRaises(TypeError):
(3.3 + 3j) * s
@unittest.skip('Fails on Python <= 2.7.6 due to string subclass slicing bug')
def test_slice(self):
"""
Do slices return newstr objects?
"""
s = str(u'abcd')
self.assertEqual(s[:2], u'ab')
self.assertEqual(type(s[:2]), str)
self.assertEqual(s[-2:], u'cd')
self.assertEqual(type(s[-2:]), str)
@unittest.skip('Fails on Python <= 2.7.6 due to string subclass slicing bug')
def test_subclassing(self):
"""
Can newstr be subclassed and do str methods then return instances of
the same class? (This is the Py3 behaviour).
"""
class SubClass(str):
pass
s = SubClass(u'abcd')
self.assertEqual(type(s), SubClass)
self.assertEqual(type(s + s), str)
self.assertEqual(type(s[0]), str)
self.assertEqual(type(s[:2]), str)
self.assertEqual(type(s.join([u'_', u'_', u'_'])), str)
def test_subclassing_2(self):
"""
Tests __new__ method in subclasses. Fails in versions <= 0.11.4
"""
class SubClass(str):
def __new__(cls, *args, **kwargs):
self = str.__new__(cls, *args, **kwargs)
assert type(self) == SubClass
return self
s = SubClass(u'abcd')
self.assertTrue(True)
# From Python 3.3: test_unicode.py
def checkequalnofix(self, result, object, methodname, *args):
method = getattr(object, methodname)
realresult = method(*args)
self.assertEqual(realresult, result)
self.assertTrue(type(realresult) is type(result))
# if the original is returned make sure that
# this doesn't happen with subclasses
if realresult is object:
class usub(str):
def __repr__(self):
return 'usub(%r)' % str.__repr__(self)
object = usub(object)
method = getattr(object, methodname)
realresult = method(*args)
self.assertEqual(realresult, result)
self.assertTrue(object is not realresult)
type2test = str
def test_maketrans_translate(self):
# these work with plain translate()
self.checkequalnofix('bbbc', 'abababc', 'translate',
{ord('a'): None})
self.checkequalnofix('iiic', 'abababc', 'translate',
{ord('a'): None, ord('b'): ord('i')})
self.checkequalnofix('iiix', 'abababc', 'translate',
{ord('a'): None, ord('b'): ord('i'), ord('c'): 'x'})
self.checkequalnofix('c', 'abababc', 'translate',
{ord('a'): None, ord('b'): ''})
self.checkequalnofix('xyyx', 'xzx', 'translate',
{ord('z'): 'yy'})
# this needs maketrans()
self.checkequalnofix('abababc', 'abababc', 'translate',
{'b': '<i>'})
tbl = self.type2test.maketrans({'a': None, 'b': '<i>'})
self.checkequalnofix('<i><i><i>c', 'abababc', 'translate', tbl)
# test alternative way of calling maketrans()
tbl = self.type2test.maketrans('abc', 'xyz', 'd')
self.checkequalnofix('xyzzy', 'abdcdcbdddd', 'translate', tbl)
self.assertRaises(TypeError, self.type2test.maketrans)
self.assertRaises(ValueError, self.type2test.maketrans, 'abc', 'defg')
self.assertRaises(TypeError, self.type2test.maketrans, 2, 'def')
self.assertRaises(TypeError, self.type2test.maketrans, 'abc', 2)
self.assertRaises(TypeError, self.type2test.maketrans, 'abc', 'def', 2)
self.assertRaises(ValueError, self.type2test.maketrans, {'xy': 2})
self.assertRaises(TypeError, self.type2test.maketrans, {(1,): 2})
self.assertRaises(TypeError, 'hello'.translate)
self.assertRaises(TypeError, 'abababc'.translate, 'abc', 'xyz')
@expectedFailurePY2
def test_multiple_inheritance(self):
"""
Issue #96 (for newstr instead of newobject)
"""
if utils.PY2:
from collections import Container
else:
from collections.abc import Container
class Base(str):
pass
class Foo(Base, Container):
def __contains__(self, item):
return False
@expectedFailurePY2
def test_with_metaclass_and_str(self):
"""
Issue #91 (for newstr instead of newobject)
"""
from future.utils import with_metaclass
class MetaClass(type):
pass
class TestClass(with_metaclass(MetaClass, str)):
pass
def test_surrogateescape_encoding(self):
"""
Tests whether surrogateescape encoding works correctly.
"""
pairs = [(u'\udcc3', b'\xc3'),
(u'\udcff', b'\xff')]
for (s, b) in pairs:
encoded = str(s).encode('utf-8', 'surrogateescape')
self.assertEqual(b, encoded)
self.assertTrue(isinstance(encoded, bytes))
self.assertEqual(s, encoded.decode('utf-8', 'surrogateescape'))
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
if utils.PY2:
self.assertFalse(s.endswith(b'D'))
|
<|file_name|>ViolationBuilder.java<|end_file_name|><|fim▁begin|>package ft.sim.monitoring;
import static ft.sim.monitoring.ViolationSeverity.*;
import static ft.sim.monitoring.ViolationType.*;
import ft.sim.world.train.Train;
import ft.sim.world.connectables.Connectable;
import ft.sim.world.connectables.Section;
import ft.sim.world.connectables.Station;
import ft.sim.world.connectables.Switch;
import ft.sim.world.connectables.Track;
/**
* Created by sina on 10/04/2017.
*/
public class ViolationBuilder {
public static void createTrainCrashViolation(Oracle o, Train t1, Train t2, Section section) {
int idTrain1 = o.getWorld().getTrainID(t1);
int idTrain2 = o.getWorld().getTrainID(t2);
int trackID = o.getWorld().getTrackIDforSection(section);
int sectionID = o.getWorld().getTrack(trackID).getSectionPosition(section);
String violationDescription = String
.format("Train %s and Train %s were on the same Section %s on track %s",
idTrain1, idTrain2, sectionID, trackID);
o.addViolation(new Violation(CRASH, CRITICAL, o.getTick(), violationDescription));
}
public static void createFixedBlockViolation(Oracle o, Train t1, Train t2,
Connectable connectable) {
String connectableName = "Unknown";
if (connectable instanceof Track) {
connectableName = "Track-" + o.getWorld().getTrackID((Track) connectable);
} else if (connectable instanceof Switch) {
connectableName = "Switch-" + o.getWorld().getSwitchID((Switch) connectable);
}
String violationDescription = String
.format("Train %s and Train %s were on the same Connectable %s",
o.getWorld().getTrainID(t1), o.getWorld().getTrainID(t2), connectableName);
o.addViolation(new Violation(FIXED_BLOCK, CRITICAL, o.getTick(), violationDescription));
}
public static void createVariableBlockViolation(Oracle o, Train t1, Train t2, double distance) {
String violationDescription = String.format(
"Train %s and Train %s were within less than braking distance from each other (%s)",
o.getWorld().getTrainID(t1), o.getWorld().getTrainID(t2), distance);
o.addViolation(new Violation(VARIABLE_BLOCK, HIGH, o.getTick(), violationDescription));
}
public static void createOverfullStationViolation(Oracle o, Station station) {
int stationID = o.getWorld().getStationID(station);
String violationDescription = String<|fim▁hole|> stationID, station.usedCapacity(), station.getCapacity());
o.addViolation(new Violation(OVERFULL_STATION, CRITICAL, o.getTick(), violationDescription));
}
}<|fim▁end|>
|
.format("Station %s is over capacity (%s out of %s)",
|
<|file_name|>AbstractResourceRootConfigItem.java<|end_file_name|><|fim▁begin|>package io.cattle.platform.configitem.server.model.impl;
import java.io.IOException;
import io.cattle.platform.configitem.server.model.RefreshableConfigItem;
import io.cattle.platform.configitem.server.resource.ResourceRoot;
import io.cattle.platform.configitem.version.ConfigItemStatusManager;
public abstract class AbstractResourceRootConfigItem extends AbstractConfigItem implements RefreshableConfigItem {
ResourceRoot resourceRoot;
public AbstractResourceRootConfigItem(String name, ConfigItemStatusManager versionManager, ResourceRoot resourceRoot) {
super(name, versionManager);
this.resourceRoot = resourceRoot;
}
@Override
public String getSourceRevision() {<|fim▁hole|> public void refresh() throws IOException {
resourceRoot.scan();
}
public ResourceRoot getResourceRoot() {
return resourceRoot;
}
}<|fim▁end|>
|
return resourceRoot.getSourceRevision();
}
@Override
|
<|file_name|>memorystore_test.go<|end_file_name|><|fim▁begin|>package storage
<|fim▁hole|>import (
"crypto/sha256"
"testing"
"github.com/stretchr/testify/require"
"github.com/theupdateframework/notary/tuf/data"
"github.com/theupdateframework/notary/tuf/utils"
)
func TestMemoryStoreMetadataOperations(t *testing.T) {
s := NewMemoryStore(nil)
// GetSized of a non-existent metadata fails
_, err := s.GetSized("nonexistent", 0)
require.Error(t, err)
require.IsType(t, ErrMetaNotFound{}, err)
// Once SetMeta succeeds, GetSized with the role name and the consistent name
// should succeed
metaContent := []byte("content")
metaSize := int64(len(metaContent))
shasum := sha256.Sum256(metaContent)
invalidShasum := sha256.Sum256([]byte{})
require.NoError(t, s.Set("exists", metaContent))
require.NoError(t, s.SetMulti(map[string][]byte{"multi1": metaContent, "multi2": metaContent}))
for _, metaName := range []string{"exists", "multi1", "multi2"} {
role := data.RoleName(metaName)
meta, err := s.GetSized(metaName, metaSize)
require.NoError(t, err)
require.Equal(t, metaContent, meta)
meta, err = s.GetSized(utils.ConsistentName(role.String(), shasum[:]), metaSize)
require.NoError(t, err)
require.Equal(t, metaContent, meta)
_, err = s.GetSized(utils.ConsistentName(role.String(), invalidShasum[:]), metaSize)
require.Error(t, err)
require.IsType(t, ErrMetaNotFound{}, err)
}
// Once Metadata is removed, it's no longer accessible
err = s.RemoveAll()
require.NoError(t, err)
_, err = s.GetSized("exists", 0)
require.Error(t, err)
require.IsType(t, ErrMetaNotFound{}, err)
}
func TestMemoryStoreGetSized(t *testing.T) {
content := []byte("content")
s := NewMemoryStore(map[data.RoleName][]byte{"content": content})
// we can get partial size
meta, err := s.GetSized("content", 3)
require.NoError(t, err)
require.Equal(t, []byte("con"), meta)
// we can get zero size
meta, err = s.GetSized("content", 0)
require.NoError(t, err)
require.Equal(t, []byte{}, meta)
// we can get the whole thing by passing NoSizeLimit (-1)
meta, err = s.GetSized("content", NoSizeLimit)
require.NoError(t, err)
require.Equal(t, content, meta)
// a size much larger than the actual length will return the whole thing
meta, err = s.GetSized("content", 8000)
require.NoError(t, err)
require.Equal(t, content, meta)
}<|fim▁end|>
| |
<|file_name|>doxytag.cpp<|end_file_name|><|fim▁begin|>#line 3 "<stdout>"
#define YY_INT_ALIGNED short int
/* A lexical scanner generated by flex */
#define yy_create_buffer tagYY_create_buffer
#define yy_delete_buffer tagYY_delete_buffer
#define yy_flex_debug tagYY_flex_debug
#define yy_init_buffer tagYY_init_buffer
#define yy_flush_buffer tagYY_flush_buffer
#define yy_load_buffer_state tagYY_load_buffer_state
#define yy_switch_to_buffer tagYY_switch_to_buffer
#define yyin tagYYin
#define yyleng tagYYleng
#define yylex tagYYlex
#define yylineno tagYYlineno
#define yyout tagYYout
#define yyrestart tagYYrestart
#define yytext tagYYtext
#define yywrap tagYYwrap
#define yyalloc tagYYalloc
#define yyrealloc tagYYrealloc
#define yyfree tagYYfree
#define FLEX_SCANNER
#define YY_FLEX_MAJOR_VERSION 2
#define YY_FLEX_MINOR_VERSION 5
#define YY_FLEX_SUBMINOR_VERSION 35
#if YY_FLEX_SUBMINOR_VERSION > 0
#define FLEX_BETA
#endif
/* First, we deal with platform-specific or compiler-specific issues. */
/* begin standard C headers. */
#include <stdio.h>
#include <string.h>
#include <errno.h>
#include <stdlib.h>
/* end standard C headers. */
/* flex integer type definitions */
#ifndef FLEXINT_H
#define FLEXINT_H
/* C99 systems have <inttypes.h>. Non-C99 systems may or may not. */
#if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
/* C99 says to define __STDC_LIMIT_MACROS before including stdint.h,
* if you want the limit (max/min) macros for int types.
*/
#ifndef __STDC_LIMIT_MACROS
#define __STDC_LIMIT_MACROS 1
#endif
#include <inttypes.h>
typedef int8_t flex_int8_t;
typedef uint8_t flex_uint8_t;
typedef int16_t flex_int16_t;
typedef uint16_t flex_uint16_t;
typedef int32_t flex_int32_t;
typedef uint32_t flex_uint32_t;
#else
typedef signed char flex_int8_t;
typedef short int flex_int16_t;
typedef int flex_int32_t;
typedef unsigned char flex_uint8_t;
typedef unsigned short int flex_uint16_t;
typedef unsigned int flex_uint32_t;
#endif /* ! C99 */
/* Limits of integral types. */
#ifndef INT8_MIN
#define INT8_MIN (-128)
#endif
#ifndef INT16_MIN
#define INT16_MIN (-32767-1)
#endif
#ifndef INT32_MIN
#define INT32_MIN (-2147483647-1)
#endif
#ifndef INT8_MAX
#define INT8_MAX (127)
#endif
#ifndef INT16_MAX
#define INT16_MAX (32767)
#endif
#ifndef INT32_MAX
#define INT32_MAX (2147483647)
#endif
#ifndef UINT8_MAX
#define UINT8_MAX (255U)
#endif
#ifndef UINT16_MAX
#define UINT16_MAX (65535U)
#endif
#ifndef UINT32_MAX
#define UINT32_MAX (4294967295U)
#endif
#endif /* ! FLEXINT_H */
#ifdef __cplusplus
/* The "const" storage-class-modifier is valid. */
#define YY_USE_CONST
#else /* ! __cplusplus */
/* C99 requires __STDC__ to be defined as 1. */
#if defined (__STDC__)
#define YY_USE_CONST
#endif /* defined (__STDC__) */
#endif /* ! __cplusplus */
#ifdef YY_USE_CONST
#define yyconst const
#else
#define yyconst
#endif
/* Returned upon end-of-file. */
#define YY_NULL 0
/* Promotes a possibly negative, possibly signed char to an unsigned
* integer for use as an array index. If the signed char is negative,
* we want to instead treat it as an 8-bit unsigned char, hence the
* double cast.
*/
#define YY_SC_TO_UI(c) ((unsigned int) (unsigned char) c)
/* Enter a start condition. This macro really ought to take a parameter,
* but we do it the disgusting crufty way forced on us by the ()-less
* definition of BEGIN.
*/
#define BEGIN (yy_start) = 1 + 2 *
/* Translate the current start state into a value that can be later handed
* to BEGIN to return to the state. The YYSTATE alias is for lex
* compatibility.
*/
#define YY_START (((yy_start) - 1) / 2)
#define YYSTATE YY_START
/* Action number for EOF rule of a given start state. */
#define YY_STATE_EOF(state) (YY_END_OF_BUFFER + state + 1)
/* Special action meaning "start processing a new file". */
#define YY_NEW_FILE tagYYrestart(tagYYin )
#define YY_END_OF_BUFFER_CHAR 0
/* Size of default input buffer. */
#ifndef YY_BUF_SIZE
#define YY_BUF_SIZE 16384
#endif
/* The state buf must be large enough to hold one state per character in the main buffer.
*/
#define YY_STATE_BUF_SIZE ((YY_BUF_SIZE + 2) * sizeof(yy_state_type))
#ifndef YY_TYPEDEF_YY_BUFFER_STATE
#define YY_TYPEDEF_YY_BUFFER_STATE
typedef struct yy_buffer_state *YY_BUFFER_STATE;
#endif
#ifndef YY_TYPEDEF_YY_SIZE_T
#define YY_TYPEDEF_YY_SIZE_T
typedef size_t yy_size_t;
#endif
extern yy_size_t tagYYleng;
extern FILE *tagYYin, *tagYYout;
#define EOB_ACT_CONTINUE_SCAN 0
#define EOB_ACT_END_OF_FILE 1
#define EOB_ACT_LAST_MATCH 2
#define YY_LESS_LINENO(n)
/* Return all but the first "n" matched characters back to the input stream. */
#define yyless(n) \
do \
{ \
/* Undo effects of setting up tagYYtext. */ \
int yyless_macro_arg = (n); \
YY_LESS_LINENO(yyless_macro_arg);\
*yy_cp = (yy_hold_char); \
YY_RESTORE_YY_MORE_OFFSET \
(yy_c_buf_p) = yy_cp = yy_bp + yyless_macro_arg - YY_MORE_ADJ; \
YY_DO_BEFORE_ACTION; /* set up tagYYtext again */ \
} \
while ( 0 )
#define unput(c) yyunput( c, (yytext_ptr) )
#ifndef YY_STRUCT_YY_BUFFER_STATE
#define YY_STRUCT_YY_BUFFER_STATE
struct yy_buffer_state
{
FILE *yy_input_file;
char *yy_ch_buf; /* input buffer */
char *yy_buf_pos; /* current position in input buffer */
/* Size of input buffer in bytes, not including room for EOB
* characters.
*/
yy_size_t yy_buf_size;
/* Number of characters read into yy_ch_buf, not including EOB
* characters.
*/
yy_size_t yy_n_chars;
/* Whether we "own" the buffer - i.e., we know we created it,
* and can realloc() it to grow it, and should free() it to
* delete it.
*/
int yy_is_our_buffer;
/* Whether this is an "interactive" input source; if so, and
* if we're using stdio for input, then we want to use getc()
* instead of fread(), to make sure we stop fetching input after
* each newline.
*/
int yy_is_interactive;
/* Whether we're considered to be at the beginning of a line.
* If so, '^' rules will be active on the next match, otherwise
* not.
*/
int yy_at_bol;
int yy_bs_lineno; /**< The line count. */
int yy_bs_column; /**< The column count. */
/* Whether to try to fill the input buffer when we reach the
* end of it.
*/
int yy_fill_buffer;
int yy_buffer_status;
#define YY_BUFFER_NEW 0
#define YY_BUFFER_NORMAL 1
/* When an EOF's been seen but there's still some text to process
* then we mark the buffer as YY_EOF_PENDING, to indicate that we
* shouldn't try reading from the input source any more. We might
* still have a bunch of tokens to match, though, because of
* possible backing-up.
*
* When we actually see the EOF, we change the status to "new"
* (via tagYYrestart()), so that the user can continue scanning by
* just pointing tagYYin at a new input file.
*/
#define YY_BUFFER_EOF_PENDING 2
};
#endif /* !YY_STRUCT_YY_BUFFER_STATE */
/* Stack of input buffers. */
static size_t yy_buffer_stack_top = 0; /**< index of top of stack. */
static size_t yy_buffer_stack_max = 0; /**< capacity of stack. */
static YY_BUFFER_STATE * yy_buffer_stack = 0; /**< Stack as an array. */
/* We provide macros for accessing buffer states in case in the
* future we want to put the buffer states in a more general
* "scanner state".
*
* Returns the top of the stack, or NULL.
*/
#define YY_CURRENT_BUFFER ( (yy_buffer_stack) \
? (yy_buffer_stack)[(yy_buffer_stack_top)] \
: NULL)
/* Same as previous macro, but useful when we know that the buffer stack is not
* NULL or when we need an lvalue. For internal use only.
*/
#define YY_CURRENT_BUFFER_LVALUE (yy_buffer_stack)[(yy_buffer_stack_top)]
/* yy_hold_char holds the character lost when tagYYtext is formed. */
static char yy_hold_char;
static yy_size_t yy_n_chars; /* number of characters read into yy_ch_buf */
yy_size_t tagYYleng;
/* Points to current character in buffer. */
static char *yy_c_buf_p = (char *) 0;
static int yy_init = 0; /* whether we need to initialize */
static int yy_start = 0; /* start state number */
/* Flag which is used to allow tagYYwrap()'s to do buffer switches
* instead of setting up a fresh tagYYin. A bit of a hack ...
*/
static int yy_did_buffer_switch_on_eof;
void tagYYrestart (FILE *input_file );
void tagYY_switch_to_buffer (YY_BUFFER_STATE new_buffer );
YY_BUFFER_STATE tagYY_create_buffer (FILE *file,int size );
void tagYY_delete_buffer (YY_BUFFER_STATE b );
void tagYY_flush_buffer (YY_BUFFER_STATE b );
void tagYYpush_buffer_state (YY_BUFFER_STATE new_buffer );
void tagYYpop_buffer_state (void );
static void tagYYensure_buffer_stack (void );
static void tagYY_load_buffer_state (void );
static void tagYY_init_buffer (YY_BUFFER_STATE b,FILE *file );
#define YY_FLUSH_BUFFER tagYY_flush_buffer(YY_CURRENT_BUFFER )
YY_BUFFER_STATE tagYY_scan_buffer (char *base,yy_size_t size );
YY_BUFFER_STATE tagYY_scan_string (yyconst char *yy_str );
YY_BUFFER_STATE tagYY_scan_bytes (yyconst char *bytes,yy_size_t len );
void *tagYYalloc (yy_size_t );
void *tagYYrealloc (void *,yy_size_t );
void tagYYfree (void * );
#define yy_new_buffer tagYY_create_buffer
#define yy_set_interactive(is_interactive) \
{ \
if ( ! YY_CURRENT_BUFFER ){ \
tagYYensure_buffer_stack (); \
YY_CURRENT_BUFFER_LVALUE = \
tagYY_create_buffer(tagYYin,YY_BUF_SIZE ); \
} \
YY_CURRENT_BUFFER_LVALUE->yy_is_interactive = is_interactive; \
}
#define yy_set_bol(at_bol) \
{ \
if ( ! YY_CURRENT_BUFFER ){\
tagYYensure_buffer_stack (); \
YY_CURRENT_BUFFER_LVALUE = \
tagYY_create_buffer(tagYYin,YY_BUF_SIZE ); \
} \
YY_CURRENT_BUFFER_LVALUE->yy_at_bol = at_bol; \
}
#define YY_AT_BOL() (YY_CURRENT_BUFFER_LVALUE->yy_at_bol)
/* Begin user sect3 */
typedef unsigned char YY_CHAR;
FILE *tagYYin = (FILE *) 0, *tagYYout = (FILE *) 0;
typedef int yy_state_type;
extern int tagYYlineno;
int tagYYlineno = 1;
extern char *tagYYtext;
#define yytext_ptr tagYYtext
static yy_state_type yy_get_previous_state (void );
static yy_state_type yy_try_NUL_trans (yy_state_type current_state );
static int yy_get_next_buffer (void );
static void yy_fatal_error (yyconst char msg[] );
/* Done after the current pattern has been matched and before the
* corresponding action - sets up tagYYtext.
*/
#define YY_DO_BEFORE_ACTION \
(yytext_ptr) = yy_bp; \
tagYYleng = (size_t) (yy_cp - yy_bp); \
(yy_hold_char) = *yy_cp; \
*yy_cp = '\0'; \
(yy_c_buf_p) = yy_cp;
#define YY_NUM_RULES 125
#define YY_END_OF_BUFFER 126
/* This struct is not used in this scanner,
but its presence is necessary. */
struct yy_trans_info
{
flex_int32_t yy_verify;
flex_int32_t yy_nxt;
};
static yyconst flex_int16_t yy_accept[707] =
{ 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 126, 123, 124, 123, 79, 81, 79, 81,
123, 101, 100, 99, 99, 99, 96, 102, 123, 104,
112, 112, 106, 106, 111, 123, 123, 92, 123, 93,
123, 90, 90, 90, 123, 120, 119, 120, 119, 85,
121, 122, 84, 83, 123, 81, 13, 13, 16, 14,
16, 26, 26, 24, 18, 18, 24, 24, 24, 37,
37, 37, 27, 35, 36, 37, 63, 63, 68, 68,
56, 56, 61, 61, 70, 70, 75, 75, 39, 39,
44, 44, 47, 47, 53, 53, 48, 0, 0, 0,
0, 0, 81, 0, 0, 0, 81, 0, 100, 99,
99, 99, 96, 102, 0, 0, 112, 112, 0, 0,
0, 0, 0, 93, 0, 0, 90, 90, 90, 90,
0, 0, 0, 0, 0, 0, 85, 122, 84, 83,
0, 81, 0, 14, 0, 0, 0, 18, 0, 0,
23, 0, 0, 0, 27, 0, 0, 0, 0, 0,
36, 0, 0, 63, 0, 0, 0, 0, 0, 56,
0, 0, 0, 0, 0, 70, 0, 0, 0, 0,
0, 39, 0, 0, 0, 0, 0, 47, 0, 0,
0, 0, 0, 80, 0, 0, 0, 0, 0, 0,
0, 0, 0, 81, 0, 99, 99, 103, 0, 112,
0, 0, 0, 109, 0, 0, 0, 90, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 21, 0, 0, 0, 0, 0, 34, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 6,
0, 0, 1, 0, 0, 81, 0, 0, 99, 99,
0, 112, 108, 107, 0, 0, 0, 0, 90, 113,
0, 116, 115, 0, 0, 0, 0, 0, 25, 25,
20, 19, 0, 0, 30, 29, 0, 0, 0, 0,
0, 65, 64, 0, 0, 0, 58, 57, 0, 0,
0, 72, 71, 0, 0, 0, 41, 40, 0, 0,
0, 50, 49, 0, 0, 0, 0, 77, 0, 0,
0, 0, 81, 0, 0, 99, 0, 0, 112, 0,
91, 0, 0, 90, 114, 0, 0, 0, 0, 0,
25, 0, 28, 0, 0, 33, 0, 66, 0, 0,
59, 0, 76, 73, 0, 45, 42, 0, 54, 51,
0, 0, 0, 0, 0, 0, 0, 0, 81, 0,
0, 0, 0, 0, 112, 110, 0, 88, 90, 118,
117, 82, 0, 15, 25, 22, 32, 31, 0, 67,
0, 60, 74, 43, 52, 0, 0, 0, 0, 0,
0, 0, 81, 0, 0, 0, 0, 0, 112, 0,
0, 90, 0, 25, 0, 62, 0, 0, 0, 0,
0, 0, 81, 0, 0, 0, 0, 105, 0, 87,
90, 0, 25, 69, 0, 0, 78, 0, 0, 0,
7, 0, 95, 0, 0, 0, 0, 89, 0, 0,
89, 89, 89, 90, 90, 89, 12, 25, 0, 0,
0, 0, 0, 0, 0, 0, 0, 89, 0, 0,
0, 0, 0, 89, 0, 90, 90, 89, 25, 0,
0, 0, 0, 0, 0, 0, 0, 0, 86, 0,
0, 0, 0, 0, 0, 90, 17, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 89, 89,
0, 90, 0, 0, 0, 0, 0, 9, 0, 0,
0, 0, 0, 89, 0, 89, 0, 89, 89, 90,
0, 0, 0, 0, 0, 0, 0, 0, 0, 98,
0, 0, 89, 0, 0, 89, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 97, 0, 0, 0,
0, 0, 0, 0, 0, 0, 8, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 5, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 2, 94, 38,
0, 0, 0, 0, 0, 0, 0, 55, 0, 0,
0, 10, 0, 0, 0, 0, 46, 0, 0, 11,
0, 0, 0, 4, 3, 0
} ;
static yyconst flex_int32_t yy_ec[256] =
{ 0,
1, 1, 1, 1, 1, 1, 1, 1, 2, 3,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 4, 5, 6, 7, 1, 8, 9, 1, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 18,
20, 18, 18, 18, 18, 18, 18, 21, 22, 23,
24, 25, 1, 26, 27, 27, 28, 27, 27, 29,
27, 27, 30, 27, 27, 27, 27, 27, 27, 27,
27, 31, 27, 27, 27, 27, 27, 27, 27, 27,
32, 1, 33, 8, 34, 1, 35, 36, 37, 38,
39, 40, 41, 42, 43, 27, 27, 44, 45, 46,
47, 48, 49, 50, 51, 52, 53, 27, 54, 55,
56, 27, 1, 57, 1, 58, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1
} ;
static yyconst flex_int32_t yy_meta[59] =
{ 0,
1, 1, 2, 3, 4, 5, 1, 4, 6, 1,
1, 4, 7, 8, 7, 9, 10, 11, 11, 11,
12, 1, 13, 10, 14, 3, 15, 15, 15, 15,
15, 4, 4, 11, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 4, 16
} ;
static yyconst flex_int16_t yy_base[746] =
{ 0,
2054, 2053, 0, 36, 57, 58, 81, 0, 139, 0,
197, 255, 60, 61, 313, 0, 371, 0, 429, 0,
485, 0, 209, 210, 543, 0, 59, 66, 601, 0,
212, 213, 215, 222, 659, 0, 715, 0, 771, 0,
829, 0, 214, 223, 887, 913, 939, 0, 995, 1051,
1109, 0, 67, 68, 238, 265, 267, 280, 287, 288,
289, 296, 2055, 2058, 2058, 0, 484, 0, 486, 2008,
2018, 2058, 0, 0, 2008, 2008, 0, 0, 191, 2058,
0, 2002, 2058, 34, 2058, 2010, 2009, 2058, 2012, 0,
224, 192, 253, 0, 2029, 2058, 2058, 684, 2058, 0,
2058, 0, 0, 0, 2028, 1167, 2058, 2009, 2058, 0,
2018, 2058, 2021, 2058, 0, 199, 2030, 2001, 2000, 2058,
502, 685, 2058, 2058, 0, 2021, 0, 0, 2033, 492,
0, 0, 2032, 974, 0, 0, 2031, 976, 0, 0,
2030, 991, 0, 0, 2029, 995, 2058, 2010, 2016, 1978,
2025, 1978, 0, 232, 1984, 1988, 1983, 2020, 0, 0,
1988, 1978, 0, 0, 1996, 1968, 0, 1980, 1966, 1965,
1972, 1961, 2010, 0, 1975, 1989, 452, 466, 0, 468,
1976, 1965, 1957, 1956, 1971, 1953, 0, 0, 0, 0,
1957, 0, 2000, 0, 1980, 1981, 1980, 0, 1948, 1947,
2058, 1954, 1943, 737, 2058, 1951, 1943, 1942, 1957, 1939,
0, 1966, 480, 0, 1955, 1944, 1936, 1935, 1933, 0,
1935, 1939, 1931, 1930, 1928, 0, 1965, 1934, 1926, 1925,
1923, 0, 1960, 1929, 1921, 1920, 1918, 0, 1955, 1924,
1916, 1915, 1913, 2058, 1950, 1929, 1917, 1923, 281, 1957,
1935, 1934, 1935, 1918, 211, 1905, 1916, 2058, 1904, 1903,
1930, 1929, 1911, 2058, 0, 1902, 1931, 674, 1922, 1898,
1923, 1922, 1892, 1895, 1891, 1894, 1922, 1891, 1915, 1914,
1896, 2058, 1886, 1911, 1910, 1880, 1883, 2058, 284, 1879,
1880, 1905, 1904, 1878, 1885, 1875, 1900, 1899, 1873, 1904,
1870, 1895, 1894, 1868, 1899, 1865, 1890, 1889, 1863, 1894,
1860, 1885, 1884, 1858, 1900, 1867, 1867, 1876, 1865, 2058,
1862, 1875, 2058, 1874, 469, 1846, 1851, 1844, 1842, 1888,
1844, 1855, 2058, 2058, 1837, 1863, 1832, 1851, 860, 2058,
1863, 2058, 2058, 1836, 1831, 1843, 1846, 1845, 0, 1831,
2058, 2058, 1826, 1855, 2058, 2058, 1828, 1823, 1849, 1832,
1850, 2058, 2058, 1819, 1830, 1847, 2058, 2058, 1816, 1842,
1844, 2058, 2058, 1813, 1839, 1841, 2058, 2058, 1810, 1836,
1838, 2058, 2058, 1807, 1820, 1813, 1811, 2058, 1811, 1810,
1811, 497, 1809, 1816, 1811, 1845, 1817, 1801, 1794, 1806,
2058, 1788, 1818, 874, 2058, 1820, 1819, 1815, 1794, 1813,
1798, 1797, 2058, 1813, 1812, 2058, 1782, 2058, 1810, 1807,
2058, 1808, 2058, 2058, 1807, 2058, 2058, 1806, 2058, 2058,
1805, 1779, 1786, 1785, 1780, 1787, 1802, 902, 1768, 1768,
1778, 1786, 1777, 1774, 1767, 2058, 1761, 1789, 482, 2058,
2058, 2058, 1772, 2058, 1760, 2058, 2058, 2058, 1785, 2058,
1802, 2058, 2058, 2058, 2058, 1752, 1781, 1781, 1763, 1752,
1777, 502, 1750, 1749, 1775, 1759, 1757, 1771, 1745, 1759,
1758, 966, 1768, 1756, 1784, 2058, 1733, 1765, 1781, 1740,
1734, 704, 1780, 1759, 1776, 1741, 1741, 0, 1738, 2058,
1224, 1772, 1725, 2058, 1724, 1733, 2058, 1750, 1749, 879,
2058, 1766, 2058, 1732, 1720, 1745, 1281, 1034, 1002, 1231,
1254, 2058, 1759, 1018, 1023, 1291, 2058, 1719, 1729, 1741,
1722, 893, 1028, 1719, 1705, 1715, 1743, 1307, 1708, 1706,
1684, 1667, 1665, 691, 1675, 1027, 984, 0, 1651, 1659,
1681, 1645, 1629, 1622, 261, 1609, 1613, 1599, 2058, 1586,
1575, 1580, 1605, 1604, 1572, 1040, 0, 1602, 700, 1574,
1571, 1591, 699, 1605, 1564, 1572, 1568, 1584, 997, 1064,
1583, 1043, 1598, 1553, 1542, 1530, 1575, 1553, 1011, 1571,
1535, 1531, 1514, 1068, 1524, 1069, 1518, 1221, 1548, 1047,
1059, 1514, 1521, 1514, 1526, 1513, 1204, 1505, 1507, 2058,
1505, 1507, 1226, 1311, 1310, 1349, 1316, 1313, 1319, 1316,
1305, 1305, 1331, 1349, 882, 1302, 2058, 1306, 1328, 1327,
1312, 1304, 1300, 1298, 1292, 1317, 2058, 1306, 1291, 1208,
1297, 1287, 1283, 1294, 1296, 1306, 1305, 2058, 1324, 1291,
1224, 1285, 1272, 1271, 1278, 1308, 1304, 1263, 1263, 979,
1283, 1279, 1257, 1249, 1260, 1240, 1249, 1241, 976, 1273,
1271, 1224, 1251, 1235, 1230, 1221, 1235, 1229, 2058, 2058,
1200, 1234, 1144, 1118, 1068, 1100, 1076, 2058, 1030, 1036,
1048, 2058, 1049, 1011, 983, 925, 2058, 850, 730, 2058,
302, 250, 209, 2058, 2058, 2058, 1364, 1380, 1396, 1412,
1428, 1444, 1460, 1476, 1492, 1508, 1524, 195, 524, 1534,
881, 1543, 1088, 1238, 1243, 1552, 1562, 1246, 64, 1568,
1584, 1597, 1603, 1615, 1257, 1631, 1647, 1663, 1679, 1695,
1711, 1727, 1741, 1746, 1752
} ;
static yyconst flex_int16_t yy_def[746] =
{ 0,
707, 707, 706, 3, 707, 707, 706, 7, 706, 9,
708, 708, 707, 707, 706, 15, 706, 17, 706, 19,
707, 21, 707, 707, 706, 25, 707, 707, 706, 29,
707, 707, 709, 709, 706, 35, 707, 37, 706, 39,
706, 41, 710, 710, 711, 711, 706, 47, 712, 712,
706, 51, 713, 713, 714, 714, 715, 715, 716, 716,
717, 717, 706, 706, 706, 718, 706, 719, 706, 719,
706, 706, 720, 721, 721, 721, 722, 723, 706, 706,
724, 724, 706, 706, 706, 706, 706, 706, 706, 725,
706, 726, 726, 727, 706, 706, 706, 706, 706, 728,
706, 729, 730, 731, 706, 706, 706, 706, 706, 732,
706, 706, 733, 706, 734, 734, 706, 706, 706, 706,
706, 706, 706, 706, 735, 736, 737, 737, 706, 706,
738, 738, 706, 706, 739, 739, 706, 706, 740, 740,
706, 706, 741, 741, 706, 706, 706, 718, 706, 706,
706, 706, 719, 706, 706, 706, 719, 706, 720, 721,
721, 721, 722, 723, 706, 706, 724, 724, 706, 706,
706, 706, 706, 725, 706, 706, 726, 726, 727, 726,
706, 706, 706, 706, 706, 706, 728, 729, 730, 731,
706, 106, 706, 732, 706, 733, 706, 734, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
735, 736, 736, 737, 706, 706, 706, 706, 706, 738,
706, 706, 706, 706, 706, 739, 706, 706, 706, 706,
706, 740, 706, 706, 706, 706, 706, 741, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 742, 719, 706, 721, 721, 706, 706, 724,
706, 706, 706, 706, 743, 706, 706, 726, 706, 706,
706, 706, 706, 706, 706, 706, 706, 744, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 736, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 742, 742, 719, 706, 706, 721, 721,
706, 724, 706, 706, 706, 743, 706, 706, 726, 706,
706, 706, 706, 706, 706, 706, 706, 706, 744, 744,
706, 706, 706, 706, 706, 706, 706, 706, 736, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 742, 719, 706, 706, 721, 706, 706, 724, 706,
706, 706, 706, 726, 706, 706, 706, 706, 706, 706,
744, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 742, 719, 706,
706, 706, 706, 706, 724, 706, 706, 706, 726, 706,
706, 706, 706, 706, 744, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 742, 719, 706, 706, 706, 706, 706, 724, 706,
706, 726, 706, 744, 706, 706, 706, 706, 706, 706,
706, 742, 719, 706, 706, 706, 706, 724, 706, 706,
726, 706, 744, 706, 706, 706, 706, 706, 706, 742,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
726, 706, 521, 726, 726, 727, 706, 744, 706, 706,
706, 706, 742, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 726, 706, 726, 726, 727, 744, 706,
706, 706, 706, 706, 742, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 726, 744, 706, 706, 706,
706, 706, 742, 706, 706, 706, 706, 706, 706, 706,
706, 726, 706, 706, 706, 706, 706, 706, 742, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 726,
706, 706, 706, 706, 706, 706, 742, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 745, 706, 742, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 742,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
742, 706, 706, 706, 706, 706, 706, 706, 706, 742,
706, 706, 706, 706, 706, 706, 706, 706, 742, 706,
706, 706, 706, 706, 706, 706, 706, 742, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 0, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706
} ;
static yyconst flex_int16_t yy_nxt[2117] =
{ 0,
64, 64, 65, 64, 64, 64, 64, 64, 66, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 67, 64, 64, 64, 68, 68, 68, 68,
68, 64, 64, 68, 68, 68, 68, 68, 68, 68,
68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
68, 68, 68, 68, 68, 68, 64, 64, 69, 65,
65, 65, 65, 65, 91, 70, 72, 72, 65, 128,
128, 91, 129, 129, 169, 130, 130, 170, 188, 71,
71, 64, 64, 65, 64, 64, 64, 72, 64, 64,
64, 64, 64, 73, 64, 73, 73, 64, 73, 73,
73, 64, 64, 64, 64, 64, 64, 73, 73, 73,
73, 73, 64, 64, 73, 73, 73, 73, 73, 73,
73, 73, 73, 73, 73, 73, 73, 73, 73, 73,
73, 73, 73, 73, 73, 73, 73, 64, 64, 64,
64, 65, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 74, 74, 74, 64,
64, 64, 64, 64, 64, 74, 75, 76, 74, 74,
64, 64, 74, 74, 74, 74, 74, 74, 74, 74,
74, 74, 74, 74, 74, 74, 74, 74, 74, 74,
74, 74, 74, 74, 74, 64, 64, 64, 64, 65,
64, 64, 64, 64, 64, 64, 64, 64, 64, 148,
64, 88, 88, 64, 65, 65, 65, 97, 64, 64,
64, 64, 64, 98, 97, 65, 165, 175, 64, 64,
98, 89, 89, 705, 95, 95, 108, 99, 178, 199,
132, 166, 200, 133, 99, 108, 134, 327, 176, 179,
249, 250, 328, 64, 64, 64, 64, 65, 64, 64,
64, 64, 64, 64, 64, 64, 64, 132, 64, 136,
133, 64, 137, 134, 704, 138, 64, 64, 64, 64,
64, 251, 136, 325, 319, 137, 64, 64, 138, 140,
140, 144, 141, 141, 145, 142, 142, 146, 144, 178,
180, 145, 147, 359, 146, 320, 573, 703, 288, 147,
179, 64, 64, 64, 64, 65, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
78, 78, 78, 64, 64, 64, 64, 64, 64, 78,
78, 78, 78, 78, 64, 64, 78, 78, 78, 78,
78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
78, 78, 78, 78, 78, 78, 78, 78, 78, 64,
64, 64, 64, 65, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 64, 79, 64, 64, 64, 80, 80, 80,
80, 80, 64, 64, 80, 80, 80, 80, 80, 80,
80, 80, 80, 80, 80, 80, 80, 80, 80, 80,
80, 80, 80, 80, 80, 80, 80, 64, 80, 64,
64, 65, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 81, 81, 81, 64,
64, 64, 64, 64, 64, 81, 81, 81, 81, 81,
64, 64, 81, 81, 81, 81, 81, 81, 81, 81,
81, 81, 81, 81, 81, 82, 81, 81, 81, 81,
81, 81, 81, 81, 81, 64, 64, 65, 149, 83,
149, 325, 83, 84, 83, 83, 83, 83, 178, 83,
150, 83, 150, 204, 288, 204, 268, 85, 83, 179,
392, 205, 178, 180, 178, 438, 83, 83, 151, 325,
151, 289, 86, 179, 325, 179, 216, 154, 482, 155,
87, 152, 217, 152, 153, 218, 492, 156, 153, 179,
219, 83, 83, 64, 64, 65, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
90, 90, 90, 64, 64, 64, 64, 64, 64, 90,
90, 90, 90, 90, 64, 64, 90, 90, 90, 90,
90, 90, 90, 90, 90, 90, 90, 90, 90, 90,
90, 90, 90, 90, 90, 90, 90, 90, 90, 64,
64, 64, 64, 65, 64, 64, 64, 64, 64, 64,
64, 64, 64, 92, 64, 92, 92, 64, 92, 92,
92, 92, 64, 64, 64, 64, 64, 92, 92, 92,
92, 92, 64, 64, 92, 92, 92, 92, 92, 92,
92, 92, 92, 92, 92, 92, 92, 93, 92, 92,
92, 92, 92, 92, 92, 92, 92, 64, 94, 64,
64, 65, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 100, 100, 100, 64,
64, 64, 64, 64, 64, 100, 100, 100, 100, 100,
64, 64, 100, 100, 100, 100, 100, 100, 100, 100,
100, 100, 100, 100, 100, 100, 100, 100, 100, 100,
100, 100, 100, 100, 100, 64, 64, 65, 182, 206,
178, 325, 589, 339, 183, 207, 325, 184, 208, 185,
209, 179, 186, 210, 584, 702, 585, 178, 204, 101,
204, 102, 102, 102, 102, 102, 205, 510, 179, 102,
102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
102, 64, 64, 65, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 103, 103,
103, 103, 64, 64, 64, 64, 64, 103, 103, 103,
103, 103, 64, 64, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 64, 103, 104,
104, 65, 104, 104, 104, 104, 104, 104, 104, 104,
104, 104, 104, 104, 104, 104, 104, 104, 104, 104,
104, 105, 104, 104, 104, 106, 106, 106, 106, 106,
104, 104, 106, 106, 106, 106, 106, 106, 106, 106,
106, 106, 106, 106, 106, 106, 106, 106, 106, 106,
106, 106, 106, 106, 106, 104, 104, 109, 109, 65,
109, 160, 111, 109, 404, 160, 109, 109, 553, 701,
109, 325, 109, 109, 325, 472, 178, 109, 109, 109,
109, 109, 109, 109, 109, 65, 109, 179, 111, 109,
178, 533, 109, 109, 325, 449, 109, 640, 109, 109,
700, 179, 554, 109, 109, 109, 109, 109, 109, 112,
112, 65, 112, 112, 112, 112, 112, 112, 112, 112,
112, 112, 112, 112, 112, 112, 113, 113, 113, 112,
112, 112, 112, 112, 112, 113, 113, 113, 113, 113,
112, 112, 113, 113, 113, 113, 113, 113, 113, 113,
113, 113, 113, 113, 113, 113, 113, 113, 113, 113,
113, 113, 113, 113, 113, 112, 112, 65, 325, 115,
678, 325, 115, 116, 117, 595, 115, 115, 222, 115,
228, 115, 178, 180, 223, 501, 229, 224, 115, 230,
596, 699, 225, 179, 231, 234, 115, 115, 669, 240,
178, 235, 118, 325, 236, 241, 541, 544, 242, 237,
119, 179, 542, 243, 522, 543, 522, 607, 522, 698,
325, 115, 115, 65, 697, 115, 546, 522, 115, 116,
117, 547, 115, 115, 178, 115, 522, 115, 555, 178,
566, 696, 597, 178, 115, 179, 612, 595, 582, 695,
179, 694, 115, 115, 179, 544, 178, 598, 118, 178,
522, 613, 596, 178, 600, 617, 119, 179, 164, 693,
179, 618, 164, 619, 179, 692, 691, 115, 115, 120,
121, 65, 121, 120, 120, 120, 120, 122, 123, 124,
120, 120, 120, 120, 120, 120, 125, 125, 125, 120,
120, 126, 120, 120, 120, 125, 125, 125, 125, 125,
120, 120, 125, 125, 125, 125, 125, 125, 125, 125,
125, 125, 125, 125, 125, 125, 125, 125, 125, 125,
125, 125, 125, 125, 125, 120, 120, 190, 190, 690,
190, 190, 190, 190, 190, 190, 190, 190, 190, 190,
190, 190, 190, 190, 192, 192, 192, 190, 190, 689,
190, 190, 190, 192, 192, 192, 192, 192, 190, 190,
192, 192, 192, 192, 192, 192, 192, 192, 192, 192,
192, 192, 192, 192, 192, 192, 192, 192, 192, 192,
192, 192, 192, 190, 190, 517, 325, 517, 518, 597,
325, 518, 519, 520, 612, 518, 521, 522, 523, 688,
518, 522, 625, 522, 598, 522, 325, 518, 167, 613,
687, 325, 167, 174, 522, 520, 187, 174, 686, 651,
187, 524, 660, 522, 522, 685, 544, 211, 544, 525,
178, 211, 684, 683, 682, 681, 680, 522, 679, 677,
518, 526, 517, 676, 517, 518, 522, 522, 518, 519,
520, 675, 518, 518, 522, 538, 674, 518, 673, 672,
178, 522, 671, 548, 518, 548, 670, 668, 667, 666,
522, 179, 520, 665, 522, 545, 664, 522, 539, 522,
663, 522, 662, 522, 661, 659, 540, 658, 657, 656,
522, 655, 654, 653, 613, 652, 650, 518, 518, 522,
649, 648, 647, 646, 645, 644, 643, 522, 598, 596,
642, 641, 639, 638, 636, 635, 634, 633, 632, 631,
616, 630, 629, 522, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
77, 77, 77, 77, 77, 77, 77, 77, 77, 77,
77, 77, 77, 77, 77, 77, 96, 96, 96, 96,
96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
96, 96, 107, 107, 107, 107, 107, 107, 107, 107,
107, 107, 107, 107, 107, 107, 107, 107, 110, 110,
110, 110, 110, 110, 110, 110, 110, 110, 110, 110,
110, 110, 110, 110, 114, 114, 114, 114, 114, 114,
114, 114, 114, 114, 114, 114, 114, 114, 114, 114,
127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
127, 127, 127, 127, 127, 127, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 135, 135, 135, 135, 135, 135, 135, 135,
135, 135, 135, 135, 135, 135, 135, 135, 139, 139,
139, 139, 139, 139, 139, 139, 139, 139, 139, 139,
139, 139, 139, 139, 143, 143, 143, 143, 143, 143,
143, 143, 143, 143, 143, 143, 143, 143, 143, 143,
159, 628, 159, 522, 159, 627, 626, 624, 159, 163,
623, 163, 622, 163, 163, 621, 620, 163, 177, 616,
177, 615, 177, 177, 614, 611, 177, 177, 179, 610,
179, 609, 179, 179, 608, 606, 179, 179, 189, 189,
605, 604, 189, 189, 190, 603, 190, 190, 190, 190,
190, 190, 190, 190, 190, 190, 602, 190, 190, 190,
194, 601, 194, 194, 599, 594, 593, 194, 592, 591,
590, 194, 194, 196, 196, 588, 587, 196, 198, 586,
198, 198, 583, 581, 198, 580, 579, 578, 522, 577,
198, 212, 212, 212, 212, 212, 212, 212, 212, 212,
212, 212, 212, 212, 576, 212, 212, 214, 214, 214,
214, 575, 574, 214, 214, 214, 214, 214, 214, 214,
214, 214, 214, 220, 220, 220, 220, 572, 571, 220,
220, 220, 220, 220, 220, 220, 220, 220, 220, 226,
226, 226, 226, 570, 569, 226, 226, 226, 226, 226,
226, 226, 226, 226, 226, 232, 232, 232, 232, 568,
567, 232, 232, 232, 232, 232, 232, 232, 232, 232,
232, 238, 238, 238, 238, 565, 564, 238, 563, 238,
238, 238, 238, 238, 238, 238, 238, 324, 562, 324,
324, 324, 324, 324, 324, 324, 324, 324, 324, 324,
324, 324, 324, 336, 561, 336, 560, 336, 559, 336,
336, 336, 336, 558, 557, 336, 349, 556, 552, 551,
349, 349, 637, 550, 637, 549, 637, 545, 537, 536,
535, 534, 532, 531, 530, 529, 528, 527, 516, 515,
514, 513, 512, 511, 509, 508, 507, 506, 505, 504,
503, 502, 500, 499, 498, 258, 497, 496, 495, 494,
493, 320, 491, 490, 489, 488, 487, 486, 485, 484,
483, 481, 480, 479, 478, 477, 476, 475, 474, 473,
471, 470, 469, 468, 467, 466, 465, 464, 463, 462,
461, 460, 459, 458, 457, 456, 455, 454, 453, 452,
451, 450, 448, 447, 446, 445, 444, 443, 442, 441,
440, 439, 437, 436, 435, 434, 433, 432, 431, 430,
429, 428, 427, 426, 425, 424, 423, 422, 421, 420,
419, 418, 417, 416, 415, 414, 413, 412, 411, 410,
409, 408, 407, 406, 405, 403, 402, 401, 400, 399,
398, 397, 396, 395, 394, 393, 325, 391, 390, 389,
388, 387, 386, 385, 384, 383, 382, 381, 380, 379,
378, 377, 376, 375, 374, 373, 372, 371, 370, 369,
368, 367, 366, 365, 364, 363, 362, 361, 360, 358,
357, 356, 355, 354, 353, 352, 351, 350, 348, 347,
346, 345, 344, 343, 342, 341, 340, 338, 337, 335,
334, 333, 332, 331, 330, 329, 326, 325, 323, 322,
321, 318, 317, 316, 315, 314, 313, 312, 311, 310,
309, 308, 307, 306, 305, 304, 303, 302, 301, 300,
299, 298, 297, 296, 295, 294, 293, 292, 291, 290,
288, 287, 286, 285, 284, 283, 282, 281, 280, 279,
278, 197, 277, 276, 275, 274, 273, 272, 271, 270,
269, 267, 266, 265, 264, 263, 262, 261, 260, 259,
258, 257, 256, 255, 254, 253, 252, 248, 247, 246,
245, 244, 239, 233, 227, 221, 215, 213, 203, 202,
201, 197, 195, 193, 191, 181, 173, 172, 171, 168,
162, 161, 158, 157, 706, 65, 65, 63, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706
} ;
static yyconst flex_int16_t yy_chk[2117] =
{ 0,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 4, 5,
6, 27, 13, 14, 27, 4, 13, 14, 28, 53,
54, 28, 53, 54, 84, 53, 54, 84, 729, 5,
6, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 718,
11, 23, 24, 11, 31, 32, 43, 33, 11, 11,
11, 11, 11, 33, 34, 44, 79, 91, 11, 11,
34, 23, 24, 703, 31, 32, 43, 33, 92, 116,
55, 79, 116, 55, 34, 44, 55, 255, 91, 92,
154, 154, 255, 11, 11, 12, 12, 12, 12, 12,
12, 12, 12, 12, 12, 12, 12, 56, 12, 57,
56, 12, 57, 56, 702, 57, 12, 12, 12, 12,
12, 154, 58, 555, 249, 58, 12, 12, 58, 59,
60, 61, 59, 60, 61, 59, 60, 61, 62, 93,
93, 62, 61, 289, 62, 249, 555, 701, 289, 62,
93, 12, 12, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 21, 67, 21,
69, 325, 21, 21, 21, 21, 21, 21, 177, 21,
67, 21, 69, 121, 213, 121, 180, 21, 21, 177,
325, 121, 178, 178, 180, 392, 21, 21, 67, 392,
69, 213, 21, 178, 472, 180, 130, 69, 449, 69,
21, 67, 130, 69, 719, 130, 472, 69, 719, 449,
130, 21, 21, 25, 25, 25, 25, 25, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 35,
35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 37, 98, 122,
268, 573, 573, 268, 98, 122, 492, 98, 122, 98,
122, 268, 98, 122, 569, 699, 569, 544, 204, 37,
204, 37, 37, 37, 37, 37, 204, 492, 544, 37,
37, 37, 37, 37, 37, 37, 37, 37, 37, 37,
37, 37, 37, 37, 37, 37, 37, 37, 37, 37,
37, 39, 39, 39, 39, 39, 39, 39, 39, 39,
39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
39, 39, 39, 39, 39, 39, 39, 39, 39, 41,
41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
41, 41, 41, 41, 41, 41, 41, 45, 45, 45,
45, 721, 45, 45, 339, 721, 45, 45, 532, 698,
45, 510, 45, 45, 625, 438, 339, 45, 45, 45,
45, 45, 45, 46, 46, 46, 46, 339, 46, 46,
404, 510, 46, 46, 438, 404, 46, 625, 46, 46,
696, 404, 532, 46, 46, 46, 46, 46, 46, 47,
47, 47, 47, 47, 47, 47, 47, 47, 47, 47,
47, 47, 47, 47, 47, 47, 47, 47, 47, 47,
47, 47, 47, 47, 47, 47, 47, 47, 47, 47,
47, 47, 47, 47, 47, 47, 47, 47, 47, 47,
47, 47, 47, 47, 47, 47, 47, 47, 47, 47,
47, 47, 47, 47, 47, 47, 47, 49, 669, 49,
669, 660, 49, 49, 49, 579, 49, 49, 134, 49,
138, 49, 482, 482, 134, 482, 138, 134, 49, 138,
579, 695, 134, 482, 138, 142, 49, 49, 660, 146,
547, 142, 49, 589, 142, 146, 519, 547, 146, 142,
49, 547, 519, 146, 518, 519, 518, 589, 518, 694,
533, 49, 49, 50, 693, 50, 524, 518, 50, 50,
50, 525, 50, 50, 524, 50, 518, 50, 533, 525,
546, 691, 580, 546, 50, 524, 594, 596, 566, 690,
525, 689, 50, 50, 546, 600, 566, 580, 50, 582,
518, 594, 596, 600, 582, 601, 50, 566, 723, 687,
582, 601, 723, 601, 600, 686, 685, 50, 50, 51,
51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
51, 51, 51, 51, 51, 51, 51, 106, 106, 684,
106, 106, 106, 106, 106, 106, 106, 106, 106, 106,
106, 106, 106, 106, 106, 106, 106, 106, 106, 683,
106, 106, 106, 106, 106, 106, 106, 106, 106, 106,
106, 106, 106, 106, 106, 106, 106, 106, 106, 106,
106, 106, 106, 106, 106, 106, 106, 106, 106, 106,
106, 106, 106, 106, 106, 501, 607, 501, 501, 598,
640, 501, 501, 501, 613, 501, 501, 501, 501, 682,
501, 520, 607, 520, 598, 520, 651, 501, 724, 613,
681, 678, 724, 725, 520, 501, 728, 725, 677, 640,
728, 501, 651, 520, 521, 676, 521, 735, 521, 501,
501, 735, 675, 674, 673, 672, 671, 521, 670, 668,
501, 501, 517, 667, 517, 517, 521, 520, 517, 517,
517, 666, 517, 517, 517, 517, 665, 517, 664, 663,
521, 526, 662, 526, 517, 526, 661, 659, 658, 657,
521, 521, 517, 656, 526, 538, 655, 538, 517, 538,
654, 538, 653, 526, 652, 650, 517, 649, 647, 646,
538, 645, 644, 643, 642, 641, 639, 517, 517, 538,
638, 636, 635, 634, 633, 632, 631, 526, 630, 629,
628, 626, 624, 623, 622, 621, 620, 619, 618, 617,
616, 615, 614, 538, 707, 707, 707, 707, 707, 707,
707, 707, 707, 707, 707, 707, 707, 707, 707, 707,
708, 708, 708, 708, 708, 708, 708, 708, 708, 708,
708, 708, 708, 708, 708, 708, 709, 709, 709, 709,
709, 709, 709, 709, 709, 709, 709, 709, 709, 709,
709, 709, 710, 710, 710, 710, 710, 710, 710, 710,
710, 710, 710, 710, 710, 710, 710, 710, 711, 711,
711, 711, 711, 711, 711, 711, 711, 711, 711, 711,
711, 711, 711, 711, 712, 712, 712, 712, 712, 712,
712, 712, 712, 712, 712, 712, 712, 712, 712, 712,
713, 713, 713, 713, 713, 713, 713, 713, 713, 713,
713, 713, 713, 713, 713, 713, 714, 714, 714, 714,
714, 714, 714, 714, 714, 714, 714, 714, 714, 714,
714, 714, 715, 715, 715, 715, 715, 715, 715, 715,
715, 715, 715, 715, 715, 715, 715, 715, 716, 716,
716, 716, 716, 716, 716, 716, 716, 716, 716, 716,
716, 716, 716, 716, 717, 717, 717, 717, 717, 717,
717, 717, 717, 717, 717, 717, 717, 717, 717, 717,
720, 612, 720, 611, 720, 609, 608, 606, 720, 722,
605, 722, 604, 722, 722, 603, 602, 722, 726, 599,
726, 597, 726, 726, 595, 593, 726, 726, 727, 592,
727, 591, 727, 727, 590, 588, 727, 727, 730, 730,
587, 586, 730, 730, 731, 585, 731, 731, 731, 731,
731, 731, 731, 731, 731, 731, 584, 731, 731, 731,
732, 583, 732, 732, 581, 578, 577, 732, 576, 575,
574, 732, 732, 733, 733, 572, 571, 733, 734, 570,
734, 734, 568, 565, 734, 564, 563, 562, 561, 560,
734, 736, 736, 736, 736, 736, 736, 736, 736, 736,
736, 736, 736, 736, 558, 736, 736, 737, 737, 737,
737, 557, 556, 737, 737, 737, 737, 737, 737, 737,
737, 737, 737, 738, 738, 738, 738, 554, 553, 738,
738, 738, 738, 738, 738, 738, 738, 738, 738, 739,
739, 739, 739, 552, 551, 739, 739, 739, 739, 739,
739, 739, 739, 739, 739, 740, 740, 740, 740, 550,
549, 740, 740, 740, 740, 740, 740, 740, 740, 740,
740, 741, 741, 741, 741, 545, 543, 741, 542, 741,
741, 741, 741, 741, 741, 741, 741, 742, 541, 742,
742, 742, 742, 742, 742, 742, 742, 742, 742, 742,
742, 742, 742, 743, 540, 743, 539, 743, 537, 743,
743, 743, 743, 536, 535, 743, 744, 534, 531, 530,
744, 744, 745, 529, 745, 528, 745, 523, 516, 515,
514, 512, 509, 508, 506, 505, 503, 502, 499, 497,
496, 495, 494, 493, 491, 490, 489, 488, 487, 485,
484, 483, 481, 480, 479, 478, 477, 476, 475, 474,
473, 471, 470, 469, 468, 467, 466, 461, 459, 455,
453, 448, 447, 445, 444, 443, 442, 441, 440, 439,
437, 436, 435, 434, 433, 432, 431, 428, 425, 422,
420, 419, 417, 415, 414, 412, 411, 410, 409, 408,
407, 406, 403, 402, 400, 399, 398, 397, 396, 395,
394, 393, 391, 390, 389, 387, 386, 385, 384, 381,
380, 379, 376, 375, 374, 371, 370, 369, 366, 365,
364, 361, 360, 359, 358, 357, 354, 353, 350, 348,
347, 346, 345, 344, 341, 338, 337, 336, 335, 332,
331, 330, 329, 328, 327, 326, 324, 322, 321, 319,
318, 317, 316, 315, 314, 313, 312, 311, 310, 309,
308, 307, 306, 305, 304, 303, 302, 301, 300, 299,
298, 297, 296, 295, 294, 293, 292, 291, 290, 287,
286, 285, 284, 283, 281, 280, 279, 278, 277, 276,
275, 274, 273, 272, 271, 270, 269, 267, 266, 263,
262, 261, 260, 259, 257, 256, 254, 253, 252, 251,
250, 248, 247, 246, 245, 243, 242, 241, 240, 239,
237, 236, 235, 234, 233, 231, 230, 229, 228, 227,
225, 224, 223, 222, 221, 219, 218, 217, 216, 215,
212, 210, 209, 208, 207, 206, 203, 202, 200, 199,
197, 196, 195, 193, 191, 186, 185, 184, 183, 182,
181, 176, 175, 173, 172, 171, 170, 169, 168, 166,
165, 162, 161, 158, 157, 156, 155, 152, 151, 150,
149, 148, 145, 141, 137, 133, 129, 126, 119, 118,
117, 113, 111, 108, 105, 95, 89, 87, 86, 82,
76, 75, 71, 70, 63, 2, 1, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706, 706, 706, 706, 706,
706, 706, 706, 706, 706, 706
} ;
static yy_state_type yy_last_accepting_state;
static char *yy_last_accepting_cpos;
extern int tagYY_flex_debug;
int tagYY_flex_debug = 0;
/* The intent behind this definition is that it'll catch
* any uses of REJECT which flex missed.
*/
#define REJECT reject_used_but_not_detected
#define yymore() yymore_used_but_not_detected
#define YY_MORE_ADJ 0
#define YY_RESTORE_YY_MORE_OFFSET
char *tagYYtext;
#line 1 "doxytag.l"
/******************************************************************************
*
* $Id: doxytag.l,v 1.27 2001/03/19 19:27:40 root Exp $
*
* Copyright (C) 1997-2010 by Dimitri van Heesch.
*
* Permission to use, copy, modify, and distribute this software and its
* documentation under the terms of the GNU General Public License is hereby
* granted. No representations are made about the suitability of this software
* for any purpose. It is provided "as is" without express or implied warranty.
* See the GNU General Public License for more details.
*
* Documents produced by Doxygen are derivative works derived from the
* input used in their production; they are not affected by this license.
*
*/
#line 19 "doxytag.l"
/*
* includes
*/
#include <stdio.h>
#include <assert.h>
#include <ctype.h>
#include "qtbc.h"
#include <qstrlist.h>
#include <qfileinfo.h>
#include <qfile.h>
#include <qdict.h>
#include <qtextstream.h>
#include <qdir.h>
#include "version.h"
//#include "suffixtree.h"
//#include "searchindex.h"
#include "logos.h"
static QCString convertToXML(const char *s)
{
QCString result;
if (s==0) return result;
const char *p=s;
char c;
while ((c=*p++))
{
switch (c)
{
case '<': result+="<"; break;
case '>': result+=">"; break;
case '&': result+="&"; break;
case '\'': result+="'"; break;
case '"': result+="""; break;
default: result+=c; break;
}
}
return result;
}
struct MemberDef
{
QCString name;
QCString anchor;
QCString args;
};
struct ClassDef
{
QCString name;
QStrList bases;
QCString fileName;
bool isFile;
QList<MemberDef> memberList;
};
QList<ClassDef> classList;
QDict<ClassDef> classDict(1009);
QList<ClassDef> fileList;
QDict<ClassDef> fileDict(1009);
static bool genTag;
static bool genIndex;
static QStrList bases;
static QCString inputString;
static int inputPosition;
static QCString yyFileName;
static int yyLineNr;
static QCString classFile;
static QCString memberRef;
static QCString memberName;
static QCString memberArgs;
static QCString className;
static QCString baseName;
static QCString docBaseLink;
static QCString docAnchor;
static QCString docRefName;
static bool nameBug;
//static SearchIndex searchIndex;
#define YY_NEVER_INTERACTIVE 1
/* -----------------------------------------------------------------
*/
#undef YY_INPUT
#define YY_INPUT(buf,result,max_size) result=yyread(buf,max_size);
static int yyread(char *buf,int max_size)
{
int c=0;
while( c < max_size && inputString[inputPosition] )
{
*buf = inputString[inputPosition++] ;
c++; buf++;
}
return c;
}
static void addClass(const char *clName)
{
if (classDict[clName]==0)
{
//printf("addClass(%s)\n",clName);
ClassDef *cd=new ClassDef;
cd->name=clName;
cd->fileName=yyFileName;
cd->isFile=FALSE;
classList.append(cd);
classDict.insert(clName,cd);
}
}
static void addFile(const char *fName)
{
if (classDict[fName]==0)
{
ClassDef *fd=new ClassDef;
fd->name=fName;
fd->fileName=yyFileName;
fd->isFile=TRUE;
classList.append(fd);
classDict.insert(fName,fd);
}
}
static void addBases(const char *clName)
{
ClassDef *cd=0;
if (clName && (cd=classDict[clName])) cd->bases=bases;
}
static void addMember(const char *memName,const char *memRef,const char *memArgs)
{
//printf("addMember(%s,%s,%s)\n",memName,memRef,memArgs);
ClassDef *cd=classList.last();
if (cd)
{
MemberDef *md;
md=new MemberDef;
md->name=memName;
md->anchor=memRef;
md->args=memArgs;
cd->memberList.append(md);
}
else
{
//printf("Warning: found member %s %s but not class was found to insert it!\n",
// memName,memArgs);
}
}
static void addReference()
{
//printf("addReference() key: %s ref:%s\n",
// docRefName.data(),(docBaseLink+"#"+docAnchor).data());
//if (genIndex && !docRefName.isEmpty() && !docBaseLink.isEmpty())
//{
// if (docAnchor.isEmpty())
// searchIndex.addReference(docRefName,docBaseLink);
// else
// searchIndex.addReference(docRefName,docBaseLink+"#"+docAnchor);
// searchIndex.addWord(docRefName,docRefName,TRUE);
//}
}
QCString unhtmlify(const char *str)
{
QCString result;
const char *p=str;
char c;
while ((c=*p)!='\0')
{
if (c!='&') { result+=c; p++; }
else
{
if (strncmp(p,"&",5)==0) { result+='&'; p+=5; }
else if (strncmp(p,"<",4)==0) { result+='<'; p+=4; }
else if (strncmp(p,">",4)==0) { result+='>'; p+=4; }
else if (strncmp(p,""",6)==0) { result+='"'; p+=4; }
else /* should not happen */ { result+='&'; p++; }
}
}
return result;
}
#line 1410 "<stdout>"
#define INITIAL 0
#define Start 1
#define SearchClassFile 2
#define ReadClassFile 3
#define CheckClassName 4
#define ReadClassName 5
#define SearchMemberRef 6
#define ReadMemberRef 7
#define SearchMemberName 8
#define ReadMemberName 9
#define ReadOperator 10
#define SearchBaseClasses 11
#define ReadBaseClass 12
#define SearchRefName 13
#define ReadRefName 14
#define SearchArgs 15
#define ReadArgs 16
#define SearchWords 17
#define SkipHTMLTag 18
#define CheckConstructor 19
#define SkipPreformated 20
#define Qt3ReadType 21
#define Qt3ReadAnchor 22
#define Qt3ReadName 23
#define Qt3ReadOperator 24
#define Qt3ReadArgs 25
#define DoxReadAnchor 26
#define DoxReadName 27
#define DoxReadArgs 28
#define DoxClassName 29
#define DoxClassBase 30
#ifndef YY_NO_UNISTD_H
/* Special case for "unistd.h", since it is non-ANSI. We include it way
* down here because we want the user's section 1 to have been scanned first.
* The user has a chance to override it with an option.
*/
#include <unistd.h>
#endif
#ifndef YY_EXTRA_TYPE
#define YY_EXTRA_TYPE void *
#endif
static int yy_init_globals (void );
/* Accessor methods to globals.
These are made visible to non-reentrant scanners for convenience. */
int tagYYlex_destroy (void );
int tagYYget_debug (void );
void tagYYset_debug (int debug_flag );
YY_EXTRA_TYPE tagYYget_extra (void );
void tagYYset_extra (YY_EXTRA_TYPE user_defined );
FILE *tagYYget_in (void );
void tagYYset_in (FILE * in_str );
FILE *tagYYget_out (void );
void tagYYset_out (FILE * out_str );
yy_size_t tagYYget_leng (void );
char *tagYYget_text (void );
int tagYYget_lineno (void );
void tagYYset_lineno (int line_number );
/* Macros after this point can all be overridden by user definitions in
* section 1.
*/
#ifndef YY_SKIP_YYWRAP
#ifdef __cplusplus
extern "C" int tagYYwrap (void );
#else
extern int tagYYwrap (void );
#endif
#endif
static void yyunput (int c,char *buf_ptr );
#ifndef yytext_ptr
static void yy_flex_strncpy (char *,yyconst char *,int );
#endif
#ifdef YY_NEED_STRLEN
static int yy_flex_strlen (yyconst char * );
#endif
#ifndef YY_NO_INPUT
#ifdef __cplusplus
static int yyinput (void );
#else
static int input (void );
#endif
#endif
/* Amount of stuff to slurp up with each read. */
#ifndef YY_READ_BUF_SIZE
#define YY_READ_BUF_SIZE 8192
#endif
/* Copy whatever the last rule matched to the standard output. */
#ifndef ECHO
/* This used to be an fputs(), but since the string might contain NUL's,
* we now use fwrite().
*/
#define ECHO fwrite( tagYYtext, tagYYleng, 1, tagYYout )
#endif
/* Gets input and stuffs it into "buf". number of characters read, or YY_NULL,
* is returned in "result".
*/
#ifndef YY_INPUT
#define YY_INPUT(buf,result,max_size) \
if ( YY_CURRENT_BUFFER_LVALUE->yy_is_interactive ) \
{ \
int c = '*'; \
yy_size_t n; \
for ( n = 0; n < max_size && \
(c = getc( tagYYin )) != EOF && c != '\n'; ++n ) \
buf[n] = (char) c; \
if ( c == '\n' ) \
buf[n++] = (char) c; \
if ( c == EOF && ferror( tagYYin ) ) \
YY_FATAL_ERROR( "input in flex scanner failed" ); \
result = n; \
} \
else \
{ \
errno=0; \
while ( (result = fread(buf, 1, max_size, tagYYin))==0 && ferror(tagYYin)) \
{ \
if( errno != EINTR) \
{ \
YY_FATAL_ERROR( "input in flex scanner failed" ); \
break; \
} \
errno=0; \
clearerr(tagYYin); \
} \
}\
\
#endif
/* No semi-colon after return; correct usage is to write "yyterminate();" -
* we don't want an extra ';' after the "return" because that will cause
* some compilers to complain about unreachable statements.
*/
#ifndef yyterminate
#define yyterminate() return YY_NULL
#endif
/* Number of entries by which start-condition stack grows. */
#ifndef YY_START_STACK_INCR
#define YY_START_STACK_INCR 25
#endif
/* Report a fatal error. */
#ifndef YY_FATAL_ERROR
#define YY_FATAL_ERROR(msg) yy_fatal_error( msg )
#endif
/* end tables serialization structures and prototypes */
/* Default declaration of generated scanner - a define so the user can
* easily add parameters.
*/
#ifndef YY_DECL
#define YY_DECL_IS_OURS 1
extern int tagYYlex (void);
#define YY_DECL int tagYYlex (void)
#endif /* !YY_DECL */
/* Code executed at the beginning of each rule, after tagYYtext and tagYYleng
* have been set up.
*/
#ifndef YY_USER_ACTION
#define YY_USER_ACTION
#endif
/* Code executed at the end of each rule. */
#ifndef YY_BREAK
#define YY_BREAK break;
#endif
#define YY_RULE_SETUP \
if ( tagYYleng > 0 ) \
YY_CURRENT_BUFFER_LVALUE->yy_at_bol = \
(tagYYtext[tagYYleng - 1] == '\n'); \
YY_USER_ACTION
/** The main scanner function which does all the work.
*/
YY_DECL
{
register yy_state_type yy_current_state;
register char *yy_cp, *yy_bp;
register int yy_act;
#line 241 "doxytag.l"
#line 1628 "<stdout>"
if ( !(yy_init) )
{
(yy_init) = 1;
#ifdef YY_USER_INIT
YY_USER_INIT;
#endif
if ( ! (yy_start) )
(yy_start) = 1; /* first start state */
if ( ! tagYYin )
tagYYin = stdin;
if ( ! tagYYout )
tagYYout = stdout;
if ( ! YY_CURRENT_BUFFER ) {
tagYYensure_buffer_stack ();
YY_CURRENT_BUFFER_LVALUE =
tagYY_create_buffer(tagYYin,YY_BUF_SIZE );
}
tagYY_load_buffer_state( );
}
while ( 1 ) /* loops until end-of-file is reached */
{
yy_cp = (yy_c_buf_p);
/* Support of tagYYtext. */
*yy_cp = (yy_hold_char);
/* yy_bp points to the position in yy_ch_buf of the start of
* the current run.
*/
yy_bp = yy_cp;
yy_current_state = (yy_start);
yy_current_state += YY_AT_BOL();
yy_match:
do
{
register YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)];
if ( yy_accept[yy_current_state] )
{
(yy_last_accepting_state) = yy_current_state;
(yy_last_accepting_cpos) = yy_cp;
}
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 707 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
++yy_cp;
}
while ( yy_base[yy_current_state] != 2058 );
yy_find_action:
yy_act = yy_accept[yy_current_state];
if ( yy_act == 0 )
{ /* have to back up */
yy_cp = (yy_last_accepting_cpos);
yy_current_state = (yy_last_accepting_state);
yy_act = yy_accept[yy_current_state];
}
YY_DO_BEFORE_ACTION;
do_action: /* This label is used only to access EOF actions. */
switch ( yy_act )
{ /* beginning of action switch */
case 0: /* must back up */
/* undo the effects of YY_DO_BEFORE_ACTION */
*yy_cp = (yy_hold_char);
yy_cp = (yy_last_accepting_cpos);
yy_current_state = (yy_last_accepting_state);
goto yy_find_action;
case 1:
YY_RULE_SETUP
#line 243 "doxytag.l"
{
BEGIN( SearchClassFile );
}
YY_BREAK
case 2:
#line 247 "doxytag.l"
case 3:
#line 248 "doxytag.l"
case 4:
#line 249 "doxytag.l"
case 5:
YY_RULE_SETUP
#line 249 "doxytag.l"
{ // Qt variant
BEGIN( ReadClassName );
}
YY_BREAK
case 6:
YY_RULE_SETUP
#line 252 "doxytag.l"
{ // Doxygen variant
BEGIN( ReadClassName );
}
YY_BREAK
case 7:
YY_RULE_SETUP
#line 255 "doxytag.l"
{
//printf("Inherits found\n");
BEGIN( SearchBaseClasses );
}
YY_BREAK
case 8:
*yy_cp = (yy_hold_char); /* undo effects of setting up tagYYtext */
(yy_c_buf_p) = yy_cp = yy_bp + 15;
YY_DO_BEFORE_ACTION; /* set up tagYYtext again */
YY_RULE_SETUP
#line 259 "doxytag.l"
{ // needed due to inconsistency in the Qt docs
BEGIN( CheckConstructor );
}
YY_BREAK
/* --------------------------------------------------- */
/* Qt member extraction rules */
case 9:
YY_RULE_SETUP
#line 266 "doxytag.l"
{
BEGIN( Qt3ReadType );
}
YY_BREAK
case 10:
YY_RULE_SETUP
#line 269 "doxytag.l"
{ // Qt-3.3+ html file
BEGIN( Qt3ReadAnchor );
}
YY_BREAK
case 11:
YY_RULE_SETUP
#line 272 "doxytag.l"
{ // Qt-4 html file
BEGIN( Qt3ReadAnchor );
}
YY_BREAK
case 12:
YY_RULE_SETUP
#line 275 "doxytag.l"
{
BEGIN( Qt3ReadAnchor );
}
YY_BREAK
case 13:
YY_RULE_SETUP
#line 278 "doxytag.l"
YY_BREAK
case 14:
YY_RULE_SETUP
#line 279 "doxytag.l"
{
memberRef = tagYYtext;
}
YY_BREAK
case 15:
YY_RULE_SETUP
#line 282 "doxytag.l"
{
BEGIN( Qt3ReadName );
}
YY_BREAK
case 16:
YY_RULE_SETUP
#line 285 "doxytag.l"
{
BEGIN( Start );
}
YY_BREAK
case 17:
YY_RULE_SETUP
#line 288 "doxytag.l"
{
memberName="operator";
BEGIN(Qt3ReadOperator);
}
YY_BREAK
case 18:
YY_RULE_SETUP
#line 292 "doxytag.l"
{ memberName+=tagYYtext; }
YY_BREAK
case 19:
YY_RULE_SETUP
#line 293 "doxytag.l"
{ memberName+="<"; }
YY_BREAK
case 20:
YY_RULE_SETUP
#line 294 "doxytag.l"
{ memberName+=">"; }
YY_BREAK
case 21:
YY_RULE_SETUP
#line 295 "doxytag.l"
{ memberName+=" new"; }
YY_BREAK
case 22:
YY_RULE_SETUP
#line 296 "doxytag.l"
{ memberName+=" delete"; }
YY_BREAK
case 23:
YY_RULE_SETUP
#line 297 "doxytag.l"
{ memberName+="()"; BEGIN( Qt3ReadArgs ); }
YY_BREAK
case 24:
YY_RULE_SETUP
#line 298 "doxytag.l"
{ unput(*tagYYtext); BEGIN( Qt3ReadArgs ); }
YY_BREAK
case 25:
YY_RULE_SETUP
#line 299 "doxytag.l"
{
memberName=tagYYtext;
memberName=memberName.mid(memberName.find("::")+2);
BEGIN(Qt3ReadArgs);
}
YY_BREAK
case 26:
YY_RULE_SETUP
#line 304 "doxytag.l"
YY_BREAK
case 27:
YY_RULE_SETUP
#line 305 "doxytag.l"
{
memberArgs+='(';
}
YY_BREAK
case 28:
YY_RULE_SETUP
#line 308 "doxytag.l"
{ memberArgs+='&'; }
YY_BREAK
case 29:
YY_RULE_SETUP
#line 309 "doxytag.l"
{ memberArgs+='<'; }
YY_BREAK
case 30:
YY_RULE_SETUP
#line 310 "doxytag.l"
{ memberArgs+='>'; }
YY_BREAK
case 31:
YY_RULE_SETUP
#line 311 "doxytag.l"
{ memberArgs+='"'; }
YY_BREAK
case 32:
YY_RULE_SETUP
#line 312 "doxytag.l"
{ memberArgs+=' '; }
YY_BREAK
case 33:
YY_RULE_SETUP
#line 313 "doxytag.l"
{
addMember(memberName,memberRef,memberArgs);
memberName.resize(0);
memberRef.resize(0);
memberArgs.resize(0);
BEGIN(Start);
}
YY_BREAK
case 34:
/* rule 34 can match eol */
YY_RULE_SETUP
#line 320 "doxytag.l"
YY_BREAK
case 35:
YY_RULE_SETUP
#line 321 "doxytag.l"
{
memberArgs+=')';
addMember(memberName,memberRef,memberArgs);
memberName.resize(0);
memberRef.resize(0);
memberArgs.resize(0);
BEGIN(Start);
}
YY_BREAK
case 36:
YY_RULE_SETUP
#line 329 "doxytag.l"
{ memberArgs+=tagYYtext; }
YY_BREAK
case 37:
YY_RULE_SETUP
#line 330 "doxytag.l"
{ memberArgs+=*tagYYtext; }
YY_BREAK
/* --------------------------------------------------- */
/* Doxygen class extraction rules */
case 38:
YY_RULE_SETUP
#line 335 "doxytag.l"
{
className.resize(0);
BEGIN(DoxClassName);
}
YY_BREAK
case 39:
/* rule 39 can match eol */
YY_RULE_SETUP
#line 339 "doxytag.l"
{
className=tagYYtext;
addClass(className);
}
YY_BREAK
case 40:
YY_RULE_SETUP
#line 343 "doxytag.l"
{
className+='<';
}
YY_BREAK
case 41:
YY_RULE_SETUP
#line 346 "doxytag.l"
{
className+='>';
}
YY_BREAK
case 42:
YY_RULE_SETUP
#line 349 "doxytag.l"
{
className+='&';
}
YY_BREAK
case 43:
YY_RULE_SETUP
#line 352 "doxytag.l"
{
className+='"';
}
YY_BREAK
case 44:
YY_RULE_SETUP
#line 355 "doxytag.l"
{
className+=*tagYYtext;
}
YY_BREAK
case 45:
YY_RULE_SETUP
#line 358 "doxytag.l"
{
BEGIN(Start);
}
YY_BREAK
/* --------------------------------------------------- */
/* Doxygen inheritance extraction rules */
case 46:
YY_RULE_SETUP
#line 365 "doxytag.l"
{
bases.clear();
baseName.resize(0);
BEGIN(DoxClassBase);
}
YY_BREAK
case 47:
/* rule 47 can match eol */
YY_RULE_SETUP
#line 370 "doxytag.l"
{
baseName+=tagYYtext;
}
YY_BREAK
case 48:
YY_RULE_SETUP
#line 373 "doxytag.l"
{
bases.append(baseName);
baseName.resize(0);
}
YY_BREAK
case 49:
YY_RULE_SETUP
#line 377 "doxytag.l"
{
baseName+='<';
}
YY_BREAK
case 50:
YY_RULE_SETUP
#line 380 "doxytag.l"
{
baseName+='>';
}
YY_BREAK
case 51:
YY_RULE_SETUP
#line 383 "doxytag.l"
{
baseName+='&';
}
YY_BREAK
case 52:
YY_RULE_SETUP
#line 386 "doxytag.l"
{
baseName+='"';
}
YY_BREAK
case 53:
YY_RULE_SETUP
#line 389 "doxytag.l"
{
baseName+=*tagYYtext;
}
YY_BREAK
case 54:
YY_RULE_SETUP
#line 392 "doxytag.l"
{
bases.append(baseName);
baseName.resize(0);
addBases(className);
BEGIN(Start);
}
YY_BREAK
/* --------------------------------------------------- */
/* Doxygen member extraction rules */
case 55:
YY_RULE_SETUP
#line 402 "doxytag.l"
{
memberName.resize(0);
BEGIN(DoxReadName);
}
YY_BREAK
case 56:
/* rule 56 can match eol */
YY_RULE_SETUP
#line 406 "doxytag.l"
{
memberName+=tagYYtext;
}
YY_BREAK
case 57:
YY_RULE_SETUP
#line 409 "doxytag.l"
{
memberName+='<';
}
YY_BREAK
case 58:
YY_RULE_SETUP
#line 412 "doxytag.l"
{
memberName+='>';
}
YY_BREAK
case 59:
YY_RULE_SETUP
#line 415 "doxytag.l"
{
memberName+='&';
}
YY_BREAK
case 60:
YY_RULE_SETUP
#line 418 "doxytag.l"
{
memberName+='"';
}
YY_BREAK
case 61:
YY_RULE_SETUP
#line 421 "doxytag.l"
{
memberName+=*tagYYtext;
}
YY_BREAK
case 62:
YY_RULE_SETUP
#line 424 "doxytag.l"
{
memberName=memberName.mid(memberName.find("::")+2);
memberRef.resize(0);
BEGIN(DoxReadAnchor);
}
YY_BREAK
case 63:
/* rule 63 can match eol */
YY_RULE_SETUP
#line 429 "doxytag.l"
{
memberRef+=tagYYtext;
}
YY_BREAK
case 64:
YY_RULE_SETUP
#line 432 "doxytag.l"
{
memberRef+='<';
}
YY_BREAK
case 65:
YY_RULE_SETUP
#line 435 "doxytag.l"
{
memberRef+='>';
}
YY_BREAK
case 66:
YY_RULE_SETUP
#line 438 "doxytag.l"
{
memberRef+='&';
}
YY_BREAK
case 67:
YY_RULE_SETUP
#line 441 "doxytag.l"
{
memberRef+='"';
}
YY_BREAK
case 68:
YY_RULE_SETUP
#line 444 "doxytag.l"
{
memberRef+=*tagYYtext;
}
YY_BREAK
case 69:
YY_RULE_SETUP
#line 447 "doxytag.l"
{
memberArgs.resize(0);
BEGIN(DoxReadArgs);
}
YY_BREAK
case 70:
/* rule 70 can match eol */
YY_RULE_SETUP
#line 451 "doxytag.l"
{
memberArgs+=tagYYtext;
}
YY_BREAK
case 71:
YY_RULE_SETUP
#line 454 "doxytag.l"
{
memberArgs+='<';
}
YY_BREAK
case 72:
YY_RULE_SETUP
#line 457 "doxytag.l"
{
memberArgs+='>';
}
YY_BREAK
case 73:
YY_RULE_SETUP
#line 460 "doxytag.l"
{
memberArgs+='&';
}
YY_BREAK
case 74:
YY_RULE_SETUP
#line 463 "doxytag.l"
{
memberArgs+='"';
}
YY_BREAK
case 75:
YY_RULE_SETUP
#line 466 "doxytag.l"
{
memberArgs+=*tagYYtext;
}
YY_BREAK
case 76:
YY_RULE_SETUP
#line 469 "doxytag.l"
{
addMember(memberName,memberRef,memberArgs);
memberName.resize(0);
memberRef.resize(0);
memberArgs.resize(0);
BEGIN(Start);
}
YY_BREAK
/** --------------------------------------------------- */
case 77:
YY_RULE_SETUP
#line 479 "doxytag.l"
{
BEGIN( SkipPreformated );
}
YY_BREAK
case 78:
YY_RULE_SETUP
#line 482 "doxytag.l"
{
BEGIN( SearchWords );
}
YY_BREAK
case 79:
YY_RULE_SETUP
#line 485 "doxytag.l"
{
BEGIN( SkipHTMLTag );
}
YY_BREAK
case 80:
YY_RULE_SETUP
#line 488 "doxytag.l"
YY_BREAK
case 81:
YY_RULE_SETUP
#line 489 "doxytag.l"
{
//printf("tag: %s#%s ref: %s word: `%s'\n",
// docBaseLink.data(),docAnchor.data(),
// docRefName.data(),tagYYtext);
//if (genIndex && !docRefName.isEmpty() && tagYYleng>2)
// searchIndex.addWord(docRefName,
// tagYYtext,FALSE
// );
}
YY_BREAK
case 82:
YY_RULE_SETUP
#line 498 "doxytag.l"
{
BEGIN( Start );
}
YY_BREAK
case 83:
YY_RULE_SETUP
#line 501 "doxytag.l"
YY_BREAK
case 84:
YY_RULE_SETUP
#line 502 "doxytag.l"
{
QCString s=tagYYtext;
if (s.find("::")!=-1)
{
docRefName=tagYYtext;
addReference();
nameBug=TRUE;
}
else
{
nameBug=FALSE;
}
BEGIN( Start );
}
YY_BREAK
case 85:
YY_RULE_SETUP
#line 516 "doxytag.l"
{
docAnchor = tagYYtext;
if (docAnchor=="details" ||
docAnchor=="_details")
{
docRefName=className.copy();
addReference();
BEGIN( Start );
}
else
{
BEGIN( SearchRefName );
}
}
YY_BREAK
case 86:
YY_RULE_SETUP
#line 530 "doxytag.l"
{
BEGIN( ReadRefName );
}
YY_BREAK
case 87:
YY_RULE_SETUP
#line 533 "doxytag.l"
{ // HACK: avoid finding links in code fragments
BEGIN( Start );
}
YY_BREAK
case 88:
YY_RULE_SETUP
#line 536 "doxytag.l"
{ // HACK: deal with Qt code
if (nameBug)
BEGIN( Start );
else
BEGIN( ReadRefName );
}
YY_BREAK
case 89:
YY_RULE_SETUP
#line 543 "doxytag.l"
{ // hmm, looks impressive :-)
docRefName=unhtmlify(tagYYtext);
addReference();
BEGIN( Start );
}
YY_BREAK
case 90:
YY_RULE_SETUP
#line 548 "doxytag.l"
{
//printf("ReadRef=%s\n",tagYYtext);
docRefName=tagYYtext;
addReference();
BEGIN( Start );
}
YY_BREAK
case 91:
YY_RULE_SETUP
#line 554 "doxytag.l"
{
//printf("Search %s\n",tagYYtext);
BEGIN( ReadBaseClass );
}
YY_BREAK
case 92:
/* rule 92 can match eol */
YY_RULE_SETUP
#line 558 "doxytag.l"
{
addBases(className);
BEGIN( Start );
}
YY_BREAK
case 93:
YY_RULE_SETUP
#line 562 "doxytag.l"
{
bases.append(tagYYtext);
BEGIN( SearchBaseClasses );
}
YY_BREAK
case 94:
YY_RULE_SETUP
#line 566 "doxytag.l"
{
BEGIN( ReadClassFile );
}
YY_BREAK
case 95:
YY_RULE_SETUP
#line 569 "doxytag.l"
{
BEGIN( ReadClassFile );
}
YY_BREAK
case 96:
YY_RULE_SETUP
#line 572 "doxytag.l"
{
className=tagYYtext;
BEGIN( CheckClassName);
}
YY_BREAK
case 97:
YY_RULE_SETUP
#line 576 "doxytag.l"
{
//printf("className=%s\n",className.data());
addClass(className);
BEGIN( Start );
}
YY_BREAK
case 98:
YY_RULE_SETUP
#line 581 "doxytag.l"
{
//printf("className=%s\n",className.data());
addFile(className);
BEGIN( Start );
}
YY_BREAK
case 99:
YY_RULE_SETUP
#line 586 "doxytag.l"
{ // not a class file
className.resize(0);
BEGIN( Start );
}
YY_BREAK
case 100:
YY_RULE_SETUP
#line 590 "doxytag.l"
{
classFile=tagYYtext;
BEGIN( SearchMemberRef );
}
YY_BREAK
case 101:
YY_RULE_SETUP
#line 594 "doxytag.l"
{
if (YY_START==ReadClassFile)
{
classFile=yyFileName;
}
//BEGIN( ReadMemberRef );
BEGIN( Start );
}
YY_BREAK
case 102:
YY_RULE_SETUP
#line 602 "doxytag.l"
{
memberRef=tagYYtext;
BEGIN( SearchMemberName );
}
YY_BREAK
case 103:
YY_RULE_SETUP
#line 606 "doxytag.l"
{ // <strong> is for qt-1.44, <b> is for qt-2.00
BEGIN( ReadMemberName );
}
YY_BREAK
case 104:
YY_RULE_SETUP
#line 609 "doxytag.l"
{
unput(*tagYYtext);
BEGIN( ReadMemberName );
}
YY_BREAK
case 105:
YY_RULE_SETUP
#line 613 "doxytag.l"
{
memberName="operator";
BEGIN( ReadOperator );
}
YY_BREAK
case 106:
YY_RULE_SETUP
#line 617 "doxytag.l"
{ memberName+=*tagYYtext; }
YY_BREAK
case 107:
YY_RULE_SETUP
#line 618 "doxytag.l"
{ memberName+="<"; }
YY_BREAK
case 108:
YY_RULE_SETUP
#line 619 "doxytag.l"
{ memberName+=">"; }
YY_BREAK
case 109:
YY_RULE_SETUP
#line 620 "doxytag.l"
{ memberName+=" new"; }
YY_BREAK
case 110:
YY_RULE_SETUP
#line 621 "doxytag.l"
{ memberName+=" delete"; }
YY_BREAK
case 111:
YY_RULE_SETUP
#line 622 "doxytag.l"
{ BEGIN( SearchArgs ); }
YY_BREAK
case 112:
YY_RULE_SETUP
#line 623 "doxytag.l"
{
memberName=tagYYtext;
BEGIN( SearchArgs );
}
YY_BREAK
case 113:
YY_RULE_SETUP
#line 627 "doxytag.l"
{
//printf("SearchArg className=%s memberName=%s\n",className.data(),memberName.data());
if (!className.isEmpty() && !memberName.isEmpty())
BEGIN( ReadArgs );
else
BEGIN( Start );
}
YY_BREAK
case 114:
YY_RULE_SETUP
#line 634 "doxytag.l"
{ memberArgs+='&'; }
YY_BREAK
case 115:
YY_RULE_SETUP
#line 635 "doxytag.l"
{ memberArgs+='<'; }
YY_BREAK
case 116:
YY_RULE_SETUP
#line 636 "doxytag.l"
{ memberArgs+='>'; }
YY_BREAK
case 117:
YY_RULE_SETUP
#line 637 "doxytag.l"
{ memberArgs+='"'; }
YY_BREAK
case 118:
YY_RULE_SETUP
#line 638 "doxytag.l"
{ memberArgs+=' '; }
YY_BREAK
/*
<ReadArgs>[{}] { // handle enums
memberArgs.resize(0);
addMember(memberName,memberRef,memberArgs);
if (*tagYYtext=='}')
BEGIN( Start );
else
BEGIN( SearchClassFile );
}
*/
case 119:
/* rule 119 can match eol */
YY_RULE_SETUP
#line 649 "doxytag.l"
{
//printf("adding member %s\n",memberName.data());
memberArgs=memberArgs.stripWhiteSpace();
//if (newClass)
//{
// newClass=FALSE;
// addClass(className);
//}
addMember(memberName,memberRef,memberArgs);
memberName.resize(0);
memberRef.resize(0);
memberArgs.resize(0);
if (*tagYYtext=='<')
BEGIN( SkipHTMLTag);
else
BEGIN( Start );
}
YY_BREAK
case 120:
YY_RULE_SETUP
#line 666 "doxytag.l"
{ memberArgs+=(*tagYYtext)&0x7f; }
YY_BREAK
case 121:
YY_RULE_SETUP
#line 667 "doxytag.l"
{ BEGIN( Start ); }
YY_BREAK
case 122:
YY_RULE_SETUP
#line 668 "doxytag.l"
YY_BREAK
case 123:
YY_RULE_SETUP
#line 669 "doxytag.l"
YY_BREAK
case 124:
/* rule 124 can match eol */
YY_RULE_SETUP
#line 670 "doxytag.l"
{ yyLineNr++;
if (YY_START!=SkipHTMLTag) BEGIN( Start );
}
YY_BREAK
case 125:
YY_RULE_SETUP
#line 674 "doxytag.l"
ECHO;
YY_BREAK
#line 2635 "<stdout>"
case YY_STATE_EOF(INITIAL):
case YY_STATE_EOF(Start):
case YY_STATE_EOF(SearchClassFile):
case YY_STATE_EOF(ReadClassFile):
case YY_STATE_EOF(CheckClassName):
case YY_STATE_EOF(ReadClassName):
case YY_STATE_EOF(SearchMemberRef):
case YY_STATE_EOF(ReadMemberRef):
case YY_STATE_EOF(SearchMemberName):
case YY_STATE_EOF(ReadMemberName):
case YY_STATE_EOF(ReadOperator):
case YY_STATE_EOF(SearchBaseClasses):
case YY_STATE_EOF(ReadBaseClass):
case YY_STATE_EOF(SearchRefName):
case YY_STATE_EOF(ReadRefName):
case YY_STATE_EOF(SearchArgs):
case YY_STATE_EOF(ReadArgs):
case YY_STATE_EOF(SearchWords):
case YY_STATE_EOF(SkipHTMLTag):
case YY_STATE_EOF(CheckConstructor):
case YY_STATE_EOF(SkipPreformated):
case YY_STATE_EOF(Qt3ReadType):
case YY_STATE_EOF(Qt3ReadAnchor):
case YY_STATE_EOF(Qt3ReadName):
case YY_STATE_EOF(Qt3ReadOperator):
case YY_STATE_EOF(Qt3ReadArgs):
case YY_STATE_EOF(DoxReadAnchor):
case YY_STATE_EOF(DoxReadName):
case YY_STATE_EOF(DoxReadArgs):
case YY_STATE_EOF(DoxClassName):
case YY_STATE_EOF(DoxClassBase):
yyterminate();
case YY_END_OF_BUFFER:
{
/* Amount of text matched not including the EOB char. */
int yy_amount_of_matched_text = (int) (yy_cp - (yytext_ptr)) - 1;
/* Undo the effects of YY_DO_BEFORE_ACTION. */
*yy_cp = (yy_hold_char);
YY_RESTORE_YY_MORE_OFFSET
if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_NEW )
{
/* We're scanning a new file or input source. It's
* possible that this happened because the user
* just pointed tagYYin at a new source and called
* tagYYlex(). If so, then we have to assure
* consistency between YY_CURRENT_BUFFER and our
* globals. Here is the right place to do so, because
* this is the first action (other than possibly a
* back-up) that will match for the new input source.
*/
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_n_chars;
YY_CURRENT_BUFFER_LVALUE->yy_input_file = tagYYin;
YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_NORMAL;
}
/* Note that here we test for yy_c_buf_p "<=" to the position
* of the first EOB in the buffer, since yy_c_buf_p will
* already have been incremented past the NUL character
* (since all states make transitions on EOB to the
* end-of-buffer state). Contrast this with the test
* in input().
*/
if ( (yy_c_buf_p) <= &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] )
{ /* This was really a NUL. */
yy_state_type yy_next_state;
(yy_c_buf_p) = (yytext_ptr) + yy_amount_of_matched_text;
yy_current_state = yy_get_previous_state( );
/* Okay, we're now positioned to make the NUL
* transition. We couldn't have
* yy_get_previous_state() go ahead and do it
* for us because it doesn't know how to deal
* with the possibility of jamming (and we don't
* want to build jamming into it because then it
* will run more slowly).
*/
yy_next_state = yy_try_NUL_trans( yy_current_state );
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
if ( yy_next_state )
{
/* Consume the NUL. */
yy_cp = ++(yy_c_buf_p);
yy_current_state = yy_next_state;
goto yy_match;
}
else
{
yy_cp = (yy_c_buf_p);
goto yy_find_action;
}
}
else switch ( yy_get_next_buffer( ) )
{
case EOB_ACT_END_OF_FILE:
{
(yy_did_buffer_switch_on_eof) = 0;
if ( tagYYwrap( ) )
{
/* Note: because we've taken care in
* yy_get_next_buffer() to have set up
* tagYYtext, we can now set up
* yy_c_buf_p so that if some total
* hoser (like flex itself) wants to
* call the scanner after we return the
* YY_NULL, it'll still work - another
* YY_NULL will get returned.
*/
(yy_c_buf_p) = (yytext_ptr) + YY_MORE_ADJ;
yy_act = YY_STATE_EOF(YY_START);
goto do_action;
}
else
{
if ( ! (yy_did_buffer_switch_on_eof) )
YY_NEW_FILE;
}
break;
}
case EOB_ACT_CONTINUE_SCAN:
(yy_c_buf_p) =
(yytext_ptr) + yy_amount_of_matched_text;
yy_current_state = yy_get_previous_state( );
yy_cp = (yy_c_buf_p);
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
goto yy_match;
case EOB_ACT_LAST_MATCH:
(yy_c_buf_p) =
&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)];
yy_current_state = yy_get_previous_state( );
yy_cp = (yy_c_buf_p);
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
goto yy_find_action;
}
break;
}
default:
YY_FATAL_ERROR(
"fatal flex scanner internal error--no action found" );
} /* end of action switch */
} /* end of scanning one token */
} /* end of tagYYlex */
/* yy_get_next_buffer - try to read in a new buffer
*
* Returns a code representing an action:
* EOB_ACT_LAST_MATCH -
* EOB_ACT_CONTINUE_SCAN - continue scanning from current position
* EOB_ACT_END_OF_FILE - end of file
*/
static int yy_get_next_buffer (void)
{
register char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf;
register char *source = (yytext_ptr);
register int number_to_move, i;
int ret_val;
if ( (yy_c_buf_p) > &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] )
YY_FATAL_ERROR(
"fatal flex scanner internal error--end of buffer missed" );
if ( YY_CURRENT_BUFFER_LVALUE->yy_fill_buffer == 0 )
{ /* Don't try to fill the buffer, so this is an EOF. */
if ( (yy_c_buf_p) - (yytext_ptr) - YY_MORE_ADJ == 1 )
{
/* We matched a single character, the EOB, so
* treat this as a final EOF.
*/
return EOB_ACT_END_OF_FILE;
}
else
{
/* We matched some text prior to the EOB, first
* process it.
*/
return EOB_ACT_LAST_MATCH;
}
}
/* Try to read more data. */
/* First move last chars to start of buffer. */
number_to_move = (int) ((yy_c_buf_p) - (yytext_ptr)) - 1;
for ( i = 0; i < number_to_move; ++i )
*(dest++) = *(source++);
if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_EOF_PENDING )
/* don't do the read, it's not guaranteed to return an EOF,
* just force an EOF
*/
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars) = 0;
else
{
yy_size_t num_to_read =
YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1;
while ( num_to_read <= 0 )
{ /* Not enough room in the buffer - grow it. */
/* just a shorter name for the current buffer */
YY_BUFFER_STATE b = YY_CURRENT_BUFFER;
int yy_c_buf_p_offset =
(int) ((yy_c_buf_p) - b->yy_ch_buf);
if ( b->yy_is_our_buffer )
{
yy_size_t new_size = b->yy_buf_size * 2;
if ( new_size <= 0 )
b->yy_buf_size += b->yy_buf_size / 8;
else
b->yy_buf_size *= 2;
b->yy_ch_buf = (char *)
/* Include room in for 2 EOB chars. */
tagYYrealloc((void *) b->yy_ch_buf,b->yy_buf_size + 2 );
}
else
/* Can't grow it, we don't own it. */
b->yy_ch_buf = 0;
if ( ! b->yy_ch_buf )
YY_FATAL_ERROR(
"fatal error - scanner input buffer overflow" );
(yy_c_buf_p) = &b->yy_ch_buf[yy_c_buf_p_offset];
num_to_read = YY_CURRENT_BUFFER_LVALUE->yy_buf_size -
number_to_move - 1;
}
if ( num_to_read > YY_READ_BUF_SIZE )
num_to_read = YY_READ_BUF_SIZE;
/* Read in more data. */
YY_INPUT( (&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move]),
(yy_n_chars), num_to_read );
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
if ( (yy_n_chars) == 0 )
{
if ( number_to_move == YY_MORE_ADJ )
{
ret_val = EOB_ACT_END_OF_FILE;
tagYYrestart(tagYYin );
}
else
{
ret_val = EOB_ACT_LAST_MATCH;
YY_CURRENT_BUFFER_LVALUE->yy_buffer_status =
YY_BUFFER_EOF_PENDING;
}
}
else
ret_val = EOB_ACT_CONTINUE_SCAN;
if ((yy_size_t) ((yy_n_chars) + number_to_move) > YY_CURRENT_BUFFER_LVALUE->yy_buf_size) {
/* Extend the array by 50%, plus the number we really need. */
yy_size_t new_size = (yy_n_chars) + number_to_move + ((yy_n_chars) >> 1);
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf = (char *) tagYYrealloc((void *) YY_CURRENT_BUFFER_LVALUE->yy_ch_buf,new_size );
if ( ! YY_CURRENT_BUFFER_LVALUE->yy_ch_buf )
YY_FATAL_ERROR( "out of dynamic memory in yy_get_next_buffer()" );
}
(yy_n_chars) += number_to_move;
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] = YY_END_OF_BUFFER_CHAR;
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] = YY_END_OF_BUFFER_CHAR;
(yytext_ptr) = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[0];
return ret_val;
}
/* yy_get_previous_state - get the state just before the EOB char was reached */
static yy_state_type yy_get_previous_state (void)
{
register yy_state_type yy_current_state;
register char *yy_cp;
yy_current_state = (yy_start);
yy_current_state += YY_AT_BOL();
for ( yy_cp = (yytext_ptr) + YY_MORE_ADJ; yy_cp < (yy_c_buf_p); ++yy_cp )
{
register YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
if ( yy_accept[yy_current_state] )
{
(yy_last_accepting_state) = yy_current_state;
(yy_last_accepting_cpos) = yy_cp;
}
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 707 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
}
return yy_current_state;
}
/* yy_try_NUL_trans - try to make a transition on the NUL character
*
* synopsis
* next_state = yy_try_NUL_trans( current_state );
*/
static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state )
{
register int yy_is_jam;
register char *yy_cp = (yy_c_buf_p);
register YY_CHAR yy_c = 1;
if ( yy_accept[yy_current_state] )
{
(yy_last_accepting_state) = yy_current_state;
(yy_last_accepting_cpos) = yy_cp;
}
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 707 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
yy_is_jam = (yy_current_state == 706);
return yy_is_jam ? 0 : yy_current_state;
}
static void yyunput (int c, register char * yy_bp )
{
register char *yy_cp;
yy_cp = (yy_c_buf_p);
/* undo effects of setting up tagYYtext */
*yy_cp = (yy_hold_char);
if ( yy_cp < YY_CURRENT_BUFFER_LVALUE->yy_ch_buf + 2 )
{ /* need to shift things up to make room */
/* +2 for EOB chars. */
register yy_size_t number_to_move = (yy_n_chars) + 2;
register char *dest = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[
YY_CURRENT_BUFFER_LVALUE->yy_buf_size + 2];
register char *source =
&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move];
while ( source > YY_CURRENT_BUFFER_LVALUE->yy_ch_buf )
*--dest = *--source;
yy_cp += (int) (dest - source);
yy_bp += (int) (dest - source);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars =
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_buf_size;
if ( yy_cp < YY_CURRENT_BUFFER_LVALUE->yy_ch_buf + 2 )
YY_FATAL_ERROR( "flex scanner push-back overflow" );
}
*--yy_cp = (char) c;
(yytext_ptr) = yy_bp;
(yy_hold_char) = *yy_cp;
(yy_c_buf_p) = yy_cp;
}
#ifndef YY_NO_INPUT
#ifdef __cplusplus
static int yyinput (void)
#else
static int input (void)
#endif
{
int c;
*(yy_c_buf_p) = (yy_hold_char);
if ( *(yy_c_buf_p) == YY_END_OF_BUFFER_CHAR )
{
/* yy_c_buf_p now points to the character we want to return.
* If this occurs *before* the EOB characters, then it's a
* valid NUL; if not, then we've hit the end of the buffer.
*/
if ( (yy_c_buf_p) < &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] )
/* This was really a NUL. */
*(yy_c_buf_p) = '\0';
else
{ /* need more input */
yy_size_t offset = (yy_c_buf_p) - (yytext_ptr);
++(yy_c_buf_p);
switch ( yy_get_next_buffer( ) )
{
case EOB_ACT_LAST_MATCH:
/* This happens because yy_g_n_b()
* sees that we've accumulated a
* token and flags that we need to
* try matching the token before
* proceeding. But for input(),
* there's no matching to consider.
* So convert the EOB_ACT_LAST_MATCH
* to EOB_ACT_END_OF_FILE.
*/
/* Reset buffer status. */
tagYYrestart(tagYYin );
/*FALLTHROUGH*/
case EOB_ACT_END_OF_FILE:
{
if ( tagYYwrap( ) )
return 0;
if ( ! (yy_did_buffer_switch_on_eof) )
YY_NEW_FILE;
#ifdef __cplusplus
return yyinput();
#else
return input();
#endif
}
case EOB_ACT_CONTINUE_SCAN:
(yy_c_buf_p) = (yytext_ptr) + offset;
break;
}
}
}
c = *(unsigned char *) (yy_c_buf_p); /* cast for 8-bit char's */
*(yy_c_buf_p) = '\0'; /* preserve tagYYtext */
(yy_hold_char) = *++(yy_c_buf_p);
YY_CURRENT_BUFFER_LVALUE->yy_at_bol = (c == '\n');
return c;
}
#endif /* ifndef YY_NO_INPUT */
/** Immediately switch to a different input stream.
* @param input_file A readable stream.
*
* @note This function does not reset the start condition to @c INITIAL .
*/
void tagYYrestart (FILE * input_file )
{
if ( ! YY_CURRENT_BUFFER ){
tagYYensure_buffer_stack ();
YY_CURRENT_BUFFER_LVALUE =
tagYY_create_buffer(tagYYin,YY_BUF_SIZE );
}
tagYY_init_buffer(YY_CURRENT_BUFFER,input_file );
tagYY_load_buffer_state( );
}
/** Switch to a different input buffer.
* @param new_buffer The new input buffer.
*
*/
void tagYY_switch_to_buffer (YY_BUFFER_STATE new_buffer )
{
/* TODO. We should be able to replace this entire function body
* with
* tagYYpop_buffer_state();
* tagYYpush_buffer_state(new_buffer);
*/
tagYYensure_buffer_stack ();
if ( YY_CURRENT_BUFFER == new_buffer )
return;
if ( YY_CURRENT_BUFFER )
{
/* Flush out information for old buffer. */
*(yy_c_buf_p) = (yy_hold_char);
YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = (yy_c_buf_p);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
YY_CURRENT_BUFFER_LVALUE = new_buffer;
tagYY_load_buffer_state( );
/* We don't actually know whether we did this switch during
* EOF (tagYYwrap()) processing, but the only time this flag
* is looked at is after tagYYwrap() is called, so it's safe
* to go ahead and always set it.
*/
(yy_did_buffer_switch_on_eof) = 1;
}
static void tagYY_load_buffer_state (void)
{
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_n_chars;
(yytext_ptr) = (yy_c_buf_p) = YY_CURRENT_BUFFER_LVALUE->yy_buf_pos;
tagYYin = YY_CURRENT_BUFFER_LVALUE->yy_input_file;
(yy_hold_char) = *(yy_c_buf_p);
}
/** Allocate and initialize an input buffer state.
* @param file A readable stream.
* @param size The character buffer size in bytes. When in doubt, use @c YY_BUF_SIZE.
*
* @return the allocated buffer state.
*/
YY_BUFFER_STATE tagYY_create_buffer (FILE * file, int size )
{
YY_BUFFER_STATE b;
b = (YY_BUFFER_STATE) tagYYalloc(sizeof( struct yy_buffer_state ) );
if ( ! b )
YY_FATAL_ERROR( "out of dynamic memory in tagYY_create_buffer()" );
b->yy_buf_size = size;
/* yy_ch_buf has to be 2 characters longer than the size given because
* we need to put in 2 end-of-buffer characters.
*/
b->yy_ch_buf = (char *) tagYYalloc(b->yy_buf_size + 2 );
if ( ! b->yy_ch_buf )
YY_FATAL_ERROR( "out of dynamic memory in tagYY_create_buffer()" );
b->yy_is_our_buffer = 1;
tagYY_init_buffer(b,file );
return b;
}
/** Destroy the buffer.
* @param b a buffer created with tagYY_create_buffer()
*
*/
void tagYY_delete_buffer (YY_BUFFER_STATE b )
{
if ( ! b )
return;
if ( b == YY_CURRENT_BUFFER ) /* Not sure if we should pop here. */
YY_CURRENT_BUFFER_LVALUE = (YY_BUFFER_STATE) 0;
if ( b->yy_is_our_buffer )
tagYYfree((void *) b->yy_ch_buf );
tagYYfree((void *) b );
}
#ifndef __cplusplus
extern int isatty (int );
#endif /* __cplusplus */
/* Initializes or reinitializes a buffer.
* This function is sometimes called more than once on the same buffer,
* such as during a tagYYrestart() or at EOF.
*/
static void tagYY_init_buffer (YY_BUFFER_STATE b, FILE * file )
{
int oerrno = errno;
tagYY_flush_buffer(b );
b->yy_input_file = file;
b->yy_fill_buffer = 1;
/* If b is the current buffer, then tagYY_init_buffer was _probably_
* called from tagYYrestart() or through yy_get_next_buffer.
* In that case, we don't want to reset the lineno or column.
*/
if (b != YY_CURRENT_BUFFER){
b->yy_bs_lineno = 1;
b->yy_bs_column = 0;
}
b->yy_is_interactive = file ? (isatty( fileno(file) ) > 0) : 0;
errno = oerrno;
}
/** Discard all buffered characters. On the next scan, YY_INPUT will be called.
* @param b the buffer state to be flushed, usually @c YY_CURRENT_BUFFER.
*
*/
void tagYY_flush_buffer (YY_BUFFER_STATE b )
{
if ( ! b )
return;
b->yy_n_chars = 0;
/* We always need two end-of-buffer characters. The first causes
* a transition to the end-of-buffer state. The second causes
* a jam in that state.
*/
b->yy_ch_buf[0] = YY_END_OF_BUFFER_CHAR;
b->yy_ch_buf[1] = YY_END_OF_BUFFER_CHAR;
b->yy_buf_pos = &b->yy_ch_buf[0];
b->yy_at_bol = 1;
b->yy_buffer_status = YY_BUFFER_NEW;
if ( b == YY_CURRENT_BUFFER )
tagYY_load_buffer_state( );
}
/** Pushes the new state onto the stack. The new state becomes
* the current state. This function will allocate the stack
* if necessary.
* @param new_buffer The new state.
*
*/
void tagYYpush_buffer_state (YY_BUFFER_STATE new_buffer )
{
if (new_buffer == NULL)
return;
tagYYensure_buffer_stack();
/* This block is copied from tagYY_switch_to_buffer. */
if ( YY_CURRENT_BUFFER )
{
/* Flush out information for old buffer. */
*(yy_c_buf_p) = (yy_hold_char);
YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = (yy_c_buf_p);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
/* Only push if top exists. Otherwise, replace top. */
if (YY_CURRENT_BUFFER)
(yy_buffer_stack_top)++;
YY_CURRENT_BUFFER_LVALUE = new_buffer;
/* copied from tagYY_switch_to_buffer. */
tagYY_load_buffer_state( );
(yy_did_buffer_switch_on_eof) = 1;
}
/** Removes and deletes the top of the stack, if present.
* The next element becomes the new top.
* <|fim▁hole|> return;
tagYY_delete_buffer(YY_CURRENT_BUFFER );
YY_CURRENT_BUFFER_LVALUE = NULL;
if ((yy_buffer_stack_top) > 0)
--(yy_buffer_stack_top);
if (YY_CURRENT_BUFFER) {
tagYY_load_buffer_state( );
(yy_did_buffer_switch_on_eof) = 1;
}
}
/* Allocates the stack if it does not exist.
* Guarantees space for at least one push.
*/
static void tagYYensure_buffer_stack (void)
{
yy_size_t num_to_alloc;
if (!(yy_buffer_stack)) {
/* First allocation is just for 2 elements, since we don't know if this
* scanner will even need a stack. We use 2 instead of 1 to avoid an
* immediate realloc on the next call.
*/
num_to_alloc = 1;
(yy_buffer_stack) = (struct yy_buffer_state**)tagYYalloc
(num_to_alloc * sizeof(struct yy_buffer_state*)
);
if ( ! (yy_buffer_stack) )
YY_FATAL_ERROR( "out of dynamic memory in tagYYensure_buffer_stack()" );
memset((yy_buffer_stack), 0, num_to_alloc * sizeof(struct yy_buffer_state*));
(yy_buffer_stack_max) = num_to_alloc;
(yy_buffer_stack_top) = 0;
return;
}
if ((yy_buffer_stack_top) >= ((yy_buffer_stack_max)) - 1){
/* Increase the buffer to prepare for a possible push. */
int grow_size = 8 /* arbitrary grow size */;
num_to_alloc = (yy_buffer_stack_max) + grow_size;
(yy_buffer_stack) = (struct yy_buffer_state**)tagYYrealloc
((yy_buffer_stack),
num_to_alloc * sizeof(struct yy_buffer_state*)
);
if ( ! (yy_buffer_stack) )
YY_FATAL_ERROR( "out of dynamic memory in tagYYensure_buffer_stack()" );
/* zero only the new slots.*/
memset((yy_buffer_stack) + (yy_buffer_stack_max), 0, grow_size * sizeof(struct yy_buffer_state*));
(yy_buffer_stack_max) = num_to_alloc;
}
}
/** Setup the input buffer state to scan directly from a user-specified character buffer.
* @param base the character buffer
* @param size the size in bytes of the character buffer
*
* @return the newly allocated buffer state object.
*/
YY_BUFFER_STATE tagYY_scan_buffer (char * base, yy_size_t size )
{
YY_BUFFER_STATE b;
if ( size < 2 ||
base[size-2] != YY_END_OF_BUFFER_CHAR ||
base[size-1] != YY_END_OF_BUFFER_CHAR )
/* They forgot to leave room for the EOB's. */
return 0;
b = (YY_BUFFER_STATE) tagYYalloc(sizeof( struct yy_buffer_state ) );
if ( ! b )
YY_FATAL_ERROR( "out of dynamic memory in tagYY_scan_buffer()" );
b->yy_buf_size = size - 2; /* "- 2" to take care of EOB's */
b->yy_buf_pos = b->yy_ch_buf = base;
b->yy_is_our_buffer = 0;
b->yy_input_file = 0;
b->yy_n_chars = b->yy_buf_size;
b->yy_is_interactive = 0;
b->yy_at_bol = 1;
b->yy_fill_buffer = 0;
b->yy_buffer_status = YY_BUFFER_NEW;
tagYY_switch_to_buffer(b );
return b;
}
/** Setup the input buffer state to scan a string. The next call to tagYYlex() will
* scan from a @e copy of @a str.
* @param yystr a NUL-terminated string to scan
*
* @return the newly allocated buffer state object.
* @note If you want to scan bytes that may contain NUL values, then use
* tagYY_scan_bytes() instead.
*/
YY_BUFFER_STATE tagYY_scan_string (yyconst char * yystr )
{
return tagYY_scan_bytes(yystr,strlen(yystr) );
}
/** Setup the input buffer state to scan the given bytes. The next call to tagYYlex() will
* scan from a @e copy of @a bytes.
* @param bytes the byte buffer to scan
* @param len the number of bytes in the buffer pointed to by @a bytes.
*
* @return the newly allocated buffer state object.
*/
YY_BUFFER_STATE tagYY_scan_bytes (yyconst char * yybytes, yy_size_t _yybytes_len )
{
YY_BUFFER_STATE b;
char *buf;
yy_size_t n, i;
/* Get memory for full buffer, including space for trailing EOB's. */
n = _yybytes_len + 2;
buf = (char *) tagYYalloc(n );
if ( ! buf )
YY_FATAL_ERROR( "out of dynamic memory in tagYY_scan_bytes()" );
for ( i = 0; i < _yybytes_len; ++i )
buf[i] = yybytes[i];
buf[_yybytes_len] = buf[_yybytes_len+1] = YY_END_OF_BUFFER_CHAR;
b = tagYY_scan_buffer(buf,n );
if ( ! b )
YY_FATAL_ERROR( "bad buffer in tagYY_scan_bytes()" );
/* It's okay to grow etc. this buffer, and we should throw it
* away when we're done.
*/
b->yy_is_our_buffer = 1;
return b;
}
#ifndef YY_EXIT_FAILURE
#define YY_EXIT_FAILURE 2
#endif
static void yy_fatal_error (yyconst char* msg )
{
(void) fprintf( stderr, "%s\n", msg );
exit( YY_EXIT_FAILURE );
}
/* Redefine yyless() so it works in section 3 code. */
#undef yyless
#define yyless(n) \
do \
{ \
/* Undo effects of setting up tagYYtext. */ \
int yyless_macro_arg = (n); \
YY_LESS_LINENO(yyless_macro_arg);\
tagYYtext[tagYYleng] = (yy_hold_char); \
(yy_c_buf_p) = tagYYtext + yyless_macro_arg; \
(yy_hold_char) = *(yy_c_buf_p); \
*(yy_c_buf_p) = '\0'; \
tagYYleng = yyless_macro_arg; \
} \
while ( 0 )
/* Accessor methods (get/set functions) to struct members. */
/** Get the current line number.
*
*/
int tagYYget_lineno (void)
{
return tagYYlineno;
}
/** Get the input stream.
*
*/
FILE *tagYYget_in (void)
{
return tagYYin;
}
/** Get the output stream.
*
*/
FILE *tagYYget_out (void)
{
return tagYYout;
}
/** Get the length of the current token.
*
*/
yy_size_t tagYYget_leng (void)
{
return tagYYleng;
}
/** Get the current token.
*
*/
char *tagYYget_text (void)
{
return tagYYtext;
}
/** Set the current line number.
* @param line_number
*
*/
void tagYYset_lineno (int line_number )
{
tagYYlineno = line_number;
}
/** Set the input stream. This does not discard the current
* input buffer.
* @param in_str A readable stream.
*
* @see tagYY_switch_to_buffer
*/
void tagYYset_in (FILE * in_str )
{
tagYYin = in_str ;
}
void tagYYset_out (FILE * out_str )
{
tagYYout = out_str ;
}
int tagYYget_debug (void)
{
return tagYY_flex_debug;
}
void tagYYset_debug (int bdebug )
{
tagYY_flex_debug = bdebug ;
}
static int yy_init_globals (void)
{
/* Initialization is the same as for the non-reentrant scanner.
* This function is called from tagYYlex_destroy(), so don't allocate here.
*/
(yy_buffer_stack) = 0;
(yy_buffer_stack_top) = 0;
(yy_buffer_stack_max) = 0;
(yy_c_buf_p) = (char *) 0;
(yy_init) = 0;
(yy_start) = 0;
/* Defined in main.c */
#ifdef YY_STDINIT
tagYYin = stdin;
tagYYout = stdout;
#else
tagYYin = (FILE *) 0;
tagYYout = (FILE *) 0;
#endif
/* For future reference: Set errno on error, since we are called by
* tagYYlex_init()
*/
return 0;
}
/* tagYYlex_destroy is for both reentrant and non-reentrant scanners. */
int tagYYlex_destroy (void)
{
/* Pop the buffer stack, destroying each element. */
while(YY_CURRENT_BUFFER){
tagYY_delete_buffer(YY_CURRENT_BUFFER );
YY_CURRENT_BUFFER_LVALUE = NULL;
tagYYpop_buffer_state();
}
/* Destroy the stack itself. */
tagYYfree((yy_buffer_stack) );
(yy_buffer_stack) = NULL;
/* Reset the globals. This is important in a non-reentrant scanner so the next time
* tagYYlex() is called, initialization will occur. */
yy_init_globals( );
return 0;
}
/*
* Internal utility routines.
*/
#ifndef yytext_ptr
static void yy_flex_strncpy (char* s1, yyconst char * s2, int n )
{
register int i;
for ( i = 0; i < n; ++i )
s1[i] = s2[i];
}
#endif
#ifdef YY_NEED_STRLEN
static int yy_flex_strlen (yyconst char * s )
{
register int n;
for ( n = 0; s[n]; ++n )
;
return n;
}
#endif
void *tagYYalloc (yy_size_t size )
{
return (void *) malloc( size );
}
void *tagYYrealloc (void * ptr, yy_size_t size )
{
/* The cast to (char *) in the following accommodates both
* implementations that use char* generic pointers, and those
* that use void* generic pointers. It works with the latter
* because both ANSI C and C++ allow castless assignment from
* any pointer type to void*, and deal with argument conversions
* as though doing an assignment.
*/
return (void *) realloc( (char *) ptr, size );
}
void tagYYfree (void * ptr )
{
free( (char *) ptr ); /* see tagYYrealloc() for (char *) cast */
}
#define YYTABLES_NAME "yytables"
#line 674 "doxytag.l"
/*@ ----------------------------------------------------------------------------
*/
void parse(QCString &s)
{
bases.clear();
nameBug = FALSE;
//newClass = TRUE;
inputString = s;
inputPosition = 0;
yyLineNr = 0;
tagYYrestart( tagYYin );
BEGIN( Start );
tagYYlex();
//printf("Number of lines scanned: %d\n",yyLineNr);
}
void parseFile(QFileInfo &fi)
{
printf("Parsing file %s...\n",fi.fileName().data());
QFile f;
f.setName(fi.absFilePath());
if (f.open(IO_ReadOnly))
{
yyFileName = fi.fileName();
className.resize(0);
memberName.resize(0);
//printf("Parsing file %s...\n",fi.fileName().data());
QCString input(fi.size()+1);
docBaseLink=fi.fileName();
docRefName=fi.fileName().copy();
//searchIndex.addReference(docRefName,docBaseLink);
//searchIndex.addWord(docRefName,docRefName,TRUE);
f.readBlock(input.data(),fi.size());
input.at(fi.size())='\0';
parse(input);
}
else
{
fprintf(stderr,"Warning: Cannot open file %s\n",fi.fileName().data());
}
}
void parseFileOrDir(const char *fileName)
{
QFileInfo fi(fileName);
if (fi.exists())
{
if (fi.isFile())
{
parseFile(fi);
}
else if (fi.isDir())
{
QDir dir(fileName);
dir.setFilter( QDir::Files );
dir.setNameFilter( "*.html" );
const QFileInfoList *list = dir.entryInfoList();
QFileInfoListIterator it( *list );
QFileInfo *cfi;
for ( it.toFirst() ; (cfi=it.current()) ; ++it)
{
if (cfi->isFile())
{
parseFile(*cfi);
}
}
}
}
else
{
fprintf(stderr,"Warning: File %s does not exist\n",fileName);
}
}
void usage(const char *name)
{
fprintf(stderr,"Doxytag version %s\nCopyright Dimitri van Heesch 1997-2010\n\n",
versionString);
fprintf(stderr," Generates a tag file and/or a search index for a set of HTML files\n\n");
fprintf(stderr,"Usage: %s [-t tag_file] [ html_file [html_file...] ]\n",name);
fprintf(stderr,"Options:\n");
fprintf(stderr," -t <tag_file> Generate tag file <tag_file>.\n");
fprintf(stderr,"If no HTML files are given all files in the current dir that\n"
"have a .html extension are parsed.\n\n");
exit(1);
}
const char *getArg(int argc,char **argv,int &optind,const char c)
{
char *s=0;
if (strlen(&argv[optind][2])>0)
s=&argv[optind][2];
else if (optind+1<argc)
s=argv[++optind];
else
{
fprintf(stderr,"option -%c requires an argument\n",c);
exit(1);
}
return s;
}
int main(int argc,char **argv)
{
QCString tagName;
QCString indexName;
int optind=1;
const char *arg;
while (optind<argc && argv[optind][0]=='-')
{
switch(argv[optind][1])
{
case 't':
arg=getArg(argc,argv,optind,'t');
tagName=arg;
break;
case 's':
arg=getArg(argc,argv,optind,'s');
indexName=arg;
break;
case 'h':
case '?':
usage(argv[0]);
break;
default:
fprintf(stderr,"Unknown option -%c\n",argv[optind][1]);
usage(argv[0]);
}
optind++;
}
genTag = !tagName.isEmpty();
genIndex = !indexName.isEmpty();
if (!genTag && !genIndex)
{
fprintf(stderr,"Nothing to do !\n\n");
usage(argv[0]);
}
int i;
if (optind>=argc)
{
parseFileOrDir(".");
}
else
{
for (i=optind;i<argc;i++)
{
parseFileOrDir(argv[i]);
}
}
if (genIndex)
{
fprintf(stderr,"Error: doxytag cannot be used to generate a search index anymore.\n"
"This functionality has been integrated into doxygen.\n");
// printf("Writing search index\n");
// if (!searchIndex.saveIndex(indexName))
// {
// fprintf(stderr,"Error: Could not write search index\n");
// }
// QFileInfo fi(indexName);
// if (fi.exists())
// {
// QCString dir=convertToQCString(fi.dir().absPath());
// fi.setFile(dir+"/search.png");
// if (!fi.exists()) writeSearchButton(dir);
// fi.setFile(dir+"/doxygen.png");
// if (!fi.exists()) writeLogo(dir);
// fi.setFile(dir+"/search.cgi");
// if (!fi.exists())
// {
// QFile f;
// f.setName(dir+"/search.cgi");
// if (f.open(IO_WriteOnly))
// {
// QTextStream t(&f);
// t << "#!/bin/sh" << endl
// << "DOXYSEARCH=" << endl
// << "DOXYPATH=" << endl
// << "if [ -f $DOXYSEARCH ]" << endl
// << "then" << endl
// << " $DOXYSEARCH $DOXYPATH" << endl
// << "else" << endl
// << " echo \"Content-Type: text/html\"" << endl
// << " echo \"\"" << endl
// << " echo \"<H1>Error: $DOXYSEARCH not found. Check cgi script!\"" << endl
// << "fi" << endl;
// f.close();
// }
// else
// {
// fprintf(stderr,"Error: could not open file %s for writing\n",(dir+"/search.cgi").data());
// }
// }
// }
}
if (genTag)
{
QFile f;
f.setName(tagName);
if (f.open(IO_WriteOnly))
{
QTextStream t(&f);
t << "<tagfile>" << endl;
ClassDef *cd=classList.first();
while (cd)
{
t << " <compound kind=\"";
if (cd->isFile) t << "file"; else t << "class";
t << "\">" << endl;
t << " <name>" << convertToXML(cd->name) << "</name>" << endl;
char *base=cd->bases.first();
while (base)
{
t << " <base>" << convertToXML(base) << "</base>" << endl;
base=cd->bases.next();
}
t << " <filename>" << convertToXML(cd->fileName) << "</filename>" << endl;
MemberDef *md=cd->memberList.first();
while (md)
{
if (md->anchor.right(5)=="-enum")
{
t << " <member kind=\"enum\">" << endl;
}
else
{
t << " <member kind=\"function\">" << endl;
}
t << " <name>" << convertToXML(md->name) << "</name>" << endl;
t << " <anchor>" << convertToXML(md->anchor) << "</anchor>" << endl;
t << " <arglist>" << convertToXML(md->args) << "</arglist>" << endl;
t << " </member>" << endl;
md=cd->memberList.next();
}
t << " </compound>" << endl;
cd=classList.next();
}
t << "</tagfile>" << endl;
}
else
{
fprintf(stderr,"Error: Could not write tag file %s\n",tagName.data());
}
}
return 0;
}
extern "C" {
int tagYYwrap() { return 1 ; }
};<|fim▁end|>
|
*/
void tagYYpop_buffer_state (void)
{
if (!YY_CURRENT_BUFFER)
|
<|file_name|>scan.rs<|end_file_name|><|fim▁begin|>use std::fs;
use std::io;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
pub struct Scandir {
pub path: PathBuf,
pub interval: u64,
}
<|fim▁hole|>impl Scandir {
pub fn new(dir: &str, seconds: u64) -> Result<Scandir, io::Error> {
let path = try!(fs::canonicalize(dir));
Ok(
Scandir {
path: path,
interval: seconds
}
)
}
pub fn scan(&self) {
let files = match fs::read_dir(&self.path) {
Err(f) => {
println!("{}", f);
return;
}
Ok(f) => f
};
for f in files {
let file = f.unwrap();
let mode = file.metadata().unwrap().permissions().mode();
let mut is_exec: bool = false;
if !file.file_type().unwrap().is_dir() {
is_exec = mode & 0o111 != 0;
}
println!("path: {} name: {} mode: {:o} is_exec: {}", file.path().display(), file.file_name().into_string().unwrap(), mode, is_exec);
}
}
}<|fim▁end|>
| |
<|file_name|>Solution.py<|end_file_name|><|fim▁begin|>class Solution:
"""
@param s: The first string
@param b: The second string
@return true or false
"""
# Time: is equal to sorted O(nlogn)
# Space: O(1)<|fim▁hole|> s = sorted(s)
t = sorted(t)
return s == t<|fim▁end|>
|
def anagram(self, s, t):
# write your code here
|
<|file_name|>iip_erase_dot_noise_uchar.cpp<|end_file_name|><|fim▁begin|>#include "pri.h"
#include "calcu_erase_dot_noise.h"
#include "iip_erase_dot_noise.h"
void iip_erase_dot_noise::_exec_uchar( long l_width, long l_height, long l_area_xpos, long l_area_ypos, long l_area_xsize, long l_area_ysize, long l_channels, unsigned char *ucharp_in, unsigned char *ucharp_out )
{
long l_start, l_scansize;
long xx,yy;
unsigned char *ucharp_in_y1,*ucharp_in_y2,*ucharp_in_y3;
unsigned char *ucharp_in_x11,*ucharp_in_x12,*ucharp_in_x13,
*ucharp_in_x21,*ucharp_in_x22,*ucharp_in_x23,
*ucharp_in_x31,*ucharp_in_x32,*ucharp_in_x33;
unsigned char *ucharp_out_y1,*ucharp_out_y2;<|fim▁hole|> calcu_erase_dot_noise cl_dot;
l_height;
/* 初期値 */
l_scansize = l_width * l_channels;
l_start = l_area_ypos * l_scansize + l_area_xpos * l_channels;
ucharp_in += l_start;
ucharp_out += l_start;
/* 縦方向ポインター初期化 */
ucharp_in_y1 = ucharp_in;
ucharp_in_y2 = ucharp_in_y3 = NULL;
ucharp_out_y1 = ucharp_out;
ucharp_out_y2 = NULL;
/* 縦方向ループ */
for (yy = 0L; yy < l_area_ysize; ++yy,
/* 縦方向の3連ポインター進める */
ucharp_in_y3 = ucharp_in_y2,
ucharp_in_y2 = ucharp_in_y1,
ucharp_in_y1 += l_scansize,
ucharp_out_y2 = ucharp_out_y1,
ucharp_out_y1 += l_scansize
) {
/* カウントダウン表示中 */
if (ON == this->get_i_cv_sw()) { pri_funct_cv_run(yy); }
/* 3連満ちるまで */
if (NULL == ucharp_in_y3) { continue; }
/* 横方向ポインター初期化 */
ucharp_in_x11 = ucharp_in_y1;
ucharp_in_x12 = ucharp_in_x13 = NULL;
ucharp_in_x21 = ucharp_in_y2;
ucharp_in_x22 = ucharp_in_x23 = NULL;
ucharp_in_x31 = ucharp_in_y3;
ucharp_in_x32 = ucharp_in_x33 = NULL;
ucharp_out_x1 = ucharp_out_y2;
ucharp_out_x2 = NULL;
/* 横方向ループ */
for (xx = 0L; xx < l_area_xsize; ++xx,
/* 横方向の3x3連ポインター進める */
ucharp_in_x33 = ucharp_in_x32,
ucharp_in_x32 = ucharp_in_x31,
ucharp_in_x31 += l_channels,
ucharp_in_x23 = ucharp_in_x22,
ucharp_in_x22 = ucharp_in_x21,
ucharp_in_x21 += l_channels,
ucharp_in_x13 = ucharp_in_x12,
ucharp_in_x12 = ucharp_in_x11,
ucharp_in_x11 += l_channels,
ucharp_out_x2 = ucharp_out_x1,
ucharp_out_x1 += l_channels
) {
/* 3連満ちるまで */
if (NULL == ucharp_in_x13) { continue; }
/* dotをつぶすか判断 */
ucharp_tmp = cl_dot.get_ucharp(
ucharp_in_x11,ucharp_in_x12,ucharp_in_x13,
ucharp_in_x21,ucharp_in_x22,ucharp_in_x23,
ucharp_in_x31,ucharp_in_x32,ucharp_in_x33
);
/* dotをつぶす */
if (NULL != ucharp_tmp) {
ucharp_out_x2[CH_RED] = ucharp_tmp[CH_RED];
ucharp_out_x2[CH_GRE] = ucharp_tmp[CH_GRE];
ucharp_out_x2[CH_BLU] = ucharp_tmp[CH_BLU];
}
}
}
}<|fim▁end|>
|
unsigned char *ucharp_out_x1,*ucharp_out_x2;
unsigned char *ucharp_tmp;
|
<|file_name|>callback.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Yusuke Sasaki
//
// This software is released under the MIT License.
// See http://opensource.org/licenses/mit-license.php or <LICENSE>.
extern crate gurobi;
use gurobi::*;
use std::io::{BufWriter, Write};
use std::fs::OpenOptions;
fn main() {
let mut env = Env::new("callback.log").unwrap();
env.set(param::OutputFlag, 0).unwrap();
env.set(param::Heuristics, 0.0).unwrap();
let mut model = Model::read_from(&std::env::args().nth(1).unwrap(), &env).unwrap();
let callback = {
let mut lastiter = -INFINITY;
let mut lastnode = -INFINITY;
let vars: Vec<_> = model.get_vars().cloned().collect();
let file = OpenOptions::new().write(true).create(true).open("cb.log").unwrap();
let mut writer = BufWriter::new(file);
move |ctx: Callback| {
use gurobi::Where::*;
match ctx.get_where() {<|fim▁hole|> Polling => {
// Ignore polling callback
}
// Currently performing presolve
PreSolve { coldel, rowdel, .. } => {
println!("@PreSolve");
if coldel > 0 || rowdel > 0 {
println!("**** {} columns and {} rows are removed. ****",
coldel,
rowdel);
}
}
// Currently in simplex
Simplex { ispert, itrcnt, objval, priminf, dualinf } => {
if itrcnt - lastiter >= 100.0 {
lastiter = itrcnt;
let ch = match ispert {
0 => ' ',
1 => 'S',
_ => 'P'
};
println!("@Simplex: itrcnt={}, objval={}{}, priminf={}, dualinf={}.",
itrcnt,
objval,
ch,
priminf,
dualinf);
}
}
// Currently in MIP
MIP { solcnt, cutcnt, objbst, objbnd, nodcnt, nodleft: actnodes, itrcnt } => {
if nodcnt - lastnode >= 100.0 {
lastnode = nodcnt;
println!("@MIP: nodcnt={}, actnodes={}, itrcnt={}, objbst={}, objbnd={}, solcnt={}, cutcnt={}.",
nodcnt,
actnodes,
itrcnt,
objbst,
objbnd,
solcnt,
cutcnt);
}
if (objbst - objbnd).abs() < 0.1 * (1.0 + objbst.abs()) {
println!("Stop early - 10% gap achived");
ctx.terminate();
}
if nodcnt >= 10000.0 && solcnt != 0.0 {
println!("Stop early - 10000 nodes explored");
ctx.terminate();
}
}
// Found a new MIP incumbent
MIPSol { solcnt, obj, nodcnt, .. } => {
println!("@MIPSol: ");
let x = try!(ctx.get_solution(vars.as_slice()));
println!("**** New solution at node {}, obj {}, sol {}, x[0] = {} ****",
nodcnt,
obj,
solcnt,
x[0]);
}
// Currently exploring a MIP node
MIPNode { .. } => {
println!("@MIPNode");
println!("**** NEW NODE! ****");
let x = try!(ctx.get_node_rel(vars.as_slice()));
println!(" relaxed solution = {:?}", x);
try!(ctx.set_solution(vars.as_slice(), x.as_slice()));
}
// Currently in barrier
Barrier { itrcnt, primobj, dualobj, priminf, dualinf, compl } => {
println!("@Barrier: itrcnt={}, primobj={}, dualobj={}, priminf={}, dualinf={}, compl={}.",
itrcnt,
primobj,
dualobj,
priminf,
dualinf,
compl);
}
// Printing a log message
Message(message) => {
writer.write_all(message.as_bytes()).unwrap();
writer.write_all(&[b'\n']).unwrap();
}
}
Ok(())
}
};
model.optimize_with_callback(callback).unwrap();
println!("\nOptimization complete");
if model.get(attr::SolCount).unwrap() == 0 {
println!("No solution found. optimization status = {:?}",
model.status());
} else {
println!("Solution found. objective = {}",
model.get(attr::ObjVal).unwrap());
for v in model.get_vars() {
let vname = v.get(&model, attr::VarName).unwrap();
let value = v.get(&model, attr::X).unwrap();
if value > 1e-25 {
println!(" {}: {}", vname, value);
}
}
}
}<|fim▁end|>
|
// Periodic polling callback
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.