prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>AlertFadeless.js<|end_file_name|><|fim▁begin|>import React, { useState } from 'react';<|fim▁hole|>export const AlertFadelessExample = (props) => {
const [visible, setVisible] = useState(true);
const onDismiss = () => setVisible(false);
return (
<div>
<Alert color="primary" isOpen={visible} toggle={onDismiss} fade={false}>
I am a primary alert and I can be dismissed without animating!
</Alert>
</div>
);
}
export function UncontrolledAlertFadelessExample() {
return (
<div>
<UncontrolledAlert color="info" fade={false}>
I am an alert and I can be dismissed without animating!
</UncontrolledAlert>
</div>
);
}<|fim▁end|> | import { UncontrolledAlert } from 'reactstrap';
import Alert from '../../../src/Alert';
|
<|file_name|>UserInfoDao.java<|end_file_name|><|fim▁begin|>package com.pmis.manage.dao;
import org.springframework.stereotype.Component;
import com.pmis.common.dao.CommonDao;
@Component
<|fim▁hole|>
}<|fim▁end|> | public class UserInfoDao extends CommonDao{
|
<|file_name|>configuration.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import findup = require("findup-sync");
import * as fs from "fs";
import * as path from "path";
import * as resolve from "resolve";
import { FatalError } from "./error";
import {arrayify, objectify, stripComments} from "./utils";
export interface IConfigurationFile {
extends?: string | string[];
jsRules?: any;
linterOptions?: {
typeCheck?: boolean,
};
rulesDirectory?: string | string[];
rules?: any;
}
export interface IConfigurationLoadResult {
path?: string;
results?: IConfigurationFile;
}
export const CONFIG_FILENAME = "tslint.json";
/* tslint:disable:object-literal-key-quotes */
export const DEFAULT_CONFIG = {
"jsRules": {
"class-name": true,
"comment-format": [true, "check-space"],
"indent": [true, "spaces"],
"no-duplicate-variable": true,
"no-eval": true,
"no-trailing-whitespace": true,
"no-unsafe-finally": true,
"one-line": [true, "check-open-brace", "check-whitespace"],
"quotemark": [true, "double"],
"semicolon": [true, "always"],
"triple-equals": [true, "allow-null-check"],
"variable-name": [true, "ban-keywords"],
"whitespace": [true,
"check-branch",
"check-decl",
"check-operator",
"check-separator",
"check-type",
],
},
"rules": {
"class-name": true,
"comment-format": [true, "check-space"],
"indent": [true, "spaces"],
"no-eval": true,
"no-internal-module": true,
"no-trailing-whitespace": true,
"no-unsafe-finally": true,
"no-var-keyword": true,
"one-line": [true, "check-open-brace", "check-whitespace"],
"quotemark": [true, "double"],
"semicolon": [true, "always"],
"triple-equals": [true, "allow-null-check"],
"typedef-whitespace": [
true, {
"call-signature": "nospace",
"index-signature": "nospace",
"parameter": "nospace",
"property-declaration": "nospace",
"variable-declaration": "nospace",
},
],
"variable-name": [true, "ban-keywords"],
"whitespace": [true,
"check-branch",
"check-decl",
"check-operator",
"check-separator",
"check-type",
],
},
};
/* tslint:enable:object-literal-key-quotes */
const BUILT_IN_CONFIG = /^tslint:(.*)$/;
/**
* Searches for a TSLint configuration and returns the data from the config.
* @param configFile A path to a config file, this can be null if the location of a config is not known
* @param inputFileLocation A path to the current file being linted. This is the starting location
* of the search for a configuration.
* @returns Load status for a TSLint configuration object
*/
export function findConfiguration(configFile: string | null, inputFilePath: string): IConfigurationLoadResult {
const path = findConfigurationPath(configFile, inputFilePath);
const loadResult: IConfigurationLoadResult = { path };
try {
loadResult.results = loadConfigurationFromPath(path);
return loadResult;
} catch (error) {
throw new FatalError(`Failed to load ${path}: ${error.message}`, error);
}
}
/**
* Searches for a TSLint configuration and returns the path to it.
* Could return undefined if not configuration is found.
* @param suppliedConfigFilePath A path to an known config file supplied by a user. Pass null here if
* the location of the config file is not known and you want to search for one.
* @param inputFilePath A path to the current file being linted. This is the starting location
* of the search for a configuration.
* @returns An absolute path to a tslint.json file
* or undefined if neither can be found.
*/
export function findConfigurationPath(suppliedConfigFilePath: string | null, inputFilePath: string) {
if (suppliedConfigFilePath != null) {
if (!fs.existsSync(suppliedConfigFilePath)) {
throw new Error(`Could not find config file at: ${path.resolve(suppliedConfigFilePath)}`);
} else {
return path.resolve(suppliedConfigFilePath);
}
} else {
// search for tslint.json from input file location
let configFilePath = findup(CONFIG_FILENAME, { cwd: inputFilePath, nocase: true });
if (configFilePath != null && fs.existsSync(configFilePath)) {
return path.resolve(configFilePath);
}
// search for tslint.json in home directory
const homeDir = getHomeDir();
if (homeDir != null) {
configFilePath = path.join(homeDir, CONFIG_FILENAME);
if (fs.existsSync(configFilePath)) {
return path.resolve(configFilePath);
}
}
// no path could be found
return undefined;
}
}
/**
* Used Node semantics to load a configuration file given configFilePath.
* For example:
* '/path/to/config' will be treated as an absolute path
* './path/to/config' will be treated as a relative path
* 'path/to/config' will attempt to load a to/config file inside a node module named path
* @returns a configuration object for TSLint loaded from the file at configFilePath
*/
export function loadConfigurationFromPath(configFilePath?: string): IConfigurationFile {
if (configFilePath == null) {<|fim▁hole|> if (path.extname(resolvedConfigFilePath) === ".json") {
const fileContent = stripComments(fs.readFileSync(resolvedConfigFilePath)
.toString()
.replace(/^\uFEFF/, ""));
configFile = JSON.parse(fileContent);
} else {
configFile = require(resolvedConfigFilePath);
delete require.cache[resolvedConfigFilePath];
}
const configFileDir = path.dirname(resolvedConfigFilePath);
configFile.rulesDirectory = getRulesDirectories(configFile.rulesDirectory, configFileDir);
// load configurations, in order, using their identifiers or relative paths
// apply the current configuration last by placing it last in this array
const configs = arrayify(configFile.extends).map((name) => {
const nextConfigFilePath = resolveConfigurationPath(name, configFileDir);
return loadConfigurationFromPath(nextConfigFilePath);
}).concat([configFile]);
return configs.reduce(extendConfigurationFile, {});
}
}
/**
* Resolve configuration file path or node_module reference
* @param filePath Relative ("./path"), absolute ("/path"), node module ("path"), or built-in ("tslint:path")
*/
function resolveConfigurationPath(filePath: string, relativeTo?: string) {
const matches = filePath.match(BUILT_IN_CONFIG);
const isBuiltInConfig = matches != null && matches.length > 0;
if (isBuiltInConfig) {
const configName = matches![1];
try {
return require.resolve(`./configs/${configName}`);
} catch (err) {
throw new Error(`${filePath} is not a built-in config, try "tslint:recommended" instead.`);
}
}
const basedir = relativeTo || process.cwd();
try {
return resolve.sync(filePath, { basedir });
} catch (err) {
try {
return require.resolve(filePath);
} catch (err) {
throw new Error(`Invalid "extends" configuration value - could not require "${filePath}". ` +
"Review the Node lookup algorithm (https://nodejs.org/api/modules.html#modules_all_together) " +
"for the approximate method TSLint uses to find the referenced configuration file.");
}
}
}
export function extendConfigurationFile(targetConfig: IConfigurationFile,
nextConfigSource: IConfigurationFile): IConfigurationFile {
const combinedConfig: IConfigurationFile = {};
const configRulesDirectory = arrayify(targetConfig.rulesDirectory);
const nextConfigRulesDirectory = arrayify(nextConfigSource.rulesDirectory);
combinedConfig.rulesDirectory = configRulesDirectory.concat(nextConfigRulesDirectory);
const combineProperties = (targetProperty: any, nextProperty: any) => {
const combinedProperty: any = {};
for (const name of Object.keys(objectify(targetProperty))) {
combinedProperty[name] = targetProperty[name];
}
// next config source overwrites the target config object
for (const name of Object.keys(objectify(nextProperty))) {
combinedProperty[name] = nextProperty[name];
}
return combinedProperty;
};
combinedConfig.rules = combineProperties(targetConfig.rules, nextConfigSource.rules);
combinedConfig.jsRules = combineProperties(targetConfig.jsRules, nextConfigSource.jsRules);
combinedConfig.linterOptions = combineProperties(targetConfig.linterOptions, nextConfigSource.linterOptions);
return combinedConfig;
}
function getHomeDir() {
const environment = global.process.env;
const paths = [
environment.USERPROFILE,
environment.HOME,
environment.HOMEPATH,
environment.HOMEDRIVE + environment.HOMEPATH,
];
for (const homePath of paths) {
if (homePath != null && fs.existsSync(homePath)) {
return homePath;
}
}
}
export function getRelativePath(directory?: string | null, relativeTo?: string) {
if (directory != null) {
const basePath = relativeTo || process.cwd();
return path.resolve(basePath, directory);
}
return undefined;
}
/**
* @param directories A path(s) to a directory of custom rules
* @param relativeTo A path that directories provided are relative to.
* For example, if the directories come from a tslint.json file, this path
* should be the path to the tslint.json file.
* @return An array of absolute paths to directories potentially containing rules
*/
export function getRulesDirectories(directories?: string | string[], relativeTo?: string): string[] {
const rulesDirectories = arrayify(directories)
.map((dir) => getRelativePath(dir, relativeTo))
.filter((dir) => dir !== undefined) as string[];
for (const directory of rulesDirectories) {
if (directory != null && !fs.existsSync(directory)) {
throw new Error(`Could not find custom rule directory: ${directory}`);
}
}
return rulesDirectories;
}<|fim▁end|> | return DEFAULT_CONFIG;
} else {
const resolvedConfigFilePath = resolveConfigurationPath(configFilePath);
let configFile: IConfigurationFile; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2012 Colin Scott
# Copyright 2012 Andreas Wundsam
# Copyright 2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
IOWorkers provide a convenient IO abstraction. Sends are fire-and-forget,
and read data is buffered and you can get notifications when data is
available.
"""
import sys
import errno
from collections import deque
import socket
from pox.lib.util import assert_type, makePinger
from pox.lib.recoco import Select, Task
from pox.core import core
log = core.getLogger()
_dummy_handler = lambda worker : None
def _call_safe (f, socket=None):
try:
f()
except Exception as e:
if socket:
log.error("Exception on socket %s..." % (socket))
log.exception(e)
class IOWorker (object):
"""
Generic IOWorker class.
Fire and forget semantics for send.
Received data is queued until read.
"""
def __init__(self):
self.send_buf = b""
self.receive_buf = b""
self.closed = False
self._custom_rx_handler = None
self._custom_close_handler = None
self._custom_connect_handler = None
self._connecting = False
self._shutdown_send = False
self.rx_handler = None
self.close_handler = None
self.connect_handler = None
def _handle_rx (self):
""" Can be overridden OR you can just use rx_handler """
self._custom_rx_handler(self)
def _handle_close (self):
""" Can be overridden OR you can just use close_handler """
self._custom_close_handler(self)
def _handle_connect (self):
""" Can be overridden OR you can just use connect_handler """
self._custom_connect_handler(self)
def _do_exception (self, loop):
self.close()
loop._workers.discard(self)
def _try_connect (self, loop):
if not self._connecting: return False
self._connecting = False
try:
self.socket.recv(0)
except socket.error as (s_errno, strerror):
if s_errno == 10035: # WSAEWOULDBLOCK
# Maybe we're still connecting after all...
self._connecting = True
return True
self.close()
loop._workers.discard(self)
return True
_call_safe(self._handle_connect)
return False
def _do_recv (self, loop):
if self._connecting and self._try_connect(loop): return
try:
data = self.socket.recv(loop._BUF_SIZE)
if len(data) == 0:
self.close()
loop._workers.discard(self)
else:
self._push_receive_data(data)
except socket.error as (s_errno, strerror):
if s_errno == errno.ENOENT:
# SSL library does this sometimes
log.error("Socket %s: ENOENT", str(self))
return
log.error("Socket %s error %i during recv: %s", str(self),
s_errno, strerror)
self.close()
loop._workers.discard(self)
def _do_send (self, loop):
if self._connecting and self._try_connect(loop): return
try:
if len(self.send_buf):
l = self.socket.send(self.send_buf)
if l > 0:
self._consume_send_buf(l)
if self._shutdown_send and len(self.send_buf) == 0:
self.socket.shutdown(socket.SHUT_WR)
except socket.error as (s_errno, strerror):
if s_errno != errno.EAGAIN:
log.error("Socket %s error %i during send: %s", str(self),
s_errno, strerror)
self.close()
loop._workers.discard(self)
@property
def available (self):
"""
Number of available bytes to read()
"""
return len(self.receive_buf)
@property
def connect_handler (self):
if self._custom_connect_handler is _dummy_handler:
return None
return self._custom_connect_handler
@connect_handler.setter
def connect_handler (self, callback):
"""
Handler to call when connected
"""
# Not sure if this is a good idea, but it might be...
if self.connect_handler is not None or callback is not None:
log.debug("Resetting connect_handler on %s?", self)
if callback is None: callback = _dummy_handler
self._custom_connect_handler = callback
@property
def close_handler (self):
if self._custom_close_handler is _dummy_handler:
return None
return self._custom_close_handler
@close_handler.setter
def close_handler (self, callback):
"""
Handler to call when closing
"""
# Not sure if this is a good idea, but it might be...
if self.close_handler is not None or callback is not None:
log.debug("Resetting close_handler on %s?", self)
if callback is None: callback = _dummy_handler
self._custom_close_handler = callback
@property
def rx_handler (self):
if self._custom_rx_handler is _dummy_handler:
return None
return self._custom_rx_handler
@rx_handler.setter
def rx_handler (self, callback):
"""
Handler to call when data is available to read
"""
# Not sure if this is a good idea, but it might be...
if self.rx_handler is not None or callback is not None:
log.debug("Resetting rx_handler on %s?", self)
if callback is None: callback = _dummy_handler
self._custom_rx_handler = callback
def send_fast (self, data):
return self.send(data)
def send (self, data):
""" Send data. Fire and forget. """
assert assert_type("data", data, [bytes], none_ok=False)
self.send_buf += data
def _push_receive_data (self, new_data):
# notify client of new received data. called by a Select loop
self.receive_buf += new_data
self._handle_rx()
def peek (self, length = None):
""" Peek up to length bytes from receive buffer. """
if length is None:
return self.receive_buf
else:
return self.receive_buf[:length]
def consume_receive_buf (self, l):
""" Consume receive buffer """
# called from the client
if len(self.receive_buf) < l:
raise RuntimeError("Receive buffer underrun")
self.receive_buf = self.receive_buf[l:]
def read (self, length = None):
"""
Read up to length bytes from receive buffer
(defaults to all)
"""
if length is None:
length = len(self.receive_buf)
r = self.receive_buf[:length]
self.receive_buf = self.receive_buf[length:]
return r
@property
def _ready_to_send (self):
# called by Select loop
return len(self.send_buf) > 0 or self._connecting
def _consume_send_buf (self, l):
# Throw out the first l bytes of the send buffer
# Called by Select loop
assert(len(self.send_buf)>=l)
self.send_buf = self.send_buf[l:]
def close (self):
""" Close this socket """
if self.closed: return
self.closed = True
_call_safe(self._handle_close)
def shutdown (self, send = True, recv = True):
"""
Shut down socket
"""
self._shutdown_send |= send
#TODO: recv
def __repr__ (self):
return "<" + self.__class__.__name__ + ">"
class RecocoIOWorker (IOWorker):
"""
An IOWorker that works with our RecocoIOLoop.
"""
# Set by register
on_close = None
pinger = None
def __init__ (self, socket):
"""
pinger is a pinger that will wake the RecocoIOLoop
on_close is a factory that hides details of Select loop
"""
IOWorker.__init__(self)
self.socket = socket
def fileno (self):
""" Return the wrapped sockets' fileno """
return self.socket.fileno()
def send_fast (self, data):
"""
send data from the client side. fire and forget.
Must only be called from the same cooperative context as the
IOWorker.
"""
if len(self.send_buf)==0 and not self._connecting and not self.closed:
try:
l = self.socket.send(data, socket.MSG_DONTWAIT)
if l == len(self.send_buf):
return
data = data[l]
except socket.error as (s_errno, strerror):
if s_errno != errno.EAGAIN:
log.error("Socket error: " + strerror)
self.close()
return
IOWorker.send(self, data)
self.pinger.ping()
def send (self, data):
IOWorker.send(self, data)
self.pinger.ping()
def close (self):
""" Register this socket to be closed. fire and forget """
# (don't close until Select loop is ready)
if self.closed: return
IOWorker.close(self)
# on_close is a function not a method
try:
self.socket.shutdown(socket.SHUT_RD)
except Exception:
pass
self.on_close(self)
if not hasattr(socket, "MSG_DONTWAIT"):
# Don't have this feature.
RecocoIOWorker.send_fast = RecocoIOWorker.send
log.debug("RecocoIOWorker.send_fast() not available")
else:
pass
def _format_lists (rlist, wlist, elist):
everything = set()
everything.update(rlist)
everything.update(wlist)
everything.update(elist)
if len(everything) == 0: return "None"
everything = list(everything)
everything.sort()
msg = ""
for fd in everything:
msg += str(fd).strip("<>").replace(" ", "-") + "|"
if fd in rlist: msg += "R"
if fd in wlist: msg += "W"
if fd in elist: msg += "X"
msg += " "<|fim▁hole|>
class RecocoIOLoop (Task):
"""
recoco task that handles the actual IO for our IO workers
"""
_select_timeout = 5
_BUF_SIZE = 8192
more_debugging = False
def __init__ (self, worker_type = RecocoIOWorker):
Task.__init__(self)
self._worker_type = worker_type
self._workers = set()
self.pinger = makePinger()
# socket.open() and socket.close() are performed by this Select task
# other threads register open() and close() requests by adding lambdas
# to this thread-safe queue.
self._pending_commands = deque()
def new_worker (self, *args, **kw):
'''
Return an IOWorker wrapping the given socket.
You can create a specific worker type by specifying
_worker_type.
'''
# Called from external threads.
# Does not register the IOWorker immediately with the select loop --
# rather, adds a command to the pending queue
_worker_type = kw.pop("_worker_type", None)
if _worker_type is None:
_worker_type = self._worker_type
assert issubclass(_worker_type, RecocoIOWorker)
worker = _worker_type(*args, **kw)
self.register_worker(worker)
return worker
def register_worker (self, worker):
"""
Register a worker with this ioloop
"""
# Our callback for io_worker.close():
def on_close (worker):
def close_worker (worker):
# Actually close the worker (called by Select loop)
worker.socket.close()
self._workers.discard(worker)
# schedule close_worker to be called by Select loop
self._pending_commands.append(lambda: close_worker(worker))
self.pinger.ping()
worker.on_close = on_close
worker.pinger = self.pinger
# Don't add immediately, since we may be in the wrong thread
self._pending_commands.append(lambda: self._workers.add(worker))
self.pinger.ping()
def stop (self):
self.running = False
self.pinger.ping()
def run (self):
self.running = True
while self.running and core.running:
try:
# First, execute pending commands
while len(self._pending_commands) > 0:
self._pending_commands.popleft()()
# Now grab workers
read_sockets = list(self._workers) + [ self.pinger ]
write_sockets = [ worker for worker in self._workers
if worker._ready_to_send ]
exception_sockets = list(self._workers)
if self.more_debugging:
log.debug("Select In : " + _format_lists(read_sockets,
write_sockets, exception_sockets))
rlist, wlist, elist = yield Select(read_sockets, write_sockets,
exception_sockets, self._select_timeout)
if self.more_debugging:
log.debug("Select Out: " + _format_lists(rlist, wlist, elist))
if self.pinger in rlist:
self.pinger.pongAll()
rlist.remove(self.pinger)
for worker in elist:
worker._do_exception(self)
if worker in rlist:
rlist.remove(worker)
if worker in wlist:
wlist.remove(worker)
for worker in rlist:
worker._do_recv(self)
for worker in wlist:
worker._do_send(self)
except GeneratorExit:
# Must be shutting down
break
except BaseException as e:
log.exception(e)
break<|fim▁end|> | msg = msg.strip()
return msg
|
<|file_name|>saved_object_graph.rs<|end_file_name|><|fim▁begin|>// This file is generated by rust-protobuf 2.25.1. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![cfg_attr(rustfmt, rustfmt::skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `tensorflow/core/protobuf/saved_object_graph.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_25_1;
#[derive(PartialEq,Clone,Default)]
pub struct SavedObjectGraph {
// message fields
pub nodes: ::protobuf::RepeatedField<SavedObject>,
pub concrete_functions: ::std::collections::HashMap<::std::string::String, SavedConcreteFunction>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedObjectGraph {
fn default() -> &'a SavedObjectGraph {
<SavedObjectGraph as ::protobuf::Message>::default_instance()
}
}
impl SavedObjectGraph {
pub fn new() -> SavedObjectGraph {
::std::default::Default::default()
}
// repeated .tensorflow.SavedObject nodes = 1;
pub fn get_nodes(&self) -> &[SavedObject] {
&self.nodes
}
pub fn clear_nodes(&mut self) {
self.nodes.clear();
}
// Param is passed by value, moved
pub fn set_nodes(&mut self, v: ::protobuf::RepeatedField<SavedObject>) {
self.nodes = v;
}
// Mutable pointer to the field.
pub fn mut_nodes(&mut self) -> &mut ::protobuf::RepeatedField<SavedObject> {
&mut self.nodes
}
// Take field
pub fn take_nodes(&mut self) -> ::protobuf::RepeatedField<SavedObject> {
::std::mem::replace(&mut self.nodes, ::protobuf::RepeatedField::new())
}
// repeated .tensorflow.SavedObjectGraph.ConcreteFunctionsEntry concrete_functions = 2;
pub fn get_concrete_functions(&self) -> &::std::collections::HashMap<::std::string::String, SavedConcreteFunction> {
&self.concrete_functions
}
pub fn clear_concrete_functions(&mut self) {
self.concrete_functions.clear();
}
// Param is passed by value, moved
pub fn set_concrete_functions(&mut self, v: ::std::collections::HashMap<::std::string::String, SavedConcreteFunction>) {
self.concrete_functions = v;
}
// Mutable pointer to the field.
pub fn mut_concrete_functions(&mut self) -> &mut ::std::collections::HashMap<::std::string::String, SavedConcreteFunction> {
&mut self.concrete_functions
}
// Take field
pub fn take_concrete_functions(&mut self) -> ::std::collections::HashMap<::std::string::String, SavedConcreteFunction> {
::std::mem::replace(&mut self.concrete_functions, ::std::collections::HashMap::new())
}
}
impl ::protobuf::Message for SavedObjectGraph {
fn is_initialized(&self) -> bool {
for v in &self.nodes {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.nodes)?;
},
2 => {
::protobuf::rt::read_map_into::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeMessage<SavedConcreteFunction>>(wire_type, is, &mut self.concrete_functions)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.nodes {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::compute_map_size::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeMessage<SavedConcreteFunction>>(2, &self.concrete_functions);
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.nodes {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
::protobuf::rt::write_map_with_cached_sizes::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeMessage<SavedConcreteFunction>>(2, &self.concrete_functions, os)?;
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedObjectGraph {
SavedObjectGraph::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<SavedObject>>(
"nodes",
|m: &SavedObjectGraph| { &m.nodes },
|m: &mut SavedObjectGraph| { &mut m.nodes },
));
fields.push(::protobuf::reflect::accessor::make_map_accessor::<_, ::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeMessage<SavedConcreteFunction>>(
"concrete_functions",
|m: &SavedObjectGraph| { &m.concrete_functions },
|m: &mut SavedObjectGraph| { &mut m.concrete_functions },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedObjectGraph>(
"SavedObjectGraph",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedObjectGraph {
static instance: ::protobuf::rt::LazyV2<SavedObjectGraph> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedObjectGraph::new)
}
}
impl ::protobuf::Clear for SavedObjectGraph {
fn clear(&mut self) {
self.nodes.clear();
self.concrete_functions.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedObjectGraph {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedObjectGraph {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SavedObject {
// message fields
pub children: ::protobuf::RepeatedField<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_ObjectReference>,
pub slot_variables: ::protobuf::RepeatedField<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_SlotVariableReference>,
pub saveable_objects: ::std::collections::HashMap<::std::string::String, SaveableObject>,
// message oneof groups
pub kind: ::std::option::Option<SavedObject_oneof_kind>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedObject {
fn default() -> &'a SavedObject {
<SavedObject as ::protobuf::Message>::default_instance()
}
}
#[derive(Clone,PartialEq,Debug)]
pub enum SavedObject_oneof_kind {
user_object(SavedUserObject),
asset(SavedAsset),
function(SavedFunction),
variable(SavedVariable),
bare_concrete_function(SavedBareConcreteFunction),
constant(SavedConstant),
resource(SavedResource),
captured_tensor(CapturedTensor),
}
impl SavedObject {
pub fn new() -> SavedObject {
::std::default::Default::default()
}
// repeated .tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference children = 1;
pub fn get_children(&self) -> &[super::trackable_object_graph::TrackableObjectGraph_TrackableObject_ObjectReference] {
&self.children
}
pub fn clear_children(&mut self) {
self.children.clear();
}
// Param is passed by value, moved
pub fn set_children(&mut self, v: ::protobuf::RepeatedField<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_ObjectReference>) {
self.children = v;
}
// Mutable pointer to the field.
pub fn mut_children(&mut self) -> &mut ::protobuf::RepeatedField<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_ObjectReference> {
&mut self.children
}
// Take field
pub fn take_children(&mut self) -> ::protobuf::RepeatedField<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_ObjectReference> {
::std::mem::replace(&mut self.children, ::protobuf::RepeatedField::new())
}
// repeated .tensorflow.TrackableObjectGraph.TrackableObject.SlotVariableReference slot_variables = 3;
pub fn get_slot_variables(&self) -> &[super::trackable_object_graph::TrackableObjectGraph_TrackableObject_SlotVariableReference] {
&self.slot_variables
}
pub fn clear_slot_variables(&mut self) {
self.slot_variables.clear();
}
// Param is passed by value, moved
pub fn set_slot_variables(&mut self, v: ::protobuf::RepeatedField<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_SlotVariableReference>) {
self.slot_variables = v;
}
// Mutable pointer to the field.
pub fn mut_slot_variables(&mut self) -> &mut ::protobuf::RepeatedField<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_SlotVariableReference> {
&mut self.slot_variables
}
// Take field
pub fn take_slot_variables(&mut self) -> ::protobuf::RepeatedField<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_SlotVariableReference> {
::std::mem::replace(&mut self.slot_variables, ::protobuf::RepeatedField::new())
}
// .tensorflow.SavedUserObject user_object = 4;
pub fn get_user_object(&self) -> &SavedUserObject {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::user_object(ref v)) => v,
_ => <SavedUserObject as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_user_object(&mut self) {
self.kind = ::std::option::Option::None;
}
pub fn has_user_object(&self) -> bool {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::user_object(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_user_object(&mut self, v: SavedUserObject) {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::user_object(v))
}
// Mutable pointer to the field.
pub fn mut_user_object(&mut self) -> &mut SavedUserObject {
if let ::std::option::Option::Some(SavedObject_oneof_kind::user_object(_)) = self.kind {
} else {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::user_object(SavedUserObject::new()));
}
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::user_object(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_user_object(&mut self) -> SavedUserObject {
if self.has_user_object() {
match self.kind.take() {
::std::option::Option::Some(SavedObject_oneof_kind::user_object(v)) => v,
_ => panic!(),
}
} else {
SavedUserObject::new()
}
}
// .tensorflow.SavedAsset asset = 5;
pub fn get_asset(&self) -> &SavedAsset {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::asset(ref v)) => v,
_ => <SavedAsset as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_asset(&mut self) {
self.kind = ::std::option::Option::None;
}
pub fn has_asset(&self) -> bool {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::asset(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_asset(&mut self, v: SavedAsset) {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::asset(v))
}
// Mutable pointer to the field.
pub fn mut_asset(&mut self) -> &mut SavedAsset {
if let ::std::option::Option::Some(SavedObject_oneof_kind::asset(_)) = self.kind {
} else {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::asset(SavedAsset::new()));
}
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::asset(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_asset(&mut self) -> SavedAsset {
if self.has_asset() {
match self.kind.take() {
::std::option::Option::Some(SavedObject_oneof_kind::asset(v)) => v,
_ => panic!(),
}
} else {
SavedAsset::new()
}
}
// .tensorflow.SavedFunction function = 6;
pub fn get_function(&self) -> &SavedFunction {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::function(ref v)) => v,
_ => <SavedFunction as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_function(&mut self) {
self.kind = ::std::option::Option::None;
}
pub fn has_function(&self) -> bool {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::function(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_function(&mut self, v: SavedFunction) {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::function(v))
}
// Mutable pointer to the field.
pub fn mut_function(&mut self) -> &mut SavedFunction {
if let ::std::option::Option::Some(SavedObject_oneof_kind::function(_)) = self.kind {
} else {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::function(SavedFunction::new()));
}
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::function(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_function(&mut self) -> SavedFunction {
if self.has_function() {
match self.kind.take() {
::std::option::Option::Some(SavedObject_oneof_kind::function(v)) => v,
_ => panic!(),
}
} else {
SavedFunction::new()
}
}
// .tensorflow.SavedVariable variable = 7;
pub fn get_variable(&self) -> &SavedVariable {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::variable(ref v)) => v,
_ => <SavedVariable as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_variable(&mut self) {
self.kind = ::std::option::Option::None;
}
pub fn has_variable(&self) -> bool {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::variable(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_variable(&mut self, v: SavedVariable) {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::variable(v))
}
// Mutable pointer to the field.
pub fn mut_variable(&mut self) -> &mut SavedVariable {
if let ::std::option::Option::Some(SavedObject_oneof_kind::variable(_)) = self.kind {
} else {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::variable(SavedVariable::new()));
}
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::variable(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_variable(&mut self) -> SavedVariable {
if self.has_variable() {
match self.kind.take() {
::std::option::Option::Some(SavedObject_oneof_kind::variable(v)) => v,
_ => panic!(),
}
} else {
SavedVariable::new()
}
}
// .tensorflow.SavedBareConcreteFunction bare_concrete_function = 8;
pub fn get_bare_concrete_function(&self) -> &SavedBareConcreteFunction {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::bare_concrete_function(ref v)) => v,
_ => <SavedBareConcreteFunction as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_bare_concrete_function(&mut self) {
self.kind = ::std::option::Option::None;
}
pub fn has_bare_concrete_function(&self) -> bool {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::bare_concrete_function(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_bare_concrete_function(&mut self, v: SavedBareConcreteFunction) {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::bare_concrete_function(v))
}
// Mutable pointer to the field.
pub fn mut_bare_concrete_function(&mut self) -> &mut SavedBareConcreteFunction {
if let ::std::option::Option::Some(SavedObject_oneof_kind::bare_concrete_function(_)) = self.kind {
} else {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::bare_concrete_function(SavedBareConcreteFunction::new()));
}
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::bare_concrete_function(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_bare_concrete_function(&mut self) -> SavedBareConcreteFunction {
if self.has_bare_concrete_function() {
match self.kind.take() {
::std::option::Option::Some(SavedObject_oneof_kind::bare_concrete_function(v)) => v,
_ => panic!(),
}
} else {
SavedBareConcreteFunction::new()
}
}
// .tensorflow.SavedConstant constant = 9;
pub fn get_constant(&self) -> &SavedConstant {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::constant(ref v)) => v,
_ => <SavedConstant as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_constant(&mut self) {
self.kind = ::std::option::Option::None;
}
pub fn has_constant(&self) -> bool {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::constant(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_constant(&mut self, v: SavedConstant) {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::constant(v))
}
// Mutable pointer to the field.
pub fn mut_constant(&mut self) -> &mut SavedConstant {
if let ::std::option::Option::Some(SavedObject_oneof_kind::constant(_)) = self.kind {
} else {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::constant(SavedConstant::new()));
}
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::constant(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_constant(&mut self) -> SavedConstant {
if self.has_constant() {
match self.kind.take() {
::std::option::Option::Some(SavedObject_oneof_kind::constant(v)) => v,
_ => panic!(),
}
} else {
SavedConstant::new()
}
}
// .tensorflow.SavedResource resource = 10;
pub fn get_resource(&self) -> &SavedResource {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::resource(ref v)) => v,
_ => <SavedResource as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_resource(&mut self) {
self.kind = ::std::option::Option::None;
}
pub fn has_resource(&self) -> bool {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::resource(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_resource(&mut self, v: SavedResource) {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::resource(v))
}
// Mutable pointer to the field.
pub fn mut_resource(&mut self) -> &mut SavedResource {
if let ::std::option::Option::Some(SavedObject_oneof_kind::resource(_)) = self.kind {
} else {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::resource(SavedResource::new()));
}
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::resource(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_resource(&mut self) -> SavedResource {
if self.has_resource() {
match self.kind.take() {
::std::option::Option::Some(SavedObject_oneof_kind::resource(v)) => v,
_ => panic!(),
}
} else {
SavedResource::new()
}
}
// .tensorflow.CapturedTensor captured_tensor = 12;
pub fn get_captured_tensor(&self) -> &CapturedTensor {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::captured_tensor(ref v)) => v,
_ => <CapturedTensor as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_captured_tensor(&mut self) {
self.kind = ::std::option::Option::None;
}
pub fn has_captured_tensor(&self) -> bool {
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::captured_tensor(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_captured_tensor(&mut self, v: CapturedTensor) {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::captured_tensor(v))
}
// Mutable pointer to the field.
pub fn mut_captured_tensor(&mut self) -> &mut CapturedTensor {
if let ::std::option::Option::Some(SavedObject_oneof_kind::captured_tensor(_)) = self.kind {
} else {
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::captured_tensor(CapturedTensor::new()));
}
match self.kind {
::std::option::Option::Some(SavedObject_oneof_kind::captured_tensor(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_captured_tensor(&mut self) -> CapturedTensor {
if self.has_captured_tensor() {
match self.kind.take() {
::std::option::Option::Some(SavedObject_oneof_kind::captured_tensor(v)) => v,
_ => panic!(),
}
} else {
CapturedTensor::new()
}
}
// repeated .tensorflow.SavedObject.SaveableObjectsEntry saveable_objects = 11;
pub fn get_saveable_objects(&self) -> &::std::collections::HashMap<::std::string::String, SaveableObject> {
&self.saveable_objects
}
pub fn clear_saveable_objects(&mut self) {
self.saveable_objects.clear();
}
// Param is passed by value, moved
pub fn set_saveable_objects(&mut self, v: ::std::collections::HashMap<::std::string::String, SaveableObject>) {
self.saveable_objects = v;
}
// Mutable pointer to the field.
pub fn mut_saveable_objects(&mut self) -> &mut ::std::collections::HashMap<::std::string::String, SaveableObject> {
&mut self.saveable_objects
}
// Take field
pub fn take_saveable_objects(&mut self) -> ::std::collections::HashMap<::std::string::String, SaveableObject> {
::std::mem::replace(&mut self.saveable_objects, ::std::collections::HashMap::new())
}
}
impl ::protobuf::Message for SavedObject {
fn is_initialized(&self) -> bool {
for v in &self.children {
if !v.is_initialized() {
return false;
}
};
for v in &self.slot_variables {
if !v.is_initialized() {
return false;
}
};
if let Some(SavedObject_oneof_kind::user_object(ref v)) = self.kind {
if !v.is_initialized() {
return false;
}
}
if let Some(SavedObject_oneof_kind::asset(ref v)) = self.kind {
if !v.is_initialized() {
return false;
}
}
if let Some(SavedObject_oneof_kind::function(ref v)) = self.kind {
if !v.is_initialized() {
return false;
}
}
if let Some(SavedObject_oneof_kind::variable(ref v)) = self.kind {
if !v.is_initialized() {
return false;
}
}
if let Some(SavedObject_oneof_kind::bare_concrete_function(ref v)) = self.kind {
if !v.is_initialized() {
return false;
}
}
if let Some(SavedObject_oneof_kind::constant(ref v)) = self.kind {
if !v.is_initialized() {
return false;
}
}
if let Some(SavedObject_oneof_kind::resource(ref v)) = self.kind {
if !v.is_initialized() {
return false;
}
}
if let Some(SavedObject_oneof_kind::captured_tensor(ref v)) = self.kind {
if !v.is_initialized() {
return false;
}
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.children)?;
},
3 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.slot_variables)?;
},
4 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::user_object(is.read_message()?));
},
5 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::asset(is.read_message()?));
},
6 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::function(is.read_message()?));
},
7 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::variable(is.read_message()?));
},
8 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::bare_concrete_function(is.read_message()?));
},
9 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::constant(is.read_message()?));
},
10 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::resource(is.read_message()?));
},
12 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.kind = ::std::option::Option::Some(SavedObject_oneof_kind::captured_tensor(is.read_message()?));
},
11 => {
::protobuf::rt::read_map_into::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeMessage<SaveableObject>>(wire_type, is, &mut self.saveable_objects)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.children {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
for value in &self.slot_variables {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::compute_map_size::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeMessage<SaveableObject>>(11, &self.saveable_objects);
if let ::std::option::Option::Some(ref v) = self.kind {
match v {
&SavedObject_oneof_kind::user_object(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
&SavedObject_oneof_kind::asset(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
&SavedObject_oneof_kind::function(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
&SavedObject_oneof_kind::variable(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
&SavedObject_oneof_kind::bare_concrete_function(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
&SavedObject_oneof_kind::constant(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
&SavedObject_oneof_kind::resource(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
&SavedObject_oneof_kind::captured_tensor(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
};
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.children {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
for v in &self.slot_variables {
os.write_tag(3, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
::protobuf::rt::write_map_with_cached_sizes::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeMessage<SaveableObject>>(11, &self.saveable_objects, os)?;
if let ::std::option::Option::Some(ref v) = self.kind {
match v {
&SavedObject_oneof_kind::user_object(ref v) => {
os.write_tag(4, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
&SavedObject_oneof_kind::asset(ref v) => {
os.write_tag(5, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
&SavedObject_oneof_kind::function(ref v) => {
os.write_tag(6, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
&SavedObject_oneof_kind::variable(ref v) => {
os.write_tag(7, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
&SavedObject_oneof_kind::bare_concrete_function(ref v) => {
os.write_tag(8, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
&SavedObject_oneof_kind::constant(ref v) => {
os.write_tag(9, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
&SavedObject_oneof_kind::resource(ref v) => {
os.write_tag(10, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
&SavedObject_oneof_kind::captured_tensor(ref v) => {
os.write_tag(12, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
};
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedObject {
SavedObject::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_ObjectReference>>(
"children",
|m: &SavedObject| { &m.children },
|m: &mut SavedObject| { &mut m.children },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::trackable_object_graph::TrackableObjectGraph_TrackableObject_SlotVariableReference>>(
"slot_variables",
|m: &SavedObject| { &m.slot_variables },
|m: &mut SavedObject| { &mut m.slot_variables },
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, SavedUserObject>(
"user_object",
SavedObject::has_user_object,
SavedObject::get_user_object,
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, SavedAsset>(
"asset",
SavedObject::has_asset,
SavedObject::get_asset,
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, SavedFunction>(
"function",
SavedObject::has_function,
SavedObject::get_function,
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, SavedVariable>(
"variable",
SavedObject::has_variable,
SavedObject::get_variable,
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, SavedBareConcreteFunction>(
"bare_concrete_function",
SavedObject::has_bare_concrete_function,
SavedObject::get_bare_concrete_function,
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, SavedConstant>(
"constant",
SavedObject::has_constant,
SavedObject::get_constant,
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, SavedResource>(
"resource",
SavedObject::has_resource,
SavedObject::get_resource,
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, CapturedTensor>(
"captured_tensor",
SavedObject::has_captured_tensor,
SavedObject::get_captured_tensor,
));
fields.push(::protobuf::reflect::accessor::make_map_accessor::<_, ::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeMessage<SaveableObject>>(
"saveable_objects",
|m: &SavedObject| { &m.saveable_objects },
|m: &mut SavedObject| { &mut m.saveable_objects },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedObject>(
"SavedObject",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedObject {
static instance: ::protobuf::rt::LazyV2<SavedObject> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedObject::new)
}
}
impl ::protobuf::Clear for SavedObject {
fn clear(&mut self) {
self.children.clear();
self.slot_variables.clear();
self.kind = ::std::option::Option::None;
self.kind = ::std::option::Option::None;
self.kind = ::std::option::Option::None;
self.kind = ::std::option::Option::None;
self.kind = ::std::option::Option::None;
self.kind = ::std::option::Option::None;
self.kind = ::std::option::Option::None;
self.kind = ::std::option::Option::None;
self.saveable_objects.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedObject {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedObject {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SavedUserObject {
// message fields
pub identifier: ::std::string::String,
pub version: ::protobuf::SingularPtrField<super::versions::VersionDef>,
pub metadata: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedUserObject {
fn default() -> &'a SavedUserObject {
<SavedUserObject as ::protobuf::Message>::default_instance()
}
}
impl SavedUserObject {
pub fn new() -> SavedUserObject {
::std::default::Default::default()
}
// string identifier = 1;
pub fn get_identifier(&self) -> &str {
&self.identifier
}
pub fn clear_identifier(&mut self) {
self.identifier.clear();
}
// Param is passed by value, moved
pub fn set_identifier(&mut self, v: ::std::string::String) {
self.identifier = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_identifier(&mut self) -> &mut ::std::string::String {
&mut self.identifier
}
// Take field
pub fn take_identifier(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.identifier, ::std::string::String::new())
}
// .tensorflow.VersionDef version = 2;
pub fn get_version(&self) -> &super::versions::VersionDef {
self.version.as_ref().unwrap_or_else(|| <super::versions::VersionDef as ::protobuf::Message>::default_instance())
}
pub fn clear_version(&mut self) {
self.version.clear();
}
pub fn has_version(&self) -> bool {
self.version.is_some()
}
// Param is passed by value, moved
pub fn set_version(&mut self, v: super::versions::VersionDef) {
self.version = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_version(&mut self) -> &mut super::versions::VersionDef {
if self.version.is_none() {
self.version.set_default();
}
self.version.as_mut().unwrap()
}
// Take field
pub fn take_version(&mut self) -> super::versions::VersionDef {
self.version.take().unwrap_or_else(|| super::versions::VersionDef::new())
}
// string metadata = 3;
pub fn get_metadata(&self) -> &str {
&self.metadata
}
pub fn clear_metadata(&mut self) {
self.metadata.clear();
}
// Param is passed by value, moved
pub fn set_metadata(&mut self, v: ::std::string::String) {
self.metadata = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_metadata(&mut self) -> &mut ::std::string::String {
&mut self.metadata
}
// Take field
pub fn take_metadata(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.metadata, ::std::string::String::new())
}
}
impl ::protobuf::Message for SavedUserObject {
fn is_initialized(&self) -> bool {
for v in &self.version {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.identifier)?;
},
2 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.version)?;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.metadata)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.identifier.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.identifier);
}
if let Some(ref v) = self.version.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
if !self.metadata.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.metadata);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.identifier.is_empty() {
os.write_string(1, &self.identifier)?;
}
if let Some(ref v) = self.version.as_ref() {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
if !self.metadata.is_empty() {
os.write_string(3, &self.metadata)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedUserObject {
SavedUserObject::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"identifier",
|m: &SavedUserObject| { &m.identifier },
|m: &mut SavedUserObject| { &mut m.identifier },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::versions::VersionDef>>(
"version",
|m: &SavedUserObject| { &m.version },
|m: &mut SavedUserObject| { &mut m.version },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"metadata",
|m: &SavedUserObject| { &m.metadata },
|m: &mut SavedUserObject| { &mut m.metadata },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedUserObject>(
"SavedUserObject",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedUserObject {
static instance: ::protobuf::rt::LazyV2<SavedUserObject> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedUserObject::new)
}
}
impl ::protobuf::Clear for SavedUserObject {
fn clear(&mut self) {
self.identifier.clear();
self.version.clear();
self.metadata.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedUserObject {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedUserObject {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SavedAsset {
// message fields
pub asset_file_def_index: i32,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedAsset {
fn default() -> &'a SavedAsset {
<SavedAsset as ::protobuf::Message>::default_instance()
}
}
impl SavedAsset {
pub fn new() -> SavedAsset {
::std::default::Default::default()
}
// int32 asset_file_def_index = 1;
pub fn get_asset_file_def_index(&self) -> i32 {
self.asset_file_def_index
}
pub fn clear_asset_file_def_index(&mut self) {
self.asset_file_def_index = 0;
}
// Param is passed by value, moved
pub fn set_asset_file_def_index(&mut self, v: i32) {
self.asset_file_def_index = v;
}
}
impl ::protobuf::Message for SavedAsset {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int32()?;
self.asset_file_def_index = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.asset_file_def_index != 0 {
my_size += ::protobuf::rt::value_size(1, self.asset_file_def_index, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.asset_file_def_index != 0 {
os.write_int32(1, self.asset_file_def_index)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedAsset {
SavedAsset::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"asset_file_def_index",
|m: &SavedAsset| { &m.asset_file_def_index },
|m: &mut SavedAsset| { &mut m.asset_file_def_index },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedAsset>(
"SavedAsset",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedAsset {
static instance: ::protobuf::rt::LazyV2<SavedAsset> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedAsset::new)
}
}
impl ::protobuf::Clear for SavedAsset {
fn clear(&mut self) {
self.asset_file_def_index = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedAsset {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedAsset {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SavedFunction {
// message fields
pub concrete_functions: ::protobuf::RepeatedField<::std::string::String>,
pub function_spec: ::protobuf::SingularPtrField<FunctionSpec>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedFunction {
fn default() -> &'a SavedFunction {
<SavedFunction as ::protobuf::Message>::default_instance()
}
}
impl SavedFunction {
pub fn new() -> SavedFunction {
::std::default::Default::default()
}
// repeated string concrete_functions = 1;
pub fn get_concrete_functions(&self) -> &[::std::string::String] {
&self.concrete_functions
}
pub fn clear_concrete_functions(&mut self) {
self.concrete_functions.clear();
}
// Param is passed by value, moved
pub fn set_concrete_functions(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.concrete_functions = v;
}
// Mutable pointer to the field.
pub fn mut_concrete_functions(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.concrete_functions
}
// Take field
pub fn take_concrete_functions(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.concrete_functions, ::protobuf::RepeatedField::new())
}
// .tensorflow.FunctionSpec function_spec = 2;
pub fn get_function_spec(&self) -> &FunctionSpec {
self.function_spec.as_ref().unwrap_or_else(|| <FunctionSpec as ::protobuf::Message>::default_instance())
}
pub fn clear_function_spec(&mut self) {
self.function_spec.clear();
}
pub fn has_function_spec(&self) -> bool {
self.function_spec.is_some()
}
// Param is passed by value, moved
pub fn set_function_spec(&mut self, v: FunctionSpec) {
self.function_spec = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_function_spec(&mut self) -> &mut FunctionSpec {
if self.function_spec.is_none() {
self.function_spec.set_default();
}
self.function_spec.as_mut().unwrap()
}
// Take field
pub fn take_function_spec(&mut self) -> FunctionSpec {
self.function_spec.take().unwrap_or_else(|| FunctionSpec::new())
}
}
impl ::protobuf::Message for SavedFunction {
fn is_initialized(&self) -> bool {
for v in &self.function_spec {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.concrete_functions)?;
},
2 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.function_spec)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.concrete_functions {
my_size += ::protobuf::rt::string_size(1, &value);
};
if let Some(ref v) = self.function_spec.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.concrete_functions {
os.write_string(1, &v)?;
};
if let Some(ref v) = self.function_spec.as_ref() {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedFunction {
SavedFunction::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"concrete_functions",
|m: &SavedFunction| { &m.concrete_functions },
|m: &mut SavedFunction| { &mut m.concrete_functions },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<FunctionSpec>>(
"function_spec",
|m: &SavedFunction| { &m.function_spec },
|m: &mut SavedFunction| { &mut m.function_spec },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedFunction>(
"SavedFunction",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedFunction {
static instance: ::protobuf::rt::LazyV2<SavedFunction> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedFunction::new)
}
}
impl ::protobuf::Clear for SavedFunction {
fn clear(&mut self) {
self.concrete_functions.clear();
self.function_spec.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedFunction {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedFunction {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct CapturedTensor {
// message fields
pub name: ::std::string::String,
pub concrete_function: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a CapturedTensor {
fn default() -> &'a CapturedTensor {
<CapturedTensor as ::protobuf::Message>::default_instance()
}
}
impl CapturedTensor {
pub fn new() -> CapturedTensor {
::std::default::Default::default()
}
// string name = 1;
pub fn get_name(&self) -> &str {
&self.name
}
pub fn clear_name(&mut self) {
self.name.clear();
}
// Param is passed by value, moved
pub fn set_name(&mut self, v: ::std::string::String) {
self.name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_name(&mut self) -> &mut ::std::string::String {
&mut self.name
}
// Take field
pub fn take_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.name, ::std::string::String::new())
}
// string concrete_function = 2;
pub fn get_concrete_function(&self) -> &str {
&self.concrete_function
}
pub fn clear_concrete_function(&mut self) {
self.concrete_function.clear();
}
// Param is passed by value, moved
pub fn set_concrete_function(&mut self, v: ::std::string::String) {
self.concrete_function = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_concrete_function(&mut self) -> &mut ::std::string::String {
&mut self.concrete_function
}
// Take field
pub fn take_concrete_function(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.concrete_function, ::std::string::String::new())
}
}
impl ::protobuf::Message for CapturedTensor {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.name)?;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.concrete_function)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.name.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.name);
}
if !self.concrete_function.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.concrete_function);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.name.is_empty() {
os.write_string(1, &self.name)?;
}
if !self.concrete_function.is_empty() {
os.write_string(2, &self.concrete_function)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> CapturedTensor {
CapturedTensor::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"name",
|m: &CapturedTensor| { &m.name },
|m: &mut CapturedTensor| { &mut m.name },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"concrete_function",
|m: &CapturedTensor| { &m.concrete_function },
|m: &mut CapturedTensor| { &mut m.concrete_function },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<CapturedTensor>(
"CapturedTensor",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static CapturedTensor {
static instance: ::protobuf::rt::LazyV2<CapturedTensor> = ::protobuf::rt::LazyV2::INIT;
instance.get(CapturedTensor::new)
}
}
impl ::protobuf::Clear for CapturedTensor {
fn clear(&mut self) {
self.name.clear();
self.concrete_function.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for CapturedTensor {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for CapturedTensor {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SavedConcreteFunction {
// message fields
pub bound_inputs: ::std::vec::Vec<i32>,
pub canonicalized_input_signature: ::protobuf::SingularPtrField<super::struct_pb::StructuredValue>,
pub output_signature: ::protobuf::SingularPtrField<super::struct_pb::StructuredValue>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedConcreteFunction {
fn default() -> &'a SavedConcreteFunction {
<SavedConcreteFunction as ::protobuf::Message>::default_instance()
}
}
impl SavedConcreteFunction {
pub fn new() -> SavedConcreteFunction {
::std::default::Default::default()
}
// repeated int32 bound_inputs = 2;
pub fn get_bound_inputs(&self) -> &[i32] {
&self.bound_inputs
}
pub fn clear_bound_inputs(&mut self) {
self.bound_inputs.clear();
}
// Param is passed by value, moved
pub fn set_bound_inputs(&mut self, v: ::std::vec::Vec<i32>) {
self.bound_inputs = v;
}
// Mutable pointer to the field.
pub fn mut_bound_inputs(&mut self) -> &mut ::std::vec::Vec<i32> {
&mut self.bound_inputs
}
// Take field
pub fn take_bound_inputs(&mut self) -> ::std::vec::Vec<i32> {
::std::mem::replace(&mut self.bound_inputs, ::std::vec::Vec::new())
}
// .tensorflow.StructuredValue canonicalized_input_signature = 3;
pub fn get_canonicalized_input_signature(&self) -> &super::struct_pb::StructuredValue {
self.canonicalized_input_signature.as_ref().unwrap_or_else(|| <super::struct_pb::StructuredValue as ::protobuf::Message>::default_instance())
}
pub fn clear_canonicalized_input_signature(&mut self) {
self.canonicalized_input_signature.clear();
}
pub fn has_canonicalized_input_signature(&self) -> bool {
self.canonicalized_input_signature.is_some()
}
// Param is passed by value, moved
pub fn set_canonicalized_input_signature(&mut self, v: super::struct_pb::StructuredValue) {
self.canonicalized_input_signature = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_canonicalized_input_signature(&mut self) -> &mut super::struct_pb::StructuredValue {
if self.canonicalized_input_signature.is_none() {
self.canonicalized_input_signature.set_default();
}
self.canonicalized_input_signature.as_mut().unwrap()
}
// Take field
pub fn take_canonicalized_input_signature(&mut self) -> super::struct_pb::StructuredValue {
self.canonicalized_input_signature.take().unwrap_or_else(|| super::struct_pb::StructuredValue::new())
}
// .tensorflow.StructuredValue output_signature = 4;
pub fn get_output_signature(&self) -> &super::struct_pb::StructuredValue {
self.output_signature.as_ref().unwrap_or_else(|| <super::struct_pb::StructuredValue as ::protobuf::Message>::default_instance())
}
pub fn clear_output_signature(&mut self) {
self.output_signature.clear();
}
pub fn has_output_signature(&self) -> bool {
self.output_signature.is_some()
}
// Param is passed by value, moved
pub fn set_output_signature(&mut self, v: super::struct_pb::StructuredValue) {
self.output_signature = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_output_signature(&mut self) -> &mut super::struct_pb::StructuredValue {
if self.output_signature.is_none() {
self.output_signature.set_default();
}
self.output_signature.as_mut().unwrap()
}
// Take field
pub fn take_output_signature(&mut self) -> super::struct_pb::StructuredValue {
self.output_signature.take().unwrap_or_else(|| super::struct_pb::StructuredValue::new())
}
}
impl ::protobuf::Message for SavedConcreteFunction {
fn is_initialized(&self) -> bool {
for v in &self.canonicalized_input_signature {
if !v.is_initialized() {
return false;
}
};
for v in &self.output_signature {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
2 => {
::protobuf::rt::read_repeated_int32_into(wire_type, is, &mut self.bound_inputs)?;
},
3 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.canonicalized_input_signature)?;
},
4 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.output_signature)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.bound_inputs {
my_size += ::protobuf::rt::value_size(2, *value, ::protobuf::wire_format::WireTypeVarint);
};
if let Some(ref v) = self.canonicalized_input_signature.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
if let Some(ref v) = self.output_signature.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.bound_inputs {
os.write_int32(2, *v)?;
};
if let Some(ref v) = self.canonicalized_input_signature.as_ref() {
os.write_tag(3, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
if let Some(ref v) = self.output_signature.as_ref() {
os.write_tag(4, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedConcreteFunction {
SavedConcreteFunction::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_vec_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"bound_inputs",
|m: &SavedConcreteFunction| { &m.bound_inputs },
|m: &mut SavedConcreteFunction| { &mut m.bound_inputs },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::struct_pb::StructuredValue>>(
"canonicalized_input_signature",
|m: &SavedConcreteFunction| { &m.canonicalized_input_signature },
|m: &mut SavedConcreteFunction| { &mut m.canonicalized_input_signature },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::struct_pb::StructuredValue>>(
"output_signature",
|m: &SavedConcreteFunction| { &m.output_signature },
|m: &mut SavedConcreteFunction| { &mut m.output_signature },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedConcreteFunction>(
"SavedConcreteFunction",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedConcreteFunction {
static instance: ::protobuf::rt::LazyV2<SavedConcreteFunction> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedConcreteFunction::new)
}
}
impl ::protobuf::Clear for SavedConcreteFunction {
fn clear(&mut self) {
self.bound_inputs.clear();
self.canonicalized_input_signature.clear();
self.output_signature.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedConcreteFunction {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedConcreteFunction {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SavedBareConcreteFunction {
// message fields
pub concrete_function_name: ::std::string::String,
pub argument_keywords: ::protobuf::RepeatedField<::std::string::String>,
pub allowed_positional_arguments: i64,
pub function_spec: ::protobuf::SingularPtrField<FunctionSpec>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedBareConcreteFunction {
fn default() -> &'a SavedBareConcreteFunction {
<SavedBareConcreteFunction as ::protobuf::Message>::default_instance()
}
}
impl SavedBareConcreteFunction {
pub fn new() -> SavedBareConcreteFunction {
::std::default::Default::default()
}
// string concrete_function_name = 1;
pub fn get_concrete_function_name(&self) -> &str {
&self.concrete_function_name
}
pub fn clear_concrete_function_name(&mut self) {
self.concrete_function_name.clear();
}
// Param is passed by value, moved
pub fn set_concrete_function_name(&mut self, v: ::std::string::String) {
self.concrete_function_name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_concrete_function_name(&mut self) -> &mut ::std::string::String {
&mut self.concrete_function_name
}
// Take field
pub fn take_concrete_function_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.concrete_function_name, ::std::string::String::new())
}
// repeated string argument_keywords = 2;
pub fn get_argument_keywords(&self) -> &[::std::string::String] {
&self.argument_keywords
}
pub fn clear_argument_keywords(&mut self) {
self.argument_keywords.clear();
}
// Param is passed by value, moved
pub fn set_argument_keywords(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.argument_keywords = v;
}
// Mutable pointer to the field.
pub fn mut_argument_keywords(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.argument_keywords
}
// Take field
pub fn take_argument_keywords(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.argument_keywords, ::protobuf::RepeatedField::new())
}
// int64 allowed_positional_arguments = 3;
pub fn get_allowed_positional_arguments(&self) -> i64 {
self.allowed_positional_arguments
}
pub fn clear_allowed_positional_arguments(&mut self) {
self.allowed_positional_arguments = 0;
}
// Param is passed by value, moved
pub fn set_allowed_positional_arguments(&mut self, v: i64) {
self.allowed_positional_arguments = v;
}
// .tensorflow.FunctionSpec function_spec = 4;
pub fn get_function_spec(&self) -> &FunctionSpec {
self.function_spec.as_ref().unwrap_or_else(|| <FunctionSpec as ::protobuf::Message>::default_instance())
}
pub fn clear_function_spec(&mut self) {
self.function_spec.clear();
}
pub fn has_function_spec(&self) -> bool {
self.function_spec.is_some()
}
// Param is passed by value, moved
pub fn set_function_spec(&mut self, v: FunctionSpec) {
self.function_spec = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_function_spec(&mut self) -> &mut FunctionSpec {
if self.function_spec.is_none() {
self.function_spec.set_default();
}
self.function_spec.as_mut().unwrap()
}
// Take field
pub fn take_function_spec(&mut self) -> FunctionSpec {
self.function_spec.take().unwrap_or_else(|| FunctionSpec::new())
}
}
impl ::protobuf::Message for SavedBareConcreteFunction {
fn is_initialized(&self) -> bool {
for v in &self.function_spec {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.concrete_function_name)?;
},
2 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.argument_keywords)?;
},
3 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int64()?;
self.allowed_positional_arguments = tmp;
},
4 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.function_spec)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.concrete_function_name.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.concrete_function_name);
}
for value in &self.argument_keywords {
my_size += ::protobuf::rt::string_size(2, &value);
};
if self.allowed_positional_arguments != 0 {
my_size += ::protobuf::rt::value_size(3, self.allowed_positional_arguments, ::protobuf::wire_format::WireTypeVarint);
}
if let Some(ref v) = self.function_spec.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.concrete_function_name.is_empty() {
os.write_string(1, &self.concrete_function_name)?;
}
for v in &self.argument_keywords {
os.write_string(2, &v)?;
};
if self.allowed_positional_arguments != 0 {
os.write_int64(3, self.allowed_positional_arguments)?;
}
if let Some(ref v) = self.function_spec.as_ref() {
os.write_tag(4, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedBareConcreteFunction {
SavedBareConcreteFunction::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"concrete_function_name",
|m: &SavedBareConcreteFunction| { &m.concrete_function_name },
|m: &mut SavedBareConcreteFunction| { &mut m.concrete_function_name },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"argument_keywords",
|m: &SavedBareConcreteFunction| { &m.argument_keywords },
|m: &mut SavedBareConcreteFunction| { &mut m.argument_keywords },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
"allowed_positional_arguments",
|m: &SavedBareConcreteFunction| { &m.allowed_positional_arguments },
|m: &mut SavedBareConcreteFunction| { &mut m.allowed_positional_arguments },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<FunctionSpec>>(
"function_spec",
|m: &SavedBareConcreteFunction| { &m.function_spec },
|m: &mut SavedBareConcreteFunction| { &mut m.function_spec },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedBareConcreteFunction>(
"SavedBareConcreteFunction",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedBareConcreteFunction {
static instance: ::protobuf::rt::LazyV2<SavedBareConcreteFunction> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedBareConcreteFunction::new)
}
}
impl ::protobuf::Clear for SavedBareConcreteFunction {
fn clear(&mut self) {
self.concrete_function_name.clear();
self.argument_keywords.clear();
self.allowed_positional_arguments = 0;
self.function_spec.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedBareConcreteFunction {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedBareConcreteFunction {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SavedConstant {
// message fields
pub operation: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedConstant {
fn default() -> &'a SavedConstant {
<SavedConstant as ::protobuf::Message>::default_instance()
}
}
impl SavedConstant {
pub fn new() -> SavedConstant {
::std::default::Default::default()
}
// string operation = 1;
pub fn get_operation(&self) -> &str {
&self.operation
}
pub fn clear_operation(&mut self) {
self.operation.clear();
}
// Param is passed by value, moved
pub fn set_operation(&mut self, v: ::std::string::String) {
self.operation = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_operation(&mut self) -> &mut ::std::string::String {
&mut self.operation
}
// Take field
pub fn take_operation(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.operation, ::std::string::String::new())
}
}
impl ::protobuf::Message for SavedConstant {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.operation)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.operation.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.operation);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.operation.is_empty() {
os.write_string(1, &self.operation)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedConstant {
SavedConstant::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"operation",
|m: &SavedConstant| { &m.operation },
|m: &mut SavedConstant| { &mut m.operation },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedConstant>(
"SavedConstant",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedConstant {
static instance: ::protobuf::rt::LazyV2<SavedConstant> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedConstant::new)
}
}
impl ::protobuf::Clear for SavedConstant {
fn clear(&mut self) {
self.operation.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedConstant {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedConstant {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}<|fim▁hole|> // message fields
pub dtype: super::types::DataType,
pub shape: ::protobuf::SingularPtrField<super::tensor_shape::TensorShapeProto>,
pub trainable: bool,
pub synchronization: super::variable::VariableSynchronization,
pub aggregation: super::variable::VariableAggregation,
pub name: ::std::string::String,
pub device: ::std::string::String,
pub experimental_distributed_variable_components: ::protobuf::RepeatedField<SavedVariable>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedVariable {
fn default() -> &'a SavedVariable {
<SavedVariable as ::protobuf::Message>::default_instance()
}
}
impl SavedVariable {
pub fn new() -> SavedVariable {
::std::default::Default::default()
}
// .tensorflow.DataType dtype = 1;
pub fn get_dtype(&self) -> super::types::DataType {
self.dtype
}
pub fn clear_dtype(&mut self) {
self.dtype = super::types::DataType::DT_INVALID;
}
// Param is passed by value, moved
pub fn set_dtype(&mut self, v: super::types::DataType) {
self.dtype = v;
}
// .tensorflow.TensorShapeProto shape = 2;
pub fn get_shape(&self) -> &super::tensor_shape::TensorShapeProto {
self.shape.as_ref().unwrap_or_else(|| <super::tensor_shape::TensorShapeProto as ::protobuf::Message>::default_instance())
}
pub fn clear_shape(&mut self) {
self.shape.clear();
}
pub fn has_shape(&self) -> bool {
self.shape.is_some()
}
// Param is passed by value, moved
pub fn set_shape(&mut self, v: super::tensor_shape::TensorShapeProto) {
self.shape = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_shape(&mut self) -> &mut super::tensor_shape::TensorShapeProto {
if self.shape.is_none() {
self.shape.set_default();
}
self.shape.as_mut().unwrap()
}
// Take field
pub fn take_shape(&mut self) -> super::tensor_shape::TensorShapeProto {
self.shape.take().unwrap_or_else(|| super::tensor_shape::TensorShapeProto::new())
}
// bool trainable = 3;
pub fn get_trainable(&self) -> bool {
self.trainable
}
pub fn clear_trainable(&mut self) {
self.trainable = false;
}
// Param is passed by value, moved
pub fn set_trainable(&mut self, v: bool) {
self.trainable = v;
}
// .tensorflow.VariableSynchronization synchronization = 4;
pub fn get_synchronization(&self) -> super::variable::VariableSynchronization {
self.synchronization
}
pub fn clear_synchronization(&mut self) {
self.synchronization = super::variable::VariableSynchronization::VARIABLE_SYNCHRONIZATION_AUTO;
}
// Param is passed by value, moved
pub fn set_synchronization(&mut self, v: super::variable::VariableSynchronization) {
self.synchronization = v;
}
// .tensorflow.VariableAggregation aggregation = 5;
pub fn get_aggregation(&self) -> super::variable::VariableAggregation {
self.aggregation
}
pub fn clear_aggregation(&mut self) {
self.aggregation = super::variable::VariableAggregation::VARIABLE_AGGREGATION_NONE;
}
// Param is passed by value, moved
pub fn set_aggregation(&mut self, v: super::variable::VariableAggregation) {
self.aggregation = v;
}
// string name = 6;
pub fn get_name(&self) -> &str {
&self.name
}
pub fn clear_name(&mut self) {
self.name.clear();
}
// Param is passed by value, moved
pub fn set_name(&mut self, v: ::std::string::String) {
self.name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_name(&mut self) -> &mut ::std::string::String {
&mut self.name
}
// Take field
pub fn take_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.name, ::std::string::String::new())
}
// string device = 7;
pub fn get_device(&self) -> &str {
&self.device
}
pub fn clear_device(&mut self) {
self.device.clear();
}
// Param is passed by value, moved
pub fn set_device(&mut self, v: ::std::string::String) {
self.device = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_device(&mut self) -> &mut ::std::string::String {
&mut self.device
}
// Take field
pub fn take_device(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.device, ::std::string::String::new())
}
// repeated .tensorflow.SavedVariable experimental_distributed_variable_components = 8;
pub fn get_experimental_distributed_variable_components(&self) -> &[SavedVariable] {
&self.experimental_distributed_variable_components
}
pub fn clear_experimental_distributed_variable_components(&mut self) {
self.experimental_distributed_variable_components.clear();
}
// Param is passed by value, moved
pub fn set_experimental_distributed_variable_components(&mut self, v: ::protobuf::RepeatedField<SavedVariable>) {
self.experimental_distributed_variable_components = v;
}
// Mutable pointer to the field.
pub fn mut_experimental_distributed_variable_components(&mut self) -> &mut ::protobuf::RepeatedField<SavedVariable> {
&mut self.experimental_distributed_variable_components
}
// Take field
pub fn take_experimental_distributed_variable_components(&mut self) -> ::protobuf::RepeatedField<SavedVariable> {
::std::mem::replace(&mut self.experimental_distributed_variable_components, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for SavedVariable {
fn is_initialized(&self) -> bool {
for v in &self.shape {
if !v.is_initialized() {
return false;
}
};
for v in &self.experimental_distributed_variable_components {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.dtype, 1, &mut self.unknown_fields)?
},
2 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.shape)?;
},
3 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_bool()?;
self.trainable = tmp;
},
4 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.synchronization, 4, &mut self.unknown_fields)?
},
5 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.aggregation, 5, &mut self.unknown_fields)?
},
6 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.name)?;
},
7 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.device)?;
},
8 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.experimental_distributed_variable_components)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.dtype != super::types::DataType::DT_INVALID {
my_size += ::protobuf::rt::enum_size(1, self.dtype);
}
if let Some(ref v) = self.shape.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
if self.trainable != false {
my_size += 2;
}
if self.synchronization != super::variable::VariableSynchronization::VARIABLE_SYNCHRONIZATION_AUTO {
my_size += ::protobuf::rt::enum_size(4, self.synchronization);
}
if self.aggregation != super::variable::VariableAggregation::VARIABLE_AGGREGATION_NONE {
my_size += ::protobuf::rt::enum_size(5, self.aggregation);
}
if !self.name.is_empty() {
my_size += ::protobuf::rt::string_size(6, &self.name);
}
if !self.device.is_empty() {
my_size += ::protobuf::rt::string_size(7, &self.device);
}
for value in &self.experimental_distributed_variable_components {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.dtype != super::types::DataType::DT_INVALID {
os.write_enum(1, ::protobuf::ProtobufEnum::value(&self.dtype))?;
}
if let Some(ref v) = self.shape.as_ref() {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
if self.trainable != false {
os.write_bool(3, self.trainable)?;
}
if self.synchronization != super::variable::VariableSynchronization::VARIABLE_SYNCHRONIZATION_AUTO {
os.write_enum(4, ::protobuf::ProtobufEnum::value(&self.synchronization))?;
}
if self.aggregation != super::variable::VariableAggregation::VARIABLE_AGGREGATION_NONE {
os.write_enum(5, ::protobuf::ProtobufEnum::value(&self.aggregation))?;
}
if !self.name.is_empty() {
os.write_string(6, &self.name)?;
}
if !self.device.is_empty() {
os.write_string(7, &self.device)?;
}
for v in &self.experimental_distributed_variable_components {
os.write_tag(8, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedVariable {
SavedVariable::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<super::types::DataType>>(
"dtype",
|m: &SavedVariable| { &m.dtype },
|m: &mut SavedVariable| { &mut m.dtype },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::tensor_shape::TensorShapeProto>>(
"shape",
|m: &SavedVariable| { &m.shape },
|m: &mut SavedVariable| { &mut m.shape },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBool>(
"trainable",
|m: &SavedVariable| { &m.trainable },
|m: &mut SavedVariable| { &mut m.trainable },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<super::variable::VariableSynchronization>>(
"synchronization",
|m: &SavedVariable| { &m.synchronization },
|m: &mut SavedVariable| { &mut m.synchronization },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<super::variable::VariableAggregation>>(
"aggregation",
|m: &SavedVariable| { &m.aggregation },
|m: &mut SavedVariable| { &mut m.aggregation },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"name",
|m: &SavedVariable| { &m.name },
|m: &mut SavedVariable| { &mut m.name },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"device",
|m: &SavedVariable| { &m.device },
|m: &mut SavedVariable| { &mut m.device },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<SavedVariable>>(
"experimental_distributed_variable_components",
|m: &SavedVariable| { &m.experimental_distributed_variable_components },
|m: &mut SavedVariable| { &mut m.experimental_distributed_variable_components },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedVariable>(
"SavedVariable",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedVariable {
static instance: ::protobuf::rt::LazyV2<SavedVariable> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedVariable::new)
}
}
impl ::protobuf::Clear for SavedVariable {
fn clear(&mut self) {
self.dtype = super::types::DataType::DT_INVALID;
self.shape.clear();
self.trainable = false;
self.synchronization = super::variable::VariableSynchronization::VARIABLE_SYNCHRONIZATION_AUTO;
self.aggregation = super::variable::VariableAggregation::VARIABLE_AGGREGATION_NONE;
self.name.clear();
self.device.clear();
self.experimental_distributed_variable_components.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedVariable {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedVariable {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct FunctionSpec {
// message fields
pub fullargspec: ::protobuf::SingularPtrField<super::struct_pb::StructuredValue>,
pub is_method: bool,
pub input_signature: ::protobuf::SingularPtrField<super::struct_pb::StructuredValue>,
pub jit_compile: FunctionSpec_JitCompile,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a FunctionSpec {
fn default() -> &'a FunctionSpec {
<FunctionSpec as ::protobuf::Message>::default_instance()
}
}
impl FunctionSpec {
pub fn new() -> FunctionSpec {
::std::default::Default::default()
}
// .tensorflow.StructuredValue fullargspec = 1;
pub fn get_fullargspec(&self) -> &super::struct_pb::StructuredValue {
self.fullargspec.as_ref().unwrap_or_else(|| <super::struct_pb::StructuredValue as ::protobuf::Message>::default_instance())
}
pub fn clear_fullargspec(&mut self) {
self.fullargspec.clear();
}
pub fn has_fullargspec(&self) -> bool {
self.fullargspec.is_some()
}
// Param is passed by value, moved
pub fn set_fullargspec(&mut self, v: super::struct_pb::StructuredValue) {
self.fullargspec = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_fullargspec(&mut self) -> &mut super::struct_pb::StructuredValue {
if self.fullargspec.is_none() {
self.fullargspec.set_default();
}
self.fullargspec.as_mut().unwrap()
}
// Take field
pub fn take_fullargspec(&mut self) -> super::struct_pb::StructuredValue {
self.fullargspec.take().unwrap_or_else(|| super::struct_pb::StructuredValue::new())
}
// bool is_method = 2;
pub fn get_is_method(&self) -> bool {
self.is_method
}
pub fn clear_is_method(&mut self) {
self.is_method = false;
}
// Param is passed by value, moved
pub fn set_is_method(&mut self, v: bool) {
self.is_method = v;
}
// .tensorflow.StructuredValue input_signature = 5;
pub fn get_input_signature(&self) -> &super::struct_pb::StructuredValue {
self.input_signature.as_ref().unwrap_or_else(|| <super::struct_pb::StructuredValue as ::protobuf::Message>::default_instance())
}
pub fn clear_input_signature(&mut self) {
self.input_signature.clear();
}
pub fn has_input_signature(&self) -> bool {
self.input_signature.is_some()
}
// Param is passed by value, moved
pub fn set_input_signature(&mut self, v: super::struct_pb::StructuredValue) {
self.input_signature = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_input_signature(&mut self) -> &mut super::struct_pb::StructuredValue {
if self.input_signature.is_none() {
self.input_signature.set_default();
}
self.input_signature.as_mut().unwrap()
}
// Take field
pub fn take_input_signature(&mut self) -> super::struct_pb::StructuredValue {
self.input_signature.take().unwrap_or_else(|| super::struct_pb::StructuredValue::new())
}
// .tensorflow.FunctionSpec.JitCompile jit_compile = 6;
pub fn get_jit_compile(&self) -> FunctionSpec_JitCompile {
self.jit_compile
}
pub fn clear_jit_compile(&mut self) {
self.jit_compile = FunctionSpec_JitCompile::DEFAULT;
}
// Param is passed by value, moved
pub fn set_jit_compile(&mut self, v: FunctionSpec_JitCompile) {
self.jit_compile = v;
}
}
impl ::protobuf::Message for FunctionSpec {
fn is_initialized(&self) -> bool {
for v in &self.fullargspec {
if !v.is_initialized() {
return false;
}
};
for v in &self.input_signature {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.fullargspec)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_bool()?;
self.is_method = tmp;
},
5 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.input_signature)?;
},
6 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.jit_compile, 6, &mut self.unknown_fields)?
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.fullargspec.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
if self.is_method != false {
my_size += 2;
}
if let Some(ref v) = self.input_signature.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
if self.jit_compile != FunctionSpec_JitCompile::DEFAULT {
my_size += ::protobuf::rt::enum_size(6, self.jit_compile);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.fullargspec.as_ref() {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
if self.is_method != false {
os.write_bool(2, self.is_method)?;
}
if let Some(ref v) = self.input_signature.as_ref() {
os.write_tag(5, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
if self.jit_compile != FunctionSpec_JitCompile::DEFAULT {
os.write_enum(6, ::protobuf::ProtobufEnum::value(&self.jit_compile))?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> FunctionSpec {
FunctionSpec::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::struct_pb::StructuredValue>>(
"fullargspec",
|m: &FunctionSpec| { &m.fullargspec },
|m: &mut FunctionSpec| { &mut m.fullargspec },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBool>(
"is_method",
|m: &FunctionSpec| { &m.is_method },
|m: &mut FunctionSpec| { &mut m.is_method },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::struct_pb::StructuredValue>>(
"input_signature",
|m: &FunctionSpec| { &m.input_signature },
|m: &mut FunctionSpec| { &mut m.input_signature },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<FunctionSpec_JitCompile>>(
"jit_compile",
|m: &FunctionSpec| { &m.jit_compile },
|m: &mut FunctionSpec| { &mut m.jit_compile },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<FunctionSpec>(
"FunctionSpec",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static FunctionSpec {
static instance: ::protobuf::rt::LazyV2<FunctionSpec> = ::protobuf::rt::LazyV2::INIT;
instance.get(FunctionSpec::new)
}
}
impl ::protobuf::Clear for FunctionSpec {
fn clear(&mut self) {
self.fullargspec.clear();
self.is_method = false;
self.input_signature.clear();
self.jit_compile = FunctionSpec_JitCompile::DEFAULT;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for FunctionSpec {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for FunctionSpec {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum FunctionSpec_JitCompile {
DEFAULT = 0,
ON = 1,
OFF = 2,
}
impl ::protobuf::ProtobufEnum for FunctionSpec_JitCompile {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<FunctionSpec_JitCompile> {
match value {
0 => ::std::option::Option::Some(FunctionSpec_JitCompile::DEFAULT),
1 => ::std::option::Option::Some(FunctionSpec_JitCompile::ON),
2 => ::std::option::Option::Some(FunctionSpec_JitCompile::OFF),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [FunctionSpec_JitCompile] = &[
FunctionSpec_JitCompile::DEFAULT,
FunctionSpec_JitCompile::ON,
FunctionSpec_JitCompile::OFF,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<FunctionSpec_JitCompile>("FunctionSpec.JitCompile", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for FunctionSpec_JitCompile {
}
impl ::std::default::Default for FunctionSpec_JitCompile {
fn default() -> Self {
FunctionSpec_JitCompile::DEFAULT
}
}
impl ::protobuf::reflect::ProtobufValue for FunctionSpec_JitCompile {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SavedResource {
// message fields
pub device: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SavedResource {
fn default() -> &'a SavedResource {
<SavedResource as ::protobuf::Message>::default_instance()
}
}
impl SavedResource {
pub fn new() -> SavedResource {
::std::default::Default::default()
}
// string device = 1;
pub fn get_device(&self) -> &str {
&self.device
}
pub fn clear_device(&mut self) {
self.device.clear();
}
// Param is passed by value, moved
pub fn set_device(&mut self, v: ::std::string::String) {
self.device = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_device(&mut self) -> &mut ::std::string::String {
&mut self.device
}
// Take field
pub fn take_device(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.device, ::std::string::String::new())
}
}
impl ::protobuf::Message for SavedResource {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.device)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.device.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.device);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.device.is_empty() {
os.write_string(1, &self.device)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SavedResource {
SavedResource::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"device",
|m: &SavedResource| { &m.device },
|m: &mut SavedResource| { &mut m.device },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SavedResource>(
"SavedResource",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SavedResource {
static instance: ::protobuf::rt::LazyV2<SavedResource> = ::protobuf::rt::LazyV2::INIT;
instance.get(SavedResource::new)
}
}
impl ::protobuf::Clear for SavedResource {
fn clear(&mut self) {
self.device.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SavedResource {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SavedResource {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SaveableObject {
// message fields
pub save_function: i32,
pub restore_function: i32,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SaveableObject {
fn default() -> &'a SaveableObject {
<SaveableObject as ::protobuf::Message>::default_instance()
}
}
impl SaveableObject {
pub fn new() -> SaveableObject {
::std::default::Default::default()
}
// int32 save_function = 2;
pub fn get_save_function(&self) -> i32 {
self.save_function
}
pub fn clear_save_function(&mut self) {
self.save_function = 0;
}
// Param is passed by value, moved
pub fn set_save_function(&mut self, v: i32) {
self.save_function = v;
}
// int32 restore_function = 3;
pub fn get_restore_function(&self) -> i32 {
self.restore_function
}
pub fn clear_restore_function(&mut self) {
self.restore_function = 0;
}
// Param is passed by value, moved
pub fn set_restore_function(&mut self, v: i32) {
self.restore_function = v;
}
}
impl ::protobuf::Message for SaveableObject {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int32()?;
self.save_function = tmp;
},
3 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int32()?;
self.restore_function = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.save_function != 0 {
my_size += ::protobuf::rt::value_size(2, self.save_function, ::protobuf::wire_format::WireTypeVarint);
}
if self.restore_function != 0 {
my_size += ::protobuf::rt::value_size(3, self.restore_function, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.save_function != 0 {
os.write_int32(2, self.save_function)?;
}
if self.restore_function != 0 {
os.write_int32(3, self.restore_function)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SaveableObject {
SaveableObject::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"save_function",
|m: &SaveableObject| { &m.save_function },
|m: &mut SaveableObject| { &mut m.save_function },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"restore_function",
|m: &SaveableObject| { &m.restore_function },
|m: &mut SaveableObject| { &mut m.restore_function },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SaveableObject>(
"SaveableObject",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SaveableObject {
static instance: ::protobuf::rt::LazyV2<SaveableObject> = ::protobuf::rt::LazyV2::INIT;
instance.get(SaveableObject::new)
}
}
impl ::protobuf::Clear for SaveableObject {
fn clear(&mut self) {
self.save_function = 0;
self.restore_function = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SaveableObject {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SaveableObject {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n1tensorflow/core/protobuf/saved_object_graph.proto\x12\ntensorflow\x1a\
,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framew\
ork/types.proto\x1a(tensorflow/core/framework/variable.proto\x1a(tensorf\
low/core/framework/versions.proto\x1a%tensorflow/core/protobuf/struct.pr\
oto\x1a5tensorflow/core/protobuf/trackable_object_graph.proto\"\x8e\x02\
\n\x10SavedObjectGraph\x12-\n\x05nodes\x18\x01\x20\x03(\x0b2\x17.tensorf\
low.SavedObjectR\x05nodes\x12b\n\x12concrete_functions\x18\x02\x20\x03(\
\x0b23.tensorflow.SavedObjectGraph.ConcreteFunctionsEntryR\x11concreteFu\
nctions\x1ag\n\x16ConcreteFunctionsEntry\x12\x10\n\x03key\x18\x01\x20\
\x01(\tR\x03key\x127\n\x05value\x18\x02\x20\x01(\x0b2!.tensorflow.SavedC\
oncreteFunctionR\x05value:\x028\x01\"\xa7\x07\n\x0bSavedObject\x12\\\n\
\x08children\x18\x01\x20\x03(\[email protected]\
kableObject.ObjectReferenceR\x08children\x12m\n\x0eslot_variables\x18\
\x03\x20\x03(\x0b2F.tensorflow.TrackableObjectGraph.TrackableObject.Slot\
VariableReferenceR\rslotVariables\x12>\n\x0buser_object\x18\x04\x20\x01(\
\x0b2\x1b.tensorflow.SavedUserObjectH\0R\nuserObject\x12.\n\x05asset\x18\
\x05\x20\x01(\x0b2\x16.tensorflow.SavedAssetH\0R\x05asset\x127\n\x08func\
tion\x18\x06\x20\x01(\x0b2\x19.tensorflow.SavedFunctionH\0R\x08function\
\x127\n\x08variable\x18\x07\x20\x01(\x0b2\x19.tensorflow.SavedVariableH\
\0R\x08variable\x12]\n\x16bare_concrete_function\x18\x08\x20\x01(\x0b2%.\
tensorflow.SavedBareConcreteFunctionH\0R\x14bareConcreteFunction\x127\n\
\x08constant\x18\t\x20\x01(\x0b2\x19.tensorflow.SavedConstantH\0R\x08con\
stant\x127\n\x08resource\x18\n\x20\x01(\x0b2\x19.tensorflow.SavedResourc\
eH\0R\x08resource\x12E\n\x0fcaptured_tensor\x18\x0c\x20\x01(\x0b2\x1a.te\
nsorflow.CapturedTensorH\0R\x0ecapturedTensor\x12W\n\x10saveable_objects\
\x18\x0b\x20\x03(\x0b2,.tensorflow.SavedObject.SaveableObjectsEntryR\x0f\
saveableObjects\x1a^\n\x14SaveableObjectsEntry\x12\x10\n\x03key\x18\x01\
\x20\x01(\tR\x03key\x120\n\x05value\x18\x02\x20\x01(\x0b2\x1a.tensorflow\
.SaveableObjectR\x05value:\x028\x01B\x06\n\x04kindJ\x04\x08\x02\x10\x03R\
\nattributes\"\x83\x01\n\x0fSavedUserObject\x12\x1e\n\nidentifier\x18\
\x01\x20\x01(\tR\nidentifier\x120\n\x07version\x18\x02\x20\x01(\x0b2\x16\
.tensorflow.VersionDefR\x07version\x12\x1e\n\x08metadata\x18\x03\x20\x01\
(\tR\x08metadataB\x02\x18\x01\"=\n\nSavedAsset\x12/\n\x14asset_file_def_\
index\x18\x01\x20\x01(\x05R\x11assetFileDefIndex\"}\n\rSavedFunction\x12\
-\n\x12concrete_functions\x18\x01\x20\x03(\tR\x11concreteFunctions\x12=\
\n\rfunction_spec\x18\x02\x20\x01(\x0b2\x18.tensorflow.FunctionSpecR\x0c\
functionSpec\"Q\n\x0eCapturedTensor\x12\x12\n\x04name\x18\x01\x20\x01(\t\
R\x04name\x12+\n\x11concrete_function\x18\x02\x20\x01(\tR\x10concreteFun\
ction\"\xe3\x01\n\x15SavedConcreteFunction\x12!\n\x0cbound_inputs\x18\
\x02\x20\x03(\x05R\x0bboundInputs\x12_\n\x1dcanonicalized_input_signatur\
e\x18\x03\x20\x01(\x0b2\x1b.tensorflow.StructuredValueR\x1bcanonicalized\
InputSignature\x12F\n\x10output_signature\x18\x04\x20\x01(\x0b2\x1b.tens\
orflow.StructuredValueR\x0foutputSignature\"\xff\x01\n\x19SavedBareConcr\
eteFunction\x124\n\x16concrete_function_name\x18\x01\x20\x01(\tR\x14conc\
reteFunctionName\x12+\n\x11argument_keywords\x18\x02\x20\x03(\tR\x10argu\
mentKeywords\x12@\n\x1callowed_positional_arguments\x18\x03\x20\x01(\x03\
R\x1aallowedPositionalArguments\x12=\n\rfunction_spec\x18\x04\x20\x01(\
\x0b2\x18.tensorflow.FunctionSpecR\x0cfunctionSpec\"-\n\rSavedConstant\
\x12\x1c\n\toperation\x18\x01\x20\x01(\tR\toperation\"\xc7\x03\n\rSavedV\
ariable\x12*\n\x05dtype\x18\x01\x20\x01(\x0e2\x14.tensorflow.DataTypeR\
\x05dtype\x122\n\x05shape\x18\x02\x20\x01(\x0b2\x1c.tensorflow.TensorSha\
peProtoR\x05shape\x12\x1c\n\ttrainable\x18\x03\x20\x01(\x08R\ttrainable\
\x12M\n\x0fsynchronization\x18\x04\x20\x01(\x0e2#.tensorflow.VariableSyn\
chronizationR\x0fsynchronization\x12A\n\x0baggregation\x18\x05\x20\x01(\
\x0e2\x1f.tensorflow.VariableAggregationR\x0baggregation\x12\x12\n\x04na\
me\x18\x06\x20\x01(\tR\x04name\x12\x16\n\x06device\x18\x07\x20\x01(\tR\
\x06device\x12z\n,experimental_distributed_variable_components\x18\x08\
\x20\x03(\x0b2\x19.tensorflow.SavedVariableR)experimentalDistributedVari\
ableComponents\"\xae\x02\n\x0cFunctionSpec\x12=\n\x0bfullargspec\x18\x01\
\x20\x01(\x0b2\x1b.tensorflow.StructuredValueR\x0bfullargspec\x12\x1b\n\
\tis_method\x18\x02\x20\x01(\x08R\x08isMethod\x12D\n\x0finput_signature\
\x18\x05\x20\x01(\x0b2\x1b.tensorflow.StructuredValueR\x0einputSignature\
\x12D\n\x0bjit_compile\x18\x06\x20\x01(\x0e2#.tensorflow.FunctionSpec.Ji\
tCompileR\njitCompile\"*\n\nJitCompile\x12\x0b\n\x07DEFAULT\x10\0\x12\
\x06\n\x02ON\x10\x01\x12\x07\n\x03OFF\x10\x02J\x04\x08\x03\x10\x04J\x04\
\x08\x04\x10\x05\"'\n\rSavedResource\x12\x16\n\x06device\x18\x01\x20\x01\
(\tR\x06device\"`\n\x0eSaveableObject\x12#\n\rsave_function\x18\x02\x20\
\x01(\x05R\x0csaveFunction\x12)\n\x10restore_function\x18\x03\x20\x01(\
\x05R\x0frestoreFunctionBZZUgithub.com/tensorflow/tensorflow/tensorflow/\
go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01b\x06proto3\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}<|fim▁end|> |
#[derive(PartialEq,Clone,Default)]
pub struct SavedVariable { |
<|file_name|>removeuiobjectmembervisitor.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2016 The Qt Company Ltd.
** Contact: https://www.qt.io/licensing/
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3 as published by the Free Software
** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
** included in the packaging of this file. Please review the following
** information to ensure the GNU General Public License requirements will
** be met: https://www.gnu.org/licenses/gpl-3.0.html.
**
****************************************************************************/
#include "removeuiobjectmembervisitor.h"
#include <qmljs/parser/qmljsast_p.h>
#include <QDebug>
using namespace QmlDesigner;
using namespace QmlDesigner::Internal;
RemoveUIObjectMemberVisitor::RemoveUIObjectMemberVisitor(TextModifier &modifier,
quint32 objectLocation):
QMLRewriter(modifier),
objectLocation(objectLocation)
{
}
bool RemoveUIObjectMemberVisitor::preVisit(QmlJS::AST::Node *ast)
{
parents.push(ast);
return true;
}
void RemoveUIObjectMemberVisitor::postVisit(QmlJS::AST::Node *)
{
parents.pop();
}
bool RemoveUIObjectMemberVisitor::visit(QmlJS::AST::UiPublicMember *ast) { return visitObjectMember(ast); }
bool RemoveUIObjectMemberVisitor::visit(QmlJS::AST::UiObjectDefinition *ast) { return visitObjectMember(ast); }
bool RemoveUIObjectMemberVisitor::visit(QmlJS::AST::UiSourceElement *ast) { return visitObjectMember(ast); }
bool RemoveUIObjectMemberVisitor::visit(QmlJS::AST::UiObjectBinding *ast) { return visitObjectMember(ast); }
bool RemoveUIObjectMemberVisitor::visit(QmlJS::AST::UiScriptBinding *ast) { return visitObjectMember(ast); }
bool RemoveUIObjectMemberVisitor::visit(QmlJS::AST::UiArrayBinding *ast) { return visitObjectMember(ast); }
// FIXME: duplicate code in the QmlJS::Rewriter class, remove this
bool RemoveUIObjectMemberVisitor::visitObjectMember(QmlJS::AST::UiObjectMember *ast)
{
const quint32 memberStart = ast->firstSourceLocation().offset;
if (memberStart == objectLocation) {
// found it
int start = objectLocation;
int end = ast->lastSourceLocation().end();
if (QmlJS::AST::UiArrayBinding *parentArray = containingArray())
extendToLeadingOrTrailingComma(parentArray, ast, start, end);
else
includeSurroundingWhitespace(start, end);
includeLeadingEmptyLine(start);
replace(start, end - start, QStringLiteral(""));
setDidRewriting(true);
return false;
} else if (ast->lastSourceLocation().end() <= objectLocation) {
// optimization: if the location of the object-to-be-removed is not inside the current member, skip any children
return false;
} else {
// only visit children if the rewriting isn't done yet.
return !didRewriting();
}
}
QmlJS::AST::UiArrayBinding *RemoveUIObjectMemberVisitor::containingArray() const
{
if (parents.size() > 2) {
if (QmlJS::AST::cast<QmlJS::AST::UiArrayMemberList*>(parents[parents.size() - 2]))
return QmlJS::AST::cast<QmlJS::AST::UiArrayBinding*>(parents[parents.size() - 3]);
}
return 0;
}
// FIXME: duplicate code in the QmlJS::Rewriter class, remove this
void RemoveUIObjectMemberVisitor::extendToLeadingOrTrailingComma(QmlJS::AST::UiArrayBinding *parentArray,
QmlJS::AST::UiObjectMember *ast,
int &start,
int &end) const
{
QmlJS::AST::UiArrayMemberList *currentMember = 0;
for (QmlJS::AST::UiArrayMemberList *it = parentArray->members; it; it = it->next) {
if (it->member == ast) {
currentMember = it;
break;
}
}
if (!currentMember)
return;<|fim▁hole|>
if (currentMember->commaToken.isValid()) {
// leading comma
start = currentMember->commaToken.offset;
if (includeSurroundingWhitespace(start, end))
--end;
} else if (currentMember->next && currentMember->next->commaToken.isValid()) {
// trailing comma
end = currentMember->next->commaToken.end();
includeSurroundingWhitespace(start, end);
} else {
// array with 1 element, so remove the complete binding
start = parentArray->firstSourceLocation().offset;
end = parentArray->lastSourceLocation().end();
includeSurroundingWhitespace(start, end);
}
}<|fim▁end|> | |
<|file_name|>expr_after.rs<|end_file_name|><|fim▁begin|>fn main() {
if foo && bar
<caret>
&& foo {}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>Greeting.java<|end_file_name|><|fim▁begin|>package simple.practice;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import simple.practice.service.GreetingService;<|fim▁hole|>
@Component
public class Greeting {
@Autowired
private GreetingService greetingService;
public String printGreeting(String name){
String msg = greetingService.greetingMessage() + " User: " + name;
System.out.println(msg);
return msg;
}
}<|fim▁end|> | |
<|file_name|>IFormUILogicCode.java<|end_file_name|><|fim▁begin|>//#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
<|fim▁hole|>// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.core.forms.notificationdialog;
public interface IFormUILogicCode
{
// No methods yet.
}<|fim▁end|> | //#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
|
<|file_name|>mail.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2012-2013 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
import cgi
import logging
import lxml.html
import lxml.html.clean as clean
import openerp.pooler as pooler
import random
import re
import socket
import threading
import time
from email.utils import getaddresses
from openerp.loglevels import ustr
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# HTML Sanitizer
#----------------------------------------------------------
tags_to_kill = ["script", "head", "meta", "title", "link", "style", "frame", "iframe", "base", "object", "embed"]
tags_to_remove = ['html', 'body', 'font']
def html_sanitize(src):
if not src:
return src
src = ustr(src, errors='replace')
# html encode email tags
part = re.compile(r"(<(([^a<>]|a[^<>\s])[^<>]*)@[^<>]+>)", re.IGNORECASE | re.DOTALL)
src = part.sub(lambda m: cgi.escape(m.group(1)), src)
# some corner cases make the parser crash (such as <SCRIPT/XSS SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT> in test_mail)
try:
cleaner = clean.Cleaner(page_structure=True, style=False, safe_attrs_only=False, forms=False, kill_tags=tags_to_kill, remove_tags=tags_to_remove)
cleaned = cleaner.clean_html(src)
except TypeError:
# lxml.clean version < 2.3.1 does not have a kill_tags attribute
# to remove in 2014
cleaner = clean.Cleaner(page_structure=True, style=False, safe_attrs_only=False, forms=False, remove_tags=tags_to_kill + tags_to_remove)
cleaned = cleaner.clean_html(src)
except Exception, e:
if isinstance(e, etree.ParserError) and 'empty' in str(e):
return ""
_logger.warning('html_sanitize failed to parse %s' % (src))
cleaned = '<p>Impossible to parse</p>'
# MAKO compatibility: $, { and } inside quotes are escaped, preventing correct mako execution
cleaned = cleaned.replace('%24', '$')
cleaned = cleaned.replace('%7B', '{')
cleaned = cleaned.replace('%7D', '}')
cleaned = cleaned.replace('%20', ' ')
cleaned = cleaned.replace('%5B', '[')
cleaned = cleaned.replace('%5D', ']')
return cleaned
#----------------------------------------------------------
# HTML Cleaner
#----------------------------------------------------------
def html_email_clean(html):
""" html_email_clean: clean the html to display in the web client.
- strip email quotes (remove blockquote nodes)
- strip signatures (remove --\n{\n)Blahblah), by replacing <br> by
\n to avoid ignoring signatures converted into html
:param string html: sanitized html; tags like html or head should not
be present in the html string. This method therefore takes as input
html code coming from a sanitized source, like fields.html.
"""
def _replace_matching_regex(regex, source, replace=''):
dest = ''
idx = 0
for item in re.finditer(regex, source):
dest += source[idx:item.start()] + replace
idx = item.end()
dest += source[idx:]
return dest
if not html or not isinstance(html, basestring):
return html
html = ustr(html)
# 0. remove encoding attribute inside tags
doctype = re.compile(r'(<[^>]*\s)(encoding=(["\'][^"\']*?["\']|[^\s\n\r>]+)(\s[^>]*|/)?>)', re.IGNORECASE | re.DOTALL)
html = doctype.sub(r"", html)
# 1. <br[ /]> -> \n, because otherwise the tree is obfuscated
br_tags = re.compile(r'([<]\s*[bB][rR]\s*\/?[>])')
html = _replace_matching_regex(br_tags, html, '__BR_TAG__')
# 2. form a tree, handle (currently ?) pure-text by enclosing them in a pre
root = lxml.html.fromstring(html)
if not len(root) and root.text is None and root.tail is None:
html = '<div>%s</div>' % html
root = lxml.html.fromstring(html)
# 2.5 remove quoted text in nodes
quote_tags = re.compile(r'(\n(>)+[^\n\r]*)')
for node in root.getiterator():
if not node.text:
continue
node.text = _replace_matching_regex(quote_tags, node.text)
# 3. remove blockquotes
quotes = [el for el in root.getiterator(tag='blockquote')]
for node in quotes:
# copy the node tail into parent text
if node.tail:
parent = node.getparent()
parent.text = parent.text or '' + node.tail
# remove the node
node.getparent().remove(node)
# 4. strip signatures
signature = re.compile(r'([-]{2}[\s]?[\r\n]{1,2}[^\z]+)')
for elem in root.getiterator():
if elem.text:
match = re.search(signature, elem.text)
if match:
elem.text = elem.text[:match.start()] + elem.text[match.end():]
if elem.tail:
match = re.search(signature, elem.tail)
if match:
elem.tail = elem.tail[:match.start()] + elem.tail[match.end():]
# 5. \n back to <br/>
html = etree.tostring(root, pretty_print=True)
html = html.replace('__BR_TAG__', '<br />')
# 6. Misc cleaning :
# - ClEditor seems to love using <div><br /><div> -> replace with <br />
br_div_tags = re.compile(r'(<div>\s*<br\s*\/>\s*<\/div>)')
html = _replace_matching_regex(br_div_tags, html, '<br />')
return html
#----------------------------------------------------------
# HTML/Text management
#----------------------------------------------------------
def html2plaintext(html, body_id=None, encoding='utf-8'):
""" From an HTML text, convert the HTML to plain text.
If @param body_id is provided then this is the tag where the
body (not necessarily <body>) starts.
"""
## (c) Fry-IT, www.fry-it.com, 2007
## <[email protected]>
## download here: http://www.peterbe.com/plog/html2plaintext
html = ustr(html)
tree = etree.fromstring(html, parser=etree.HTMLParser())
if body_id is not None:
source = tree.xpath('//*[@id=%s]' % (body_id,))
else:
source = tree.xpath('//body')
if len(source):
tree = source[0]
url_index = []
i = 0
for link in tree.findall('.//a'):
url = link.get('href')
if url:
i += 1
link.tag = 'span'
link.text = '%s [%s]' % (link.text, i)
url_index.append(url)
html = ustr(etree.tostring(tree, encoding=encoding))
# \r char is converted into , must remove it
html = html.replace(' ', '')
html = html.replace('<strong>', '*').replace('</strong>', '*')
html = html.replace('<b>', '*').replace('</b>', '*')
html = html.replace('<h3>', '*').replace('</h3>', '*')
html = html.replace('<h2>', '**').replace('</h2>', '**')
html = html.replace('<h1>', '**').replace('</h1>', '**')
html = html.replace('<em>', '/').replace('</em>', '/')
html = html.replace('<tr>', '\n')
html = html.replace('</p>', '\n')
html = re.sub('<br\s*/?>', '\n', html)
html = re.sub('<.*?>', ' ', html)
html = html.replace(' ' * 2, ' ')
# strip all lines
html = '\n'.join([x.strip() for x in html.splitlines()])
html = html.replace('\n' * 2, '\n')
for i, url in enumerate(url_index):
if i == 0:
html += '\n\n'
html += ustr('[%s] %s\n') % (i + 1, url)
return html
def plaintext2html(text, container_tag=False):
""" Convert plaintext into html. Content of the text is escaped to manage
html entities, using cgi.escape().
- all \n,\r are replaced by <br />
- enclose content into <p>
- 2 or more consecutive <br /> are considered as paragraph breaks
:param string container_tag: container of the html; by default the
content is embedded into a <div>
"""
text = cgi.escape(ustr(text))
# 1. replace \n and \r
text = text.replace('\n', '<br/>')
text = text.replace('\r', '<br/>')
# 2-3: form paragraphs
idx = 0
final = '<p>'
br_tags = re.compile(r'(([<]\s*[bB][rR]\s*\/?[>]\s*){2,})')
for item in re.finditer(br_tags, text):
final += text[idx:item.start()] + '</p><p>'
idx = item.end()
final += text[idx:] + '</p>'
# 4. container
if container_tag:
final = '<%s>%s</%s>' % (container_tag, final, container_tag)
return ustr(final)
def append_content_to_html(html, content, plaintext=True, preserve=False, container_tag=False):
""" Append extra content at the end of an HTML snippet, trying
to locate the end of the HTML document (</body>, </html>, or
EOF), and converting the provided content in html unless ``plaintext``
is False.
Content conversion can be done in two ways:
- wrapping it into a pre (preserve=True)
- use plaintext2html (preserve=False, using container_tag to wrap the
whole content)
A side-effect of this method is to coerce all HTML tags to
lowercase in ``html``, and strip enclosing <html> or <body> tags in
content if ``plaintext`` is False.
:param str html: html tagsoup (doesn't have to be XHTML)
:param str content: extra content to append
:param bool plaintext: whether content is plaintext and should
be wrapped in a <pre/> tag.
:param bool preserve: if content is plaintext, wrap it into a <pre>
instead of converting it into html
"""
html = ustr(html)
if plaintext and preserve:
content = u'\n<pre>%s</pre>\n' % ustr(content)
elif plaintext:
content = '\n%s\n' % plaintext2html(content, container_tag)
else:
content = re.sub(r'(?i)(</?html.*>|</?body.*>|<!\W*DOCTYPE.*>)', '', content)
content = u'\n%s\n' % ustr(content)
# Force all tags to lowercase
html = re.sub(r'(</?)\W*(\w+)([ >])',
lambda m: '%s%s%s' % (m.group(1), m.group(2).lower(), m.group(3)), html)
insert_location = html.find('</body>')
if insert_location == -1:
insert_location = html.find('</html>')
if insert_location == -1:
return '%s%s' % (html, content)
return '%s%s%s' % (html[:insert_location], content, html[insert_location:])
#----------------------------------------------------------
# Emails
#----------------------------------------------------------
email_re = re.compile(r"""
([a-zA-Z][\w\.-]*[a-zA-Z0-9] # username part
@ # mandatory @ sign
[a-zA-Z0-9][\w\.-]* # domain must start with a letter ... Ged> why do we include a 0-9 then?
\.
[a-z]{2,3} # TLD
)
""", re.VERBOSE)
res_re = re.compile(r"\[([0-9]+)\]", re.UNICODE)
command_re = re.compile("^Set-([a-z]+) *: *(.+)$", re.I + re.UNICODE)
# Updated in 7.0 to match the model name as well
# Typical form of references is <timestamp-openerp-record_id-model_name@domain>
# group(1) = the record ID ; group(2) = the model (if any) ; group(3) = the domain
reference_re = re.compile("<.*-open(?:object|erp)-(\\d+)(?:-([\w.]+))?.*@(.*)>", re.UNICODE)
def generate_tracking_message_id(res_id):
"""Returns a string that can be used in the Message-ID RFC822 header field
Used to track the replies related to a given object thanks to the "In-Reply-To"
or "References" fields that Mail User Agents will set.
"""
try:
rnd = random.SystemRandom().random()
except NotImplementedError:
rnd = random.random()
rndstr = ("%.15f" % rnd)[2:]
return "<%.15f.%s-openerp-%s@%s>" % (time.time(), rndstr, res_id, socket.gethostname())
def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False,
attachments=None, message_id=None, references=None, openobject_id=False, debug=False, subtype='plain', headers=None,
smtp_server=None, smtp_port=None, ssl=False, smtp_user=None, smtp_password=None, cr=None, uid=None):
"""Low-level function for sending an email (deprecated).
:deprecate: since OpenERP 6.1, please use ir.mail_server.send_email() instead.
:param email_from: A string used to fill the `From` header, if falsy,
config['email_from'] is used instead. Also used for
the `Reply-To` header if `reply_to` is not provided
:param email_to: a sequence of addresses to send the mail to.
"""
# If not cr, get cr from current thread database
local_cr = None
if not cr:
db_name = getattr(threading.currentThread(), 'dbname', None)
if db_name:
local_cr = cr = pooler.get_db(db_name).cursor()
else:
raise Exception("No database cursor found, please pass one explicitly")
# Send Email
try:
mail_server_pool = pooler.get_pool(cr.dbname).get('ir.mail_server')
res = False
# Pack Message into MIME Object
email_msg = mail_server_pool.build_email(email_from, email_to, subject, body, email_cc, email_bcc, reply_to,
attachments, message_id, references, openobject_id, subtype, headers=headers)
res = mail_server_pool.send_email(cr, uid or 1, email_msg, mail_server_id=None,
smtp_server=smtp_server, smtp_port=smtp_port, smtp_user=smtp_user, smtp_password=smtp_password,
smtp_encryption=('ssl' if ssl else None), smtp_debug=debug)
except Exception:
_logger.exception("tools.email_send failed to deliver email")
return False
finally:<|fim▁hole|>
def email_split(text):
""" Return a list of the email addresses found in ``text`` """
if not text:
return []
return [addr[1] for addr in getaddresses([text])
# getaddresses() returns '' when email parsing fails, and
# sometimes returns emails without at least '@'. The '@'
# is strictly required in RFC2822's `addr-spec`.
if addr[1]
if '@' in addr[1]]<|fim▁end|> | if local_cr:
cr.close()
return res |
<|file_name|>ItemsTable.tsx<|end_file_name|><|fim▁begin|>// tslint:disable-next-line: no-submodule-imports
import React, { useEffect, useMemo } from 'react';
import Moment from 'react-moment';
import { DATE_TIME_FORMAT, TIME_FORMAT } from '../../constants';
import { diffAndFormatShort } from '../../utils';
import { Box, Flex } from '@chakra-ui/layout';
import { Button } from '@chakra-ui/button';
import { Table, Tbody, Td, Tfoot, Th, Thead, Tr } from '@chakra-ui/table';
import { TriangleDownIcon, TriangleUpIcon } from '@chakra-ui/icons';
import { useTable, useSortBy, usePagination, useFilters, useRowSelect } from 'react-table';
import { calculateTotal, fuzzyTextFilterFn } from './TrackItemTable.utils';
import { SelectColumnFilter } from './SelectColumnFilter';
import { DefaultColumnFilter } from './DefaultColumnFilter';
import { IndeterminateCheckbox } from './IndeterminateCheckbox';
import { Portal } from '@chakra-ui/react';
import { TrackItemTableButtons } from './TrackItemTableButtons';
import { TrackItemTablePager } from './TrackItemTablePager';
import { OverflowTextCell } from './OverflowText';
interface ItemsTableProps {
data: any[];
resetButtonsRef?: any;
isOneDay: boolean;
isSearchTable: boolean;
pageCount?: number;
pageIndex?: number;
changePaging?: any;
extraColumns?: any[];
}
export const ItemsTable = ({
data,
resetButtonsRef,
isOneDay,
isSearchTable,
pageCount: controlledPageCount,
pageIndex: controlledPageIndex,
changePaging,
extraColumns = [],
}: ItemsTableProps) => {
const dateToValue = ({ value }) => {
return <Moment format={isOneDay ? TIME_FORMAT : DATE_TIME_FORMAT}>{value}</Moment>;
};
const defaultColumn = useMemo(
() => ({
// Let's set up our default Filter UI
Filter: DefaultColumnFilter,
}),
[],
);
const columns = useMemo(
() => [
{
Header: 'App',
accessor: 'app',
Filter: SelectColumnFilter,
filter: 'includes',
width: 100,
minWidth: 100,
maxWidth: 120,
},
{
Header: 'Title',
accessor: 'title',
Cell: OverflowTextCell,
width: 250,
minWidth: 100,
maxWidth: 500,
},
{
Header: 'URL',
accessor: 'url',
Cell: OverflowTextCell,
width: 150,
minWidth: 70,
maxWidth: 400,
},
{
Header: 'Begin',
accessor: 'beginDate',
Cell: dateToValue,
width: 80,
minWidth: 80,
maxWidth: 120,
},
{
Header: 'End',
accessor: 'endDate',
Cell: dateToValue,
width: 80,
minWidth: 80,
maxWidth: 120,
},
{
Header: 'Duration',
accessor: record => diffAndFormatShort(record.beginDate, record.endDate),
Footer: info => {
const total = useMemo(() => calculateTotal(info.data), [info.data]);
return <Box pr={4}>Total: {total}</Box>;
},
width: 80,
minWidth: 80,
maxWidth: 80,
},
...extraColumns,
],
// eslint-disable-next-line react-hooks/exhaustive-deps
[],
);
const filterTypes = useMemo(
() => ({
// Add a new fuzzyTextFilterFn filter type.
fuzzyText: fuzzyTextFilterFn,
// Or, override the default text filter to use
// "startWith"
text: (rows, id, filterValue) => {
return rows.filter(row => {
const rowValue = row.values[id];
return rowValue !== undefined
? String(rowValue)
.toLowerCase()
.startsWith(String(filterValue).toLowerCase())
: true;
});
},<|fim▁hole|> );
const pagingProps = isSearchTable
? {
initialState: { pageIndex: controlledPageIndex },
disableFilters: true,
manualPagination: true,
pageCount: controlledPageCount,
}
: {};
const {
getTableProps,
getTableBodyProps,
headerGroups,
footerGroups,
prepareRow,
page,
canPreviousPage,
canNextPage,
pageOptions,
pageCount,
gotoPage,
nextPage,
previousPage,
setPageSize,
setAllFilters,
setSortBy,
selectedFlatRows,
state: { pageIndex, pageSize, selectedRowIds },
} = useTable(
{
columns,
defaultColumn,
filterTypes,
data,
...pagingProps,
},
useFilters,
useSortBy,
usePagination,
useRowSelect,
hooks => {
hooks.visibleColumns.push(columns => [
{
id: 'selection',
width: 10,
minWidth: 10,
maxWidth: 10,
Header: ({ getToggleAllRowsSelectedProps }) => (
<div>
<IndeterminateCheckbox {...getToggleAllRowsSelectedProps()} />
</div>
),
Cell: ({ row }) => (
<div>
<IndeterminateCheckbox {...row.getToggleRowSelectedProps()} />
</div>
),
},
...columns,
]);
},
);
useEffect(() => {
if (isSearchTable) {
console.info('Change paging', { pageIndex, pageSize });
changePaging({ pageIndex, pageSize });
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [pageIndex, pageSize]);
return (
<>
<Portal containerRef={resetButtonsRef}>
{!isSearchTable && (
<TrackItemTableButtons
{...{ setAllFilters, setSortBy, selectedFlatRows, selectedRowIds }}
/>
)}
</Portal>
<Table {...getTableProps()}>
<Thead>
{headerGroups.map(headerGroup => (
<Tr {...headerGroup.getHeaderGroupProps()}>
{headerGroup.headers.map(column => (
<Th
{...column.getHeaderProps({
style: {
minWidth: column.minWidth,
width: column.width,
maxWidth: column.maxWidth,
},
})}
isNumeric={column.isNumeric}
>
{column.name}
{column.id === 'selection' && column.render('Header')}
{column.id !== 'selection' && (
<Flex alignItems="center">
<Button
variant="ghost"
fontWeight="bold"
{...column.getSortByToggleProps()}
>
{column.render('Header')}
<Box pl="4">
{column.isSorted ? (
column.isSortedDesc ? (
<TriangleDownIcon aria-label="sorted descending" />
) : (
<TriangleUpIcon aria-label="sorted ascending" />
)
) : null}
</Box>
</Button>
<Box flex={1} />
{column.canFilter ? column.render('Filter') : null}
</Flex>
)}
</Th>
))}
</Tr>
))}
</Thead>
<Tbody {...getTableBodyProps()}>
{page.map(row => {
prepareRow(row);
return (
<Tr {...row.getRowProps()}>
{row.cells.map(cell => (
<Td {...cell.getCellProps()} isNumeric={cell.column.isNumeric}>
{cell.render('Cell')}
</Td>
))}
</Tr>
);
})}
</Tbody>
<Tfoot>
{footerGroups.map(group => (
<Tr {...group.getFooterGroupProps()}>
{group.headers.map(column => (
<Td {...column.getFooterProps()}>{column.render('Footer')}</Td>
))}
</Tr>
))}
</Tfoot>
</Table>
<TrackItemTablePager
{...{
gotoPage,
canPreviousPage,
previousPage,
pageIndex,
pageOptions,
pageSize,
nextPage,
canNextPage,
pageCount,
setPageSize,
}}
/>
</>
);
};<|fim▁end|> | }),
[], |
<|file_name|>ads-loader.service.ts<|end_file_name|><|fim▁begin|>/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Injectable, ComponentFactoryResolver, EventEmitter } from '@angular/core';
import { AdsDisplayContainer } from './ads-display-container';
import { AdsPalSessionEvent } from './ads-events';
import { PalService } from './pal.service';
import { AdsRequest } from './ads-request';
import { HttpClient, HttpErrorResponse, HttpResponse } from '@angular/common/http';
import { forkJoin, from, Observable, of, Subscription, throwError } from 'rxjs';
import { catchError, filter, flatMap, map, switchMap, tap } from 'rxjs/operators';
import { AdsManager } from './ads-manager';
import { PalSession } from './pal-session';
import { randomInt } from './common';
import { VASTWrapper } from './vast-wrapper';
import {VASTDocument} from './vast-document';
@Injectable({
providedIn: 'root'
})
export class AdsLoaderService {
private adDisplayContainerInternal?: AdsDisplayContainer;
palSessionLoaded: EventEmitter<AdsPalSessionEvent>;
get adDisplayContainer(): AdsDisplayContainer | undefined {
return this.adDisplayContainerInternal;
}
set adDisplayContainer(val: AdsDisplayContainer | undefined) {
this.adDisplayContainerInternal = val;
}
constructor(
private palService: PalService,
private http: HttpClient,
private componentFactoryResolver: ComponentFactoryResolver,
) {
this.palSessionLoaded = new EventEmitter<AdsPalSessionEvent>();
}
requestAds(adsRequest: AdsRequest): Observable<AdsManager> {
if (adsRequest.tagUrl === undefined) {
return throwError(new Error('Ad tag url is not specified.'));
}
if (adsRequest.palRequest !== undefined) {
if (adsRequest.palRequest.descriptionUrl === undefined) {
adsRequest.palRequest.descriptionUrl = this.getDescriptionUrl();
}
return this.palService.loadNonce(adsRequest.palRequest)
.pipe(
tap((session: PalSession) => {
this.palSessionLoaded.emit({ session });
}),
switchMap((session: PalSession) => {
return this.requestAdsInternal(adsRequest, session);
}));
} else {
return this.requestAdsInternal(adsRequest);
}
}
private getDescriptionUrl(): string {
return window.location.protocol + '//' + window.location.hostname;
}
private requestAdsInternal(adsRequest: AdsRequest, session?: PalSession):
Observable<AdsManager> {
if (adsRequest.tagUrl === undefined) {
return throwError(new Error('Ad tag url is not specified.'));
} else {
let adTagUrl;
try {
adTagUrl = new URL(adsRequest.tagUrl);
adTagUrl = this.appendCorrelator(adTagUrl);
adTagUrl = this.appendUrlParam(adTagUrl);
} catch (err) {
console.error(err);
return throwError(err);
}<|fim▁hole|>
return this.fetchInternal(adTagUrl, undefined)
.pipe(
flatMap(vast => forkJoin([
of(vast),
...vast.ads.filter(ad => ad instanceof VASTWrapper)
.map(
ad => this.fetchInternal(
(ad as VASTWrapper).vastAdTagURI,
(ad as VASTWrapper)))
])),
map(([
parent,
]) =>
new AdsManager(
parent, this.componentFactoryResolver,
this.adDisplayContainer!, this.http, session)));
}
}
private appendCorrelator(adTagUrl: URL): URL {
let correlator = randomInt(10000, 100000).toString();
correlator += randomInt(1000, 10000).toString();
console.debug('Correlator generated. value=' + correlator);
return this.appendParam(adTagUrl, 'correlator', correlator);
}
private appendUrlParam(adTagUrl: URL): URL {
const url = this.getDescriptionUrl();
return this.appendParam(adTagUrl, 'url', url);
}
private appendParam(url: URL, key: string, val: string): URL {
if (url.searchParams.has(key)) {
const oldValues = url.searchParams.getAll(key);
url.searchParams.delete(key);
console.debug(
'Existing param value deleted. ' + key + '=' + oldValues.toString());
}
url.searchParams.append(key, val);
console.debug('New param value appended. ' + key + '=' + val);
return url;
}
private appendNonce(adTagUrl: URL, nonce: string): URL {
return this.appendParam(adTagUrl, 'paln', nonce);
}
private fetchInternal(adTagURI: URL, wrapper?: VASTWrapper):
Observable<VASTDocument> {
return this.http
.get(adTagURI.toString(), { observe: 'response', responseType: 'text' })
.pipe(
map((resp: HttpResponse<string>) => {
if (resp.body == null) {
throw new Error('Ad tag returned empty body.');
}
console.info('Ad response. ', resp.body);
const doc =
new DOMParser().parseFromString(resp.body, 'text/xml');
const vast = new VASTDocument(doc);
if (wrapper) {
wrapper.vast = vast;
}
return vast;
}),
catchError(error => this.handleFetchError(error)));
}
private handleFetchError(error: HttpErrorResponse): Observable<never> {
if (error.error instanceof ErrorEvent) {
return throwError(new Error(`${error.error.message}`));
} else {
return throwError(new Error(`${error.message}`));
}
}
}<|fim▁end|> |
if (session !== undefined) {
adTagUrl = this.appendNonce(adTagUrl, session.nonce);
} |
<|file_name|>test_alignment.py<|end_file_name|><|fim▁begin|>import IMP
import IMP.test
import IMP.atom
import IMP.core
class Tests(IMP.test.TestCase):
def _produce_point_sets(self, tr):
vs = []
vsr = []
for i in range(0, 20):
vs.append(IMP.algebra.get_random_vector_in(
IMP.algebra.get_unit_bounding_box_3d()))
vsr.append(tr.get_transformed(vs[-1]))
return (vs, vsr)
def test_alignment_selection(self):
"""Testing rigid alignment of point sets"""
m=IMP.Model()
r = IMP.algebra.get_random_rotation_3d()
t = IMP.algebra.get_random_vector_in(
IMP.algebra.get_unit_bounding_box_3d())
tr = IMP.algebra.Transformation3D(r, t)
(vs, vsr) = self._produce_point_sets(tr)
hroot1=IMP.atom.Hierarchy(IMP.Particle(m))
hroot2=IMP.atom.Hierarchy(IMP.Particle(m))
for v in vs:
p=IMP.Particle(m)
d=IMP.core.XYZR.setup_particle(p)
d.set_coordinates(v)
d.set_radius(1.0)
IMP.atom.Mass.setup_particle(p,1.0)
hroot1.add_child(p)
for v in vsr:
p=IMP.Particle(m)<|fim▁hole|> IMP.atom.Mass.setup_particle(p,1.0)
hroot2.add_child(p)
sel1=IMP.atom.Selection(hroot1)
sel2=IMP.atom.Selection(hroot2)
tr = IMP.atom.get_transformation_aligning_first_to_second(sel1, sel2)
self.assertAlmostEqual(IMP.algebra.get_distance(tr.get_rotation(), r),
0, delta=.1)
self.assertAlmostEqual(IMP.algebra.get_distance(tr.get_translation(),
t),
0, delta=.1)
if __name__ == '__main__':
IMP.test.main()<|fim▁end|> | d=IMP.core.XYZR.setup_particle(p)
d.set_coordinates(v)
d.set_radius(1.0) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>export * from './PythonHome';
export * from './PythonNav';<|fim▁end|> | |
<|file_name|>MetaRelationTest.java<|end_file_name|><|fim▁begin|>package org.genericsystem.remote;
//package org.genericsystem.kernel;
//
//import java.util.Collections;<|fim▁hole|>//import org.testng.annotations.Test;
//
//@Test
//public class MetaRelationTest extends AbstractTest {
//
// public void test001_setMetaAttribute_engineEngine() {
//
// Root engine = new Root();
// Vertex metaAttribute = engine.setMetaAttribute();
// Vertex metaRelation = engine.setMetaAttribute(Collections.singletonList(engine));
// assert metaRelation.getMeta() == metaAttribute;
// assert metaRelation.inheritsFrom(metaAttribute);
// }
//
// public void test002_setMetaAttribute_relation() {
//
// Root engine = new Root();
// Vertex metaAttribute = engine.setMetaAttribute();
// Vertex metaRelation = engine.setMetaAttribute(Collections.singletonList(engine));
// Vertex car = engine.addInstance("Car");
// Vertex power = engine.addInstance("Power", car);
// Vertex color = engine.addInstance("Color");
// Vertex carColor = engine.addInstance("carColor", new Vertex[] { car, color });
// assert carColor.isInstanceOf(metaRelation);
// assert power.isInstanceOf(metaAttribute);
// }
// }<|fim▁end|> | // |
<|file_name|>start-test-server.ts<|end_file_name|><|fim▁begin|>import { GraphQLHTTPTestEndpoint } from '../helpers/grapqhl-http-test/graphql-http-test-endpoint';
<|fim▁hole|>process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
new GraphQLHTTPTestEndpoint().start(1337).catch(error => {
console.error(error.stack);
});<|fim▁end|> | // to get through firewall |
<|file_name|>taskfarm.py<|end_file_name|><|fim▁begin|># import argcomplete
# import httplib
# import logging
# import simplejson
# import sys
# import urllib2
# from time import strftime, localtime
# from conpaas.core import https<|fim▁hole|># from .base import BaseClient
# from .config import config
# from .service import ServiceCmd
# MODES = ['DEMO', 'REAL']
# TASKFARM_MNG_PORT = 8475
# def http_jsonrpc_post(hostname, uri, method, port=TASKFARM_MNG_PORT, params=None):
# """Perform a plain HTTP JSON RPC post (for task farming)"""
# if params is None:
# params = {}
# url = "http://%s:%s%s" % (hostname, port, uri)
# data = simplejson.dumps({'method': method,
# 'params': params,
# 'jsonrpc': '2.0',
# 'id': 1,
# })
# req = urllib2.Request(url, data, {'Content-Type': 'application/json'})
# res = urllib2.urlopen(req).read()
# return res
# def http_file_upload_post(host, uri, port=TASKFARM_MNG_PORT, params=None, files=None):
# """Perform a plain HTTP file upload post (for task farming)"""
# if params is None:
# params = {}
# if files is None:
# files = []
# content_type, body = https.client._encode_multipart_formdata(params, files)
# h = httplib.HTTP(host, port)
# h.putrequest('POST', uri)
# h.putheader('content-type', content_type)
# h.putheader('content-length', str(len(body)))
# h.endheaders()
# h.send(body)
# _errcode, _errmsg, _headers = h.getreply()
# return h.file.read()
# class TaskFarmCmd(ServiceCmd):
# def __init__(self, parser, client):
# self.initial_expected_state = 'RUNNING'
# ServiceCmd.__init__(self, parser, client, "taskfarm", ['node'],
# "TaskFarm service sub-commands help")
# self._add_get_mode()
# self._add_set_mode()
# self._add_upload()
# self._add_select_schedule()
# def call_manager(self, app_id, service_id, method, data=None):
# """TaskFarm peculiarities:
# 1) it works via plain HTTP
# 2) it uses port 8475
# 3) the 'shutdown' method is called 'terminate_workers'
# 4) it accepts only POST requests
# 5) it does not have to be started or stopped
# """
# if data is None:
# data = {}
# if method == "shutdown":
# method = "terminate_workers"
# service = self.client.service_dict(app_id, service_id)
# res = http_jsonrpc_post(service['application']['manager'], '/', method, params=data)
# try:
# data = simplejson.loads(res[1])
# except ValueError:
# data = simplejson.loads(res)
# return data.get('result', data)
# def _add_start(self):
# """
# TaskFarm does not have to be started.
# Overrides ServiceCmd._add_start().
# """
# pass
# def _add_stop(self):
# """
# TaskFarm does not have to be stopped.
# Overrides ServiceCmd._add_stop()
# """
# pass
# def _print_res(self, res):
# resres = res['result']
# if 'error' in resres:
# self.client.error("%s" % resres['error'])
# elif 'message' in resres:
# print "%s" % resres['message']
# else:
# print "%s" % res
# # ======= get_mode
# def _add_get_mode(self):
# subparser = self.add_parser('get_mode', help="get TaskFarm mode")
# subparser.set_defaults(run_cmd=self.get_mode, parser=subparser)
# subparser.add_argument('app_name_or_id',
# help="Name or identifier of an application")
# subparser.add_argument('serv_name_or_id',
# help="Name or identifier of a service")
# def get_mode(self, args):
# app_id, service_id = self.check_service(args.app_name_or_id, args.serv_name_or_id)
# mode = self.get_string_mode(app_id, service_id)
# print "%s" % mode
# def get_string_mode(self, app_id, service_id):
# res = self.call_manager(app_id, service_id, "get_service_info")
# return res['mode']
# # ======= set_mode
# def _add_set_mode(self):
# subparser = self.add_parser('set_mode', help="set TaskFarm mode")
# subparser.set_defaults(run_cmd=self.set_mode, parser=subparser)
# subparser.add_argument('app_name_or_id',
# help="Name or identifier of an application")
# subparser.add_argument('serv_name_or_id',
# help="Name or identifier of a service")
# subparser.add_argument('mode', choices=MODES, help="mode")
# def set_mode(self, args):
# app_id, service_id = self.check_service(args.app_name_or_id, args.serv_name_or_id)
# old_mode = self.get_string_mode(app_id, service_id)
# if old_mode != 'NA':
# res = {'result': {'error': 'ERROR: mode is already set to %s' % old_mode}}
# else:
# res = self.call_manager(app_id, service_id, "set_service_mode", [args.mode])
# self._print_res(res)
# # ========== upload bag of tasks
# def _add_upload(self):
# subparser = self.add_parser('upload_bot', help="upload bag of tasks")
# subparser.set_defaults(run_cmd=self.upload_bag_of_tasks,
# parser=subparser)
# subparser.add_argument('app_name_or_id',
# help="Name or identifier of an application")
# subparser.add_argument('serv_name_or_id',
# help="Name or identifier of a service")
# subparser.add_argument('filename',
# help="file containing the bag of tasks")
# subparser.add_argument('location',
# help="XtreemFS location, e.g., 192.168.122.1/uc3")
# def upload_bag_of_tasks(self, args):
# app_id, service_id = self.check_service(args.app_name_or_id, args.serv_name_or_id)
# mode = self.get_string_mode(app_id, service_id)
# if mode == 'NA':
# res = {'result': {'error': 'ERROR: to upload bag of task, first specify run mode.'}}
# else:
# service = self.client.service_dict(app_id, service_id)
# params = {'uriLocation': args.location,
# 'method': 'start_sampling'}
# filecontents = open(args.filename).read()
# res = http_file_upload_post(service['application']['manager'], '/', params=params,
# files=[('botFile', args.filename, filecontents)])
# res = simplejson.loads(res)
# self._print_res(res)
# # ========= select_schedule
# def _add_select_schedule(self):
# subparser = self.add_parser('upload_bot', help="upload bag of tasks")
# subparser.set_defaults(run_cmd=self.select_schedule, parser=subparser)
# subparser.add_argument('app_name_or_id',
# help="Name or identifier of an application")
# subparser.add_argument('serv_name_or_id',
# help="Name or identifier of a service")
# subparser.add_argument('schedule', type=int, help="schedule identifier")
# def _select_schedule(self, args):
# app_id, service_id = self.check_service(args.app_name_or_id, args.serv_name_or_id)
# mode = self.get_mode(app_id, service_id)
# if mode == 'NA':
# return {'result': {'error': 'ERROR: to select a schedule, first specify run mode DEMO or REAL, then upload a bag of tasks '}}
# # check schedule availability
# res = self.call_manager(app_id, service_id, "get_service_info")
# if res['noCompletedTasks'] == 0:
# return {'message': "No schedule available yet: try again later..."}
# if res['state'] != 'RUNNING':
# return {'message': "Busy %s: try again later..." % res['phase']}
# sres = self.call_manager(app_id, service_id, "get_sampling_results")
# sdata = simplejson.loads(sres)
# if 'timestamp' in sdata:
# # Sampling is ready, check if bag is ready, or if we have to choose a schedule
# ts = sdata['timestamp']
# print strftime("Bag sampled on %a %d %b %Y at %H:%M:%S %Z", localtime(ts / 1000))
# if 'schedules' in sdata:
# #sch = sdata['schedules']
# #ss = simplejson.dumps(sch)
# # print "schedules: ", ss
# numscheds = len(sdata['schedules'])
# if numscheds == 0:
# return {'result': {'message': "Bag finished during sampling phase"}}
# if res['noTotalTasks'] == res['noCompletedTasks']:
# return {'result': {'message': "Taskfarm already finished"}}
# # check schedule selection
# if (args.schedule < 1) or (args.schedule > numscheds):
# return {'result': {'error': "ERROR: select schedule in interval [1..%d]" % numscheds}}
# # start execution
# # "{"method":"start_execution","params":["1371729870918","2"],"jsonrpc":"2.0","id":1}"
# res = self.call_manager(app_id, service_id, "start_execution", [ts, args.schedule - 1])
# return {'result': res}
# def select_schedule(self, args):
# res = self._select_schedule(args)
# self._print_res(res)
# def main():
# logger = logging.getLogger(__name__)
# console = logging.StreamHandler()
# formatter = logging.Formatter('%(levelname)s - %(message)s')
# console.setFormatter(formatter)
# logger.addHandler(console)
# cmd_client = BaseClient(logger)
# parser, argv = config('Manage ConPaaS PHP services.', logger)
# _serv_cmd = TaskFarmCmd(parser, cmd_client)
# argcomplete.autocomplete(parser)
# args = parser.parse_args(argv)
# cmd_client.set_config(args.director_url, args.username, args.password,
# args.debug)
# try:
# args.run_cmd(args)
# except:
# e = sys.exc_info()[1]
# sys.stderr.write("ERROR: %s\n" % e)
# sys.exit(1)
# if __name__ == '__main__':
# main()<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from threading import Thread
import Queue
from django.core.urlresolvers import reverse
from django.conf import settings
from django import forms
from django.http import HttpRequest
from django.test import TestCase
import haystack
from haystack.forms import model_choices, SearchForm, ModelSearchForm
from haystack.query import EmptySearchQuerySet
from haystack.sites import SearchSite
from haystack.views import SearchView, FacetedSearchView, search_view_factory
from core.models import MockModel, AnotherMockModel
class InitialedSearchForm(SearchForm):
q = forms.CharField(initial='Search for...', required=False, label='Search')
class SearchViewTestCase(TestCase):
def setUp(self):
super(SearchViewTestCase, self).setUp()
mock_index_site = SearchSite()
mock_index_site.register(MockModel)
mock_index_site.register(AnotherMockModel)
# Stow.
self.old_site = haystack.site
haystack.site = mock_index_site
self.old_engine = getattr(settings, 'HAYSTACK_SEARCH_ENGINE')
settings.HAYSTACK_SEARCH_ENGINE = 'dummy'
def tearDown(self):
haystack.site = self.old_site
settings.HAYSTACK_SEARCH_ENGINE = self.old_engine
super(SearchViewTestCase, self).tearDown()
def test_search_no_query(self):
response = self.client.get(reverse('haystack_search'))
self.assertEqual(response.status_code, 200)
def test_search_query(self):
response = self.client.get(reverse('haystack_search'), {'q': 'hello world'})
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context[-1]['page'].object_list), 1)
self.assertEqual(response.context[-1]['page'].object_list[0].content_type(), 'haystack.dummymodel')
self.assertEqual(response.context[-1]['page'].object_list[0].pk, 1)
def test_invalid_page(self):
response = self.client.get(reverse('haystack_search'), {'q': 'hello world', 'page': '165233'})
self.assertEqual(response.status_code, 404)
def test_empty_results(self):
sv = SearchView()
self.assert_(isinstance(sv.get_results(), EmptySearchQuerySet))
def test_initial_data(self):
sv = SearchView(form_class=InitialedSearchForm)
sv.request = HttpRequest()
form = sv.build_form()
self.assert_(isinstance(form, InitialedSearchForm))
self.assertEqual(form.fields['q'].initial, 'Search for...')
self.assertEqual(form.as_p(), u'<p><label for="id_q">Search:</label> <input type="text" name="q" value="Search for..." id="id_q" /></p>')
def test_thread_safety(self):
exceptions = []
def threaded_view(queue, view, request):
import time; time.sleep(2)
try:
inst = view(request)
queue.put(request.GET['name'])
except Exception, e:
exceptions.append(e)
raise
class ThreadedSearchView(SearchView):
def __call__(self, request):
print "Name: %s" % request.GET['name']
return super(ThreadedSearchView, self).__call__(request)
view = search_view_factory(view_class=ThreadedSearchView)
queue = Queue.Queue()
request_1 = HttpRequest()
request_1.GET = {'name': 'foo'}
request_2 = HttpRequest()
request_2.GET = {'name': 'bar'}
th1 = Thread(target=threaded_view, args=(queue, view, request_1))
th2 = Thread(target=threaded_view, args=(queue, view, request_2))
th1.start()
th2.start()
th1.join()
th2.join()
foo = queue.get()
bar = queue.get()
self.assertNotEqual(foo, bar)
class ResultsPerPageTestCase(TestCase):
urls = 'core.tests.results_per_page_urls'
def test_custom_results_per_page(self):
response = self.client.get('/search/', {'q': 'hello world'})
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context[-1]['page'].object_list), 1)
self.assertEqual(response.context[-1]['paginator'].per_page, 1)
response = self.client.get('/search2/', {'q': 'hello world'})
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context[-1]['page'].object_list), 1)
self.assertEqual(response.context[-1]['paginator'].per_page, 2)
class FacetedSearchViewTestCase(TestCase):
def setUp(self):
super(FacetedSearchViewTestCase, self).setUp()
mock_index_site = SearchSite()
mock_index_site.register(MockModel)
mock_index_site.register(AnotherMockModel)
# Stow.
self.old_site = haystack.site
haystack.site = mock_index_site
self.old_engine = getattr(settings, 'HAYSTACK_SEARCH_ENGINE')
settings.HAYSTACK_SEARCH_ENGINE = 'dummy'
def tearDown(self):
haystack.site = self.old_site
settings.HAYSTACK_SEARCH_ENGINE = self.old_engine
super(FacetedSearchViewTestCase, self).tearDown()
def test_search_no_query(self):
response = self.client.get(reverse('haystack_faceted_search'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['facets'], {})
def test_empty_results(self):
fsv = FacetedSearchView()
self.assert_(isinstance(fsv.get_results(), EmptySearchQuerySet))
class BasicSearchViewTestCase(TestCase):
def setUp(self):
super(BasicSearchViewTestCase, self).setUp()
mock_index_site = SearchSite()
mock_index_site.register(MockModel)
mock_index_site.register(AnotherMockModel)
# Stow.
self.old_site = haystack.site
haystack.site = mock_index_site
<|fim▁hole|> self.old_engine = getattr(settings, 'HAYSTACK_SEARCH_ENGINE')
settings.HAYSTACK_SEARCH_ENGINE = 'dummy'
def tearDown(self):
haystack.site = self.old_site
settings.HAYSTACK_SEARCH_ENGINE = self.old_engine
super(BasicSearchViewTestCase, self).tearDown()
def test_search_no_query(self):
response = self.client.get(reverse('haystack_basic_search'))
self.assertEqual(response.status_code, 200)
def test_search_query(self):
response = self.client.get(reverse('haystack_basic_search'), {'q': 'hello world'})
self.assertEqual(response.status_code, 200)
self.assertEqual(type(response.context[-1]['form']), ModelSearchForm)
self.assertEqual(len(response.context[-1]['page'].object_list), 1)
self.assertEqual(response.context[-1]['page'].object_list[0].content_type(), 'haystack.dummymodel')
self.assertEqual(response.context[-1]['page'].object_list[0].pk, 1)
self.assertEqual(response.context[-1]['query'], 'hello world')
def test_invalid_page(self):
response = self.client.get(reverse('haystack_basic_search'), {'q': 'hello world', 'page': '165233'})
self.assertEqual(response.status_code, 404)<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Phaser Editor documentation build configuration file, created by
# sphinx-quickstart on Thu May 25 08:35:14 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
#'rinoh.frontend.sphinx'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.<|fim▁hole|># source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Phaser Editor 2D'
copyright = u'2016-2020, Arian Fornaris'
author = u'Arian Fornaris'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'2.1.7'
# The full version, including alpha/beta/rc tags.
release = u'2.1.7'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
# pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#import sphinx_rtd_theme
html_theme = "phaser-editor"
# Uncomment for generate Eclipse Offline Help
#html_theme = "eclipse-help"
html_theme_path = ["_themes"]
html_show_sourcelink = False
html_show_sphinx = False
html_favicon = "logo.png"
html_title = "Phaser Editor Help"
html_show_copyright = True
print(html_theme_path)
#html_theme = 'classic'
highlight_language = 'javascript'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'PhaserEditordoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
'preamble': '',
# Latex figure (float) alignment
#
'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PhaserEditor2D.tex', u'Phaser Editor 2D Documentation',
u'Arian Fornaris', 'manual'),
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PhaserEditor2D', u'Phaser Editor 2D Documentation',
author, 'Arian', 'A friendly HTML5 game IDE.',
'Miscellaneous'),
]<|fim▁end|> | # You can specify multiple suffix as a list of string:
# |
<|file_name|>IsGreaterOrEqualQueryCriteria.java<|end_file_name|><|fim▁begin|>/*
* This file is part of ToroDB.
*<|fim▁hole|> *
* ToroDB is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with ToroDB. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright (c) 2014, 8Kdata Technology
*
*/
package com.torodb.torod.core.language.querycriteria;
import com.torodb.torod.core.language.AttributeReference;
import com.torodb.torod.core.language.querycriteria.utils.QueryCriteriaVisitor;
import com.torodb.torod.core.subdocument.values.Value;
/**
*
*/
public class IsGreaterOrEqualQueryCriteria extends AttributeAndValueQueryCriteria {
private static final long serialVersionUID = 1L;
public IsGreaterOrEqualQueryCriteria(AttributeReference attributeReference, Value<?> val) {
super(attributeReference, val);
}
@Override
protected int getBaseHash() {
return 5;
}
@Override
public String toString() {
return getAttributeReference() + " >= " + getValue();
}
@Override
public <Result, Arg> Result accept(QueryCriteriaVisitor<Result, Arg> visitor, Arg arg) {
return visitor.visit(this, arg);
}
}<|fim▁end|> | * ToroDB is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version. |
<|file_name|>rollup.config.js<|end_file_name|><|fim▁begin|>import angular from 'rollup-plugin-angular';
import commonjs from 'rollup-plugin-commonjs';
import nodeResolve from 'rollup-plugin-node-resolve';
import typescript from 'rollup-plugin-typescript';
import uglify from 'rollup-plugin-uglify';
import { minify } from 'uglify-es';
// rollup-plugin-angular addons
import sass from 'node-sass';
import CleanCSS from 'clean-css';
import { minify as minifyHtml } from 'html-minifier';
const cssmin = new CleanCSS();
const htmlminOpts = {
caseSensitive: true,
collapseWhitespace: true,
removeComments: true,
};
export default {
input: 'dist/index.js',
output: {
// core output options
file: 'dist/bundle.umd.js', // required
format: 'umd', // required
name: 'ngx-form.element',
globals: {
'@angular/core': 'ng.core',
'rxjs/Subject': 'Subject',
'@angular/forms': 'ng.forms',
'@ngx-core/common': 'ngx-core.common',
'@ngx-form/interface': 'ngx-form.interface',
'@angular/common': 'ng.common',
'@angular/material': 'ng.material'
},
// advanced output options
// paths: ,
// banner: ,
// footer: ,
// intro:,
// outro: ,
sourcemap: true, // true | inline
// sourcemapFile: ,
// interop: ,
// danger zone
exports: 'named',
// amd: ,
// indent: ,
// strict:
},
onwarn,
plugins: [
angular({
preprocessors: {
template: template => minifyHtml(template, htmlminOpts),
style: scss => {
const css = sass.renderSync({ data: scss }).css;
return cssmin.minify(css).styles;
},
}
}),
commonjs(),
nodeResolve({
// use "module" field for ES6 module if possible
module: true, // Default: true
// use "jsnext:main" if possible
// – see https://github.com/rollup/rollup/wiki/jsnext:main
jsnext: true, // Default: false
// use "main" field or index.js, even if it's not an ES6 module
// (needs to be converted from CommonJS to ES6
// – see https://github.com/rollup/rollup-plugin-commonjs
main: true, // Default: true
// some package.json files have a `browser` field which
// specifies alternative files to load for people bundling
// for the browser. If that's you, use this option, otherwise
// pkg.browser will be ignored
browser: true, // Default: false
// not all files you want to resolve are .js files
extensions: [ '.js', '.json' ], // Default: ['.js']
// whether to prefer built-in modules (e.g. `fs`, `path`) or
// local ones with the same names
preferBuiltins: true, // Default: true
// Lock the module search in this path (like a chroot). Module defined
// outside this path will be mark has external
jail: '/src', // Default: '/'
// If true, inspect resolved files to check that they are
// ES2015 modules
modulesOnly: false, // Default: false
// Any additional options that should be passed through
// to node-resolve
customResolveOptions: {}
}),
typescript({
typescript: require('./node_modules/typescript')
}),
uglify({}, minify)
]
};
function onwarn(message) {
const suppressed = [
'UNRESOLVED_IMPORT',
'THIS_IS_UNDEFINED'
];
if (!suppressed.find(code => message.code === code)) {
return console.warn(message.message);
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>a-backend-roles.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* This file exports the main roles AnalyticsBackend uses which are:
* 'developer', 'teacher' and 'student'.
*
* Also indicates the anonymous routes used by the gleaner-tracker module to
* send data to the collector server.
*/
exports.app = {
roles: [
{
roles: 'student',
allows: [
{
resources: [
'/games/public',
'/games/:gameId/versions',
'/games/:gameId/versions/:versionId',
'/games/:gameId/versions/:versionId/sessions/my',
'/sessions/:sessionId/results'
],
permissions: [
'get'
]
},
{
resources: [
'/sessions/:sessionId'
],
permissions: [
'put',
'get'
]
}
]
},
{
roles: 'teacher',
allows: [
{
resources: [
'/games/public',
'/games/:gameId/versions',
'/games/:gameId/versions/:versionId',
'/games/:gameId/versions/:versionId/sessions/my',
'/sessions/:sessionId/results'
],
permissions: [
'get'
]
},
{
resources: [
'/sessions/:sessionId',
'/sessions/:sessionId/remove',
'/sessions/:sessionId/results/:resultsId'
],
permissions: [
'*'
]
},
{
resources: [
'/games/:gameId/versions/:versionId/sessions',
'/sessions/:sessionId/event/:event'
],
permissions: [
'post'
]
}
]
},
{
roles: 'developer',
allows: [
{
resources: [
'/games/my',
'/games/:gameId',
'/games/:gameId/versions',
'/games/:gameId/versions/:versionId'
],
permissions: [
'*'
]
},
{
resources: [
'/games/:gameId/versions/:versionId/sessions',
'/sessions/:sessionId'
],
permissions: [
'get'
]
},
{
resources: [<|fim▁hole|> 'post'
]
}
]
}
],
anonymous: [
'/collector/start/:trackingCode',
'/collector/track'
],
autoroles: [
'student',
'teacher',
'developer'
]
};<|fim▁end|> | '/games'
],
permissions: [ |
<|file_name|>spi.go<|end_file_name|><|fim▁begin|><|fim▁hole|>
import (
"github.com/docker/infrakit/pkg/spi"
"github.com/docker/infrakit/pkg/types"
)
// InterfaceSpec is the current name and version of the Resource API.
var InterfaceSpec = spi.InterfaceSpec{
Name: "Resource",
Version: "0.1.1",
}
// ID is the unique identifier for a collection of resources.
type ID string
// Spec is a specification of resources to provision.
type Spec struct {
// ID is the unique identifier for the collection of resources.
ID ID
// Properties is the opaque configuration for the resources.
Properties *types.Any
}
// Plugin defines the functions for a Resource plugin.
type Plugin interface {
Commit(spec Spec, pretend bool) (string, error)
Destroy(spec Spec, pretend bool) (string, error)
DescribeResources(spec Spec) (string, error)
}<|fim▁end|> | package resource |
<|file_name|>authActions.spec.ts<|end_file_name|><|fim▁begin|>import {login, signup} from '../../src/app/actions/authActions';
import ActionsConstants from '../../src/common/constants/actionsConstants';
describe('auth actions', () => {
describe('if we create a login action', () => {
let userId = 'TestUser';
it('should generate action with payload', () => {
expect(login(userId)).toEqual({
type: ActionsConstants.Login,
payload: userId
});
});
});
describe('if we create a login action without a userId', () => {
const error = new TypeError('not a string');
it('should fail', () => {
expect(login(error)).toEqual({
type: ActionsConstants.Login,
payload: error,
error: true
});
});
});
<|fim▁hole|> });
});
});
});<|fim▁end|> | describe('if we create a signup action', () => {
it('should generate action with payload', () => {
expect(signup()).toEqual({
type: ActionsConstants.SignUp |
<|file_name|>bench_basic.py<|end_file_name|><|fim▁begin|>from __future__ import division, print_function, absolute_import
import sys
from numpy.testing import *
import numpy.linalg as linalg
def random(size):
return rand(*size)
class TestSolve(TestCase):
def bench_random(self):
basic_solve = linalg.solve
print()
print(' Solving system of linear equations')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | basic | scipy | basic ')
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
# larger diagonal ensures non-singularity:
for i in range(size): a[i,i] = 10*(.1+a[i,i])
b = random([size])
print('| %6.2f ' % measure('solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_solve(a,b)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestInv(TestCase):
def bench_random(self):
basic_inv = linalg.inv
print()
print(' Finding matrix inverse')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | basic | scipy | basic')
<|fim▁hole|> print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
# large diagonal ensures non-singularity:
for i in range(size): a[i,i] = 10*(.1+a[i,i])
print('| %6.2f ' % measure('inv(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_inv(a)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('inv(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_inv(a)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestDet(TestCase):
def bench_random(self):
basic_det = linalg.det
print()
print(' Finding matrix determinant')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | basic | scipy | basic ')
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
print('| %6.2f ' % measure('det(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_det(a)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('det(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_det(a)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
if __name__ == "__main__":
run_module_suite()<|fim▁end|> | for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2 |
<|file_name|>rdfpipe.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
A commandline tool for parsing RDF in different formats and serializing the
resulting graph to a chosen format.
"""
import sys
from optparse import OptionParser
import logging
import rdflib
from rdflib import plugin
from rdflib.store import Store
from rdflib.graph import Graph
from rdflib.namespace import Namespace, RDF, RDFS, OWL, XSD
from rdflib.parser import Parser
from rdflib.serializer import Serializer
from rdfextras.tools.pathutils import guess_format
STORE_CONNECTION = ''
STORE_TYPE = 'IOMemory'
DEFAULT_INPUT_FORMAT = 'xml'
DEFAULT_OUTPUT_FORMAT = 'n3'
NS_BINDINGS = {
'rdf': RDF,
'rdfs': RDFS,
'owl': OWL,
'xsd': XSD,
'dc': "http://purl.org/dc/elements/1.1/",
'dct': "http://purl.org/dc/terms/",
'foaf': "http://xmlns.com/foaf/0.1/",
'wot': "http://xmlns.com/wot/0.1/"
}
def parse_and_serialize(input_files, input_format, guess,
outfile, output_format, ns_bindings,
store_conn=STORE_CONNECTION, store_type=STORE_TYPE):
store = plugin.get(store_type, Store)()
store.open(store_conn)
graph = Graph(store)
for prefix, uri in ns_bindings.items():
graph.namespace_manager.bind(prefix, uri, override=False)
for fpath in input_files:
use_format, kws = _format_and_kws(input_format)
if fpath == '-':
fpath = sys.stdin
elif not input_format and guess:
use_format = guess_format(fpath) or DEFAULT_INPUT_FORMAT
graph.parse(fpath, format=use_format, **kws)
if outfile:
output_format, kws = _format_and_kws(output_format)
graph.serialize(destination=outfile, format=output_format, base=None, **kws)
store.rollback()
def _format_and_kws(fmt):
"""
>>> _format_and_kws("fmt")
('fmt', {})
>>> _format_and_kws("fmt:+a")
('fmt', {'a': True})
>>> _format_and_kws("fmt:a")
('fmt', {'a': True})
>>> _format_and_kws("fmt:+a,-b")
('fmt', {'a': True, 'b': False})
>>> _format_and_kws("fmt:c=d")
('fmt', {'c': 'd'})
"""
fmt, kws = fmt, {}
if fmt and ':' in fmt:
fmt, kwrepr = fmt.split(':')
for kw in kwrepr.split(','):
if '=' in kw:
k, v = kw.split('=')
kws[k] = v
elif kw.startswith('-'):
kws[kw[1:]] = False
elif kw.startswith('+'):
kws[kw[1:]] = True
else: # same as "+"
kws[kw] = True
return fmt, kws
def make_option_parser():
parser_names = _get_plugin_names(Parser)
serializer_names = _get_plugin_names(Serializer)
kw_example = "FORMAT:(+)KW1,-KW2,KW3=VALUE"
oparser = OptionParser(
"%prog [-h] [-i INPUT_FORMAT] [-o OUTPUT_FORMAT] [--ns=PFX=NS ...] [-] [FILE ...]",
description=__doc__.strip() + (
" Reads file system paths, URLs or from stdin if '-' is given."
" The result is serialized to stdout."),
version="%prog " + "(using rdflib %s)" % rdflib.__version__)
oparser.add_option('-i', '--input-format',
type=str, #default=DEFAULT_INPUT_FORMAT,
help="Format of the input document(s)."
" Available input formats are: %s." % parser_names +
" If no format is given, it will be guessed from the file name extension."
" Keywords to parser can be given after format like: %s." % kw_example
,
metavar="INPUT_FORMAT")
oparser.add_option('-o', '--output-format',
type=str, default=DEFAULT_OUTPUT_FORMAT,
help="Format of the graph serialization."
" Available output formats are: %s."
% serializer_names +
" Default format is: '%default'." +
" Keywords to serializer can be given after format like: %s." % kw_example
,
metavar="OUTPUT_FORMAT")
oparser.add_option('--ns',
action="append", type=str,
help="Register a namespace binding (QName prefix to a base URI). "
"This can be used more than once.",
metavar="PREFIX=NAMESPACE")
oparser.add_option('--no-guess', dest='guess',
action='store_false', default=True,
help="Don't guess format based on file suffix.")
oparser.add_option('--no-out',
action='store_true', default=False,
help="Don't output the resulting graph (useful for checking validity of input).")
oparser.add_option('-w', '--warn',<|fim▁hole|> return oparser
_get_plugin_names = lambda kind: ", ".join(p.name for p in plugin.plugins(kind=kind))
def main():
oparser = make_option_parser()
opts, args = oparser.parse_args()
if len(args) < 1:
oparser.print_usage()
oparser.exit()
if opts.warn:
loglevel = logging.WARNING
else:
loglevel = logging.CRITICAL
logging.basicConfig(level=loglevel)
ns_bindings = dict(NS_BINDINGS)
if opts.ns:
for ns_kw in opts.ns:
pfx, uri = ns_kw.split('=')
ns_bindings[pfx] = uri
outfile = sys.stdout
if opts.no_out:
outfile = None
parse_and_serialize(args, opts.input_format, opts.guess,
outfile, opts.output_format, ns_bindings)
if __name__ == "__main__":
main()<|fim▁end|> | action='store_true', default=False,
help="Output warnings to stderr (by default only critical errors).")
|
<|file_name|>count_column.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
"CountColumn filter"
from .abstract import AbstractFilter<|fim▁hole|> "Count a flux's column and put the result in a variable"
name = 'Compter colonnes'
description = "Compte le nombre de colonnes d'un flux et met le résultat dans une variable"
node_in = ['cible']
parameters = [
{
'name': 'Variable',
'key': 'target',
'type': 'integer'
}
]
def run(self):
"Execute the filter"
target = self._model.config('target')
value = len(self._flux_in['cible']['headers'])
self._registery.set(target, value)<|fim▁end|> |
class CountColumn(AbstractFilter): |
<|file_name|>htmlimageelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::document_loader::{LoadBlocker, LoadType};
use crate::dom::activation::Activatable;
use crate::dom::attr::Attr;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectBinding::DOMRectMethods;
use crate::dom::bindings::codegen::Bindings::ElementBinding::ElementBinding::ElementMethods;
use crate::dom::bindings::codegen::Bindings::HTMLImageElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLImageElementBinding::HTMLImageElementMethods;
use crate::dom::bindings::codegen::Bindings::MouseEventBinding::MouseEventMethods;
use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeBinding::NodeMethods;
use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::{DomRoot, LayoutDom, MutNullableDom};
use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::document::Document;
use crate::dom::element::{cors_setting_for_element, referrer_policy_for_element};
use crate::dom::element::{reflect_cross_origin_attribute, set_cross_origin_attribute};
use crate::dom::element::{AttributeMutation, Element, RawLayoutElementHelpers};
use crate::dom::event::Event;
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlareaelement::HTMLAreaElement;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::htmlformelement::{FormControl, HTMLFormElement};
use crate::dom::htmlmapelement::HTMLMapElement;
use crate::dom::htmlpictureelement::HTMLPictureElement;
use crate::dom::htmlsourceelement::HTMLSourceElement;
use crate::dom::mouseevent::MouseEvent;
use crate::dom::node::UnbindContext;
use crate::dom::node::{
document_from_node, window_from_node, BindContext, Node, NodeDamage, ShadowIncluding,
};
use crate::dom::performanceresourcetiming::InitiatorType;
use crate::dom::values::UNSIGNED_LONG_MAX;
use crate::dom::virtualmethods::VirtualMethods;
use crate::dom::window::Window;
use crate::fetch::create_a_potential_CORS_request;
use crate::image_listener::{add_cache_listener_for_element, ImageCacheListener};
use crate::microtask::{Microtask, MicrotaskRunnable};
use crate::network_listener::{self, NetworkListener, PreInvoke, ResourceTimingListener};
use crate::script_thread::ScriptThread;
use crate::task_source::TaskSource;
use app_units::{Au, AU_PER_PX};
use cssparser::{Parser, ParserInput};
use dom_struct::dom_struct;
use euclid::Point2D;
use html5ever::{LocalName, Prefix};
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use mime::{self, Mime};
use msg::constellation_msg::PipelineId;
use net_traits::image::base::{Image, ImageMetadata};
use net_traits::image_cache::UsePlaceholder;
use net_traits::image_cache::{CanRequestImages, CorsStatus, ImageCache, ImageOrMetadataAvailable};
use net_traits::image_cache::{ImageResponder, ImageResponse, ImageState, PendingImageId};
use net_traits::request::{CorsSettings, Destination, Initiator, RequestBuilder};
use net_traits::{FetchMetadata, FetchResponseListener, FetchResponseMsg, NetworkError};
use net_traits::{ReferrerPolicy, ResourceFetchTiming, ResourceTimingType};
use num_traits::ToPrimitive;
use servo_url::origin::ImmutableOrigin;
use servo_url::origin::MutableOrigin;
use servo_url::ServoUrl;
use std::cell::{Cell, RefMut};
use std::char;
use std::collections::HashSet;
use std::default::Default;
use std::i32;
use std::mem;
use std::sync::{Arc, Mutex};
use style::attr::{
parse_double, parse_length, parse_unsigned_integer, AttrValue, LengthOrPercentageOrAuto,
};
use style::context::QuirksMode;
use style::media_queries::MediaList;
use style::parser::ParserContext;
use style::str::is_ascii_digit;
use style::stylesheets::{CssRuleType, Origin};
use style::values::specified::length::{Length, NoCalcLength};
use style::values::specified::{source_size_list::SourceSizeList, AbsoluteLength};
use style_traits::ParsingMode;
enum ParseState {
InDescriptor,
InParens,
AfterDescriptor,
}
pub struct SourceSet {
image_sources: Vec<ImageSource>,
source_size: SourceSizeList,
}
impl SourceSet {
fn new() -> SourceSet {
SourceSet {
image_sources: Vec::new(),
source_size: SourceSizeList::empty(),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct ImageSource {
pub url: String,
pub descriptor: Descriptor,
}
#[derive(Clone, Debug, PartialEq)]
pub struct Descriptor {
pub wid: Option<u32>,
pub den: Option<f64>,
}
#[derive(Clone, Copy, JSTraceable, MallocSizeOf)]
#[allow(dead_code)]
enum State {
Unavailable,
PartiallyAvailable,
CompletelyAvailable,
Broken,
}
#[derive(Clone, Copy, JSTraceable, MallocSizeOf)]
enum ImageRequestPhase {
Pending,
Current,
}
#[derive(JSTraceable, MallocSizeOf)]
#[must_root]
struct ImageRequest {
state: State,
parsed_url: Option<ServoUrl>,
source_url: Option<USVString>,
blocker: Option<LoadBlocker>,
#[ignore_malloc_size_of = "Arc"]
image: Option<Arc<Image>>,
metadata: Option<ImageMetadata>,
final_url: Option<ServoUrl>,
current_pixel_density: Option<f64>,
}
#[dom_struct]
pub struct HTMLImageElement {
htmlelement: HTMLElement,
image_request: Cell<ImageRequestPhase>,
current_request: DomRefCell<ImageRequest>,
pending_request: DomRefCell<ImageRequest>,
form_owner: MutNullableDom<HTMLFormElement>,
generation: Cell<u32>,
#[ignore_malloc_size_of = "SourceSet"]
source_set: DomRefCell<SourceSet>,
last_selected_source: DomRefCell<Option<USVString>>,
}
impl HTMLImageElement {
pub fn get_url(&self) -> Option<ServoUrl> {
self.current_request.borrow().parsed_url.clone()
}
}
/// The context required for asynchronously loading an external image.
struct ImageContext {
/// Reference to the script thread image cache.
image_cache: Arc<dyn ImageCache>,
/// Indicates whether the request failed, and why
status: Result<(), NetworkError>,
/// The cache ID for this request.
id: PendingImageId,
/// Used to mark abort
aborted: bool,
/// The document associated with this request
doc: Trusted<Document>,
/// timing data for this resource
resource_timing: ResourceFetchTiming,
url: ServoUrl,
}
impl FetchResponseListener for ImageContext {
fn process_request_body(&mut self) {}
fn process_request_eof(&mut self) {}
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
debug!("got {:?} for {:?}", metadata.as_ref().map(|_| ()), self.url);
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponse(metadata.clone()));
let metadata = metadata.ok().map(|meta| match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
});
// Step 14.5 of https://html.spec.whatwg.org/multipage/#img-environment-changes
if let Some(metadata) = metadata.as_ref() {
if let Some(ref content_type) = metadata.content_type {
let mime: Mime = content_type.clone().into_inner().into();
if mime.type_() == mime::MULTIPART && mime.subtype().as_str() == "x-mixed-replace" {
self.aborted = true;
}
}
}
let status_code = metadata
.as_ref()
.and_then(|m| m.status.as_ref().map(|&(code, _)| code))
.unwrap_or(0);
self.status = match status_code {
0 => Err(NetworkError::Internal(
"No http status code received".to_owned(),
)),
200..=299 => Ok(()), // HTTP ok status codes
_ => Err(NetworkError::Internal(format!(
"HTTP error code {}",
status_code
))),
};
}
fn process_response_chunk(&mut self, payload: Vec<u8>) {
if self.status.is_ok() {
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseChunk(payload));
}
}
fn process_response_eof(&mut self, response: Result<ResourceFetchTiming, NetworkError>) {
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseEOF(response));
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
fn submit_resource_timing(&mut self) {
network_listener::submit_timing(self)
}
}
impl ResourceTimingListener for ImageContext {
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
(
InitiatorType::LocalName("img".to_string()),
self.url.clone(),
)
}
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
self.doc.root().global()
}
}
impl PreInvoke for ImageContext {
fn should_invoke(&self) -> bool {
!self.aborted
}
}
#[derive(PartialEq)]
pub(crate) enum FromPictureOrSrcSet {
Yes,
No,
}
// https://html.spec.whatwg.org/multipage/#update-the-image-data steps 17-20
// This function is also used to prefetch an image in `script::dom::servoparser::prefetch`.
pub(crate) fn image_fetch_request(
img_url: ServoUrl,
origin: ImmutableOrigin,
pipeline_id: PipelineId,
cors_setting: Option<CorsSettings>,
referrer_policy: Option<ReferrerPolicy>,
from_picture_or_srcset: FromPictureOrSrcSet,
) -> RequestBuilder {
let mut request =
create_a_potential_CORS_request(img_url, Destination::Image, cors_setting, None)
.origin(origin)
.pipeline_id(Some(pipeline_id))
.referrer_policy(referrer_policy);
if from_picture_or_srcset == FromPictureOrSrcSet::Yes {
request = request.initiator(Initiator::ImageSet);
}
request
}
impl HTMLImageElement {
/// Update the current image with a valid URL.
fn fetch_image(&self, img_url: &ServoUrl) {
let window = window_from_node(self);
let image_cache = window.image_cache();
let response = image_cache.find_image_or_metadata(
img_url.clone().into(),
window.origin().immutable().clone(),
cors_setting_for_element(self.upcast()),
UsePlaceholder::Yes,
CanRequestImages::Yes,
);
match response {
Ok(ImageOrMetadataAvailable::ImageAvailable(image, url)) => {
self.process_image_response(ImageResponse::Loaded(image, url));
},
Ok(ImageOrMetadataAvailable::MetadataAvailable(m)) => {
self.process_image_response(ImageResponse::MetadataLoaded(m));
},
Err(ImageState::Pending(id)) => {
add_cache_listener_for_element(image_cache, id, self);
},
Err(ImageState::LoadError) => {
self.process_image_response(ImageResponse::None);
},
Err(ImageState::NotRequested(id)) => {
add_cache_listener_for_element(image_cache, id, self);
self.fetch_request(img_url, id);
},
}
}
fn fetch_request(&self, img_url: &ServoUrl, id: PendingImageId) {
let document = document_from_node(self);
let window = window_from_node(self);
let context = Arc::new(Mutex::new(ImageContext {
image_cache: window.image_cache(),
status: Ok(()),
id: id,
aborted: false,
doc: Trusted::new(&document),
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
url: img_url.clone(),
}));
let (action_sender, action_receiver) = ipc::channel().unwrap();
let (task_source, canceller) = document
.window()
.task_manager()
.networking_task_source_with_canceller();
let listener = NetworkListener {
context,
task_source,
canceller: Some(canceller),
};
ROUTER.add_route(
action_receiver.to_opaque(),
Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}),
);
let request = image_fetch_request(
img_url.clone(),
document.origin().immutable().clone(),
document.global().pipeline_id(),
cors_setting_for_element(self.upcast()),
referrer_policy_for_element(self.upcast()),
if Self::uses_srcset_or_picture(self.upcast()) {
FromPictureOrSrcSet::Yes
} else {
FromPictureOrSrcSet::No
},
);
// This is a background load because the load blocker already fulfills the
// purpose of delaying the document's load event.
document
.loader_mut()
.fetch_async_background(request, action_sender);
}
// Steps common to when an image has been loaded.
fn handle_loaded_image(&self, image: Arc<Image>, url: ServoUrl) {
self.current_request.borrow_mut().metadata = Some(ImageMetadata {
height: image.height,
width: image.width,
});
self.current_request.borrow_mut().final_url = Some(url);
self.current_request.borrow_mut().image = Some(image);
self.current_request.borrow_mut().state = State::CompletelyAvailable;
LoadBlocker::terminate(&mut self.current_request.borrow_mut().blocker);
// Mark the node dirty
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
/// Step 24 of https://html.spec.whatwg.org/multipage/#update-the-image-data
fn process_image_response(&self, image: ImageResponse) {
// TODO: Handle multipart/x-mixed-replace
let (trigger_image_load, trigger_image_error) = match (image, self.image_request.get()) {
(ImageResponse::Loaded(image, url), ImageRequestPhase::Current) => {
self.handle_loaded_image(image, url);
(true, false)
},
(ImageResponse::PlaceholderLoaded(image, url), ImageRequestPhase::Current) => {
self.handle_loaded_image(image, url);
(false, true)
},
(ImageResponse::Loaded(image, url), ImageRequestPhase::Pending) => {
self.abort_request(State::Unavailable, ImageRequestPhase::Pending);
self.image_request.set(ImageRequestPhase::Current);
self.handle_loaded_image(image, url);
(true, false)
},
(ImageResponse::PlaceholderLoaded(image, url), ImageRequestPhase::Pending) => {
self.abort_request(State::Unavailable, ImageRequestPhase::Pending);
self.image_request.set(ImageRequestPhase::Current);
self.handle_loaded_image(image, url);
(false, true)
},
(ImageResponse::MetadataLoaded(meta), ImageRequestPhase::Current) => {
self.current_request.borrow_mut().state = State::PartiallyAvailable;
self.current_request.borrow_mut().metadata = Some(meta);
(false, false)
},
(ImageResponse::MetadataLoaded(_), ImageRequestPhase::Pending) => {
self.pending_request.borrow_mut().state = State::PartiallyAvailable;
(false, false)
},
(ImageResponse::None, ImageRequestPhase::Current) => {
self.abort_request(State::Broken, ImageRequestPhase::Current);
(false, true)
},
(ImageResponse::None, ImageRequestPhase::Pending) => {
self.abort_request(State::Broken, ImageRequestPhase::Current);
self.abort_request(State::Broken, ImageRequestPhase::Pending);
self.image_request.set(ImageRequestPhase::Current);
(false, true)
},
};
// Fire image.onload and loadend
if trigger_image_load {
// TODO: https://html.spec.whatwg.org/multipage/#fire-a-progress-event-or-event
self.upcast::<EventTarget>().fire_event(atom!("load"));
self.upcast::<EventTarget>().fire_event(atom!("loadend"));
}
// Fire image.onerror
if trigger_image_error {
self.upcast::<EventTarget>().fire_event(atom!("error"));
self.upcast::<EventTarget>().fire_event(atom!("loadend"));
}
// Trigger reflow
let window = window_from_node(self);
window.add_pending_reflow();
}
fn process_image_response_for_environment_change(
&self,
image: ImageResponse,
src: USVString,
generation: u32,
selected_pixel_density: f64,
) {
match image {
ImageResponse::Loaded(image, url) | ImageResponse::PlaceholderLoaded(image, url) => {
self.pending_request.borrow_mut().metadata = Some(ImageMetadata {
height: image.height,
width: image.width,
});
self.pending_request.borrow_mut().final_url = Some(url);
self.pending_request.borrow_mut().image = Some(image);
self.finish_reacting_to_environment_change(src, generation, selected_pixel_density);
},
ImageResponse::MetadataLoaded(meta) => {
self.pending_request.borrow_mut().metadata = Some(meta);
},
ImageResponse::None => {
self.abort_request(State::Unavailable, ImageRequestPhase::Pending);
},
};
}
/// <https://html.spec.whatwg.org/multipage/#abort-the-image-request>
fn abort_request(&self, state: State, request: ImageRequestPhase) {
let mut request = match request {
ImageRequestPhase::Current => self.current_request.borrow_mut(),
ImageRequestPhase::Pending => self.pending_request.borrow_mut(),
};
LoadBlocker::terminate(&mut request.blocker);
request.state = state;
request.image = None;
request.metadata = None;
}
/// <https://html.spec.whatwg.org/multipage/#update-the-source-set>
fn update_source_set(&self) {
// Step 1
*self.source_set.borrow_mut() = SourceSet::new();
// Step 2
let elem = self.upcast::<Element>();
let parent = elem.upcast::<Node>().GetParentElement();
let nodes;
let elements = match parent.as_ref() {
Some(p) => {
if p.is::<HTMLPictureElement>() {
nodes = p.upcast::<Node>().children();
nodes
.filter_map(DomRoot::downcast::<Element>)
.map(|n| DomRoot::from_ref(&*n))
.collect()
} else {
vec![DomRoot::from_ref(&*elem)]
}
},
None => vec![DomRoot::from_ref(&*elem)],
};
// Step 3
let width = match elem.get_attribute(&ns!(), &local_name!("width")) {
Some(x) => match parse_length(&x.value()) {
LengthOrPercentageOrAuto::Length(x) => {
let abs_length = AbsoluteLength::Px(x.to_f32_px());
Some(Length::NoCalc(NoCalcLength::Absolute(abs_length)))
},
_ => None,
},
None => None,
};
// Step 4
for element in &elements {
// Step 4.1
if *element == DomRoot::from_ref(&*elem) {
let mut source_set = SourceSet::new();
// Step 4.1.1
if let Some(x) = element.get_attribute(&ns!(), &local_name!("srcset")) {
source_set.image_sources = parse_a_srcset_attribute(&x.value());
}
// Step 4.1.2
if let Some(x) = element.get_attribute(&ns!(), &local_name!("sizes")) {
source_set.source_size =
parse_a_sizes_attribute(DOMString::from_string(x.value().to_string()));
}
// Step 4.1.3
let src_attribute = element.get_string_attribute(&local_name!("src"));
let is_src_empty = src_attribute.is_empty();
let no_density_source_of_1 = source_set
.image_sources
.iter()
.all(|source| source.descriptor.den != Some(1.));
let no_width_descriptor = source_set
.image_sources
.iter()
.all(|source| source.descriptor.wid.is_none());
if !is_src_empty && no_density_source_of_1 && no_width_descriptor {
source_set.image_sources.push(ImageSource {
url: src_attribute.to_string(),
descriptor: Descriptor {
wid: None,
den: None,
},
})
}
// Step 4.1.4
self.normalise_source_densities(&mut source_set, width);
// Step 4.1.5
*self.source_set.borrow_mut() = source_set;
// Step 4.1.6
return;
}
// Step 4.2
if !element.is::<HTMLSourceElement>() {
continue;
}
// Step 4.3 - 4.4
let mut source_set = SourceSet::new();
match element.get_attribute(&ns!(), &local_name!("srcset")) {
Some(x) => {
source_set.image_sources = parse_a_srcset_attribute(&x.value());
},
_ => continue,
}
// Step 4.5
if source_set.image_sources.is_empty() {
continue;
}
// Step 4.6
if let Some(x) = element.get_attribute(&ns!(), &local_name!("media")) {
if !self.matches_environment(x.value().to_string()) {
continue;
}
}
// Step 4.7
if let Some(x) = element.get_attribute(&ns!(), &local_name!("sizes")) {
source_set.source_size =
parse_a_sizes_attribute(DOMString::from_string(x.value().to_string()));
}
// Step 4.8
if let Some(x) = element.get_attribute(&ns!(), &local_name!("type")) {
// TODO Handle unsupported mime type
let mime = x.value().parse::<Mime>();
match mime {
Ok(m) => match m.type_() {
mime::IMAGE => (),
_ => continue,
},
_ => continue,
}
}
// Step 4.9
self.normalise_source_densities(&mut source_set, width);
// Step 4.10
*self.source_set.borrow_mut() = source_set;
return;
}
}
fn evaluate_source_size_list(
&self,
source_size_list: &mut SourceSizeList,
_width: Option<Length>,
) -> Au {
let document = document_from_node(self);
let device = document.device();
let quirks_mode = document.quirks_mode();
//FIXME https://github.com/whatwg/html/issues/3832
source_size_list.evaluate(&device, quirks_mode)
}
/// https://html.spec.whatwg.org/multipage/#matches-the-environment
fn matches_environment(&self, media_query: String) -> bool {
let document = document_from_node(self);
let quirks_mode = document.quirks_mode();
let document_url = &document.url();
// FIXME(emilio): This should do the same that we do for other media
// lists regarding the rule type and such, though it doesn't really
// matter right now...
//
// Also, ParsingMode::all() is wrong, and should be DEFAULT.
let context = ParserContext::new(
Origin::Author,
document_url,
Some(CssRuleType::Style),
ParsingMode::all(),
quirks_mode,
None,
None,
);
let mut parserInput = ParserInput::new(&media_query);
let mut parser = Parser::new(&mut parserInput);
let media_list = MediaList::parse(&context, &mut parser);
media_list.evaluate(&document.device(), quirks_mode)
}
/// <https://html.spec.whatwg.org/multipage/#normalise-the-source-densities>
fn normalise_source_densities(&self, source_set: &mut SourceSet, width: Option<Length>) {
// Step 1
let mut source_size = &mut source_set.source_size;
// Find source_size_length for Step 2.2
let source_size_length = self.evaluate_source_size_list(&mut source_size, width);
// Step 2
for imgsource in &mut source_set.image_sources {
// Step 2.1
if imgsource.descriptor.den.is_some() {
continue;
}
// Step 2.2
if imgsource.descriptor.wid.is_some() {
let wid = imgsource.descriptor.wid.unwrap();
imgsource.descriptor.den = Some(wid as f64 / source_size_length.to_f64_px());
} else {
//Step 2.3
imgsource.descriptor.den = Some(1 as f64);
}
}
}
/// <https://html.spec.whatwg.org/multipage/#select-an-image-source>
fn select_image_source(&self) -> Option<(USVString, f64)> {
// Step 1, 3
self.update_source_set();
let source_set = &*self.source_set.borrow_mut();
let len = source_set.image_sources.len();
// Step 2
if len == 0 {
return None;
}
// Step 4
let mut repeat_indices = HashSet::new();
for outer_index in 0..len {
if repeat_indices.contains(&outer_index) {
continue;
}
let imgsource = &source_set.image_sources[outer_index];
let pixel_density = imgsource.descriptor.den.unwrap();
for inner_index in (outer_index + 1)..len {
let imgsource2 = &source_set.image_sources[inner_index];
if pixel_density == imgsource2.descriptor.den.unwrap() {
repeat_indices.insert(inner_index);
}
}
}
let mut max = (0f64, 0);
let img_sources = &mut vec![];
for (index, image_source) in source_set.image_sources.iter().enumerate() {
if repeat_indices.contains(&index) {
continue;
}
let den = image_source.descriptor.den.unwrap();
if max.0 < den {
max = (den, img_sources.len());
}
img_sources.push(image_source);
}
// Step 5
let mut best_candidate = max;
let device = document_from_node(self).device();
let device_den = device.device_pixel_ratio().get() as f64;
for (index, image_source) in img_sources.iter().enumerate() {
let current_den = image_source.descriptor.den.unwrap();
if current_den < best_candidate.0 && current_den >= device_den {
best_candidate = (current_den, index);
}
}
let selected_source = img_sources.remove(best_candidate.1).clone();
Some((
USVString(selected_source.url),
selected_source.descriptor.den.unwrap() as f64,
))
}
fn init_image_request(
&self,
request: &mut RefMut<ImageRequest>,
url: &ServoUrl,
src: &USVString,
) {
request.parsed_url = Some(url.clone());
request.source_url = Some(src.clone());
request.image = None;
request.metadata = None;
let document = document_from_node(self);
LoadBlocker::terminate(&mut request.blocker);
request.blocker = Some(LoadBlocker::new(&*document, LoadType::Image(url.clone())));
}
/// Step 13-17 of html.spec.whatwg.org/multipage/#update-the-image-data
fn prepare_image_request(&self, url: &ServoUrl, src: &USVString, selected_pixel_density: f64) {
match self.image_request.get() {
ImageRequestPhase::Pending => {
if let Some(pending_url) = self.pending_request.borrow().parsed_url.clone() {
// Step 13
if pending_url == *url {
return;
}
}
},
ImageRequestPhase::Current => {
let mut current_request = self.current_request.borrow_mut();
let mut pending_request = self.pending_request.borrow_mut();
// step 16, create a new "image_request"
match (current_request.parsed_url.clone(), current_request.state) {
(Some(parsed_url), State::PartiallyAvailable) => {
// Step 14
if parsed_url == *url {
// Step 15 abort pending request
pending_request.image = None;
pending_request.parsed_url = None;
LoadBlocker::terminate(&mut pending_request.blocker);
// TODO: queue a task to restart animation, if restart-animation is set
return;
}
pending_request.current_pixel_density = Some(selected_pixel_density);
self.image_request.set(ImageRequestPhase::Pending);
self.init_image_request(&mut pending_request, &url, &src);
},
(_, State::Broken) | (_, State::Unavailable) => {
// Step 17
current_request.current_pixel_density = Some(selected_pixel_density);
self.init_image_request(&mut current_request, &url, &src);
},
(_, _) => {
// step 17
pending_request.current_pixel_density = Some(selected_pixel_density);
self.image_request.set(ImageRequestPhase::Pending);
self.init_image_request(&mut pending_request, &url, &src);
},
}
},
}
self.fetch_image(&url);
}
/// Step 8-12 of html.spec.whatwg.org/multipage/#update-the-image-data
fn update_the_image_data_sync_steps(&self) {
let document = document_from_node(self);
let window = document.window();
let task_source = window.task_manager().dom_manipulation_task_source();
let this = Trusted::new(self);
let (src, pixel_density) = match self.select_image_source() {
// Step 8
Some(data) => data,
None => {
// Step 9.
// FIXME(nox): Why are errors silenced here?
let _ = task_source.queue(
task!(image_null_source_error: move || {
let this = this.root();
{
let mut current_request =
this.current_request.borrow_mut();
current_request.source_url = None;
current_request.parsed_url = None;
}
let elem = this.upcast::<Element>();
let src_present = elem.has_attribute(&local_name!("src"));
if src_present || Self::uses_srcset_or_picture(elem) {
this.upcast::<EventTarget>().fire_event(atom!("error"));
}
// FIXME(nox): According to the spec, setting the current
// request to the broken state is done prior to queuing a
// task, why is this here?
this.abort_request(State::Broken, ImageRequestPhase::Current);
this.abort_request(State::Broken, ImageRequestPhase::Pending);
}),
window.upcast(),
);
return;
},
};
// Step 11
let base_url = document.base_url();
let parsed_url = base_url.join(&src.0);
match parsed_url {
Ok(url) => {
// Step 13-17
self.prepare_image_request(&url, &src, pixel_density);
},
Err(_) => {
// Step 12.1-12.5.
let src = src.0;
// FIXME(nox): Why are errors silenced here?
let _ = task_source.queue(
task!(image_selected_source_error: move || {
let this = this.root();
{
let mut current_request =
this.current_request.borrow_mut();
current_request.source_url = Some(USVString(src))
}
this.upcast::<EventTarget>().fire_event(atom!("error"));
// FIXME(nox): According to the spec, setting the current
// request to the broken state is done prior to queuing a
// task, why is this here?
this.abort_request(State::Broken, ImageRequestPhase::Current);
this.abort_request(State::Broken, ImageRequestPhase::Pending);
}),
window.upcast(),
);
},<|fim▁hole|>
/// <https://html.spec.whatwg.org/multipage/#update-the-image-data>
pub fn update_the_image_data(&self) {
let document = document_from_node(self);
let window = document.window();
let elem = self.upcast::<Element>();
let src = elem.get_url_attribute(&local_name!("src"));
let base_url = document.base_url();
// https://html.spec.whatwg.org/multipage/#reacting-to-dom-mutations
// Always first set the current request to unavailable,
// ensuring img.complete is false.
{
let mut current_request = self.current_request.borrow_mut();
current_request.state = State::Unavailable;
}
if !document.is_active() {
// Step 1 (if the document is inactive)
// TODO: use GlobalScope::enqueue_microtask,
// to queue micro task to come back to this algorithm
}
// Step 2 abort if user-agent does not supports images
// NOTE: Servo only supports images, skipping this step
// Step 3, 4
let mut selected_source = None;
let mut pixel_density = None;
let src_set = elem.get_url_attribute(&local_name!("srcset"));
let is_parent_picture = elem
.upcast::<Node>()
.GetParentElement()
.map_or(false, |p| p.is::<HTMLPictureElement>());
if src_set.is_empty() && !is_parent_picture && !src.is_empty() {
selected_source = Some(src.clone());
pixel_density = Some(1 as f64);
};
// Step 5
*self.last_selected_source.borrow_mut() = selected_source.clone();
// Step 6, check the list of available images
if !selected_source
.as_ref()
.map_or(false, |source| source.is_empty())
{
if let Ok(img_url) = base_url.join(&src) {
let image_cache = window.image_cache();
let response = image_cache.find_image_or_metadata(
img_url.clone().into(),
window.origin().immutable().clone(),
cors_setting_for_element(self.upcast()),
UsePlaceholder::No,
CanRequestImages::No,
);
if let Ok(ImageOrMetadataAvailable::ImageAvailable(image, url)) = response {
// Cancel any outstanding tasks that were queued before the src was
// set on this element.
self.generation.set(self.generation.get() + 1);
// Step 6.3
let metadata = ImageMetadata {
height: image.height,
width: image.width,
};
// Step 6.3.2 abort requests
self.abort_request(State::CompletelyAvailable, ImageRequestPhase::Current);
self.abort_request(State::Unavailable, ImageRequestPhase::Pending);
let mut current_request = self.current_request.borrow_mut();
current_request.final_url = Some(url);
current_request.image = Some(image.clone());
current_request.metadata = Some(metadata);
// Step 6.3.6
current_request.current_pixel_density = pixel_density;
let this = Trusted::new(self);
let src = src.0;
let _ = window.task_manager().dom_manipulation_task_source().queue(
task!(image_load_event: move || {
let this = this.root();
{
let mut current_request =
this.current_request.borrow_mut();
current_request.parsed_url = Some(img_url);
current_request.source_url = Some(USVString(src));
}
// TODO: restart animation, if set.
this.upcast::<EventTarget>().fire_event(atom!("load"));
}),
window.upcast(),
);
return;
}
}
}
// step 7, await a stable state.
self.generation.set(self.generation.get() + 1);
let task = ImageElementMicrotask::StableStateUpdateImageDataTask {
elem: DomRoot::from_ref(self),
generation: self.generation.get(),
};
ScriptThread::await_stable_state(Microtask::ImageElement(task));
}
/// <https://html.spec.whatwg.org/multipage/#img-environment-changes>
pub fn react_to_environment_changes(&self) {
// Step 1
let task = ImageElementMicrotask::EnvironmentChangesTask {
elem: DomRoot::from_ref(self),
generation: self.generation.get(),
};
ScriptThread::await_stable_state(Microtask::ImageElement(task));
}
/// Step 2-12 of https://html.spec.whatwg.org/multipage/#img-environment-changes
fn react_to_environment_changes_sync_steps(&self, generation: u32) {
// TODO reduce duplicacy of this code
fn add_cache_listener_for_element(
image_cache: Arc<dyn ImageCache>,
id: PendingImageId,
elem: &HTMLImageElement,
selected_source: String,
selected_pixel_density: f64,
) {
let trusted_node = Trusted::new(elem);
let (responder_sender, responder_receiver) = ipc::channel().unwrap();
let window = window_from_node(elem);
let (task_source, canceller) = window
.task_manager()
.networking_task_source_with_canceller();
let generation = elem.generation.get();
ROUTER.add_route(responder_receiver.to_opaque(), Box::new(move |message| {
debug!("Got image {:?}", message);
// Return the image via a message to the script thread, which marks
// the element as dirty and triggers a reflow.
let element = trusted_node.clone();
let image = message.to().unwrap();
let selected_source_clone = selected_source.clone();
let _ = task_source.queue_with_canceller(
task!(process_image_response_for_environment_change: move || {
let element = element.root();
// Ignore any image response for a previous request that has been discarded.
if generation == element.generation.get() {
element.process_image_response_for_environment_change(image,
USVString::from(selected_source_clone), generation, selected_pixel_density);
}
}),
&canceller,
);
}));
image_cache.add_listener(id, ImageResponder::new(responder_sender, id));
}
let elem = self.upcast::<Element>();
let document = document_from_node(elem);
let has_pending_request = match self.image_request.get() {
ImageRequestPhase::Pending => true,
_ => false,
};
// Step 2
if !document.is_active() || !Self::uses_srcset_or_picture(elem) || has_pending_request {
return;
}
// Steps 3-4
let (selected_source, selected_pixel_density) = match self.select_image_source() {
Some(selected) => selected,
None => return,
};
// Step 5
let same_source = match *self.last_selected_source.borrow() {
Some(ref last_src) => *last_src == selected_source,
_ => false,
};
let same_selected_pixel_density = match self.current_request.borrow().current_pixel_density
{
Some(den) => selected_pixel_density == den,
_ => false,
};
if same_source && same_selected_pixel_density {
return;
}
let base_url = document.base_url();
// Step 6
let img_url = match base_url.join(&selected_source.0) {
Ok(url) => url,
Err(_) => return,
};
// Step 12
self.image_request.set(ImageRequestPhase::Pending);
self.init_image_request(
&mut self.pending_request.borrow_mut(),
&img_url,
&selected_source,
);
let window = window_from_node(self);
let image_cache = window.image_cache();
// Step 14
let response = image_cache.find_image_or_metadata(
img_url.clone().into(),
window.origin().immutable().clone(),
cors_setting_for_element(self.upcast()),
UsePlaceholder::No,
CanRequestImages::Yes,
);
match response {
Ok(ImageOrMetadataAvailable::ImageAvailable(_image, _url)) => {
// Step 15
self.finish_reacting_to_environment_change(
selected_source,
generation,
selected_pixel_density,
);
},
Ok(ImageOrMetadataAvailable::MetadataAvailable(m)) => {
self.process_image_response_for_environment_change(
ImageResponse::MetadataLoaded(m),
selected_source,
generation,
selected_pixel_density,
);
},
Err(ImageState::Pending(id)) => {
add_cache_listener_for_element(
image_cache.clone(),
id,
self,
selected_source.0,
selected_pixel_density,
);
},
Err(ImageState::LoadError) => {
self.process_image_response_for_environment_change(
ImageResponse::None,
selected_source,
generation,
selected_pixel_density,
);
},
Err(ImageState::NotRequested(id)) => {
add_cache_listener_for_element(
image_cache,
id,
self,
selected_source.0,
selected_pixel_density,
);
self.fetch_request(&img_url, id);
},
}
}
/// Step 15 for <https://html.spec.whatwg.org/multipage/#img-environment-changes>
fn finish_reacting_to_environment_change(
&self,
src: USVString,
generation: u32,
selected_pixel_density: f64,
) {
let this = Trusted::new(self);
let window = window_from_node(self);
let src = src.0;
let _ = window.task_manager().dom_manipulation_task_source().queue(
task!(image_load_event: move || {
let this = this.root();
let relevant_mutation = this.generation.get() != generation;
// Step 15.1
if relevant_mutation {
this.abort_request(State::Unavailable, ImageRequestPhase::Pending);
return;
}
// Step 15.2
*this.last_selected_source.borrow_mut() = Some(USVString(src));
{
let mut pending_request = this.pending_request.borrow_mut();
pending_request.current_pixel_density = Some(selected_pixel_density);
// Step 15.3
pending_request.state = State::CompletelyAvailable;
// Step 15.4
// Already a part of the list of available images due to Step 14
// Step 15.5
mem::swap(&mut this.current_request.borrow_mut(), &mut pending_request);
this.abort_request(State::Unavailable, ImageRequestPhase::Pending);
}
// Step 15.6
this.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
// Step 15.7
this.upcast::<EventTarget>().fire_event(atom!("load"));
}),
window.upcast(),
);
}
fn uses_srcset_or_picture(elem: &Element) -> bool {
let has_src = elem.has_attribute(&local_name!("srcset"));
let is_parent_picture = elem
.upcast::<Node>()
.GetParentElement()
.map_or(false, |p| p.is::<HTMLPictureElement>());
has_src || is_parent_picture
}
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLImageElement {
HTMLImageElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
image_request: Cell::new(ImageRequestPhase::Current),
current_request: DomRefCell::new(ImageRequest {
state: State::Unavailable,
parsed_url: None,
source_url: None,
image: None,
metadata: None,
blocker: None,
final_url: None,
current_pixel_density: None,
}),
pending_request: DomRefCell::new(ImageRequest {
state: State::Unavailable,
parsed_url: None,
source_url: None,
image: None,
metadata: None,
blocker: None,
final_url: None,
current_pixel_density: None,
}),
form_owner: Default::default(),
generation: Default::default(),
source_set: DomRefCell::new(SourceSet::new()),
last_selected_source: DomRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLImageElement> {
Node::reflect_node(
Box::new(HTMLImageElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLImageElementBinding::Wrap,
)
}
pub fn Image(
window: &Window,
width: Option<u32>,
height: Option<u32>,
) -> Fallible<DomRoot<HTMLImageElement>> {
let document = window.Document();
let image = HTMLImageElement::new(local_name!("img"), None, &document);
if let Some(w) = width {
image.SetWidth(w);
}
if let Some(h) = height {
image.SetHeight(h);
}
Ok(image)
}
pub fn areas(&self) -> Option<Vec<DomRoot<HTMLAreaElement>>> {
let elem = self.upcast::<Element>();
let usemap_attr = elem.get_attribute(&ns!(), &local_name!("usemap"))?;
let value = usemap_attr.value();
if value.len() == 0 || !value.is_char_boundary(1) {
return None;
}
let (first, last) = value.split_at(1);
if first != "#" || last.len() == 0 {
return None;
}
let useMapElements = document_from_node(self)
.upcast::<Node>()
.traverse_preorder(ShadowIncluding::No)
.filter_map(DomRoot::downcast::<HTMLMapElement>)
.find(|n| {
n.upcast::<Element>()
.get_string_attribute(&LocalName::from("name")) ==
last
});
useMapElements.map(|mapElem| mapElem.get_area_elements())
}
pub fn same_origin(&self, origin: &MutableOrigin) -> bool {
if let Some(ref image) = self.current_request.borrow().image {
return image.cors_status == CorsStatus::Safe;
}
self.current_request
.borrow()
.final_url
.as_ref()
.map_or(false, |url| {
url.scheme() == "data" || url.origin().same_origin(origin)
})
}
}
#[derive(JSTraceable, MallocSizeOf)]
pub enum ImageElementMicrotask {
StableStateUpdateImageDataTask {
elem: DomRoot<HTMLImageElement>,
generation: u32,
},
EnvironmentChangesTask {
elem: DomRoot<HTMLImageElement>,
generation: u32,
},
}
impl MicrotaskRunnable for ImageElementMicrotask {
fn handler(&self) {
match self {
&ImageElementMicrotask::StableStateUpdateImageDataTask {
ref elem,
ref generation,
} => {
// Step 7 of https://html.spec.whatwg.org/multipage/#update-the-image-data,
// stop here if other instances of this algorithm have been scheduled
if elem.generation.get() == *generation {
elem.update_the_image_data_sync_steps();
}
},
&ImageElementMicrotask::EnvironmentChangesTask {
ref elem,
ref generation,
} => {
elem.react_to_environment_changes_sync_steps(*generation);
},
}
}
}
pub trait LayoutHTMLImageElementHelpers {
#[allow(unsafe_code)]
unsafe fn image(&self) -> Option<Arc<Image>>;
#[allow(unsafe_code)]
unsafe fn image_url(&self) -> Option<ServoUrl>;
#[allow(unsafe_code)]
unsafe fn image_density(&self) -> Option<f64>;
#[allow(unsafe_code)]
unsafe fn image_data(&self) -> (Option<Arc<Image>>, Option<ImageMetadata>);
fn get_width(&self) -> LengthOrPercentageOrAuto;
fn get_height(&self) -> LengthOrPercentageOrAuto;
}
impl LayoutHTMLImageElementHelpers for LayoutDom<HTMLImageElement> {
#[allow(unsafe_code)]
unsafe fn image(&self) -> Option<Arc<Image>> {
(*self.unsafe_get())
.current_request
.borrow_for_layout()
.image
.clone()
}
#[allow(unsafe_code)]
unsafe fn image_url(&self) -> Option<ServoUrl> {
(*self.unsafe_get())
.current_request
.borrow_for_layout()
.parsed_url
.clone()
}
#[allow(unsafe_code)]
unsafe fn image_data(&self) -> (Option<Arc<Image>>, Option<ImageMetadata>) {
let current_request = (*self.unsafe_get()).current_request.borrow_for_layout();
(
current_request.image.clone(),
current_request.metadata.clone(),
)
}
#[allow(unsafe_code)]
unsafe fn image_density(&self) -> Option<f64> {
(*self.unsafe_get())
.current_request
.borrow_for_layout()
.current_pixel_density
.clone()
}
#[allow(unsafe_code)]
fn get_width(&self) -> LengthOrPercentageOrAuto {
unsafe {
(*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("width"))
.map(AttrValue::as_dimension)
.cloned()
.unwrap_or(LengthOrPercentageOrAuto::Auto)
}
}
#[allow(unsafe_code)]
fn get_height(&self) -> LengthOrPercentageOrAuto {
unsafe {
(*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("height"))
.map(AttrValue::as_dimension)
.cloned()
.unwrap_or(LengthOrPercentageOrAuto::Auto)
}
}
}
//https://html.spec.whatwg.org/multipage/#parse-a-sizes-attribute
pub fn parse_a_sizes_attribute(value: DOMString) -> SourceSizeList {
let mut input = ParserInput::new(&value);
let mut parser = Parser::new(&mut input);
let url = ServoUrl::parse("about:blank").unwrap();
let context = ParserContext::new(
Origin::Author,
&url,
Some(CssRuleType::Style),
// FIXME(emilio): why ::empty() instead of ::DEFAULT? Also, what do
// browsers do regarding quirks-mode in a media list?
ParsingMode::empty(),
QuirksMode::NoQuirks,
None,
None,
);
SourceSizeList::parse(&context, &mut parser)
}
impl HTMLImageElementMethods for HTMLImageElement {
// https://html.spec.whatwg.org/multipage/#dom-img-alt
make_getter!(Alt, "alt");
// https://html.spec.whatwg.org/multipage/#dom-img-alt
make_setter!(SetAlt, "alt");
// https://html.spec.whatwg.org/multipage/#dom-img-src
make_url_getter!(Src, "src");
// https://html.spec.whatwg.org/multipage/#dom-img-src
make_url_setter!(SetSrc, "src");
// https://html.spec.whatwg.org/multipage/#dom-img-srcset
make_url_getter!(Srcset, "srcset");
// https://html.spec.whatwg.org/multipage/#dom-img-src
make_url_setter!(SetSrcset, "srcset");
// https://html.spec.whatwg.org/multipage/#dom-img-crossOrigin
fn GetCrossOrigin(&self) -> Option<DOMString> {
reflect_cross_origin_attribute(self.upcast::<Element>())
}
// https://html.spec.whatwg.org/multipage/#dom-img-crossOrigin
fn SetCrossOrigin(&self, value: Option<DOMString>) {
set_cross_origin_attribute(self.upcast::<Element>(), value);
}
// https://html.spec.whatwg.org/multipage/#dom-img-usemap
make_getter!(UseMap, "usemap");
// https://html.spec.whatwg.org/multipage/#dom-img-usemap
make_setter!(SetUseMap, "usemap");
// https://html.spec.whatwg.org/multipage/#dom-img-ismap
make_bool_getter!(IsMap, "ismap");
// https://html.spec.whatwg.org/multipage/#dom-img-ismap
make_bool_setter!(SetIsMap, "ismap");
// https://html.spec.whatwg.org/multipage/#dom-img-width
fn Width(&self) -> u32 {
let node = self.upcast::<Node>();
match node.bounding_content_box() {
Some(rect) => rect.size.width.to_px() as u32,
None => self.NaturalWidth(),
}
}
// https://html.spec.whatwg.org/multipage/#dom-img-width
fn SetWidth(&self, value: u32) {
image_dimension_setter(self.upcast(), local_name!("width"), value);
}
// https://html.spec.whatwg.org/multipage/#dom-img-height
fn Height(&self) -> u32 {
let node = self.upcast::<Node>();
match node.bounding_content_box() {
Some(rect) => rect.size.height.to_px() as u32,
None => self.NaturalHeight(),
}
}
// https://html.spec.whatwg.org/multipage/#dom-img-height
fn SetHeight(&self, value: u32) {
image_dimension_setter(self.upcast(), local_name!("height"), value);
}
// https://html.spec.whatwg.org/multipage/#dom-img-naturalwidth
fn NaturalWidth(&self) -> u32 {
let request = self.current_request.borrow();
let pixel_density = request.current_pixel_density.unwrap_or(1f64);
match request.metadata {
Some(ref metadata) => (metadata.width as f64 / pixel_density) as u32,
None => 0,
}
}
// https://html.spec.whatwg.org/multipage/#dom-img-naturalheight
fn NaturalHeight(&self) -> u32 {
let request = self.current_request.borrow();
let pixel_density = request.current_pixel_density.unwrap_or(1f64);
match request.metadata {
Some(ref metadata) => (metadata.height as f64 / pixel_density) as u32,
None => 0,
}
}
// https://html.spec.whatwg.org/multipage/#dom-img-complete
fn Complete(&self) -> bool {
let elem = self.upcast::<Element>();
let srcset_absent = !elem.has_attribute(&local_name!("srcset"));
if !elem.has_attribute(&local_name!("src")) && srcset_absent {
return true;
}
let src = elem.get_string_attribute(&local_name!("src"));
if srcset_absent && src.is_empty() {
return true;
}
let request = self.current_request.borrow();
let request_state = request.state;
match request_state {
State::CompletelyAvailable | State::Broken => return true,
State::PartiallyAvailable | State::Unavailable => return false,
}
}
// https://html.spec.whatwg.org/multipage/#dom-img-currentsrc
fn CurrentSrc(&self) -> USVString {
let current_request = self.current_request.borrow();
let ref url = current_request.parsed_url;
match *url {
Some(ref url) => USVString(url.clone().into_string()),
None => {
let ref unparsed_url = current_request.source_url;
match *unparsed_url {
Some(ref url) => url.clone(),
None => USVString("".to_owned()),
}
},
}
}
// https://html.spec.whatwg.org/multipage/#dom-img-name
make_getter!(Name, "name");
// https://html.spec.whatwg.org/multipage/#dom-img-name
make_atomic_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-img-align
make_getter!(Align, "align");
// https://html.spec.whatwg.org/multipage/#dom-img-align
make_setter!(SetAlign, "align");
// https://html.spec.whatwg.org/multipage/#dom-img-hspace
make_uint_getter!(Hspace, "hspace");
// https://html.spec.whatwg.org/multipage/#dom-img-hspace
make_uint_setter!(SetHspace, "hspace");
// https://html.spec.whatwg.org/multipage/#dom-img-vspace
make_uint_getter!(Vspace, "vspace");
// https://html.spec.whatwg.org/multipage/#dom-img-vspace
make_uint_setter!(SetVspace, "vspace");
// https://html.spec.whatwg.org/multipage/#dom-img-longdesc
make_getter!(LongDesc, "longdesc");
// https://html.spec.whatwg.org/multipage/#dom-img-longdesc
make_setter!(SetLongDesc, "longdesc");
// https://html.spec.whatwg.org/multipage/#dom-img-border
make_getter!(Border, "border");
// https://html.spec.whatwg.org/multipage/#dom-img-border
make_setter!(SetBorder, "border");
}
impl VirtualMethods for HTMLImageElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
}
fn adopting_steps(&self, old_doc: &Document) {
self.super_type().unwrap().adopting_steps(old_doc);
self.update_the_image_data();
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&local_name!("src") |
&local_name!("srcset") |
&local_name!("width") |
&local_name!("crossorigin") |
&local_name!("sizes") => self.update_the_image_data(),
_ => {},
}
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("name") => AttrValue::from_atomic(value.into()),
&local_name!("width") | &local_name!("height") => {
AttrValue::from_dimension(value.into())
},
&local_name!("hspace") | &local_name!("vspace") => AttrValue::from_u32(value.into(), 0),
_ => self
.super_type()
.unwrap()
.parse_plain_attribute(name, value),
}
}
fn handle_event(&self, event: &Event) {
if event.type_() != atom!("click") {
return;
}
let area_elements = self.areas();
let elements = match area_elements {
Some(x) => x,
None => return,
};
// Fetch click coordinates
let mouse_event = match event.downcast::<MouseEvent>() {
Some(x) => x,
None => return,
};
let point = Point2D::new(
mouse_event.ClientX().to_f32().unwrap(),
mouse_event.ClientY().to_f32().unwrap(),
);
let bcr = self.upcast::<Element>().GetBoundingClientRect();
let bcr_p = Point2D::new(bcr.X() as f32, bcr.Y() as f32);
// Walk HTMLAreaElements
for element in elements {
let shape = element.get_shape_from_coords();
let shp = match shape {
Some(x) => x.absolute_coords(bcr_p),
None => return,
};
if shp.hit_test(&point) {
element.activation_behavior(event, self.upcast());
return;
}
}
}
fn bind_to_tree(&self, context: &BindContext) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(context);
}
let document = document_from_node(self);
if context.tree_connected {
document.register_responsive_image(self);
}
// The element is inserted into a picture parent element
// https://html.spec.whatwg.org/multipage/#relevant-mutations
if let Some(parent) = self.upcast::<Node>().GetParentElement() {
if parent.is::<HTMLPictureElement>() {
self.update_the_image_data();
}
}
}
fn unbind_from_tree(&self, context: &UnbindContext) {
self.super_type().unwrap().unbind_from_tree(context);
let document = document_from_node(self);
document.unregister_responsive_image(self);
// The element is removed from a picture parent element
// https://html.spec.whatwg.org/multipage/#relevant-mutations
if context.parent.is::<HTMLPictureElement>() {
self.update_the_image_data();
}
}
}
impl FormControl for HTMLImageElement {
fn form_owner(&self) -> Option<DomRoot<HTMLFormElement>> {
self.form_owner.get()
}
fn set_form_owner(&self, form: Option<&HTMLFormElement>) {
self.form_owner.set(form);
}
fn to_element<'a>(&'a self) -> &'a Element {
self.upcast::<Element>()
}
fn is_listed(&self) -> bool {
false
}
}
impl ImageCacheListener for HTMLImageElement {
fn generation_id(&self) -> u32 {
self.generation.get()
}
fn process_image_response(&self, response: ImageResponse) {
self.process_image_response(response);
}
}
fn image_dimension_setter(element: &Element, attr: LocalName, value: u32) {
// This setter is a bit weird: the IDL type is unsigned long, but it's parsed as
// a dimension for rendering.
let value = if value > UNSIGNED_LONG_MAX { 0 } else { value };
// FIXME: There are probably quite a few more cases of this. This is the
// only overflow that was hitting on automation, but we should consider what
// to do in the general case case.
//
// See <https://github.com/servo/app_units/issues/22>
let pixel_value = if value > (i32::MAX / AU_PER_PX) as u32 {
0
} else {
value
};
let dim = LengthOrPercentageOrAuto::Length(Au::from_px(pixel_value as i32));
let value = AttrValue::Dimension(value.to_string(), dim);
element.set_attribute(&attr, value);
}
/// Collect sequence of code points
pub fn collect_sequence_characters<F>(s: &str, predicate: F) -> (&str, &str)
where
F: Fn(&char) -> bool,
{
for (i, ch) in s.chars().enumerate() {
if !predicate(&ch) {
return (&s[0..i], &s[i..]);
}
}
return (s, "");
}
/// Parse an `srcset` attribute - https://html.spec.whatwg.org/multipage/#parsing-a-srcset-attribute.
pub fn parse_a_srcset_attribute(input: &str) -> Vec<ImageSource> {
let mut url_len = 0;
let mut candidates: Vec<ImageSource> = vec![];
while url_len < input.len() {
let position = &input[url_len..];
let (spaces, position) =
collect_sequence_characters(position, |c| *c == ',' || char::is_whitespace(*c));
// add the length of the url that we parse to advance the start index
let space_len = spaces.char_indices().count();
url_len += space_len;
if position.is_empty() {
return candidates;
}
let (url, spaces) = collect_sequence_characters(position, |c| !char::is_whitespace(*c));
// add the counts of urls that we parse to advance the start index
url_len += url.chars().count();
let comma_count = url.chars().rev().take_while(|c| *c == ',').count();
let url: String = url
.chars()
.take(url.chars().count() - comma_count)
.collect();
// add 1 to start index, for the comma
url_len += comma_count + 1;
let (space, position) = collect_sequence_characters(spaces, |c| char::is_whitespace(*c));
let space_len = space.len();
url_len += space_len;
let mut descriptors = Vec::new();
let mut current_descriptor = String::new();
let mut state = ParseState::InDescriptor;
let mut char_stream = position.chars().enumerate();
let mut buffered: Option<(usize, char)> = None;
loop {
let next_char = buffered.take().or_else(|| char_stream.next());
if next_char.is_some() {
url_len += 1;
}
match state {
ParseState::InDescriptor => match next_char {
Some((_, ' ')) => {
if !current_descriptor.is_empty() {
descriptors.push(current_descriptor.clone());
current_descriptor = String::new();
state = ParseState::AfterDescriptor;
}
continue;
},
Some((_, ',')) => {
if !current_descriptor.is_empty() {
descriptors.push(current_descriptor.clone());
}
break;
},
Some((_, c @ '(')) => {
current_descriptor.push(c);
state = ParseState::InParens;
continue;
},
Some((_, c)) => {
current_descriptor.push(c);
},
None => {
if !current_descriptor.is_empty() {
descriptors.push(current_descriptor.clone());
}
break;
},
},
ParseState::InParens => match next_char {
Some((_, c @ ')')) => {
current_descriptor.push(c);
state = ParseState::InDescriptor;
continue;
},
Some((_, c)) => {
current_descriptor.push(c);
continue;
},
None => {
if !current_descriptor.is_empty() {
descriptors.push(current_descriptor.clone());
}
break;
},
},
ParseState::AfterDescriptor => match next_char {
Some((_, ' ')) => {
state = ParseState::AfterDescriptor;
continue;
},
Some((idx, c)) => {
state = ParseState::InDescriptor;
buffered = Some((idx, c));
continue;
},
None => {
if !current_descriptor.is_empty() {
descriptors.push(current_descriptor.clone());
}
break;
},
},
}
}
let mut error = false;
let mut width: Option<u32> = None;
let mut density: Option<f64> = None;
let mut future_compat_h: Option<u32> = None;
for descriptor in descriptors {
let (digits, remaining) =
collect_sequence_characters(&descriptor, |c| is_ascii_digit(c) || *c == '.');
let valid_non_negative_integer = parse_unsigned_integer(digits.chars());
let has_w = remaining == "w";
let valid_floating_point = parse_double(digits);
let has_x = remaining == "x";
let has_h = remaining == "h";
if valid_non_negative_integer.is_ok() && has_w {
let result = valid_non_negative_integer;
error = result.is_err();
if width.is_some() || density.is_some() {
error = true;
}
if let Ok(w) = result {
width = Some(w);
}
} else if valid_floating_point.is_ok() && has_x {
let result = valid_floating_point;
error = result.is_err();
if width.is_some() || density.is_some() || future_compat_h.is_some() {
error = true;
}
if let Ok(x) = result {
density = Some(x);
}
} else if valid_non_negative_integer.is_ok() && has_h {
let result = valid_non_negative_integer;
error = result.is_err();
if density.is_some() || future_compat_h.is_some() {
error = true;
}
if let Ok(h) = result {
future_compat_h = Some(h);
}
} else {
error = true;
}
}
if future_compat_h.is_some() && width.is_none() {
error = true;
}
if !error {
let descriptor = Descriptor {
wid: width,
den: density,
};
let image_source = ImageSource {
url: url,
descriptor: descriptor,
};
candidates.push(image_source);
}
}
candidates
}<|fim▁end|> | }
} |
<|file_name|>leftnav.client.controller.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module(ApplicationConfiguration.applicationModuleName).controller('LeftNavController',
function(Authentication, $mdSidenav, Menus, $log) {
this.authentication = Authentication;
this.isCollapsed = false;
this.menu = Menus.getMenu('sidenav');
this.selected = '';
this.isSelected = function(item) {
return this.selected === item;
};
this.selectItem = function(item) {
this.selected = item;
};
//this.toggleCollapsibleMenu = function() {
// this.isCollapsed = !this.isCollapsed;
//};
//
//// Collapsing the menu after navigation
//this.$on('$stateChangeSuccess', function() {
// this.isCollapsed = false;
//});
//
//this.toggleMenu = function() {
// $mdSidenav('left').toggle();
//};
//
//$mdSidenav('lefty').open();<|fim▁hole|> console.log($mdSidenav('left').isOpen());
//$mdSidenav('left').close()
// .then(function(){
// $log.debug('close LEFT is done');
// });
};
}
);<|fim▁end|> | this.toggleCollapsibleMenu = function() {
$mdSidenav('left').toggle(); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#
#
# Copyright 2011,2013 Luis Ariel Vega Soliz, Uremix (http://www.uremix.org) and contributors.<|fim▁hole|># UADH is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# UADH is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with UADH. If not, see <http://www.gnu.org/licenses/>.
#
#
'''
Created on 08/09/2012
@author: Luis Ariel Vega Soliz ([email protected])
@contact: Uremix Team (http://uremix.org)
'''<|fim▁end|> | #
#
# This file is part of UADH (Uremix App Developer Helper).
# |
<|file_name|>result_httpd.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# coding:utf-8
from __future__ import print_function
'PackagesHTTPD - stream folder content as .tar over http'
__author__ = 'Mathias Gumz <[email protected]>'
__license__ = 'MPL2'
__version__ = ''
import sys
import os, os.path
import zipfile, tarfile
from StringIO import StringIO
import cgi
try:
from http.server import SimpleHTTPRequestHandler, HTTPServer
except ImportError: # assume py2
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class PackagesHTTPD(SimpleHTTPRequestHandler):
'''
httpd-server to stream the contents of a given folder as
/packages.tar if /packages.tar is accessed. otherwise
it acts just like SimpleHTTPRequestHandler
'''
def do_GET(self):
'''
/packages.tar - serve the contents of the folder referenced in
self.server.packages as a streamd .tar file
/packages/* - serve the files of the folder referenced in
self.server.packages (chrooting into it)
/* - serve the files of the folder referenced in
self.server.chroot
'''
if self.path == '/packages.tar':
self._serve_folder_as_tar(self.server.packages)
return
SimpleHTTPRequestHandler.do_GET(self)
<|fim▁hole|> list = os.listdir(path)
except os.error:
self.send_error(404, "No permission to list directory")
return None
if path == self.server.chroot:
list.append("packages/")
list.append("packages.tar")
list.sort(lambda a, b: cmp(a.lower(), b.lower()))
f = StringIO()
f.write("<title>Directory listing for %s</title>\n" % self.path)
f.write("<h2>Directory listing for %s</h2>\n" % self.path)
f.write("<hr>\n<ul>\n")
for name in list:
fullname = os.path.join(path, name)
displayname = linkname = name = cgi.escape(name)
# Append / for directories or @ for symbolic links
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
f.write('<li><a href="%s">%s</a>\n' % (linkname, displayname))
f.write("</ul>\n<hr>\n")
f.seek(0)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
return f
def translate_path(self, path):
'''
translates 'path' (the path-part of an uri) to a file-system based
path.
we assume self.server.folder to be the standard chroot-folder. if
the user tries to access /packages, the self.server.packages folder
is used as the chroot
'''
chroot = self.server.chroot
if path.find('/packages/') == 0:
chroot = self.server.packages
_, path = path.split('/packages/', 1)
if not os.path.isabs(chroot):
chroot = os.path.abspath(chroot)
result = SimpleHTTPRequestHandler.translate_path(self, path)
_, result = result.split(os.getcwd(), 1)
if len(result) > 0 and result[0] == '/':
result = result[1:]
result = os.path.join(chroot, result)
return result
def _serve_folder_as_tar(self, folder):
tfile = tarfile.open(name='packages.tar', mode='w|', fileobj=self.wfile)
self.send_response(200)
self.send_header('Content-type', 'application/x-tar')
self.end_headers()
tfile.add(folder, arcname='packages')
tfile.close()
def _serve_zip_entry(self, name):
try:
entry = self.server.zipfile.open(name, 'r')
except KeyError:
self.send_response(404)
self.end_headers()
return
@staticmethod
def _create_zipfile(zname, zdir):
zfile = zipfile.ZipFile(zname, 'w', zipfile.ZIP_STORED, True)
for root, dirs, files in os.walk(zdir):
for f in files:
fname = os.path.join(root, f)
zfile.write(fname)
zfile.close()
if __name__ == '__main__':
def main():
if len(sys.argv) < 4:
print('usage: %s <port> <chroot> <packages_chroot>' % __file__)
return
port, chroot, packages_chroot = int(sys.argv[1]), sys.argv[2], sys.argv[3]
server_class = HTTPServer
httpd = server_class(('', port), PackagesHTTPD)
httpd.chroot = chroot
httpd.packages = packages_chroot
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
main()<|fim▁end|> | def list_directory(self, path):
try: |
<|file_name|>RecipeClasspathHandler.java<|end_file_name|><|fim▁begin|>/**
* Copyright Intellectual Reserve, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gedcomx.build.enunciate;
import org.codehaus.enunciate.main.ClasspathHandler;
import org.codehaus.enunciate.main.ClasspathResource;
import org.codehaus.enunciate.main.Enunciate;
import org.gedcomx.test.Recipe;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/**
* @author Ryan Heaton
*/
public class RecipeClasspathHandler implements ClasspathHandler {
private final Enunciate enunciate;
private final List<Recipe> recipes = new ArrayList<Recipe>();
private final Unmarshaller unmarshaller;
public RecipeClasspathHandler(Enunciate enunciate) {
this.enunciate = enunciate;
try {
unmarshaller = JAXBContext.newInstance(Recipe.class).createUnmarshaller();
}
catch (JAXBException e) {
throw new RuntimeException(e);
}
}
public List<Recipe> getRecipes() {
return recipes;
}
@Override
public void startPathEntry(File pathEntry) {
}
@Override
public void handleResource(ClasspathResource resource) {
if (resource.getPath().endsWith(".recipe.xml")) {
try {
this.recipes.add((Recipe) unmarshaller.unmarshal(resource.read()));
}
catch (Exception e) {
this.enunciate.error("Unable to unmarshal recipe %s: %s.", resource.getPath(), e.getMessage());
}
}
}
@Override
public boolean endPathEntry(File pathEntry) {
return false;
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>select_path.rs<|end_file_name|><|fim▁begin|>use http::Method;
use matches::assert_matches;
use ruma_common::api::{
error::IntoHttpError,
select_path,
MatrixVersion::{V1_0, V1_1, V1_2},
Metadata,
};
const BASE: Metadata = Metadata {
description: "",
method: Method::GET,
name: "test_endpoint",
unstable_path: Some("/unstable/path"),
r0_path: Some("/r0/path"),
stable_path: Some("/stable/path"),
rate_limited: false,
authentication: ruma_common::api::AuthScheme::None,
added: None,
deprecated: None,
removed: None,
};
const U: &str = "u";
const S: &str = "s";
const R: &str = "r";
// TODO add test that can hook into tracing and verify the deprecation warning is emitted
#[test]
fn select_stable() {
let meta = Metadata { added: Some(V1_1), ..BASE };
let res = select_path(&[V1_0, V1_1], &meta, None, None, Some(format_args!("{}", S)))
.unwrap()
.to_string();
assert_eq!(res, S);
}
#[test]
fn select_unstable() {
let meta = BASE;
let res =
select_path(&[V1_0], &meta, Some(format_args!("{}", U)), None, None).unwrap().to_string();
assert_eq!(res, U);
}
#[test]
fn select_r0() {
let meta = Metadata { added: Some(V1_0), ..BASE };
let res =
select_path(&[V1_0], &meta, None, Some(format_args!("{}", R)), Some(format_args!("{}", S)))
.unwrap()
.to_string();
assert_eq!(res, R);
}
#[test]
fn select_removed_err() {
let meta = Metadata { added: Some(V1_0), deprecated: Some(V1_1), removed: Some(V1_2), ..BASE };
let res = select_path(
&[V1_2],
&meta,
Some(format_args!("{}", U)),
Some(format_args!("{}", R)),
Some(format_args!("{}", S)),
)
.unwrap_err();
assert_matches!(res, IntoHttpError::EndpointRemoved(V1_2));
}
#[test]
fn partially_removed_but_stable() {
let meta = Metadata { added: Some(V1_0), deprecated: Some(V1_1), removed: Some(V1_2), ..BASE };<|fim▁hole|> let res =
select_path(&[V1_1], &meta, None, Some(format_args!("{}", R)), Some(format_args!("{}", S)))
.unwrap()
.to_string();
assert_eq!(res, S);
}
#[test]
fn no_unstable() {
let meta = Metadata { added: Some(V1_1), ..BASE };
let res =
select_path(&[V1_0], &meta, None, Some(format_args!("{}", R)), Some(format_args!("{}", S)))
.unwrap_err();
assert_matches!(res, IntoHttpError::NoUnstablePath);
}<|fim▁end|> | |
<|file_name|>test_suspenders.py<|end_file_name|><|fim▁begin|>import pytest
from functools import partial
from bluesky.preprocessors import suspend_wrapper
from bluesky.suspenders import (SuspendBoolHigh,
SuspendBoolLow,
SuspendFloor,
SuspendCeil,
SuspendWhenOutsideBand,
SuspendInBand,
SuspendOutBand)
from bluesky.tests.utils import MsgCollector
from bluesky import Msg
import time as ttime
from bluesky.run_engine import RunEngineInterrupted
import threading
import time
from .utils import _fabricate_asycio_event
@pytest.mark.parametrize(
'klass,sc_args,start_val,fail_val,resume_val,wait_time',
[(SuspendBoolHigh, (), 0, 1, 0, .2),
(SuspendBoolLow, (), 1, 0, 1, .2),
(SuspendFloor, (.5,), 1, 0, 1, .2),
(SuspendCeil, (.5,), 0, 1, 0, .2),
(SuspendWhenOutsideBand, (.5, 1.5), 1, 0, 1, .2),
((SuspendInBand, True), (.5, 1.5), 1, 0, 1, .2), # renamed to WhenOutsideBand
((SuspendOutBand, True), (.5, 1.5), 0, 1, 0, .2)]) # deprecated
def test_suspender(klass, sc_args, start_val, fail_val,
resume_val, wait_time, RE, hw):
sig = hw.bool_sig
try:
klass, deprecated = klass
except TypeError:
deprecated = False
if deprecated:
with pytest.warns(UserWarning):
my_suspender = klass(sig,
*sc_args, sleep=wait_time)
else:
my_suspender = klass(sig,
*sc_args, sleep=wait_time)
my_suspender.install(RE)
def putter(val):
sig.put(val)
# make sure we start at good value!
putter(start_val)
# dumb scan
scan = [Msg('checkpoint'), Msg('sleep', None, .2)]
RE(scan)
# paranoid
assert RE.state == 'idle'
start = ttime.time()
# queue up fail and resume conditions
threading.Timer(.1, putter, (fail_val,)).start()
threading.Timer(.5, putter, (resume_val,)).start()
# start the scan
RE(scan)
stop = ttime.time()
# assert we waited at least 2 seconds + the settle time
delta = stop - start
print(delta)
assert delta > .5 + wait_time + .2
def test_pretripped(RE, hw):
'Tests if suspender is tripped before __call__'
sig = hw.bool_sig
scan = [Msg('checkpoint')]
msg_lst = []
sig.put(1)
def accum(msg):
msg_lst.append(msg)
susp = SuspendBoolHigh(sig)
RE.install_suspender(susp)
threading.Timer(1, sig.put, (0,)).start()
RE.msg_hook = accum
RE(scan)
assert len(msg_lst) == 2
assert ['wait_for', 'checkpoint'] == [m[0] for m in msg_lst]
@pytest.mark.parametrize('pre_plan,post_plan,expected_list',
[([Msg('null')], None,
['checkpoint', 'sleep', 'rewindable', 'null',
'wait_for', 'resume', 'rewindable', 'sleep']),
(None, [Msg('null')],
['checkpoint', 'sleep', 'rewindable',
'wait_for', 'resume', 'null', 'rewindable',
'sleep']),
([Msg('null')], [Msg('null')],
['checkpoint', 'sleep', 'rewindable', 'null',
'wait_for', 'resume', 'null', 'rewindable',
'sleep']),
(lambda: [Msg('null')], lambda: [Msg('null')],
['checkpoint', 'sleep', 'rewindable', 'null',
'wait_for', 'resume', 'null', 'rewindable',
'sleep'])])
def test_pre_suspend_plan(RE, pre_plan, post_plan, expected_list, hw):
sig = hw.bool_sig
scan = [Msg('checkpoint'), Msg('sleep', None, .2)]
msg_lst = []
sig.put(0)
def accum(msg):
msg_lst.append(msg)
susp = SuspendBoolHigh(sig, pre_plan=pre_plan,
post_plan=post_plan)
RE.install_suspender(susp)
threading.Timer(.1, sig.put, (1,)).start()
threading.Timer(1, sig.put, (0,)).start()
RE.msg_hook = accum
RE(scan)
assert len(msg_lst) == len(expected_list)
assert expected_list == [m[0] for m in msg_lst]
RE.remove_suspender(susp)
RE(scan)
assert susp.RE is None
RE.install_suspender(susp)
RE.clear_suspenders()
assert susp.RE is None
assert not RE.suspenders
def test_pause_from_suspend(RE, hw):
'Tests what happens when a pause is requested from a suspended state'
sig = hw.bool_sig
scan = [Msg('checkpoint')]
msg_lst = []
sig.put(1)
def accum(msg):
msg_lst.append(msg)
susp = SuspendBoolHigh(sig)
RE.install_suspender(susp)
threading.Timer(1, RE.request_pause).start()
threading.Timer(2, sig.put, (0,)).start()
RE.msg_hook = accum
with pytest.raises(RunEngineInterrupted):
RE(scan)
assert [m[0] for m in msg_lst] == ['wait_for']
RE.resume()
assert ['wait_for', 'wait_for', 'checkpoint'] == [m[0] for m in msg_lst]
def test_deferred_pause_from_suspend(RE, hw):
'Tests what happens when a soft pause is requested from a suspended state'
sig = hw.bool_sig
scan = [Msg('checkpoint'), Msg('null')]
msg_lst = []
sig.put(1)
<|fim▁hole|> susp = SuspendBoolHigh(sig)
RE.install_suspender(susp)
threading.Timer(1, RE.request_pause, (True,)).start()
threading.Timer(4, sig.put, (0,)).start()
RE.msg_hook = accum
with pytest.raises(RunEngineInterrupted):
RE(scan)
assert [m[0] for m in msg_lst] == ['wait_for', 'checkpoint']
RE.resume()
assert ['wait_for', 'checkpoint', 'null'] == [m[0] for m in msg_lst]
def test_unresumable_suspend_fail(RE):
'Tests what happens when a soft pause is requested from a suspended state'
scan = [Msg('clear_checkpoint'), Msg('sleep', None, 2)]
m_coll = MsgCollector()
RE.msg_hook = m_coll
ev = _fabricate_asycio_event(RE.loop)
threading.Timer(.1, partial(RE.request_suspend, fut=ev.wait)).start()
threading.Timer(1, ev.set).start()
start = time.time()
with pytest.raises(RunEngineInterrupted):
RE(scan)
stop = time.time()
assert .1 < stop - start < 1
def test_suspender_plans(RE, hw):
'Tests that the suspenders can be installed via Msg'
sig = hw.bool_sig
my_suspender = SuspendBoolHigh(sig, sleep=0.2)
def putter(val):
sig.put(val)
putter(0)
# Do the messages work?
RE([Msg('install_suspender', None, my_suspender)])
assert my_suspender in RE.suspenders
RE([Msg('remove_suspender', None, my_suspender)])
assert my_suspender not in RE.suspenders
# Can we call both in a plan?
RE([Msg('install_suspender', None, my_suspender),
Msg('remove_suspender', None, my_suspender)])
scan = [Msg('checkpoint'), Msg('sleep', None, .2)]
# No suspend scan: does the wrapper error out?
start = ttime.time()
RE(suspend_wrapper(scan, my_suspender))
stop = ttime.time()
delta = stop - start
assert delta < .9
# Suspend scan
start = ttime.time()
threading.Timer(.1, putter, (1,)).start()
threading.Timer(.5, putter, (0,)).start()
RE(suspend_wrapper(scan, my_suspender))
stop = ttime.time()
delta = stop - start
assert delta > .9
# Did we clean up?
start = ttime.time()
threading.Timer(.1, putter, (1,)).start()
threading.Timer(.5, putter, (0,)).start()
RE(scan)
stop = ttime.time()
delta = stop - start
assert delta < .9<|fim▁end|> | def accum(msg):
print(msg)
msg_lst.append(msg)
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#<|fim▁hole|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Images and Snapshots.
"""
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import api
from horizon import exceptions
from horizon import tables
from horizon import tabs
from .images.tables import ImagesTable
from .snapshots.tables import SnapshotsTable
from .volume_snapshots.tables import VolumeSnapshotsTable
from .volume_snapshots.tabs import SnapshotDetailTabs
LOG = logging.getLogger(__name__)
class IndexView(tables.MultiTableView):
table_classes = (ImagesTable, SnapshotsTable, VolumeSnapshotsTable)
template_name = 'project/images_and_snapshots/index.html'
def has_more_data(self, table):
return getattr(self, "_more_%s" % table.name, False)
def get_images_data(self):
marker = self.request.GET.get(ImagesTable._meta.pagination_param, None)
try:
# FIXME(gabriel): The paging is going to be strange here due to
# our filtering after the fact.
(all_images,
self._more_images) = api.image_list_detailed(self.request,
marker=marker)
images = [im for im in all_images
if im.container_format not in ['aki', 'ari'] and
im.properties.get("image_type", '') != "snapshot"]
except:
images = []
exceptions.handle(self.request, _("Unable to retrieve images."))
return images
def get_snapshots_data(self):
req = self.request
marker = req.GET.get(SnapshotsTable._meta.pagination_param, None)
try:
snaps, self._more_snapshots = api.snapshot_list_detailed(req,
marker=marker)
except:
snaps = []
exceptions.handle(req, _("Unable to retrieve snapshots."))
return snaps
def get_volume_snapshots_data(self):
try:
snapshots = api.volume_snapshot_list(self.request)
except:
snapshots = []
exceptions.handle(self.request, _("Unable to retrieve "
"volume snapshots."))
return snapshots
class DetailView(tabs.TabView):
tab_group_class = SnapshotDetailTabs
template_name = 'project/images_and_snapshots/snapshots/detail.html'<|fim▁end|> | # Copyright 2012 Nebula, Inc.
# Copyright 2012 OpenStack LLC |
<|file_name|>update.test.js<|end_file_name|><|fim▁begin|>'use strict';
const Support = require('../support');
const DataTypes = require('../../../lib/data-types');
const chai = require('chai');
const sinon = require('sinon');
const expect = chai.expect;
const current = Support.sequelize;
const _ = require('lodash');
describe(Support.getTestDialectTeaser('Model'), () => {
describe('update', () => {
beforeEach(async function() {
this.Account = this.sequelize.define('Account', {
ownerId: {
type: DataTypes.INTEGER,
allowNull: false,
field: 'owner_id'
},
name: {
type: DataTypes.STRING
}
});
await this.Account.sync({ force: true });
});
it('should only update the passed fields', async function() {
const account = await this.Account
.create({ ownerId: 2 });
await this.Account.update({
name: Math.random().toString()
}, {
where: {
id: account.get('id')
}
});
});
describe('skips update query', () => {
it('if no data to update', async function() {
const spy = sinon.spy();
await this.Account.create({ ownerId: 3 });
const result = await this.Account.update({
unknownField: 'haha'
}, {
where: {
ownerId: 3
},
logging: spy
});
expect(result[0]).to.equal(0);
expect(spy.called, 'Update query was issued when no data to update').to.be.false;
});
it('skips when timestamps disabled', async function() {
const Model = this.sequelize.define('Model', {
ownerId: {
type: DataTypes.INTEGER,
allowNull: false,
field: 'owner_id'
},
name: {
type: DataTypes.STRING
}
}, {
timestamps: false
});
const spy = sinon.spy();
await Model.sync({ force: true });
await Model.create({ ownerId: 3 });
const result = await Model.update({
unknownField: 'haha'
}, {
where: {
ownerId: 3
},
logging: spy
});
expect(result[0]).to.equal(0);
expect(spy.called, 'Update query was issued when no data to update').to.be.false;
});
});
it('changed should be false after reload', async function() {
const account0 = await this.Account.create({ ownerId: 2, name: 'foo' });
account0.name = 'bar';
expect(account0.changed()[0]).to.equal('name');
const account = await account0.reload();
expect(account.changed()).to.equal(false);
});
it('should ignore undefined values without throwing not null validation', async function() {
const ownerId = 2;
const account0 = await this.Account.create({
ownerId,
name: Math.random().toString()
});
await this.Account.update({
name: Math.random().toString(),
ownerId: undefined
}, {
where: {
id: account0.get('id')
}
});
const account = await this.Account.findOne();
expect(account.ownerId).to.be.equal(ownerId);
});
if (_.get(current.dialect.supports, 'returnValues.returning')) {
it('should return the updated record', async function() {
const account = await this.Account.create({ ownerId: 2 });<|fim▁hole|> id: account.get('id')
},
returning: true
});
const firstAcc = accounts[0];
expect(firstAcc.ownerId).to.be.equal(2);
expect(firstAcc.name).to.be.equal('FooBar');
});
}
if (current.dialect.supports['LIMIT ON UPDATE']) {
it('should only update one row', async function() {
await this.Account.create({
ownerId: 2,
name: 'Account Name 1'
});
await this.Account.create({
ownerId: 2,
name: 'Account Name 2'
});
await this.Account.create({
ownerId: 2,
name: 'Account Name 3'
});
const options = {
where: {
ownerId: 2
},
limit: 1
};
const account = await this.Account.update({ name: 'New Name' }, options);
expect(account[0]).to.equal(1);
});
}
});
});<|fim▁end|> |
const [, accounts] = await this.Account.update({ name: 'FooBar' }, {
where: { |
<|file_name|>datetime.rs<|end_file_name|><|fim▁begin|>//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015-2020 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use chrono::naive::NaiveDateTime;
use toml_query::delete::TomlValueDeleteExt;
use toml_query::insert::TomlValueInsertExt;
use toml_query::read::TomlValueReadTypeExt;
use toml::Value;
use libimagstore::store::Entry;
use libimagerror::errors::Error as EM;
use anyhow::Error;
use anyhow::Result;
use anyhow::Context;
use crate::range::DateTimeRange;
pub trait EntryDate {
fn delete_date(&mut self) -> Result<()>;
fn read_date(&self) -> Result<NaiveDateTime>;
fn set_date(&mut self, d: NaiveDateTime) -> Result<Option<Result<NaiveDateTime>>>;
fn delete_date_range(&mut self) -> Result<()>;
fn read_date_range(&self) -> Result<DateTimeRange>;
fn set_date_range(&mut self, start: NaiveDateTime, end: NaiveDateTime) -> Result<Option<Result<DateTimeRange>>>;
}
const DATE_HEADER_LOCATION : &str = "datetime.value";
const DATE_RANGE_START_HEADER_LOCATION : &str = "datetime.range.start";
const DATE_RANGE_END_HEADER_LOCATION : &str = "datetime.range.end";
const DATE_FMT : &str = "%Y-%m-%dT%H:%M:%S";
impl EntryDate for Entry {
fn delete_date(&mut self) -> Result<()> {
self.get_header_mut()
.delete(&DATE_HEADER_LOCATION)<|fim▁hole|> .context("Delete date error")
.map_err(Error::from)
}
fn read_date(&self) -> Result<NaiveDateTime> {
self.get_header()
.read_string(&DATE_HEADER_LOCATION)
.context("Error while reading date")?
.ok_or_else(|| anyhow!("Error reading date"))?
.parse::<NaiveDateTime>()
.context("Datetime parse error")
.map_err(Error::from)
}
/// Set a Date for this entry
///
/// # Return value
///
/// This function returns funny things, I know. But I find it more attractive to be explicit
/// what failed when here, instead of beeing nice to the user here.
///
/// So here's a list how things are returned:
///
/// - Err(_) if the inserting failed
/// - Ok(None) if the inserting succeeded and _did not replace an existing value_.
/// - Ok(Some(Ok(_))) if the inserting succeeded, but replaced an existing value which then got
/// parsed into a NaiveDateTime object
/// - Ok(Some(Err(_))) if the inserting succeeded, but replaced an existing value which then
/// got parsed into a NaiveDateTime object, where the parsing failed for some reason.
///
fn set_date(&mut self, d: NaiveDateTime) -> Result<Option<Result<NaiveDateTime>>> {
let date = d.format(&DATE_FMT).to_string();
self.get_header_mut()
.insert(&DATE_HEADER_LOCATION, Value::String(date))
.context(anyhow!("Failed to insert header '{}' in '{}'",
DATE_HEADER_LOCATION,
self.get_location()))
.map_err(Error::from)
.map(|opt| opt.map(|stri| {
stri.as_str()
.ok_or_else(|| Error::from(EM::EntryHeaderTypeError))?
.parse::<NaiveDateTime>()
.context("Datetime parse error")
.map_err(Error::from)
}))
.context("Error setting date")
.map_err(Error::from)
}
/// Deletes the date range
///
/// # Warning
///
/// First deletes the start, then the end. If the first operation fails, this might leave the
/// header in an inconsistent state.
///
fn delete_date_range(&mut self) -> Result<()> {
self
.get_header_mut()
.delete(&DATE_RANGE_START_HEADER_LOCATION)
.map(|_| ())
.context("Delete Datetime range error")?;
self.get_header_mut()
.delete(&DATE_RANGE_END_HEADER_LOCATION)
.map(|_| ())
.context("Delete Datetime range error")
.map_err(Error::from)
}
fn read_date_range(&self) -> Result<DateTimeRange> {
let start = self
.get_header()
.read_string(&DATE_RANGE_START_HEADER_LOCATION)
.context("Error while reading Datetime range")?
.ok_or_else(|| anyhow!("Error reading date"))
.and_then(str_to_ndt)?;
let end = self
.get_header()
.read_string(&DATE_RANGE_START_HEADER_LOCATION)
.context("Error reading Datetime range")?
.ok_or_else(|| anyhow!("Error reading date"))
.and_then(str_to_ndt)?;
DateTimeRange::new(start, end)
.context("Datetime Range error")
.map_err(Error::from)
}
/// Set the date range
///
/// # Warning
///
/// This first sets the start, then the end. If the first operation fails, this might leave the
/// header in an inconsistent state.
///
fn set_date_range(&mut self, start: NaiveDateTime, end: NaiveDateTime)
-> Result<Option<Result<DateTimeRange>>>
{
let start = start.format(&DATE_FMT).to_string();
let end = end.format(&DATE_FMT).to_string();
let opt_old_start = self
.get_header_mut()
.insert(&DATE_RANGE_START_HEADER_LOCATION, Value::String(start))
.map(|opt| opt.as_ref().map(val_to_ndt))
.context("Error setting Datetime range")?;
let opt_old_end = self
.get_header_mut()
.insert(&DATE_RANGE_END_HEADER_LOCATION, Value::String(end))
.map(|opt| opt.as_ref().map(val_to_ndt))
.context("Error setting Datetime range")?;
match (opt_old_start, opt_old_end) {
(Some(Ok(old_start)), Some(Ok(old_end))) => {
let dr = DateTimeRange::new(old_start, old_end)
.context("Error processing Datetime range")
.map_err(Error::from);
Ok(Some(dr))
},
(Some(Err(e)), _) => Err(e),
(_, Some(Err(e))) => Err(e),
_ => {
Ok(None)
},
}
}
}
#[inline]
fn str_to_ndt(v: String) -> Result<NaiveDateTime> {
v.parse::<NaiveDateTime>()
.context("Error parsing Datetime")
.map_err(Error::from)
}
#[inline]
fn val_to_ndt(v: &Value) -> Result<NaiveDateTime> {
v.as_str()
.ok_or_else(|| Error::from(EM::EntryHeaderTypeError))?
.parse::<NaiveDateTime>()
.context("Datetime parsing error")
.map_err(Error::from)
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::*;
use libimagstore::store::Store;
use chrono::naive::NaiveDateTime;
use chrono::naive::NaiveDate;
use chrono::naive::NaiveTime;
use toml_query::read::TomlValueReadExt;
pub fn get_store() -> Store {
Store::new_inmemory(PathBuf::from("/"), &None).unwrap()
}
#[test]
fn test_set_date() {
let store = get_store();
#[allow(clippy::zero_prefixed_literal)]
let date = {
let date = NaiveDate::from_ymd(2000, 01, 02);
let time = NaiveTime::from_hms(03, 04, 05);
NaiveDateTime::new(date, time)
};
let mut entry = store.create(PathBuf::from("test")).unwrap();
let res = entry.set_date(date);
assert!(res.is_ok(), format!("Error: {:?}", res));
let res = res.unwrap();
assert!(res.is_none()); // There shouldn't be an existing value
// Check whether the header is set correctly
let hdr_field = entry.get_header().read(&DATE_HEADER_LOCATION);
assert!(hdr_field.is_ok());
let hdr_field = hdr_field.unwrap();
assert!(hdr_field.is_some());
let hdr_field = hdr_field.unwrap();
match *hdr_field {
Value::String(ref s) => assert_eq!("2000-01-02T03:04:05", s),
_ => panic!("Wrong header type"),
}
}
#[test]
#[allow(clippy::zero_prefixed_literal)]
fn test_read_date() {
use chrono::Datelike;
use chrono::Timelike;
let store = get_store();
let date = {
let date = NaiveDate::from_ymd(2000, 01, 02);
let time = NaiveTime::from_hms(03, 04, 05);
NaiveDateTime::new(date, time)
};
let mut entry = store.create(PathBuf::from("test")).unwrap();
let res = entry.set_date(date);
assert!(res.is_ok(), format!("Expected Ok(_), got: {:?}", res));
let res = res.unwrap();
assert!(res.is_none()); // There shouldn't be an existing value
// same as the test above ...
let d = entry.read_date();
assert!(d.is_ok(), format!("Expected Ok(_), got: {:?}", d));
let d = d.unwrap();
assert_eq!(d.date().year() , 2000);
assert_eq!(d.date().month() , 01);
assert_eq!(d.date().day() , 02);
assert_eq!(d.time().hour() , 03);
assert_eq!(d.time().minute() , 04);
assert_eq!(d.time().second() , 05);
}
#[test]
fn test_delete_date() {
let store = get_store();
#[allow(clippy::zero_prefixed_literal)]
let date = {
let date = NaiveDate::from_ymd(2000, 01, 02);
let time = NaiveTime::from_hms(03, 04, 05);
NaiveDateTime::new(date, time)
};
let mut entry = store.create(PathBuf::from("test")).unwrap();
let res = entry.set_date(date);
assert!(res.is_ok(), format!("Expected Ok(_), got: {:?}", res));
let res = res.unwrap();
assert!(res.is_none()); // There shouldn't be an existing value
assert!(entry.delete_date().is_ok());
let hdr_field = entry.get_header().read(&DATE_HEADER_LOCATION);
assert!(hdr_field.is_ok());
let hdr_field = hdr_field.unwrap();
assert!(hdr_field.is_none());
}
}<|fim▁end|> | .map(|_| ()) |
<|file_name|>alloc.rs<|end_file_name|><|fim▁begin|>use std::mem;
pub fn allocate<T>(num: usize) -> *mut T {
let vec = Vec::<T>::with_capacity(num);
let rptr = vec.as_ptr();
mem::forget(vec);
rptr as *mut T
}<|fim▁hole|>pub fn deallocate<T>(tofree: *mut T, num: usize) {
unsafe {
Vec::from_raw_parts(tofree, 0, num);
}
}<|fim▁end|> | |
<|file_name|>ContextSettingsWidget.py<|end_file_name|><|fim▁begin|>from rezgui.qt import QtGui
from rezgui.util import create_pane
from rezgui.mixins.ContextViewMixin import ContextViewMixin
from rezgui.models.ContextModel import ContextModel
from rez.config import config
from rez.vendor import yaml
from rez.vendor.yaml.error import YAMLError
from rez.vendor.schema.schema import Schema, SchemaError, Or, And, Use
from functools import partial
class ContextSettingsWidget(QtGui.QWidget, ContextViewMixin):
titles = {
"packages_path": "Search path for Rez packages",
"implicit_packages": "Packages that are implicitly added to the request",
"package_filter": "Package exclusion/inclusion rules"
}
schema_dict = {
"packages_path": [basestring],
"implicit_packages": [basestring],
"package_filter": Or(And(None, Use(lambda x: [])),
And(dict, Use(lambda x: [x])),
[dict])
}
def __init__(self, context_model=None, attributes=None, parent=None):
"""
Args:
attributes (list of str): Select only certain settings to expose. If
None, all settings are exposed.
"""
super(ContextSettingsWidget, self).__init__(parent)
ContextViewMixin.__init__(self, context_model)
self.schema_keys = set(self.schema_dict.iterkeys())
if attributes:
self.schema_keys &= set(attributes)
assert self.schema_keys
schema_dict = dict((k, v) for k, v in self.schema_dict.iteritems()
if k in self.schema_keys)
self.schema = Schema(schema_dict)
self.edit = QtGui.QTextEdit()
self.edit.setStyleSheet("font: 12pt 'Courier'")
self.default_btn = QtGui.QPushButton("Set To Defaults")
self.discard_btn = QtGui.QPushButton("Discard Changes...")
self.apply_btn = QtGui.QPushButton("Apply")
self.discard_btn.setEnabled(False)
self.apply_btn.setEnabled(False)
btn_pane = create_pane([None, self.default_btn, self.discard_btn,
self.apply_btn], True)
layout = QtGui.QVBoxLayout()
layout.addWidget(self.edit)
layout.addWidget(btn_pane)
self.setLayout(layout)
self.apply_btn.clicked.connect(self.apply_changes)
self.default_btn.clicked.connect(self.set_defaults)
self.discard_btn.clicked.connect(partial(self.discard_changes, True))
self.edit.textChanged.connect(self._settingsChanged)
self._update_text()
def _contextChanged(self, flags=0):<|fim▁hole|>
def apply_changes(self):
def _content_error(title, text):
ret = QtGui.QMessageBox.warning(self, title, text,
QtGui.QMessageBox.Discard,
QtGui.QMessageBox.Cancel)
if ret == QtGui.QMessageBox.Discard:
self.discard_changes()
# load new content
try:
txt = self.edit.toPlainText()
data = yaml.load(str(txt))
except YAMLError as e:
_content_error("Invalid syntax", str(e))
return
# check against schema
if self.schema:
try:
data = self.schema.validate(data)
except SchemaError as e:
_content_error("Settings validation failure", str(e))
return
# apply to context model
self.context_model.set_packages_path(data["packages_path"])
self.context_model.set_package_filter(data["package_filter"])
self._update_text()
def discard_changes(self, prompt=False):
if prompt:
ret = QtGui.QMessageBox.warning(
self,
"The context settings have been modified.",
"Your changes will be lost. Are you sure?",
QtGui.QMessageBox.Ok,
QtGui.QMessageBox.Cancel)
if ret != QtGui.QMessageBox.Ok:
return
self._update_text()
def set_defaults(self):
packages_path = config.packages_path
implicits = [str(x) for x in config.implicit_packages]
package_filter = config.package_filter
data = {"packages_path": packages_path,
"implicit_packages": implicits,
"package_filter": package_filter}
data = dict((k, v) for k, v in data.iteritems()
if k in self.schema_keys)
self._set_text(data)
self.discard_btn.setEnabled(True)
self.apply_btn.setEnabled(True)
def _update_text(self):
model = self.context_model
implicits = [str(x) for x in model.implicit_packages]
data = {"packages_path": model.packages_path,
"implicit_packages": implicits,
"package_filter": model.package_filter}
data = dict((k, v) for k, v in data.iteritems()
if k in self.schema_keys)
self._set_text(data)
self.discard_btn.setEnabled(False)
self.apply_btn.setEnabled(False)
def _set_text(self, data):
lines = []
for key, value in data.iteritems():
lines.append('')
txt = yaml.dump({key: value}, default_flow_style=False)
title = self.titles.get(key)
if title:
lines.append("# %s" % title)
lines.append(txt.rstrip())
txt = '\n'.join(lines) + '\n'
txt = txt.lstrip()
self.edit.setPlainText(txt)
def _settingsChanged(self):
self.discard_btn.setEnabled(True)
self.apply_btn.setEnabled(True)
# Copyright 2013-2016 Allan Johns.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.<|fim▁end|> | if not (flags & ContextModel.CONTEXT_CHANGED):
return
self._update_text() |
<|file_name|>wordsquare.py<|end_file_name|><|fim▁begin|>from csp import *
from collections import Counter
class WordSquare:
"""
Find a word square of the given size from the given dictionary.
"""
alphabet = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
def __init__(self, wordsfile):
"""
Constructor.
Arguments:
wordsfile -- the path to a text file of valid words for this
word square, with one word on a line
"""
self.words = (str.upper(w.rstrip()) for w in open(wordsfile) if str.isalpha(w.rstrip()))
def csp(self, size, diag=False):
return WordSquareCSP(self, size, diag)
class WordSquareCSP(ConstraintSatisfactionProblem):
def __init__(self, wordsquare, size, diag):
"""
Constructor.
Arguments:
wordsquare -- the word square associated with the CSP
size -- the length of the words in the square
diag -- True if the CSP has a diagonal constraint, otherwise
False<|fim▁hole|> """
ConstraintSatisfactionProblem.__init__(self)
self.is_disjoint_constraints = True
self.size = size
self.letters_count = Counter()
# create a map: indexOf -> map(letter -> wordlist)
lettermap = [{letter: set() for letter in wordsquare.alphabet} for i in range(size)]
# filter out words whose length != size
for word in (word for word in wordsquare.words if len(word) == size):
for index in range(len(word)):
lettermap[index][word[index]].add(word)
self.letters_count.update(word[index])
# create a variable for each (row,col) pair in the word square
self.variables = {(i, j): WordSquareVariable(self, (i, j)) for i in range(size) for j in range(size)}
# create a constraint for each row and for each col (and the diagonal if requested)
self.constraints = set()
WordSquareConstraint.lettermap = lettermap
for i in range(size):
self.constraints.add(WordSquareConstraint({self.variables[(i, col)] for col in range(size)}))
self.constraints.add(WordSquareConstraint({self.variables[(row, i)] for row in range(size)}))
if diag:
self.constraints.add(WordSquareConstraint({self.variables[(i, i)] for i in range(size)}))
def __str__(self):
L = list(' ' * (self.size * self.size))
for var_name in self.variables:
if self.variables[var_name].value:
L[var_name[0] * self.size + var_name[1]] = self.variables[var_name].value
M = list()
for i in range(0, len(L), self.size):
M.extend(L[i:i + self.size] + ["\n"])
return ''.join(M)
class WordSquareVariable(BaseVariable):
"""
A variable in the word square CSP.
Public instance variables:
csp -- a reference to the CSP wrapping this variable
name -- a (row, column) tuple identifying this variable referencing
the letter at (row, column) in the word square
domain -- this variable's domain of legal values at this stage in
the problem
value -- the letter assigned to this variable, or None
constraints -- a set of constraints covering this variable
"""
def __init__(self, csp, name):
BaseVariable.__init__(self, csp, name)
self.domain = WordSquare.alphabet[:]
def ordered_domain(self):
"""
Returns:
This variable's domain as a list of values, sorted by most common
to least common.
"""
return sorted(self.domain, key=lambda c: self.csp.letters_count[c], reverse=True)
def find_constraint(self, other_var):
"""
Find a constraint that covers two given variables.
Arguments:
other_var -- the variable we're looking for a shared constraint
with
Returns:
The constraint that covers both `self` and `other_var`. The nature
of the word square implies that at most one constraint covers
any two given variables.
"""
for constraint in self.constraints:
if constraint in other_var.constraints:
return constraint
return None
class WordSquareConstraint(BaseConstraint):
"""
A constraint in the word square CSP. The constraint is of the form
[V_0 = d_0, V_1 = d_1, ..., V_n = d_n] and is satisfied if there's a
word W such that W[0] = d_0, W[1] = d_1, ..., W[n] = d_n.
Public instance variables:
variables -- a list of variables this constraint covers, in order from
top to bottom or left to right
Public class variables:
lettermap -- a map: string index i -> map: letter -> words whose
i-th character is letter
Unpublished instance variables:
indices -- a map: variable v -> index i such that `self.variables[i] is v`
"""
def __init__(self, variables):
"""
Constructor.
Arguments:
variables -- a set of variables this constraint covers
"""
BaseConstraint.__init__(self, sorted(iter(variables), key=WordSquareVariable.get_name))
self.indices = {self.variables[i].name: i for i in range(len(self.variables))}
def is_satisfiable(self, variable, assignment):
"""
Is the constraint, including variables already assigned values,
satisfiable with the given assignment `variable.value = assignment`?
Arguments:
variable -- the variable we're assigning to
assignment -- the value we're assigning to the variable
Returns:
A list of words W such that for all indices i in self.variables,
W[i] is in self.variables[i].domain AND `W[i] = assignment` if
`self.variables[i] is variable`.
"""
words = self.lettermap[self.indices[variable.name]][assignment]
for other_var in self.variables:
if other_var is not variable:
words = [w for w in words if w[self.indices[other_var.name]] in other_var.domain]
return words
def __repr__(self):
return "[Constraint] %s" % [var.name for var in self.variables]
if __name__ == '__main__':
wordsquare = WordSquare('resources/words.txt')
puzzle = wordsquare.csp(5, True)
print(puzzle.solve())<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// @generated by Thrift for src/module.thrift
// This file is probably not the place you want to edit!
#![recursion_limit = "100000000"]
#![allow(non_camel_case_types, non_snake_case, non_upper_case_globals, unused_crate_dependencies)]
include!("impl_my_things.rs");
include!("my/other/thing.rs");
pub use self::errors::*;
pub use self::types::*;
/// Thrift type definitions for `module`.
pub mod types {
#![allow(clippy::redundant_closure)]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MyStruct {
pub foo: ::std::primitive::i32,
pub bar: ::std::string::String,
// This field forces `..Default::default()` when instantiating this
// struct, to make code future-proof against new fields added later to
// the definition in Thrift. If you don't want this, add the annotation
// `(rust.exhaustive)` to the Thrift struct to eliminate this field.
#[doc(hidden)]
pub _dot_dot_Default_default: self::dot_dot::OtherFields,
}
impl ::std::default::Default for self::MyStruct {
fn default() -> Self {
Self {
foo: ::std::default::Default::default(),
bar: ::std::default::Default::default(),
_dot_dot_Default_default: self::dot_dot::OtherFields(()),
}
}<|fim▁hole|> impl ::std::fmt::Debug for self::MyStruct {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
formatter
.debug_struct("MyStruct")
.field("foo", &self.foo)
.field("bar", &self.bar)
.finish()
}
}
unsafe impl ::std::marker::Send for self::MyStruct {}
unsafe impl ::std::marker::Sync for self::MyStruct {}
impl ::fbthrift::GetTType for self::MyStruct {
const TTYPE: ::fbthrift::TType = ::fbthrift::TType::Struct;
}
impl<P> ::fbthrift::Serialize<P> for self::MyStruct
where
P: ::fbthrift::ProtocolWriter,
{
fn write(&self, p: &mut P) {
p.write_struct_begin("MyStruct");
p.write_field_begin("foo", ::fbthrift::TType::I32, 1);
::fbthrift::Serialize::write(&self.foo, p);
p.write_field_end();
p.write_field_begin("bar", ::fbthrift::TType::String, 2);
::fbthrift::Serialize::write(&self.bar, p);
p.write_field_end();
p.write_field_stop();
p.write_struct_end();
}
}
impl<P> ::fbthrift::Deserialize<P> for self::MyStruct
where
P: ::fbthrift::ProtocolReader,
{
fn read(p: &mut P) -> ::anyhow::Result<Self> {
static FIELDS: &[::fbthrift::Field] = &[
::fbthrift::Field::new("bar", ::fbthrift::TType::String, 2),
::fbthrift::Field::new("foo", ::fbthrift::TType::I32, 1),
];
let mut field_foo = ::std::option::Option::None;
let mut field_bar = ::std::option::Option::None;
let _ = p.read_struct_begin(|_| ())?;
loop {
let (_, fty, fid) = p.read_field_begin(|_| (), FIELDS)?;
match (fty, fid as ::std::primitive::i32) {
(::fbthrift::TType::Stop, _) => break,
(::fbthrift::TType::I32, 1) => field_foo = ::std::option::Option::Some(::fbthrift::Deserialize::read(p)?),
(::fbthrift::TType::String, 2) => field_bar = ::std::option::Option::Some(::fbthrift::Deserialize::read(p)?),
(fty, _) => p.skip(fty)?,
}
p.read_field_end()?;
}
p.read_struct_end()?;
::std::result::Result::Ok(Self {
foo: field_foo.unwrap_or_default(),
bar: field_bar.unwrap_or_default(),
_dot_dot_Default_default: self::dot_dot::OtherFields(()),
})
}
}
mod dot_dot {
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct OtherFields(pub(crate) ());
#[allow(dead_code)] // if serde isn't being used
pub(super) fn default_for_serde_deserialize() -> OtherFields {
OtherFields(())
}
}
}
/// Error return types.
pub mod errors {
}<|fim▁end|> | }
|
<|file_name|>oneOfArray.ts<|end_file_name|><|fim▁begin|>/*
The MIT License
Copyright (c) 2017-2019 EclipseSource Munich
https://github.com/eclipsesource/jsonforms
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
import { registerExamples } from '../register';
export const schema = {
$schema: 'http://json-schema.org/draft-07/schema#',
definitions: {
address: {
type: 'object',
properties: {
street_address: { type: 'string' },
city: { type: 'string' },
state: { type: 'string' }
},
required: ['street_address', 'city', 'state']
},
user: {
type: 'object',
properties: {
name: { type: 'string' },
mail: { type: 'string' }
},
required: ['name', 'mail']
}
},
type: 'object',
properties: {
name: { type: 'string' },
addressOrUsers: {
type: 'array',
items: {
oneOf: [
{ $ref: '#/definitions/address' },
{ $ref: '#/definitions/user' }
]<|fim▁hole|>
export const uischema = {
type: 'VerticalLayout',
elements: [
{
type: 'Control',
scope: '#/properties/addressOrUsers'
}
]
};
const data = {
name: 'test',
addressOrUsers: [
{
street_address: '1600 Pennsylvania Avenue NW',
city: 'Washington',
state: 'DC'
},
{
name: 'User',
mail: '[email protected]'
}
]
};
registerExamples([
{
name: 'oneOfArray',
label: 'oneOf (in array)',
data,
schema,
uischema
}
]);<|fim▁end|> | }
}
}
}; |
<|file_name|>cohort-search.component.spec.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { MemoryRouter } from 'react-router';
import { mount } from 'enzyme';
import { CohortBuilderApi, CriteriaType, Domain } from 'generated/fetch';
import { registerApiClient } from 'app/services/swagger-fetch-clients';
import {
currentCohortCriteriaStore,
currentCohortSearchContextStore,
currentWorkspaceStore,
} from 'app/utils/navigation';
import { waitOneTickAndUpdate } from 'testing/react-test-helpers';
import {
CohortBuilderServiceStub,
CriteriaStubVariables,
} from 'testing/stubs/cohort-builder-service-stub';
import { workspaceDataStub } from 'testing/stubs/workspaces';
import { CohortSearch } from './cohort-search.component';
const searchContextStubs = [
{
domain: Domain.CONDITION,
item: {
searchParameters: [],
},
},
{
domain: Domain.PERSON,
item: {
searchParameters: [],
},
type: CriteriaType.ETHNICITY,
},
];
describe('CohortSearch', () => {
const component = () => {
return mount(
<MemoryRouter>
<CohortSearch setUnsavedChanges={() => {}} />
</MemoryRouter>
);
};
beforeEach(() => {
currentWorkspaceStore.next(workspaceDataStub);
registerApiClient(CohortBuilderApi, new CohortBuilderServiceStub());
});
it('should render', () => {
currentCohortSearchContextStore.next(searchContextStubs[0]);
const wrapper = component();
expect(wrapper).toBeTruthy();
});
it('should render CriteriaSearch component for any domain except Person', () => {
currentCohortSearchContextStore.next(searchContextStubs[0]);
const wrapper = component();
expect(wrapper.find('[id="criteria-search-container"]').length).toBe(1);
expect(wrapper.find('[data-test-id="demographics"]').length).toBe(0);
});
it('should render Demographics component for Person domain', () => {
currentCohortSearchContextStore.next(searchContextStubs[1]);
const wrapper = component();
expect(wrapper.find('[id="criteria-search-container"]').length).toBe(0);<|fim▁hole|> });
it('should show warning modal for unsaved demographics selections', async () => {
currentCohortSearchContextStore.next(searchContextStubs[1]);
const wrapper = component();
expect(
wrapper.find('[data-test-id="cohort-search-unsaved-message"]').length
).toBe(0);
const selection = {
...CriteriaStubVariables[1],
parameterId: 'test param id',
};
currentCohortCriteriaStore.next([selection]);
await waitOneTickAndUpdate(wrapper);
wrapper.find('[data-test-id="cohort-search-back-arrow"]').simulate('click');
expect(
wrapper.find('[data-test-id="cohort-search-unsaved-message"]').length
).toBeGreaterThan(0);
});
});<|fim▁end|> | expect(wrapper.find('[data-test-id="demographics"]').length).toBe(1); |
<|file_name|>emails.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.core.mail import EmailMultiAlternatives
from django.template import Context, Template
from django.template.loader import get_template
from helpers import ClientRouter, MailAssetsHelper, strip_accents
class UserMail:
"""
This class is responsible for firing emails for Users and Nonprofits
"""
from_email = 'Atados <[email protected]>'
def __init__(self, user):
self.whole_user = user # This is the Nonprofit or Volunteer object
self.user = user.user if not type(user).__name__=='User' else user # This is the User object
self.global_context = {
"assets": {
"check": "https://s3.amazonaws.com/atados-us/images/check.png",
"iconFacebook": "https://s3.amazonaws.com/atados-us/images/icon-fb.png",
"iconInstagram": "https://s3.amazonaws.com/atados-us/images/icon-insta.png",
"logoAtadosSmall": "https://s3.amazonaws.com/atados-us/images/logo.small.png",
"logoAtadosSmall2": "https://s3.amazonaws.com/atados-us/images/mandala.png"
}
}
def sendEmail(self, template_name, subject, context, user_email=None):
text_content = get_template('email/{}.txt'.format(template_name)).render(context)
html_content = get_template('email/{}.html'.format(template_name)).render(context)
msg = EmailMultiAlternatives(subject, text_content, self.from_email, [user_email if user_email else self.user.email])
msg.attach_alternative(text_content, "text/plain")
msg.attach_alternative(html_content, "text/html")
return msg.send() > 0
def make_context(self, data):
context_data = self.global_context.copy()
context_data.update(data)
return Context(context_data)
def sendSignupConfirmation(self, site, token):
return self.sendEmail('emailVerification', 'Confirme seu email do Atados.', self.make_context({ 'token': token , 'site': site}))
class VolunteerMail(UserMail):
"""
This class contains all emails sent to volunteers
"""
def sendSignup(self):
"""
Email A/B from ruler
Sent when volunteer completes registration
"""
return self.sendEmail('volunteerSignup', 'Eba! Seu cadastro foi feito com sucesso', self.make_context({}))
def sendFacebookSignup(self): # pass by now
"""
Sent when volunteer completes registration from Facebook
"""
return self.sendEmail('volunteerFacebookSignup', 'Seja bem vindo ao Atados! \o/', self.make_context({}))
def sendAppliesToProject(self, project):
"""
Email for ruler C
Sent when volunteer applies to project
"""
return self.sendEmail('volunteerAppliesToProject', u'Você se inscreveu em uma vaga :)', self.make_context({'project': project}))
def askActInteractionConfirmation(self, project, volunteer):
"""
Email for ruler D
Sent when volunteer applies to project
"""
confirm_url = ClientRouter.mail_routine_monitoring_build_form_url(True, volunteer.user.email, project.nonprofit.name, "")
refute_url = ClientRouter.mail_routine_monitoring_build_form_url(False, volunteer.user.email, project.nonprofit.name, "")
return self.sendEmail('askActInteractionConfirmation', u'Acompanhamento de Rotina:)',
self.make_context({
'project': project,
'confirm_url': confirm_url,
'refute_url': refute_url
})
)
def sendAskAboutProjectExperience(self, apply):
"""
"""
subject = u"Como foi sua experiência com a Atados!"
feedback_form_url = ClientRouter.mail_ask_about_project_experience_url('volunteer', apply)
return self.sendEmail('volunteerAskAboutProjectExperience', subject, self.make_context({
'project_name': apply.project.name,
'feedback_form_url': feedback_form_url,
}), apply.volunteer.user.email)
#+ def sendAfterApply4Weeks(self): # new ruler
#+ """
#+ """
#+ context = Context({'user': self.user.name})
#+ return self.sendEmail('volunteerAfterApply4Weeks', '~ ~ ~ ~ ~', context)
#+ def send3DaysBeforePontual(self): # new ruler
#+ """
#+ """
#+ context = Context({'user': self.user.name})
#+ return self.sendEmail('volunteer3DaysBeforePontual', '~ ~ ~ ~ ~', context)
class NonprofitMail(UserMail):
"""
This class contains all emails sent to nonprofits
"""
def sendSignup(self):
"""
Email 1 from ruler
"""
return self.sendEmail('nonprofitSignup', 'Recebemos seu cadastro :)', self.make_context({
'review_profile_url': ClientRouter.edit_nonprofit_url(self.user.slug)
}))
def sendApproved(self):
"""
Email 2 from ruler
"""
return self.sendEmail('nonprofitApproved', 'Agora você tem um perfil no Atados', self.make_context({
'new_act_url': ClientRouter.new_act_url()
}))
def sendProjectPostingSuccessful(self, project):
"""
Email *NEW*
"""
return self.sendEmail('projectPostingSuccessful', 'Vaga criada com sucesso!', self.make_context({<|fim▁hole|> edit_nonprofit_act_url(self, act_slug)
def sendProjectApproved(self, project):
"""
Email 3 from ruler
"""
return self.sendEmail('projectApproved', 'Publicamos a sua vaga de voluntariado', self.make_context({
'project': project,
'act_url': ClientRouter.view_act_url(project.slug)
}))
def sendGetsNotifiedAboutApply(self, apply, message):
"""
Email 4 from ruler
"""
try:
subject = u'Novo voluntário para o {}'.format(apply.project.name)
except UnicodeEncodeError:
subject = u'Novo voluntário para o {}'.format(strip_accents(apply.project.name))
return self.sendEmail('nonprofitGetsNotifiedAboutApply', subject, self.make_context({
'apply': apply,
'volunteer_message': message,
'answer_volunteer_url': ClientRouter.view_volunteer_url(apply.volunteer.user.slug)
}), apply.project.email)
def sendAskAboutProjectExperience(self, project):
"""
"""
subject = u"Nos conta como foi sua experiência com a Atados!"
act_url = ClientRouter.edit_project_url(project.slug)
feedback_form_url = ClientRouter.mail_ask_about_project_experience_url('nonprofit', project)
return self.sendEmail('nonprofitAskAboutProjectExperience', subject, self.make_context({
'project_name': project.name,
'feedback_form_url': feedback_form_url,
'act_url': act_url,
}), project.email)
#+ def send1MonthInactive(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofit1MonthInactive', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
#+ def sendPontual(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofitPontual', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
#+ def sendRecorrente(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofitRecorrente', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))<|fim▁end|> | 'project': project,
'edit_project_url': ClientRouter.edit_project_url(project.slug)
}))
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use serialize::json::{Json, ParserError};
use url::Url;
use std::collections::HashMap;
use std::io::IoError;
use std::local_data::Ref;
#[cfg(not(teepee))]
pub use self::http::Client;
#[cfg(teepee)]
pub use self::teepee::Client;
mod http;
mod teepee;
macro_rules! params {
{$($key:expr: $val:expr,)+} => (
{
use std::collections::HashMap;
let mut params: HashMap<String, String> = HashMap::new();
$(
params.insert($key.into_string(), $val.to_string());
)+
params
}
);
}
pub static USER_AGENT: &'static str = "rawr v0.1 (github.com/cybergeek94/rawr)";
local_data_key!(_modhash: String)
pub type JsonError = ParserError;
pub type JsonResult<T> = Result<T, JsonError>;
pub trait JsonClient {
/// Make a GET request, returning a Json response. The GET parameters should be in the passed URL.
/// Implementers should update the local modhash by using `set_modhash()`
fn get(&self, url: &Url) -> JsonResult<Json>;
/// Make a POST request, returning the JSON response
fn post(&self, url: &Url, params: HashMap<String, String>) -> JsonResult<Json>;
/// Make a POST request, including the value of `set_modhash` as the `X-Modhash` header
/// and the session cookie
fn post_modhash(&self, url: &Url, params: HashMap<String, String>, session: &str) -> JsonResult<Json>;
}
pub fn set_modhash(modhash: &str) {
_modhash.replace(Some(modhash.into_string()));
}
pub fn get_modhash() -> Option<Ref<String>> {
_modhash.get()
}
pub fn has_modhash() -> bool {
_modhash.get().is_some()
}
/// Map a std::io::IoError to a serialize::json::IoError (ParserError variant)
pub fn err_io_to_json_io(err: IoError) -> ParserError {
super::serialize::json::IoError(err.kind, err.desc)
}
#[test]
fn test_params() {
let params = params!{
"hello": "goodbye",
"yes": "no",
};
drop(params);
}<|fim▁end|> | #![macro_escape]
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from qdec_partial import get_ip_name
from qdec_partial import QDEC |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
if sys.version_info < (3,):
import ConfigParser
else:
import configparser as ConfigParser
class Config:
def __init__(self):
self.interval = 30
self.access_key_id = None
self.access_key_secret = None
self.domain_name = None
self.sub_domain_name = None
self.record_type = "A"
self.region_id = "cn-hangzhou"
self.configParser = ConfigParser.ConfigParser()
def loadConfig(self, file):
if not self.configParser.read(file):
print "config file not exist"
return False
try:
self.interval = self.configParser.getint("CONFIG", "interval")
self.access_key_id = self.configParser.get("CONFIG", "access_key_id")<|fim▁hole|> self.region_id = self.configParser.get("CONFIG", "region_id")
if not self.interval:
self.interval = 30
if not self.record_type:
self.record_type = "A"
if not self.region_id:
self.region_id = "cn-hangzhou"
except Exception, e:
print "invalid config: {0}".format(e.message)
return False
if not self.access_key_id or not self.access_key_secret or not self.domain_name or not self.sub_domain_name:
print "invalid config"
return False
return True<|fim▁end|> | self.access_key_secret = self.configParser.get("CONFIG", "access_key_secret")
self.domain_name = self.configParser.get("CONFIG", "domain_name")
self.sub_domain_name = self.configParser.get("CONFIG", "sub_domain_name")
self.record_type = self.configParser.get("CONFIG", "record_type") |
<|file_name|>tests_person_api.py<|end_file_name|><|fim▁begin|>__author__ = 'sweemeng'
from rest_framework import status
from popit.signals.handlers import *
from popit.models import *
from django.conf import settings
import json
import logging
from popit.tests.base_testcase import BasePopitTestCase
from popit.tests.base_testcase import BasePopitAPITestCase
from popit.serializers.minimized import MinPersonSerializer
# TODO: Test multilingual behavior. To make behavior clear
# TODO: Need new fixtures
class PersonSerializerTestCase(BasePopitTestCase):
def test_fetch_non_empty_field_person_serializer(self):
person = Person.objects.untranslated().get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
serializer = PersonSerializer(person, language='en')
data = serializer.data
self.assertEqual(data["name"], "John")
def test_fetch_empty_field_person_serializer(self):
person = Person.objects.untranslated().get(id='ab1a5788e5bae955c048748fa6af0e97')
serializer = PersonSerializer(person, language='en')
data = serializer.data
self.assertEqual(data["given_name"], "")
def test_fetch_not_empty_relation_person_serializer(self):
person = Person.objects.untranslated().get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
serializer = PersonSerializer(person, language='en')
data = serializer.data
self.assertTrue(data["other_names"])
def test_fetch_empty_relation_person_serializer(self):
person = Person.objects.untranslated().get(id='078541c9-9081-4082-b28f-29cbb64440cb')
serializer = PersonSerializer(person, language='en')
data = serializer.data
self.assertFalse(data["other_names"])
def test_create_person_with_all_field_serializer(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "1950-01-01",
"death_data": "2000-01-01",
"email": "[email protected]",
"contact_details":[
{
"type":"twitter",
"value": "sinarproject",
}
],
"links":[
{
"url":"http://sinarproject.org",
}
],
"identifiers":[
{
"identifier": "9089098098",
"scheme": "rakyat",
}
],
"other_names":[
{
"name":"Jane",
"family_name":"Jambul",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
person_serial = PersonSerializer(data=person_data, language='en')
person_serial.is_valid()
self.assertEqual(person_serial.errors, {})
person_serial.save()
person = Person.objects.language("en").get(name="joe")
self.assertEqual(person.given_name, "joe jambul")
def test_update_person_serializer(self):
person_data = {
"given_name": "jerry jambul",
}
person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
self.assertEqual(person_.given_name, "jerry jambul")
def test_create_links_person_serializers(self):
person_data = {
"links": [
{
"url": "http://twitter.com/sweemeng",
}
]
}
person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
url = person_.links.language("en").get(url="http://twitter.com/sweemeng")
self.assertEqual(url.url, "http://twitter.com/sweemeng")
def test_update_links_person_serializers(self):
# links id a4ffa24a9ef3cbcb8cfaa178c9329367
person_data = {
"id":"ab1a5788e5bae955c048748fa6af0e97",
"links":[
{
"id": "a4ffa24a9ef3cbcb8cfaa178c9329367",
"note": "just a random repo"
}
]
}
person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language="en")
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
url = person_.links.language("en").get(id="a4ffa24a9ef3cbcb8cfaa178c9329367")
self.assertEqual(url.note, "just a random repo")
def test_update_create_nested_links_persons_serializer(self):
person_data = {
"id":"ab1a5788e5bae955c048748fa6af0e97",
"contact_details":[
{
"id": "a66cb422-eec3-4861-bae1-a64ae5dbde61",
"links": [{
"url": "http://facebook.com",
}]
}
],
}
person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
# There should be only 1 links in that contact
contact = person_.contact_details.language('en').get(id='a66cb422-eec3-4861-bae1-a64ae5dbde61')
links = contact.links.language('en').filter(url="http://sinarproject.org")
self.assertEqual(links[0].url, "http://sinarproject.org")
def test_update_update_nested_links_person_serializer(self):
person_data = {
"id":"8497ba86-7485-42d2-9596-2ab14520f1f4",
"identifiers":[
{
"id": "af7c01b5-1c4f-4c08-9174-3de5ff270bdb",
"links": [{
"id": "9c9a2093-c3eb-4b51-b869-0d3b4ab281fd",
"note": "this is just a test note",
}]
}
],
}
person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
identifier = person_.identifiers.language('en').get(id="af7c01b5-1c4f-4c08-9174-3de5ff270bdb")
link = identifier.links.language('en').get(id="9c9a2093-c3eb-4b51-b869-0d3b4ab281fd")
self.assertEqual(link.note, "this is just a test note")
def test_create_identifier_person_serializer(self):
person_data = {
"identifiers": [
{
"scheme": "IC",
"identifier": "129031309",
}
]
}
person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
identifier = person_.identifiers.language('en').get(identifier="129031309")
self.assertEqual(identifier.scheme, "IC")
def test_update_identifier_person_serializer(self):
person_data = {
"identifiers": [
{
"id": "34b59cb9-607a-43c7-9d13-dfe258790ebf",
"identifier": "53110322",
}
]
}
person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language="en")
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
identifier = person_.identifiers.language('en').get(id="34b59cb9-607a-43c7-9d13-dfe258790ebf")
self.assertEqual(identifier.identifier, '53110322')
def test_create_contact_person_serializer(self):
person_data = {
"contact_details": [
{
"type":"twitter",
"value": "sinarproject",
}
]
}
person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
contact = person_.contact_details.language('en').get(type="twitter")
self.assertEqual(contact.value, "sinarproject")
def test_update_contact_person_serializer(self):
person_data = {
"contact_details": [
{
"id": "a66cb422-eec3-4861-bae1-a64ae5dbde61",
"value": "0123421222",
}
]
}
person = Person.objects.untranslated().get(id='ab1a5788e5bae955c048748fa6af0e97')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language="en")
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
contact = person_.contact_details.language('en').get(id="a66cb422-eec3-4861-bae1-a64ae5dbde61")
self.assertEqual(contact.value, "0123421222")
def test_create_other_name_person_serializer(self):
person_data = {
"other_names": [
{
"name": "jane",
"family_name": "jambul",
"given_name": "test person",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
other_name = person_.other_names.language('en').get(name="jane")
self.assertEqual(other_name.given_name, "test person")
def test_update_other_person_serializer(self):
person_data = {
"other_names": [
{
"id": "cf93e73f-91b6-4fad-bf76-0782c80297a8",
"family_name": "jambul",
}
]
}
person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
other_name = person_.other_names.language('en').get(id="cf93e73f-91b6-4fad-bf76-0782c80297a8")
self.assertEqual(other_name.family_name, "jambul")
def test_create_person_invalid_date_serializer(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "invalid date",
"death_data": "invalid date",
"email": "[email protected]",
}
person_serial = PersonSerializer(data=person_data, language='en')
person_serial.is_valid()
self.assertNotEqual(person_serial.errors, {})
def test_update_person_translated_serializer(self):
person_data = {
"given_name": "jerry jambul",
}
person = Person.objects.language("ms").get(id='ab1a5788e5bae955c048748fa6af0e97')
person_serializer = PersonSerializer(person, data=person_data, partial=True, language='ms')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person_ = Person.objects.language('ms').get(id='ab1a5788e5bae955c048748fa6af0e97')
self.assertEqual(person_.given_name, "jerry jambul")
def test_create_person_translated_serializer(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "bukan john doe",
"gender": "tak tahu",
"summary": "orang ujian",
"honorific_prefix": "Datuk Seri",
"biography": "Dia Tak wujud!!!!",
"email": "[email protected]",
}
person_serial = PersonSerializer(data=person_data, language='ms')
person_serial.is_valid()
self.assertEqual(person_serial.errors, {})
person_serial.save()
person = Person.objects.language("ms").get(name="joe")
self.assertEqual(person.given_name, "joe jambul")
def test_load_translated_person_membership(self):
person = Person.objects.untranslated().get(id="078541c9-9081-4082-b28f-29cbb64440cb")
person_serializer = PersonSerializer(person, language="ms")
data = person_serializer.data
for membership in data["memberships"]:
self.assertEqual(membership["language_code"], "ms")
def test_load_translated_person_membership_organization(self):
person = Person.objects.untranslated().get(id="078541c9-9081-4082-b28f-29cbb64440cb")
person_serializer = PersonSerializer(person, language="ms")
data = person_serializer.data
for membership in data["memberships"]:
if membership["organization"]:
self.assertEqual(membership["organization"]["language_code"], "ms")
def test_fetch_person_membership_on_behalf_of_expanded(self):
person = Person.objects.untranslated().get(id="2439e472-10dc-4f9c-aa99-efddd9046b4a")
person_serializer = PersonSerializer(person, language="en")
data = person_serializer.data
self.assertEqual(data["memberships"][0]["on_behalf_of"]["id"], "3d62d9ea-0600-4f29-8ce6-f7720fd49aa3")
def test_fetch_person_minimized_serializer(self):
person = Person.objects.untranslated().get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
person_serializer = MinPersonSerializer(person)
membership_count = person.memberships.count()
self.assertTrue(len(person_serializer.data["memberships"]), membership_count)
def test_update_person_serializer_null_value(self):
person = Person.objects.untranslated().get(id="ab1a5788e5bae955c048748fa6af0e97")
data = {
"biography": None,
}
person_serializer = PersonSerializer(person, data=data, partial=True, language='en')
person_serializer.is_valid()
self.assertEqual(person_serializer.errors, {})
person_serializer.save()
person = Person.objects.language("en").get(id="ab1a5788e5bae955c048748fa6af0e97")
self.assertEqual(person.biography, None)
# We have set parameter in client into json instead of multipart form, maybe we should explicitly set it.
class PersonAPITestCase(BasePopitAPITestCase):
def test_view_person_list(self):
response = self.client.get("/en/persons/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue("page" in response.data)
self.assertEqual(response.data["per_page"], settings.REST_FRAMEWORK["PAGE_SIZE"])
self.assertEqual(response.data["num_pages"], 1)
def test_view_person_detail(self):
person = Person.objects.language("en").get(id="8497ba86-7485-42d2-9596-2ab14520f1f4")
response = self.client.get("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.data
self.assertEqual(data["result"]["name"], "John")
self.assertTrue("memberships" in response.data["result"])
def test_view_person_detail_not_exist(self):
response = self.client.get("/en/persons/not_exist/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_create_person_unauthorized(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "1950-01-01",
"death_data": "2000-01-01",
"email": "[email protected]",
"contact_details":[
{
"type":"twitter",
"value": "sinarproject",
}
],
"links":[
{
"url":"http://sinarproject.org",
}
],
"identifiers":[
{
"identifier": "9089098098",
"scheme": "rakyat",
}
],
"other_names":[
{
"name":"Jane",
"family_name":"Jambul",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
response = self.client.post("/en/persons/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_person_authorized(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "1950-01-01",
"death_data": "2000-01-01",
"email": "[email protected]",
"contact_details":[
{
"type":"twitter",
"value": "sinarproject",
}
],
"links":[
{
"url":"http://sinarproject.org",
}
],
"identifiers":[
{
"identifier": "9089098098",
"scheme": "rakyat",
}
],
"other_names":[
{
"name":"Jane",
"family_name":"Jambul",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/", person_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
person = Person.objects.language("en").get(name="joe")
self.assertEqual(person.name, "joe")
def test_update_person_unauthorized(self):
person_data = {
"given_name": "jerry jambul",
}
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_not_exist_unauthorized(self):
person_data = {
"given_name": "jerry jambul",
}
response = self.client.put("/en/persons/not_exist/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_authorized(self):
person_data = {
"given_name": "jerry jambul",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
self.assertEqual(person_.given_name, "jerry jambul")
def test_update_person_not_exist_authorized(self):
person_data = {
"given_name": "jerry jambul",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/en/persons/not_exist/", person_data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_create_person_links_unauthorized(self):
person_data = {
"links": [
{
"url": "http://twitter.com/sweemeng",
}
]
}
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_person_links_authorized(self):
person_data = {
"links": [
{
"url": "http://twitter.com/sweemeng",
}
]
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
url = person_.links.language("en").get(url="http://twitter.com/sweemeng")
self.assertEqual(url.url, "http://twitter.com/sweemeng")
def test_update_person_links_unauthorized(self):
person_data = {
"id":"ab1a5788e5bae955c048748fa6af0e97",
"links":[
{
"id": "a4ffa24a9ef3cbcb8cfaa178c9329367",
"note": "just a random repo"
}
]
}
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_links_authorized(self):
person_data = {
"id":"ab1a5788e5bae955c048748fa6af0e97",
"links":[
{
"id": "a4ffa24a9ef3cbcb8cfaa178c9329367",
"note": "just a random repo"
}
]
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
url = person_.links.language("en").get(id="a4ffa24a9ef3cbcb8cfaa178c9329367")
self.assertEqual(url.note, "just a random repo")
def test_create_nested_person_links_unauthorized(self):
person_data = {
"id":"ab1a5788e5bae955c048748fa6af0e97",
"contact_details":[
{
"id": "a66cb422-eec3-4861-bae1-a64ae5dbde61",
"links": [{
"url": "http://facebook.com",
}]
}
],
}
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_nested_person_links_authorized(self):
person_data = {
"id":"ab1a5788e5bae955c048748fa6af0e97",
"contact_details":[
{
"id": "a66cb422-eec3-4861-bae1-a64ae5dbde61",
"links": [{
"url": "http://facebook.com",
}]
}
],
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
# There should be only 1 links in that contact
contact = person_.contact_details.language('en').get(id='a66cb422-eec3-4861-bae1-a64ae5dbde61')
links = contact.links.language('en').all()
check = False
for i in links:
if i.url == "http://sinarproject.org":
check = True
self.assertTrue(check, "http://sinarproject.org does not exist")
def test_update_nested_person_links_unauthorized(self):
person_data = {
"id":"8497ba86-7485-42d2-9596-2ab14520f1f4",
"identifiers":[
{
"id": "af7c01b5-1c4f-4c08-9174-3de5ff270bdb",
"links": [{
"id": "9c9a2093-c3eb-4b51-b869-0d3b4ab281fd",
"note": "this is just a test note",
}]
}
],
}
response = self.client.put("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_nested_person_links_authorized(self):
person_data = {
"id":"8497ba86-7485-42d2-9596-2ab14520f1f4",
"identifiers":[
{
"id": "af7c01b5-1c4f-4c08-9174-3de5ff270bdb",
"links": [{
"id": "9c9a2093-c3eb-4b51-b869-0d3b4ab281fd",
"note": "this is just a test note",
}]
}
],
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/", person_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
identifier = person_.identifiers.language('en').get(id="af7c01b5-1c4f-4c08-9174-3de5ff270bdb")
link = identifier.links.language('en').get(id="9c9a2093-c3eb-4b51-b869-0d3b4ab281fd")
self.assertEqual(link.note, "this is just a test note")
def test_create_other_names_unauthorized(self):
person_data = {
"other_names": [
{
"name": "jane",
"family_name": "jambul",
"given_name": "test person",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_other_names_authorized(self):
person_data = {
"other_names": [
{
"name": "jane",
"family_name": "jambul",
"given_name": "test person",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
other_name = person_.other_names.language('en').get(name="jane")
self.assertEqual(other_name.given_name, "test person")
def test_update_other_names_unauthorized(self):
person_data = {
"other_names": [
{
"id": "cf93e73f-91b6-4fad-bf76-0782c80297a8",
"family_name": "jambul",
}
]
}
response = self.client.put("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/", person_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_other_names_authorized(self):
person_data = {
"other_names": [
{
"id": "cf93e73f-91b6-4fad-bf76-0782c80297a8",
"family_name": "jambul",
}
]
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/", person_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
other_name = person_.other_names.language('en').get(id="cf93e73f-91b6-4fad-bf76-0782c80297a8")
self.assertEqual(other_name.family_name, "jambul")
def test_delete_persons_unauthorized(self):
response = self.client.delete("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/")
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_not_exist_unauthorized(self):
response = self.client.delete("/en/persons/not_exist/")
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_persons_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_person_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete("/en/persons/not_exist/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_person_api_summary_more_than_255(self):
raw_data = """
{
"result":
{
"proxy_image": "https://sinar-malaysia.popit.mysociety.org/image-proxy/http%3A%2F%2Fupload.wikimedia.org%2Fwikipedia%2Fcommons%2Fthumb%2F0%2F05%2FAnwar_Ibrahim.jpg%2F398px-Anwar_Ibrahim.jpg",
"image": "http://upload.wikimedia.org/wikipedia/commons/thumb/0/05/Anwar_Ibrahim.jpg/398px-Anwar_Ibrahim.jpg",
"html_url": "https://sinar-malaysia.popit.mysociety.org/persons/53630562f1eab6270da6c8ed",
"url": "https://sinar-malaysia.popit.mysociety.org/api/v0.1/persons/53630562f1eab6270da6c8ed",
"birth_date": "1947-08-10",
"death_date": null,
"id": "53630562f1eab6270da6c8ed",
"name": "Anwar Ibrahim",
"summary": "Dato' Seri Anwar Bin Ibrahim[1] (born 10 August 1947) is a Malaysian politician. He is the Leader of Opposition of Malaysia (Pakatan Rakyat), economic advisor to the state government of Selangor[2] and de facto leader of PKR (KeADILan). He served as the Deputy Prime Minister of Malaysia from 1993 to 1998 and Finance Minister from 1991 to 1998 when he was in UMNO, a major party in ruling Barisan Nasional coaltion.",
"images":
[
{
"proxy_url": "https://sinar-malaysia.popit.mysociety.org/image-proxy/http%3A%2F%2Fupload.wikimedia.org%2Fwikipedia%2Fcommons%2Fthumb%2F0%2F05%2FAnwar_Ibrahim.jpg%2F398px-Anwar_Ibrahim.jpg",
"created": "",
"url": "http://upload.wikimedia.org/wikipedia/commons/thumb/0/05/Anwar_Ibrahim.jpg/398px-Anwar_Ibrahim.jpg",
"id": "536305bef1eab6270da6c8ee"
}
],
"memberships":
[
{
"contact_details": [ ],
"links": [ ],
"images": [ ],
"url": "https://sinar-malaysia.popit.mysociety.org/api/v0.1/memberships/53630b0619ee29270d8a9e5e",
"start_date": null,
"role": "",
"post_id": null,
"person_id": "53630562f1eab6270da6c8ed",
"organization_id": "536309c319ee29270d8a9e26",
"label": null,
"id": "53630b0619ee29270d8a9e5e",
"html_url": "https://sinar-malaysia.popit.mysociety.org/memberships/53630b0619ee29270d8a9e5e",
"end_date": null,
"area_name": null,
"area_id": null
},
{
"contact_details": [ ],
"links": [ ],
"images": [ ],
"id": "53633d8319ee29270d8a9ed5",
"person_id": "53630562f1eab6270da6c8ed",
"end_date": "2013-05-05",
"start_date": "2008-08-26",
"label": null,
"post_id": "53633d1719ee29270d8a9ed4",
"role": "Opposition Leader",
"organization_id": "53633b5a19ee29270d8a9ecf",
"url": "https://sinar-malaysia.popit.mysociety.org/api/v0.1/memberships/53633d8319ee29270d8a9ed5",
"html_url": "https://sinar-malaysia.popit.mysociety.org/memberships/53633d8319ee29270d8a9ed5"
},
{
"contact_details": [ ],
"links": [ ],
"images": [ ],
"end_date": null,
"id": "5535e892aea781383fa79402",
"post_id": "545e4d5b5222837c2c05988b",
"start_date": "2013",
"role": "Parliamentary Candidate",
"organization_id": "545de8665222837c2c0586c0",
"person_id": "53630562f1eab6270da6c8ed",
"url": "https://sinar-malaysia.popit.mysociety.org/api/v0.1/memberships/5535e892aea781383fa79402",
"html_url": "https://sinar-malaysia.popit.mysociety.org/memberships/5535e892aea781383fa79402"
}
],
"links": [ ],
"contact_details": [ ],
"identifiers": [ ],
"other_names":
[
{
"name": "Dato' Seri Anwar Bin Ibrahim",
"note": "With honorifics.",
"id": "55653036561fa5421bb7bd20"
},
{
"name": "Anwar Bin Ibrahim",
"id": "55653036561fa5421bb7bd1f"
}
]
}
}
"""
data = json.loads(raw_data)
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/", data["result"])
logging.warn(response.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_person_api_invalid_date(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "invalid date",
"death_date": "invalid date",
"email": "[email protected]",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/", person_data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue("errors" in response.data)
def test_update_person_authorized_translated(self):
person_data = {
"given_name": "jerry jambul",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put("/ms/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person_ = Person.objects.language('ms').get(id='ab1a5788e5bae955c048748fa6af0e97')
self.assertEqual(person_.given_name, "jerry jambul")
def test_create_person_authorized_translated(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "bukan john doe",
"gender": "tak tahu",
"summary": "orang ujian",
"honorific_prefix": "Datuk Seri",
"biography": "Dia Tak wujud!!!!",
"email": "[email protected]",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/ms/persons/", person_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
person = Person.objects.language("ms").get(name="joe")
self.assertEqual(person.name, "joe")
def test_create_person_othername_blank_id_authorized(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "1950-01-01",
"death_data": "2000-01-01",
"email": "[email protected]",
"other_names":[
{
"id": "",
"name":"Jane",
"family_name":"Jambul",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/", person_data)
logging.warn(response.data["result"]["other_names"])
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
other_names = response.data["result"]["other_names"][0]
self.assertNotEqual(other_names["id"], "")
def test_create_person_identifier_blank_id_authorized(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "1950-01-01",
"death_data": "2000-01-01",
"email": "[email protected]",
"identifiers":[
{
"id": "",
"identifier": "9089098098",
"scheme": "rakyat",
}
],<|fim▁hole|> self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/", person_data)
logging.warn(response.data["result"]["other_names"])
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
identifiers = response.data["result"]["identifiers"][0]
self.assertNotEqual(identifiers["id"], "")
def test_create_person_contact_details_blank_id_authorized(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "1950-01-01",
"death_data": "2000-01-01",
"email": "[email protected]",
"contact_details":[
{
"id": "",
"type":"twitter",
"value": "sinarproject",
}
],
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/", person_data)
logging.warn(response.data["result"]["other_names"])
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
contact_details = response.data["result"]["contact_details"][0]
self.assertNotEqual(contact_details["id"], "")
def test_create_person_links_blank_id_authorized(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "1950-01-01",
"death_data": "2000-01-01",
"email": "[email protected]",
"links":[
{
"id": "",
"url":"http://sinarproject.org",
}
],
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/", person_data)
logging.warn(response.data["result"]["other_names"])
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
links = response.data["result"]["links"][0]
self.assertNotEqual(links["id"], "")
def test_create_person_with_all_field_blank_id_serializer(self):
person_data = {
"id": "",
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "1950-01-01",
"death_data": "2000-01-01",
"email": "[email protected]",
"contact_details":[
{
"type":"twitter",
"value": "sinarproject",
}
],
"links":[
{
"url":"http://sinarproject.org",
}
],
"identifiers":[
{
"identifier": "9089098098",
"scheme": "rakyat",
}
],
"other_names":[
{
"name":"Jane",
"family_name":"Jambul",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
person_serial = PersonSerializer(data=person_data, language='en')
person_serial.is_valid()
self.assertEqual(person_serial.errors, {})
person_serial.save()
person = Person.objects.language("en").get(name="joe")
self.assertEqual(person.given_name, "joe jambul")
def test_create_person_with_all_field_birthdate_deathdate_blank_serializer(self):
person_data = {
"name": "joe",
"family_name": "doe",
"given_name": "joe jambul",
"additional_name": "not john doe",
"gender": "unknown",
"summary": "person unit test api",
"honorific_prefix": "Chief",
"honorific_suffix": "of the fake people league",
"biography": "He does not exists!!!!",
"birth_date": "",
"death_data": "",
"email": "[email protected]",
"contact_details":[
{
"type":"twitter",
"value": "sinarproject",
}
],
"links":[
{
"url":"http://sinarproject.org",
}
],
"identifiers":[
{
"identifier": "9089098098",
"scheme": "rakyat",
}
],
"other_names":[
{
"name":"Jane",
"family_name":"Jambul",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
]
}
person_serial = PersonSerializer(data=person_data, language='en')
person_serial.is_valid()
self.assertEqual(person_serial.errors, {})
person_serial.save()
person = Person.objects.language("en").get(name="joe")
self.assertEqual(person.given_name, "joe jambul")
def test_minify_person_api(self):
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97", {"minify":"True"})
person = Person.objects.get(id="ab1a5788e5bae955c048748fa6af0e97")
membership_count = person.memberships.count()
self.assertEqual(len(response.data["result"]["memberships"]), membership_count)<|fim▁end|> | }
token = Token.objects.get(user__username="admin") |
<|file_name|>ElementCreator.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.actions;
import com.intellij.CommonBundle;
import com.intellij.history.LocalHistory;
import com.intellij.history.LocalHistoryAction;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.application.WriteActionAware;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.SmartPointerManager;
import com.intellij.psi.SmartPsiElementPointer;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* @author peter
*/
public abstract class ElementCreator implements WriteActionAware {
private static final Logger LOG = Logger.getInstance(ElementCreator.class);
private final Project myProject;
private final @NlsContexts.DialogTitle String myErrorTitle;
protected ElementCreator(Project project, @NotNull @NlsContexts.DialogTitle String errorTitle) {
myProject = project;
myErrorTitle = errorTitle;
}
protected abstract PsiElement @NotNull [] create(@NotNull String newName) throws Exception;
@NlsContexts.Command
@NotNull
protected abstract String getActionName(@NotNull String newName);
<|fim▁hole|> Messages.showMessageDialog(myProject, IdeBundle.message("error.name.should.be.specified"), CommonBundle.getErrorTitle(),
Messages.getErrorIcon());
return PsiElement.EMPTY_ARRAY;
}
Ref<List<SmartPsiElementPointer<?>>> createdElements = Ref.create();
Exception exception = executeCommand(getActionName(inputString), () -> {
PsiElement[] psiElements = create(inputString);
SmartPointerManager manager = SmartPointerManager.getInstance(myProject);
createdElements.set(ContainerUtil.map(psiElements, manager::createSmartPsiElementPointer));
});
if (exception != null) {
handleException(exception);
return PsiElement.EMPTY_ARRAY;
}
return ContainerUtil.mapNotNull(createdElements.get(), SmartPsiElementPointer::getElement).toArray(PsiElement.EMPTY_ARRAY);
}
@Nullable
private Exception executeCommand(@NotNull @NlsContexts.Command String commandName, @NotNull ThrowableRunnable<? extends Exception> invokeCreate) {
final Exception[] exception = new Exception[1];
CommandProcessor.getInstance().executeCommand(myProject, () -> {
LocalHistoryAction action = LocalHistory.getInstance().startAction(commandName);
try {
if (startInWriteAction()) {
WriteAction.run(invokeCreate);
}
else {
invokeCreate.run();
}
}
catch (Exception ex) {
exception[0] = ex;
}
finally {
action.finish();
}
}, commandName, null, UndoConfirmationPolicy.REQUEST_CONFIRMATION);
return exception[0];
}
private void handleException(Exception t) {
LOG.info(t);
String errorMessage = getErrorMessage(t);
Messages.showMessageDialog(myProject, errorMessage, myErrorTitle, Messages.getErrorIcon());
}
public static @NlsContexts.DialogMessage String getErrorMessage(Throwable t) {
String errorMessage = CreateElementActionBase.filterMessage(t.getMessage());
if (StringUtil.isEmpty(errorMessage)) {
errorMessage = t.toString();
}
return errorMessage;
}
}<|fim▁end|> | public @NotNull PsiElement @NotNull [] tryCreate(@NotNull final String inputString) {
if (inputString.isEmpty()) { |
<|file_name|>dts_renderer_spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import mapHelpers from 'convert-source-map';
import MagicString from 'magic-string';
import {encode} from 'sourcemap-codec';
import ts from 'typescript';
import {absoluteFrom, getFileSystem} from '../../../src/ngtsc/file_system';
import {runInEachFileSystem, TestFile} from '../../../src/ngtsc/file_system/testing';
import {Reexport} from '../../../src/ngtsc/imports';
import {MockLogger} from '../../../src/ngtsc/logging/testing';
import {loadTestFiles} from '../../../src/ngtsc/testing';
import {Import, ImportManager} from '../../../src/ngtsc/translator';
import {DecorationAnalyzer} from '../../src/analysis/decoration_analyzer';
import {ModuleWithProvidersAnalyzer, ModuleWithProvidersInfo} from '../../src/analysis/module_with_providers_analyzer';
import {NgccReferencesRegistry} from '../../src/analysis/ngcc_references_registry';
import {ExportInfo, PrivateDeclarationsAnalyzer} from '../../src/analysis/private_declarations_analyzer';
import {CompiledClass} from '../../src/analysis/types';
import {Esm2015ReflectionHost} from '../../src/host/esm2015_host';
import {DtsRenderer} from '../../src/rendering/dts_renderer';
import {RedundantDecoratorMap, RenderingFormatter} from '../../src/rendering/rendering_formatter';
import {getRootFiles, makeTestEntryPointBundle} from '../helpers/utils';
class TestRenderingFormatter implements RenderingFormatter {
addImports(output: MagicString, imports: Import[], sf: ts.SourceFile) {
output.prepend('\n// ADD IMPORTS\n');
}
addExports(output: MagicString, baseEntryPointPath: string, exports: ExportInfo[]) {
output.prepend('\n// ADD EXPORTS\n');
}
addDirectExports(output: MagicString, exports: Reexport[]) {
output.prepend('\n// ADD DIRECT EXPORTS\n');
}
addConstants(output: MagicString, constants: string, file: ts.SourceFile): void {
output.prepend('\n// ADD CONSTANTS\n');
}
addDefinitions(output: MagicString, compiledClass: CompiledClass, definitions: string) {
output.prepend('\n// ADD DEFINITIONS\n');
}
addAdjacentStatements(output: MagicString, compiledClass: CompiledClass, statements: string) {
output.prepend('\n// ADD ADJACENT STATEMENTS\n');
}
removeDecorators(output: MagicString, decoratorsToRemove: RedundantDecoratorMap) {
output.prepend('\n// REMOVE DECORATORS\n');
}
addModuleWithProvidersParams(
output: MagicString, moduleWithProviders: ModuleWithProvidersInfo[],
importManager: ImportManager): void {
output.prepend('\n// ADD MODUlE WITH PROVIDERS PARAMS\n');
}
printStatement(): string {
return 'IGNORED';
}
}
function createTestRenderer(
packageName: string, files: TestFile[], dtsFiles?: TestFile[], mappingFiles?: TestFile[]) {
const logger = new MockLogger();
loadTestFiles(files);
if (dtsFiles) {
loadTestFiles(dtsFiles);
}
if (mappingFiles) {
loadTestFiles(mappingFiles);
}
const fs = getFileSystem();
const isCore = packageName === '@angular/core';
const bundle = makeTestEntryPointBundle(
'test-package', 'esm2015', isCore, getRootFiles(files), dtsFiles && getRootFiles(dtsFiles));
const host = new Esm2015ReflectionHost(logger, isCore, bundle.src, bundle.dts);
const referencesRegistry = new NgccReferencesRegistry(host);
const decorationAnalyses =
new DecorationAnalyzer(fs, bundle, host, referencesRegistry).analyzeProgram();
const moduleWithProvidersAnalyses =
new ModuleWithProvidersAnalyzer(
host, bundle.src.program.getTypeChecker(), referencesRegistry, true)
.analyzeProgram(bundle.src.program);
const privateDeclarationsAnalyses =
new PrivateDeclarationsAnalyzer(host, referencesRegistry).analyzeProgram(bundle.src.program);
const testFormatter = new TestRenderingFormatter();
spyOn(testFormatter, 'addExports').and.callThrough();
spyOn(testFormatter, 'addImports').and.callThrough();
spyOn(testFormatter, 'addDefinitions').and.callThrough();
spyOn(testFormatter, 'addAdjacentStatements').and.callThrough();
spyOn(testFormatter, 'addConstants').and.callThrough();
spyOn(testFormatter, 'removeDecorators').and.callThrough();
spyOn(testFormatter, 'addModuleWithProvidersParams').and.callThrough();
spyOn(testFormatter, 'printStatement').and.callThrough();
const renderer = new DtsRenderer(testFormatter, fs, logger, host, bundle);
return {
renderer,
testFormatter,
decorationAnalyses,
moduleWithProvidersAnalyses,
privateDeclarationsAnalyses,
bundle
};
}
runInEachFileSystem(() => {
describe('DtsRenderer', () => {
let _: typeof absoluteFrom;
let INPUT_PROGRAM: TestFile;
let INPUT_DTS_PROGRAM: TestFile;
beforeEach(() => {
_ = absoluteFrom;
INPUT_PROGRAM = {
name: _('/node_modules/test-package/src/file.js'),
contents:
`import { Directive } from '@angular/core';\nexport class A {\n foo(x) {\n return x;\n }\n}\nA.decorators = [\n { type: Directive, args: [{ selector: '[a]' }] }\n];\n`
};
INPUT_DTS_PROGRAM = {
name: _('/node_modules/test-package/typings/file.d.ts'),
contents: `export declare class A {\nfoo(x: number): number;\n}\n`
};
});
it('should render extract types into typings files', () => {
const {
renderer,
decorationAnalyses,
privateDeclarationsAnalyses,
moduleWithProvidersAnalyses
} = createTestRenderer('test-package', [INPUT_PROGRAM], [INPUT_DTS_PROGRAM]);
const result = renderer.renderProgram(
decorationAnalyses, privateDeclarationsAnalyses, moduleWithProvidersAnalyses);
const typingsFile =
result.find(f => f.path === _('/node_modules/test-package/typings/file.d.ts'))!;
expect(typingsFile.contents)
.toContain(
'foo(x: number): number;\n static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<A, never>;\n static ɵdir: ɵngcc0.ɵɵDirectiveDeclaration');
});
it('should render imports into typings files', () => {
const {
renderer,
decorationAnalyses,
privateDeclarationsAnalyses,
moduleWithProvidersAnalyses
} = createTestRenderer('test-package', [INPUT_PROGRAM], [INPUT_DTS_PROGRAM]);
const result = renderer.renderProgram(
decorationAnalyses, privateDeclarationsAnalyses, moduleWithProvidersAnalyses);
const typingsFile =
result.find(f => f.path === _('/node_modules/test-package/typings/file.d.ts'))!;
expect(typingsFile.contents).toContain(`\n// ADD IMPORTS\n`);
});
it('should render exports into typings files', () => {
const {
renderer,
decorationAnalyses,
privateDeclarationsAnalyses,
moduleWithProvidersAnalyses
} = createTestRenderer('test-package', [INPUT_PROGRAM], [INPUT_DTS_PROGRAM]);
// Add a mock export to trigger export rendering
privateDeclarationsAnalyses.push({
identifier: 'ComponentB',
from: _('/node_modules/test-package/src/file.js'),
dtsFrom: _('/typings/b.d.ts')
});
const result = renderer.renderProgram(
decorationAnalyses, privateDeclarationsAnalyses, moduleWithProvidersAnalyses);
const typingsFile =
result.find(f => f.path === _('/node_modules/test-package/typings/file.d.ts'))!;
expect(typingsFile.contents).toContain(`\n// ADD EXPORTS\n`);
});
it('should render ModuleWithProviders type params', () => {
const {
renderer,
decorationAnalyses,
privateDeclarationsAnalyses,
moduleWithProvidersAnalyses
} = createTestRenderer('test-package', [INPUT_PROGRAM], [INPUT_DTS_PROGRAM]);
const result = renderer.renderProgram(
decorationAnalyses, privateDeclarationsAnalyses, moduleWithProvidersAnalyses);
const typingsFile =
result.find(f => f.path === _('/node_modules/test-package/typings/file.d.ts'))!;
expect(typingsFile.contents).toContain(`\n// ADD MODUlE WITH PROVIDERS PARAMS\n`);
});
it('should render an external source map for files whose original file does not have a source map',
() => {
const {
renderer,
decorationAnalyses,
privateDeclarationsAnalyses,
moduleWithProvidersAnalyses
} = createTestRenderer('test-package', [INPUT_PROGRAM], [INPUT_DTS_PROGRAM]);
const result = renderer.renderProgram(
decorationAnalyses, privateDeclarationsAnalyses, moduleWithProvidersAnalyses);
const typingsFile =
result.find(f => f.path === _('/node_modules/test-package/typings/file.d.ts'))!;
expect(typingsFile.contents).toContain('//# sourceMappingURL=file.d.ts.map');
});
it('should render an internal source map for files whose original file has an internal source map',
() => {
const sourceMap = mapHelpers.fromObject({
'version': 3,
'file': 'file.d.ts',
'sources': ['file.d.ts'],
'names': [],
'mappings': encode([[]]),
'sourcesContent': [INPUT_DTS_PROGRAM.contents],
});<|fim▁hole|> privateDeclarationsAnalyses,
moduleWithProvidersAnalyses
} = createTestRenderer('test-package', [INPUT_PROGRAM], [INPUT_DTS_PROGRAM]);
const result = renderer.renderProgram(
decorationAnalyses, privateDeclarationsAnalyses, moduleWithProvidersAnalyses);
const typingsFile =
result.find(f => f.path === _('/node_modules/test-package/typings/file.d.ts'))!;
expect(typingsFile.contents).toContain('//# sourceMappingURL=data:application/json');
});
});
});<|fim▁end|> | INPUT_DTS_PROGRAM.contents += sourceMap.toComment();
const {
renderer,
decorationAnalyses, |
<|file_name|>amd.js<|end_file_name|><|fim▁begin|>define(function () { 'use strict';
({
get foo () {
console.log( 'effect' );
return {};
}
}).foo.bar;
({
get foo () {
return {};
}
}).foo.bar.baz;
({
get foo () {
console.log( 'effect' );
return () => {};
}
}).foo();
({
get foo () {
return () => console.log( 'effect' );
}
}).foo();
({
get foo () {
console.log( 'effect' );
return () => () => {};
}
}).foo()();<|fim▁hole|> }).foo()();
});<|fim▁end|> | ({
get foo () {
return () => () => console.log( 'effect' );
} |
<|file_name|>Badge.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { SxProps } from '@mui/system';
import { OverridableStringUnion } from '@mui/types';
import { ExtendBadgeUnstyledTypeMap, BadgeUnstyledTypeMap } from '@mui/base/BadgeUnstyled';
import { Theme } from '../styles';
import { OverridableComponent, OverrideProps } from '../OverridableComponent';
export interface BadgePropsVariantOverrides {}
export interface BadgePropsColorOverrides {}
export type BadgeTypeMap<
D extends React.ElementType = 'span',
P = {},
> = ExtendBadgeUnstyledTypeMap<{
props: P & {
/**
* Override or extend the styles applied to the component.
*/
classes?: BadgeUnstyledTypeMap['props']['classes'] & {
/** Styles applied to the badge `span` element if `color="primary"`. */
colorPrimary?: string;
/** Styles applied to the badge `span` element if `color="secondary"`. */
colorSecondary?: string;
/** Styles applied to the badge `span` element if `color="error"`. */
colorError?: string;
/** Styles applied to the badge `span` element if `color="info"`. */
colorInfo?: string;
/** Styles applied to the badge `span` element if `color="success"`. */
colorSuccess?: string;
/** Styles applied to the badge `span` element if `color="warning"`. */
colorWarning?: string;
/** Class name applied to the badge `span` element if `anchorOrigin={{ 'top', 'right' }} overlap="rectangular"`. */
anchorOriginTopRightRectangular?: string;
/** Class name applied to the badge `span` element if `anchorOrigin={{ 'bottom', 'right' }} overlap="rectangular"`. */
anchorOriginBottomRightRectangular?: string;
/** Class name applied to the badge `span` element if `anchorOrigin={{ 'top', 'left' }} overlap="rectangular"`. */
anchorOriginTopLeftRectangular?: string;
/** Class name applied to the badge `span` element if `anchorOrigin={{ 'bottom', 'left' }} overlap="rectangular"`. */
anchorOriginBottomLeftRectangular?: string;
/** Class name applied to the badge `span` element if `anchorOrigin={{ 'top', 'right' }} overlap="circular"`. */
anchorOriginTopRightCircular?: string;
/** Class name applied to the badge `span` element if `anchorOrigin={{ 'bottom', 'right' }} overlap="circular"`. */
anchorOriginBottomRightCircular?: string;
/** Class name applied to the badge `span` element if `anchorOrigin={{ 'top', 'left' }} overlap="circular"`. */
anchorOriginTopLeftCircular?: string;
/** Class name applied to the badge `span` element if `anchorOrigin={{ 'bottom', 'left' }} overlap="circular"`. */
anchorOriginBottomLeftCircular?: string;
/** Class name applied to the badge `span` element if `overlap="rectangular"`. */
overlapRectangular?: string;
/** Class name applied to the badge `span` element if `overlap="circular"`. */
overlapCircular?: string;
};
/**
* The color of the component. It supports those theme colors that make sense for this component.
* @default 'default'
*/
color?: OverridableStringUnion<
'primary' | 'secondary' | 'default' | 'error' | 'info' | 'success' | 'warning',
BadgePropsColorOverrides
>;
/**
* Wrapped shape the badge should overlap.
* @default 'rectangular'
*/
overlap?: 'rectangular' | 'circular';
/**
* The system prop that allows defining system overrides as well as additional CSS styles.
*/
sx?: SxProps<Theme>;
/**
* The variant to use.
* @default 'standard'
*/
variant?: OverridableStringUnion<'standard' | 'dot', BadgePropsVariantOverrides>;
};
defaultComponent: D;
}>;
type BadgeRootProps = NonNullable<BadgeTypeMap['props']['componentsProps']>['root'];
type BadgeBadgeProps = NonNullable<BadgeTypeMap['props']['componentsProps']>['badge'];
export const BadgeRoot: React.FC<BadgeRootProps>;
export const BadgeMark: React.FC<BadgeBadgeProps>;<|fim▁hole|> * Demos:
*
* - [Avatars](https://mui.com/components/avatars/)
* - [Badges](https://mui.com/components/badges/)
*
* API:
*
* - [Badge API](https://mui.com/api/badge/)
* - inherits [BadgeUnstyled API](https://mui.com/api/badge-unstyled/)
*/
declare const Badge: OverridableComponent<BadgeTypeMap>;
export type BadgeClasses = Record<BadgeClassKey, string>;
export const badgeClasses: BadgeClasses;
export type BadgeProps<
D extends React.ElementType = BadgeTypeMap['defaultComponent'],
P = {},
> = OverrideProps<BadgeTypeMap<D, P>, D>;
export default Badge;<|fim▁end|> |
export type BadgeClassKey = keyof NonNullable<BadgeTypeMap['props']['classes']>;
/**
* |
<|file_name|>watch_directory.py<|end_file_name|><|fim▁begin|>"""Working example of the ReadDirectoryChanges API which will
track changes made to a directory. Can either be run from a
command-line, with a comma-separated list of paths to watch,
or used as a module, either via the watch_path generator or
via the Watcher threads, one thread per path.
Examples:
watch_directory.py c:/temp,r:/images
or:
import watch_directory
for file_type, filename, action in watch_directory.watch_path ("c:/temp"):
print filename, action
or:
import watch_directory
import Queue
file_changes = Queue.Queue ()
for pathname in ["c:/temp", "r:/goldent/temp"]:
watch_directory.Watcher (pathname, file_changes)
while 1:
file_type, filename, action = file_changes.get ()
print file_type, filename, action
"""<|fim▁hole|>from datetime import datetime
import Queue
import threading
import time
import win32file
import win32con
from winsys import fs
ACTIONS = {
1 : "Created",
2 : "Deleted",
3 : "Updated",
4 : "Renamed to something",
5 : "Renamed from something"
}
def watch_path (path_to_watch, include_subdirectories=False):
FILE_LIST_DIRECTORY = 0x0001
hDir = win32file.CreateFile (
path_to_watch,
FILE_LIST_DIRECTORY,
win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE,
None,
win32con.OPEN_EXISTING,
win32con.FILE_FLAG_BACKUP_SEMANTICS,
None
)
while True:
results = win32file.ReadDirectoryChangesW (
hDir,
1024,
include_subdirectories,
win32con.FILE_NOTIFY_CHANGE_SIZE | win32con.FILE_NOTIFY_CHANGE_FILE_NAME,
None,
None
)
for action, file in results:
full_filename = os.path.join (path_to_watch, file)
if not os.path.exists (full_filename):
file_type = "<deleted>"
elif os.path.isdir (full_filename):
file_type = 'folder'
else:
file_type = 'file'
yield (file_type, full_filename, ACTIONS.get (action, "Unknown"))
class Watcher (threading.Thread):
def __init__ (self, path_to_watch, results_queue, **kwds):
threading.Thread.__init__ (self, **kwds)
self.setDaemon (1)
self.path_to_watch = path_to_watch
self.results_queue = results_queue
self.start ()
def run (self):
for result in watch_path (self.path_to_watch, True):
self.results_queue.put (result)
def sizer (requests_queue, results_queue):
while True:
request = requests_queue.get ()
if request is None:
print "Stopping..."
break
else:
results_queue.put ((request, sum (f.size for f in fs.flat (request))))
def stop_sizers (sizer_requests, n_sizers):
for n in range (n_sizers):
sizer_requests.put (None)
if __name__ == '__main__':
"""If run from the command line, use the thread-based
routine to watch the current directory (default) or
a list of directories specified on the command-line
separated by commas, eg
watch_directory.py c:/temp,c:/
"""
PATH_TO_WATCH = "."
N_SIZERS = 5
try:
path_to_watch = sys.argv[1] or PATH_TO_WATCH
except IndexError:
path_to_watch = PATH_TO_WATCH
path_to_watch = os.path.abspath (path_to_watch)
sizer_requests = Queue.Queue ()
sizer_results = Queue.Queue ()
sizers = [threading.Thread (target=sizer, args=(sizer_requests, sizer_results)) for _ in range (N_SIZERS)]
for sizer in sizers:
sizer.start ()
try:
last_updated = {}
print "Watching %s at %s" % (path_to_watch, time.asctime ())
files_changed = Queue.Queue ()
Watcher (path_to_watch, files_changed)
while True:
top_level_dirs = set ()
while True:
try:
file_type, filename, action = files_changed.get_nowait ()
top_level_dirs.add (filename[len (path_to_watch):].split (os.sep)[1])
except Queue.Empty:
break
for dir in top_level_dirs:
if (datetime.now () - last_updated.get (dir, datetime.min)).seconds > 60:
print "Requesting size of", os.path.abspath (dir)
sizer_requests.put (os.path.abspath (dir))
while True:
try:
dir, size = sizer_results.get_nowait ()
except Queue.Empty:
break
else:
print dir, "=>", size
time.sleep (10)
finally:
stop_sizers (sizer_requests, len (sizers))<|fim▁end|> | from __future__ import generators
import os, sys
import atexit
|
<|file_name|>Raw.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# +---------------------------------------------------------------------------+
# | 01001110 01100101 01110100 01111010 01101111 01100010 |
# | |
# | Netzob : Inferring communication protocols |
# +---------------------------------------------------------------------------+
# | Copyright (C) 2011-2014 Georges Bossert and Frédéric Guihéry |
# | This program is free software: you can redistribute it and/or modify |
# | it under the terms of the GNU General Public License as published by |
# | the Free Software Foundation, either version 3 of the License, or |
# | (at your option) any later version. |
# | |
# | This program is distributed in the hope that it will be useful, |
# | but WITHOUT ANY WARRANTY; without even the implied warranty of |
# | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
# | GNU General Public License for more details. |
# | |
# | You should have received a copy of the GNU General Public License |
# | along with this program. If not, see <http://www.gnu.org/licenses/>. |
# +---------------------------------------------------------------------------+
# | @url : http://www.netzob.org |
# | @contact : [email protected] |
# | @sponsors : Amossys, http://www.amossys.fr |
# | Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
# +---------------------------------------------------------------------------+
# +---------------------------------------------------------------------------+
# | File contributors : |
# | - Georges Bossert <georges.bossert (a) supelec.fr> |
# | - Frédéric Guihéry <frederic.guihery (a) amossys.fr> |
# +---------------------------------------------------------------------------+
# +---------------------------------------------------------------------------+
# | Standard library imports |
# +---------------------------------------------------------------------------+
import random
import os
from bitarray import bitarray
# +---------------------------------------------------------------------------+
# | Related third party imports |
# +---------------------------------------------------------------------------+
# +---------------------------------------------------------------------------+
# | Local application imports |
# +---------------------------------------------------------------------------+
from netzob.Common.Models.Types.AbstractType import AbstractType
class Raw(AbstractType):
"""Raw netzob data type expressed in bytes.
For instance, we can use this type to parse any raw field of 2 bytes:
>>> from netzob.all import *
>>> f = Field(Raw(nbBytes=2))
or with a specific value (default is little endianness)
>>> f = Field(Raw('\x01\x02\x03'))
>>> print f.domain.dataType
Raw='\\x01\\x02\\x03' ((0, 24))
>>> f.domain.dataType.endianness = AbstractType.ENDIAN_BIG
>>> print f.domain.dataType
Raw='\\x01\\x02\\x03' ((0, 24))
"""
def __init__(self, value=None, nbBytes=None, unitSize=AbstractType.defaultUnitSize(), endianness=AbstractType.defaultEndianness(), sign=AbstractType.defaultSign()):
if value is not None and not isinstance(value, bitarray):
from netzob.Common.Models.Types.TypeConverter import TypeConverter
from netzob.Common.Models.Types.BitArray import BitArray
value = TypeConverter.convert(value, Raw, BitArray)
nbBits = self._convertNbBytesinNbBits(nbBytes)
super(Raw, self).__init__(self.__class__.__name__, value, nbBits, unitSize=unitSize, endianness=endianness, sign=sign)
def __str__(self):
if self.value is not None:
from netzob.Common.Models.Types.TypeConverter import TypeConverter
from netzob.Common.Models.Types.BitArray import BitArray
from netzob.Common.Models.Types.HexaString import HexaString
return "{0}={1} ({2})".format(self.typeName, repr(TypeConverter.convert(self.value, BitArray, Raw)), self.size)
else:
return "{0}={1} ({2})".format(self.typeName, self.value, self.size)
def __repr__(self):
if self.value is not None:
from netzob.Common.Models.Types.TypeConverter import TypeConverter
from netzob.Common.Models.Types.BitArray import BitArray
return str(TypeConverter.convert(self.value, BitArray, self.__class__))
else:
return str(self.value)
def _convertNbBytesinNbBits(self, nbBytes):
nbMinBit = None<|fim▁hole|> if isinstance(nbBytes, int):
nbMinBit = nbBytes * 8
nbMaxBit = nbMinBit
else:
if nbBytes[0] is not None:
nbMinBit = nbBytes[0] * 8
if nbBytes[1] is not None:
nbMaxBit = nbBytes[1] * 8
return (nbMinBit, nbMaxBit)
def generate(self, generationStrategy=None):
"""Generates a random Raw that respects the requested size.
>>> from netzob.all import *
>>> a = Raw(nbBytes=(10))
>>> gen = a.generate()
>>> print len(gen)
80
>>> from netzob.all import *
>>> a = Raw(nbBytes=(10, 20))
>>> gen = a.generate()
>>> print 10<=len(gen) and 20<=len(gen)
True
"""
from netzob.Common.Models.Types.TypeConverter import TypeConverter
from netzob.Common.Models.Types.BitArray import BitArray
minSize, maxSize = self.size
if maxSize is None:
maxSize = AbstractType.MAXIMUM_GENERATED_DATA_SIZE
if minSize is None:
minSize = 0
generatedSize = random.randint(minSize, maxSize)
return TypeConverter.convert(os.urandom(generatedSize / 8), Raw, BitArray)
@staticmethod
def decode(data, unitSize=AbstractType.defaultUnitSize(), endianness=AbstractType.defaultEndianness(), sign=AbstractType.defaultSign()):
return data
@staticmethod
def encode(data, unitSize=AbstractType.defaultUnitSize(), endianness=AbstractType.defaultEndianness(), sign=AbstractType.defaultSign()):
return data
@staticmethod
def canParse(data):
"""Computes if specified data can be parsed as raw which is always the case if the data is at least 1 length and aligned on a byte.
>>> from netzob.all import *
>>> Raw.canParse(TypeConverter.convert("hello netzob", ASCII, BitArray))
True
The ascii table is defined from 0 to 127:
>>> Raw.canParse(TypeConverter.convert(128, Decimal, BitArray, src_sign=AbstractType.SIGN_UNSIGNED))
True
:param data: the data to check
:type data: python raw
:return: True if data can be parsed as a Raw which is always the case (if len(data)>0)
:rtype: bool
:raise: TypeError if the data is None
"""
if data is None:
raise TypeError("data cannot be None")
if len(data) == 0:
return False
if len(data) % 8 != 0:
return False
return True<|fim▁end|> | nbMaxBit = None
if nbBytes is not None: |
<|file_name|>RecognizeCleanerML.py<|end_file_name|><|fim▁begin|># vim: ts=4:sw=4:expandtab
# BleachBit
# Copyright (C) 2008-2017 Andrew Ziem
# https://www.bleachbit.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Check local CleanerML files as a security measure
"""
from __future__ import absolute_import, print_function
from bleachbit import _, _p
import bleachbit
from bleachbit.CleanerML import list_cleanerml_files
from bleachbit.Options import options
import hashlib<|fim▁hole|>
logger = logging.getLogger(__name__)
KNOWN = 1
CHANGED = 2
NEW = 3
def cleaner_change_dialog(changes, parent):
"""Present a dialog regarding the change of cleaner definitions"""
def toggled(cell, path, model):
"""Callback for clicking the checkbox"""
__iter = model.get_iter_from_string(path)
value = not model.get_value(__iter, 0)
model.set(__iter, 0, value)
import pygtk
pygtk.require('2.0')
import gtk
dialog = gtk.Dialog(title=_("Security warning"),
parent=parent,
flags=gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
dialog.set_default_size(600, 500)
# create warning
warnbox = gtk.HBox()
image = gtk.Image()
image.set_from_stock(gtk.STOCK_DIALOG_WARNING, gtk.ICON_SIZE_DIALOG)
warnbox.pack_start(image, False)
# TRANSLATORS: Cleaner definitions are XML data files that define
# which files will be cleaned.
label = gtk.Label(
_("These cleaner definitions are new or have changed. Malicious definitions can damage your system. If you do not trust these changes, delete the files or quit."))
label.set_line_wrap(True)
warnbox.pack_start(label, True)
dialog.vbox.pack_start(warnbox, False)
# create tree view
import gobject
liststore = gtk.ListStore(gobject.TYPE_BOOLEAN, gobject.TYPE_STRING)
treeview = gtk.TreeView(model=liststore)
renderer0 = gtk.CellRendererToggle()
renderer0.set_property('activatable', True)
renderer0.connect('toggled', toggled, liststore)
# TRANSLATORS: This is the column label (header) in the tree view for the
# security dialog
treeview.append_column(
gtk.TreeViewColumn(_p('column_label', 'Delete'), renderer0, active=0))
renderer1 = gtk.CellRendererText()
# TRANSLATORS: This is the column label (header) in the tree view for the
# security dialog
treeview.append_column(
gtk.TreeViewColumn(_p('column_label', 'Filename'), renderer1, text=1))
# populate tree view
for change in changes:
liststore.append([False, change[0]])
# populate dialog with widgets
scrolled_window = gtk.ScrolledWindow()
scrolled_window.add_with_viewport(treeview)
dialog.vbox.pack_start(scrolled_window)
dialog.add_button(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)
dialog.add_button(gtk.STOCK_QUIT, gtk.RESPONSE_CLOSE)
# run dialog
dialog.show_all()
while True:
if gtk.RESPONSE_ACCEPT != dialog.run():
sys.exit(0)
delete = []
for row in liststore:
b = row[0]
path = row[1]
if b:
delete.append(path)
if 0 == len(delete):
# no files selected to delete
break
import GuiBasic
if not GuiBasic.delete_confirmation_dialog(parent, mention_preview=False):
# confirmation not accepted, so do not delete files
continue
for path in delete:
logger.info("deleting unrecognized CleanerML '%s'", path)
os.remove(path)
break
dialog.destroy()
def hashdigest(string):
"""Return hex digest of hash for a string"""
# hashlib requires Python 2.5
return hashlib.sha512(string).hexdigest()
class RecognizeCleanerML:
"""Check local CleanerML files as a security measure"""
def __init__(self, parent_window=None):
self.parent_window = parent_window
try:
self.salt = options.get('hashsalt')
except bleachbit.NoOptionError:
self.salt = hashdigest(str(random.random()))
options.set('hashsalt', self.salt)
self.__scan()
def __recognized(self, pathname):
"""Is pathname recognized?"""
with open(pathname) as f:
body = f.read()
new_hash = hashdigest(self.salt + body)
try:
known_hash = options.get_hashpath(pathname)
except bleachbit.NoOptionError:
return NEW, new_hash
if new_hash == known_hash:
return KNOWN, new_hash
return CHANGED, new_hash
def __scan(self):
"""Look for files and act accordingly"""
changes = []
for pathname in sorted(list_cleanerml_files(local_only=True)):
pathname = os.path.abspath(pathname)
(status, myhash) = self.__recognized(pathname)
if NEW == status or CHANGED == status:
changes.append([pathname, status, myhash])
if len(changes) > 0:
cleaner_change_dialog(changes, self.parent_window)
for change in changes:
pathname = change[0]
myhash = change[2]
logger.info("remembering CleanerML file '%s'", pathname)
if os.path.exists(pathname):
options.set_hashpath(pathname, myhash)<|fim▁end|> | import logging
import os
import random
import sys |
<|file_name|>pages.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from wagtail.admin.views import page_privacy, pages
app_name = 'wagtailadmin_pages'
urlpatterns = [
url(r'^add/(\w+)/(\w+)/(\d+)/$', pages.create, name='add'),
url(r'^add/(\w+)/(\w+)/(\d+)/preview/$', pages.PreviewOnCreate.as_view(), name='preview_on_add'),
url(r'^usage/(\w+)/(\w+)/$', pages.content_type_use, name='type_use'),
url(r'^(\d+)/edit/$', pages.edit, name='edit'),
url(r'^(\d+)/edit/preview/$', pages.PreviewOnEdit.as_view(), name='preview_on_edit'),
url(r'^(\d+)/view_draft/$', pages.view_draft, name='view_draft'),
url(r'^(\d+)/add_subpage/$', pages.add_subpage, name='add_subpage'),
url(r'^(\d+)/delete/$', pages.delete, name='delete'),
url(r'^(\d+)/unpublish/$', pages.unpublish, name='unpublish'),<|fim▁hole|> url(r'^(\d+)/move/(\d+)/$', pages.move_choose_destination, name='move_choose_destination'),
url(r'^(\d+)/move/(\d+)/confirm/$', pages.move_confirm, name='move_confirm'),
url(r'^(\d+)/set_position/$', pages.set_page_position, name='set_page_position'),
url(r'^(\d+)/copy/$', pages.copy, name='copy'),
url(r'^moderation/(\d+)/approve/$', pages.approve_moderation, name='approve_moderation'),
url(r'^moderation/(\d+)/reject/$', pages.reject_moderation, name='reject_moderation'),
url(r'^moderation/(\d+)/preview/$', pages.preview_for_moderation, name='preview_for_moderation'),
url(r'^(\d+)/privacy/$', page_privacy.set_privacy, name='set_privacy'),
url(r'^(\d+)/lock/$', pages.lock, name='lock'),
url(r'^(\d+)/unlock/$', pages.unlock, name='unlock'),
url(r'^(\d+)/revisions/$', pages.revisions_index, name='revisions_index'),
url(r'^(\d+)/revisions/(\d+)/view/$', pages.revisions_view, name='revisions_view'),
url(r'^(\d+)/revisions/(\d+)/revert/$', pages.revisions_revert, name='revisions_revert'),
url(r'^(\d+)/revisions/(\d+)/unschedule/$', pages.revisions_unschedule, name='revisions_unschedule'),
url(r'^(\d+)/revisions/compare/(live|earliest|\d+)\.\.\.(live|latest|\d+)/$', pages.revisions_compare, name='revisions_compare'),
]<|fim▁end|> |
url(r'^search/$', pages.search, name='search'),
url(r'^(\d+)/move/$', pages.move_choose_destination, name='move'), |
<|file_name|>webglframebuffer.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use canvas_traits::CanvasMsg;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::WebGLFramebufferBinding;
use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants as constants;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::reflect_dom_object;
use dom::webglobject::WebGLObject;
use dom::webglrenderbuffer::WebGLRenderbuffer;
use dom::webgltexture::WebGLTexture;
use dom::window::Window;
use dom_struct::dom_struct;
use ipc_channel::ipc::IpcSender;
use std::cell::Cell;
use webrender_api;
use webrender_api::{WebGLCommand, WebGLFramebufferBindingRequest, WebGLFramebufferId, WebGLResult, WebGLError};
#[must_root]
#[derive(JSTraceable, Clone, HeapSizeOf)]
enum WebGLFramebufferAttachment {
Renderbuffer(JS<WebGLRenderbuffer>),
Texture { texture: JS<WebGLTexture>, level: i32 },
}
#[dom_struct]
pub struct WebGLFramebuffer {
webgl_object: WebGLObject,
id: WebGLFramebufferId,
/// target can only be gl::FRAMEBUFFER at the moment
target: Cell<Option<u32>>,
is_deleted: Cell<bool>,
size: Cell<Option<(i32, i32)>>,
status: Cell<u32>,
#[ignore_heap_size_of = "Defined in ipc-channel"]
renderer: IpcSender<CanvasMsg>,
// The attachment points for textures and renderbuffers on this
// FBO.
color: DOMRefCell<Option<WebGLFramebufferAttachment>>,
depth: DOMRefCell<Option<WebGLFramebufferAttachment>>,
stencil: DOMRefCell<Option<WebGLFramebufferAttachment>>,
depthstencil: DOMRefCell<Option<WebGLFramebufferAttachment>>,
}
impl WebGLFramebuffer {
fn new_inherited(renderer: IpcSender<CanvasMsg>,
id: WebGLFramebufferId)
-> WebGLFramebuffer {
WebGLFramebuffer {
webgl_object: WebGLObject::new_inherited(),
id: id,
target: Cell::new(None),
is_deleted: Cell::new(false),
renderer: renderer,
size: Cell::new(None),
status: Cell::new(constants::FRAMEBUFFER_UNSUPPORTED),
color: DOMRefCell::new(None),
depth: DOMRefCell::new(None),
stencil: DOMRefCell::new(None),
depthstencil: DOMRefCell::new(None),
}
}
pub fn maybe_new(window: &Window, renderer: IpcSender<CanvasMsg>)
-> Option<Root<WebGLFramebuffer>> {
let (sender, receiver) = webrender_api::channel::msg_channel().unwrap();
renderer.send(CanvasMsg::WebGL(WebGLCommand::CreateFramebuffer(sender))).unwrap();
let result = receiver.recv().unwrap();
result.map(|fb_id| WebGLFramebuffer::new(window, renderer, fb_id))
}
pub fn new(window: &Window,
renderer: IpcSender<CanvasMsg>,
id: WebGLFramebufferId)
-> Root<WebGLFramebuffer> {
reflect_dom_object(box WebGLFramebuffer::new_inherited(renderer, id),
window,
WebGLFramebufferBinding::Wrap)
}
}
impl WebGLFramebuffer {
pub fn id(&self) -> WebGLFramebufferId {
self.id
}
pub fn bind(&self, target: u32) {
// Update the framebuffer status on binding. It may have
// changed if its attachments were resized or deleted while
// we've been unbound.
self.update_status();
self.target.set(Some(target));
let cmd = WebGLCommand::BindFramebuffer(target, WebGLFramebufferBindingRequest::Explicit(self.id));
self.renderer.send(CanvasMsg::WebGL(cmd)).unwrap();
}
pub fn delete(&self) {
if !self.is_deleted.get() {
self.is_deleted.set(true);
let _ = self.renderer.send(CanvasMsg::WebGL(WebGLCommand::DeleteFramebuffer(self.id)));
}
}
pub fn is_deleted(&self) -> bool {
self.is_deleted.get()
}
pub fn size(&self) -> Option<(i32, i32)> {
self.size.get()
}
fn update_status(&self) {
let c = self.color.borrow();
let z = self.depth.borrow();
let s = self.stencil.borrow();
let zs = self.depthstencil.borrow();
let has_c = c.is_some();
let has_z = z.is_some();
let has_s = s.is_some();
let has_zs = zs.is_some();
let attachments = [&*c, &*z, &*s, &*zs];
// From the WebGL spec, 6.6 ("Framebuffer Object Attachments"):
//
// "In the WebGL API, it is an error to concurrently attach
// renderbuffers to the following combinations of
// attachment points:
//
// DEPTH_ATTACHMENT + DEPTH_STENCIL_ATTACHMENT
// STENCIL_ATTACHMENT + DEPTH_STENCIL_ATTACHMENT
// DEPTH_ATTACHMENT + STENCIL_ATTACHMENT
//
// If any of the constraints above are violated, then:
//
// checkFramebufferStatus must return FRAMEBUFFER_UNSUPPORTED."
if (has_zs && (has_z || has_s)) ||
(has_z && has_s) {
self.status.set(constants::FRAMEBUFFER_UNSUPPORTED);
return;
}
let mut fb_size = None;
for attachment in &attachments {
// Get the size of this attachment.
let size = match **attachment {
Some(WebGLFramebufferAttachment::Renderbuffer(ref att_rb)) => {
att_rb.size()
}
Some(WebGLFramebufferAttachment::Texture { texture: ref att_tex, level } ) => {
let info = att_tex.image_info_at_face(0, level as u32);
Some((info.width() as i32, info.height() as i32))
}
None => None,
};
// Make sure that, if we've found any other attachment,
// that the size matches.
if size.is_some() {
if fb_size.is_some() && size != fb_size {
self.status.set(constants::FRAMEBUFFER_INCOMPLETE_DIMENSIONS);
return;
} else {
fb_size = size;
}
}
}
self.size.set(fb_size);
if has_c || has_z || has_zs || has_s {
self.status.set(constants::FRAMEBUFFER_COMPLETE);
} else {
self.status.set(constants::FRAMEBUFFER_UNSUPPORTED);
}
}
pub fn check_status(&self) -> u32 {
return self.status.get();
}
pub fn renderbuffer(&self, attachment: u32, rb: Option<&WebGLRenderbuffer>) -> WebGLResult<()> {
let binding = match attachment {
constants::COLOR_ATTACHMENT0 => &self.color,
constants::DEPTH_ATTACHMENT => &self.depth,
constants::STENCIL_ATTACHMENT => &self.stencil,
constants::DEPTH_STENCIL_ATTACHMENT => &self.depthstencil,
_ => return Err(WebGLError::InvalidEnum),
};
let rb_id = match rb {
Some(rb) => {
*binding.borrow_mut() = Some(WebGLFramebufferAttachment::Renderbuffer(JS::from_ref(rb)));
Some(rb.id())
}
_ => {
*binding.borrow_mut() = None;
None
}
};
self.renderer.send(CanvasMsg::WebGL(WebGLCommand::FramebufferRenderbuffer(constants::FRAMEBUFFER,
attachment,
constants::RENDERBUFFER,
rb_id))).unwrap();
self.update_status();
Ok(())
}
pub fn texture2d(&self, attachment: u32, textarget: u32, texture: Option<&WebGLTexture>,
level: i32) -> WebGLResult<()> {
let binding = match attachment {
constants::COLOR_ATTACHMENT0 => &self.color,
constants::DEPTH_ATTACHMENT => &self.depth,
constants::STENCIL_ATTACHMENT => &self.stencil,
constants::DEPTH_STENCIL_ATTACHMENT => &self.depthstencil,
_ => return Err(WebGLError::InvalidEnum),
};
let tex_id = match texture {
// Note, from the GLES 2.0.25 spec, page 113:
// "If texture is zero, then textarget and level are ignored."
Some(texture) => {
// From the GLES 2.0.25 spec, page 113:
//
// "level specifies the mipmap level of the texture image<|fim▁hole|> }
// "If texture is not zero, then texture must either
// name an existing texture object with an target of
// textarget, or texture must name an existing cube
// map texture and textarget must be one of:
// TEXTURE_CUBE_MAP_POSITIVE_X,
// TEXTURE_CUBE_MAP_POSITIVE_Y,
// TEXTURE_CUBE_MAP_POSITIVE_Z,
// TEXTURE_CUBE_MAP_NEGATIVE_X,
// TEXTURE_CUBE_MAP_NEGATIVE_Y, or
// TEXTURE_CUBE_MAP_NEGATIVE_Z. Otherwise,
// INVALID_OPERATION is generated."
let is_cube = match textarget {
constants::TEXTURE_2D => false,
constants::TEXTURE_CUBE_MAP_POSITIVE_X => true,
constants::TEXTURE_CUBE_MAP_POSITIVE_Y => true,
constants::TEXTURE_CUBE_MAP_POSITIVE_Z => true,
constants::TEXTURE_CUBE_MAP_NEGATIVE_X => true,
constants::TEXTURE_CUBE_MAP_NEGATIVE_Y => true,
constants::TEXTURE_CUBE_MAP_NEGATIVE_Z => true,
_ => return Err(WebGLError::InvalidEnum),
};
match texture.target() {
Some(constants::TEXTURE_CUBE_MAP) if is_cube => {}
Some(_) if !is_cube => {}
_ => return Err(WebGLError::InvalidOperation),
}
*binding.borrow_mut() = Some(WebGLFramebufferAttachment::Texture {
texture: JS::from_ref(texture),
level: level }
);
Some(texture.id())
}
_ => {
*binding.borrow_mut() = None;
None
}
};
self.renderer.send(CanvasMsg::WebGL(WebGLCommand::FramebufferTexture2D(constants::FRAMEBUFFER,
attachment,
textarget,
tex_id,
level))).unwrap();
self.update_status();
Ok(())
}
fn with_matching_renderbuffers<F>(&self, rb: &WebGLRenderbuffer, mut closure: F)
where F: FnMut(&DOMRefCell<Option<WebGLFramebufferAttachment>>)
{
let attachments = [&self.color,
&self.depth,
&self.stencil,
&self.depthstencil];
for attachment in &attachments {
let matched = {
match *attachment.borrow() {
Some(WebGLFramebufferAttachment::Renderbuffer(ref att_rb))
if rb.id() == att_rb.id() => true,
_ => false,
}
};
if matched {
closure(attachment);
}
}
}
fn with_matching_textures<F>(&self, texture: &WebGLTexture, mut closure: F)
where F: FnMut(&DOMRefCell<Option<WebGLFramebufferAttachment>>)
{
let attachments = [&self.color,
&self.depth,
&self.stencil,
&self.depthstencil];
for attachment in &attachments {
let matched = {
match *attachment.borrow() {
Some(WebGLFramebufferAttachment::Texture { texture: ref att_texture, .. })
if texture.id() == att_texture.id() => true,
_ => false,
}
};
if matched {
closure(attachment);
}
}
}
pub fn detach_renderbuffer(&self, rb: &WebGLRenderbuffer) {
self.with_matching_renderbuffers(rb, |att| {
*att.borrow_mut() = None;
self.update_status();
});
}
pub fn detach_texture(&self, texture: &WebGLTexture) {
self.with_matching_textures(texture, |att| {
*att.borrow_mut() = None;
self.update_status();
});
}
pub fn invalidate_renderbuffer(&self, rb: &WebGLRenderbuffer) {
self.with_matching_renderbuffers(rb, |_att| {
self.update_status();
});
}
pub fn invalidate_texture(&self, texture: &WebGLTexture) {
self.with_matching_textures(texture, |_att| {
self.update_status();
});
}
pub fn target(&self) -> Option<u32> {
self.target.get()
}
}
impl Drop for WebGLFramebuffer {
fn drop(&mut self) {
self.delete();
}
}<|fim▁end|> | // to be attached to the framebuffer and must be
// 0. Otherwise, INVALID_VALUE is generated."
if level != 0 {
return Err(WebGLError::InvalidValue); |
<|file_name|>mount.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use base::prelude::*;
use core::{mem};
use core::ops::{BitOr, Not, BitAnd};
use cty::{c_ulong, MS_RDONLY, MS_NOSUID, MS_NODEV, MS_NOEXEC, MS_SYNCHRONOUS,
MS_REMOUNT, MS_MANDLOCK, MS_DIRSYNC, MS_NOATIME, MS_NODIRATIME,
MS_BIND, MS_MOVE, MS_REC, MS_SILENT, MS_POSIXACL, MS_UNBINDABLE, MS_LAZYTIME,
MS_PRIVATE, MS_SLAVE, MS_SHARED, MS_STRICTATIME, PATH_MAX};
use fmt::{Debug, Write};
use syscall::{self};
use rmo::{ToRmo};
use str_one::{CStr};
use str_two::{CString};
use {rmo_cstr, Pool};
/// Mounts a filesystem.
///
/// [argument, src]
/// The file that will be mounted.
///
/// [argument, dst]
/// The point at which it will be mounted.
///
/// [argument, ty]
/// The type of the filesystem.
///
/// [argument, flags]
/// The flags to be used to mount the filesystem.
///
/// [argument, data]
/// Filesystem dependent data.
///
/// = Remarks
///
/// :flags: link:lrs::fs::flags
///
/// See {flags} for pre-defined mount flags.
///
/// = Examples
///
/// The following example bind-mounts a directory `a` read-only at the path `b`. Both
/// paths must exist in the current working directory and the example must be executed as
/// root.
///
/// ----
/// mount("a", "b", "", MOUNT_READ_ONLY | MOUNT_BIND, "").unwrap();
/// ----
///
/// The example in link:lrs::fs::unmount[unmount] shows how to perform the unmount
/// operation.
///
/// = See also
///
/// * link:man:mount(2)
/// * {flags}
pub fn mount<P, Q, R, S>(src: P, dst: Q, ty: R, flags: MountFlags, data: S) -> Result
where P: for<'a> ToRmo<Pool<'a>, CStr, CString<Pool<'a>>>,
Q: for<'a> ToRmo<Pool<'a>, CStr, CString<Pool<'a>>>,
R: for<'a> ToRmo<Pool<'a>, CStr, CString<Pool<'a>>>,
S: for<'a> ToRmo<Pool<'a>, CStr, CString<Pool<'a>>>,
{
let mut buf1: [d8; PATH_MAX] = unsafe { mem::uninit() };
let mut buf2: [d8; PATH_MAX] = unsafe { mem::uninit() };
let mut buf3: [d8; 256] = unsafe { mem::uninit() };
let mut buf4: [d8; 256] = unsafe { mem::uninit() };
let src = try!(rmo_cstr(&src, &mut buf1));
let dst = try!(rmo_cstr(&dst, &mut buf2));
let ty = try!(rmo_cstr(&ty, &mut buf3));
let data = try!(rmo_cstr(&data, &mut buf4));
rv!(syscall::mount(&src, &dst, &ty, flags.0, &data))
}
/// Flags used when mounting a filesystem.
///<|fim▁hole|>/// :flags: link:lrs::fs::flags
///
/// See {flags} for pre-defined mount flags.
///
/// = See also
///
/// * flags
pub struct MountFlags(c_ulong);
impl BitOr for MountFlags {
type Output = MountFlags;
fn bitor(self, other: MountFlags) -> MountFlags {
MountFlags(self.0 | other.0)
}
}
impl BitAnd for MountFlags {
type Output = MountFlags;
fn bitand(self, other: MountFlags) -> MountFlags {
MountFlags(self.0 & other.0)
}
}
impl Not for MountFlags {
type Output = MountFlags;
fn not(self) -> MountFlags {
MountFlags(!self.0)
}
}
pub const MOUNT_NONE: MountFlags = MountFlags(0);
macro_rules! create {
($($(#[$meta:meta])* flag $name:ident = $val:expr;)*) => {
$($(#[$meta])* pub const $name: MountFlags = MountFlags($val);)*
impl Debug for MountFlags {
fn fmt<W: Write>(&self, w: &mut W) -> Result {
let mut first = true;
$(
if self.0 & $val != 0 {
if !first { try!(w.write(b"|")); }
first = false;
try!(w.write_all(stringify!($name).as_bytes()));
}
)*
let _ = first;
Ok(())
}
}
}
}
create! {
#[doc = "Mount the filesystem read-only.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_RDONLY therein"]
flag MOUNT_READ_ONLY = MS_RDONLY;
#[doc = "Don't respect set-user-id and set-group-id flags on the filesystem.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_NOSUID therein"]
flag MOUNT_NO_SET_ID = MS_NOSUID;
#[doc = "Don't allow access to devices on this filesystem.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_NODEV therein"]
flag MOUNT_NO_DEVICE_ACCESS = MS_NODEV;
#[doc = "Don't allow execution of programs on this filesystem.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_NOEXEC therein"]
flag MOUNT_NO_EXEC = MS_NOEXEC;
#[doc = "Flush all data and meta-data changes to this filesystem to the disk \
immediately.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_SYNCHRONOUS therein"]
flag MOUNT_SYNC = MS_SYNCHRONOUS;
#[doc = "Perform a remount operation.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_REMOUNT therein"]
flag MOUNT_REMOUNT = MS_REMOUNT;
#[doc = "Allow mandatory locking on the monut point.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_MANBLOCK therein"]
flag MOUNT_MANDATORY_LOCKING = MS_MANDLOCK;
#[doc = "Make directory changes on this filesystem synchonous.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_DIRSYNC therein"]
flag MOUNT_DIR_SYNC = MS_DIRSYNC;
#[doc = "Don't update the access times of files on this filesystem.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_NOATIME therein"]
flag MOUNT_NO_ACCESS_TIME = MS_NOATIME;
#[doc = "Don't update the access times of directories on this filesystem.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_NODIRATIME therein"]
flag MOUNT_NO_DIR_ACCESS_TIME = MS_NODIRATIME;
#[doc = "Perform a bind operation.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_BIND therein"]
flag MOUNT_BIND = MS_BIND;
#[doc = "Atomically move a mount to another mount point.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_MOVE therein"]
flag MOUNT_MOVE = MS_MOVE;
#[doc = "Not documented."]
flag MOUNT_REC = MS_REC;
#[doc = "Omit certain warning messages from the kernel log.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_SILENT therein"]
flag MOUNT_SILENT = MS_SILENT;
#[doc = "Not documented."]
flag MOUNT_POSIX_ACL = MS_POSIXACL;
#[doc = "Not documented."]
flag MOUNT_UNBINDABLE = MS_UNBINDABLE;
#[doc = "Not documented."]
flag MOUNT_PRIVATE = MS_PRIVATE;
#[doc = "Not documented."]
flag MOUNT_SLAVE = MS_SLAVE;
#[doc = "Not documented."]
flag MOUNT_SHARED = MS_SHARED;
#[doc = "Perform an access time update after every access.\n"]
#[doc = "= See also"]
#[doc = "* link:man:mount(2) and MS_STRICTATIME therein"]
flag MOUNT_STRICT_ACCESS_TIME = MS_STRICTATIME;
#[doc = "Maintain changes to access/modification/status-change times in memory and \
only update the inodes under special circumstances.\n"]
#[doc = "= Remarks"]
#[doc = ":lazy: link:man:mount(2)"]
#[doc = "See the {lazy}[manual page] and MS_LAZYTIME therein for the details.\n"]
#[doc = "== Kernel versions"]
#[doc = "The required kernel version is 4.0.\n"]
#[doc = "= See also"]
#[doc = "* {lazy} and MS_LAZYTIME therein"]
flag MOUNT_LAZY_TIME = MS_LAZYTIME;
}
impl MountFlags {
/// Sets a flag.
///
/// [argument, flag]
/// The flag to be set.
pub fn set(&mut self, flag: MountFlags) {
self.0 |= flag.0
}
/// Clears a flag.
///
/// [argument, flag]
/// The flag to be cleared.
pub fn unset(&mut self, flag: MountFlags) {
self.0 &= !flag.0
}
/// Returns whether a flag is set.
///
/// [argument, flag]
/// The flag to be checked.
pub fn is_set(&self, flag: MountFlags) -> bool {
self.0 & flag.0 != 0
}
}<|fim▁end|> | /// = Remarks
/// |
<|file_name|>PDMDevHlp.cpp<|end_file_name|><|fim▁begin|>/* $Id: PDMDevHlp.cpp $ */
/** @file
* PDM - Pluggable Device and Driver Manager, Device Helpers.
*/
/*
* Copyright (C) 2006-2015 Oracle Corporation
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* you can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*/
/*******************************************************************************
* Header Files *
*******************************************************************************/
#define LOG_GROUP LOG_GROUP_PDM_DEVICE
#include "PDMInternal.h"
#include <VBox/vmm/pdm.h>
#include <VBox/vmm/mm.h>
#include <VBox/vmm/hm.h>
#include <VBox/vmm/pgm.h>
#include <VBox/vmm/iom.h>
#ifdef VBOX_WITH_REM
# include <VBox/vmm/rem.h>
#endif
#include <VBox/vmm/dbgf.h>
#include <VBox/vmm/vmapi.h>
#include <VBox/vmm/vm.h>
#include <VBox/vmm/uvm.h>
#include <VBox/vmm/vmm.h>
#include <VBox/version.h>
#include <VBox/log.h>
#include <VBox/err.h>
#include <iprt/asm.h>
#include <iprt/assert.h>
#include <iprt/ctype.h>
#include <iprt/string.h>
#include <iprt/thread.h>
#include "dtrace/VBoxVMM.h"
#include "PDMInline.h"
/*******************************************************************************
* Defined Constants And Macros *
*******************************************************************************/
/** @def PDM_DEVHLP_DEADLOCK_DETECTION
* Define this to enable the deadlock detection when accessing physical memory.
*/
#if /*defined(DEBUG_bird) ||*/ defined(DOXYGEN_RUNNING)
# define PDM_DEVHLP_DEADLOCK_DETECTION /**< @todo enable DevHlp deadlock detection! */
#endif
/**
* Wrapper around PDMR3LdrGetSymbolRCLazy.
*/
DECLINLINE(int) pdmR3DevGetSymbolRCLazy(PPDMDEVINS pDevIns, const char *pszSymbol, PRTRCPTR ppvValue)
{
PVM pVM = pDevIns->Internal.s.pVMR3;
if (HMIsEnabled(pVM))
{
*ppvValue = NIL_RTRCPTR;
return VINF_SUCCESS;
}
return PDMR3LdrGetSymbolRCLazy(pVM,
pDevIns->Internal.s.pDevR3->pReg->szRCMod,
pDevIns->Internal.s.pDevR3->pszRCSearchPath,
pszSymbol, ppvValue);
}
/**
* Wrapper around PDMR3LdrGetSymbolR0Lazy.
*/
DECLINLINE(int) pdmR3DevGetSymbolR0Lazy(PPDMDEVINS pDevIns, const char *pszSymbol, PRTR0PTR ppvValue)
{
return PDMR3LdrGetSymbolR0Lazy(pDevIns->Internal.s.pVMR3,
pDevIns->Internal.s.pDevR3->pReg->szR0Mod,
pDevIns->Internal.s.pDevR3->pszR0SearchPath,
pszSymbol, ppvValue);
}
/** @name R3 DevHlp
* @{
*/
/** @interface_method_impl{PDMDEVHLPR3,pfnIOPortRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_IOPortRegister(PPDMDEVINS pDevIns, RTIOPORT Port, RTIOPORT cPorts, RTHCPTR pvUser, PFNIOMIOPORTOUT pfnOut, PFNIOMIOPORTIN pfnIn,
PFNIOMIOPORTOUTSTRING pfnOutStr, PFNIOMIOPORTINSTRING pfnInStr, const char *pszDesc)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_IOPortRegister: caller='%s'/%d: Port=%#x cPorts=%#x pvUser=%p pfnOut=%p pfnIn=%p pfnOutStr=%p pfnInStr=%p p32_tszDesc=%p:{%s}\n", pDevIns->pReg->szName, pDevIns->iInstance,
Port, cPorts, pvUser, pfnOut, pfnIn, pfnOutStr, pfnInStr, pszDesc, pszDesc));
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
#if 0 /** @todo needs a real string cache for this */
if (pDevIns->iInstance > 0)
{
char *pszDesc2 = MMR3HeapAPrintf(pVM, MM_TAG_PDM_DEVICE_DESC, "%s [%u]", pszDesc, pDevIns->iInstance);
if (pszDesc2)
pszDesc = pszDesc2;
}
#endif
int rc = IOMR3IOPortRegisterR3(pDevIns->Internal.s.pVMR3, pDevIns, Port, cPorts, pvUser,
pfnOut, pfnIn, pfnOutStr, pfnInStr, pszDesc);
LogFlow(("pdmR3DevHlp_IOPortRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnIOPortRegisterRC} */
static DECLCALLBACK(int) pdmR3DevHlp_IOPortRegisterRC(PPDMDEVINS pDevIns, RTIOPORT Port, RTIOPORT cPorts, RTRCPTR pvUser,
const char *pszOut, const char *pszIn,
const char *pszOutStr, const char *pszInStr, const char *pszDesc)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_IOPortRegisterRC: caller='%s'/%d: Port=%#x cPorts=%#x pvUser=%p pszOut=%p:{%s} pszIn=%p:{%s} pszOutStr=%p:{%s} pszInStr=%p:{%s} pszDesc=%p:{%s}\n", pDevIns->pReg->szName, pDevIns->iInstance,
Port, cPorts, pvUser, pszOut, pszOut, pszIn, pszIn, pszOutStr, pszOutStr, pszInStr, pszInStr, pszDesc, pszDesc));
/*
* Resolve the functions (one of the can be NULL).
*/
int rc = VINF_SUCCESS;
if ( pDevIns->pReg->szRCMod[0]
&& (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_RC)
&& !HMIsEnabled(pVM))
{
RTRCPTR RCPtrIn = NIL_RTRCPTR;
if (pszIn)
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pszIn, &RCPtrIn);
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszIn)\n", pDevIns->pReg->szRCMod, pszIn));
}
RTRCPTR RCPtrOut = NIL_RTRCPTR;
if (pszOut && RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pszOut, &RCPtrOut);
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszOut)\n", pDevIns->pReg->szRCMod, pszOut));
}
RTRCPTR RCPtrInStr = NIL_RTRCPTR;
if (pszInStr && RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pszInStr, &RCPtrInStr);
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszInStr)\n", pDevIns->pReg->szRCMod, pszInStr));
}
RTRCPTR RCPtrOutStr = NIL_RTRCPTR;
if (pszOutStr && RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pszOutStr, &RCPtrOutStr);
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszOutStr)\n", pDevIns->pReg->szRCMod, pszOutStr));
}
if (RT_SUCCESS(rc))
{
#if 0 /** @todo needs a real string cache for this */
if (pDevIns->iInstance > 0)
{
char *pszDesc2 = MMR3HeapAPrintf(pVM, MM_TAG_PDM_DEVICE_DESC, "%s [%u]", pszDesc, pDevIns->iInstance);
if (pszDesc2)
pszDesc = pszDesc2;
}
#endif
rc = IOMR3IOPortRegisterRC(pVM, pDevIns, Port, cPorts, pvUser, RCPtrOut, RCPtrIn, RCPtrOutStr, RCPtrInStr, pszDesc);
}
}
else if (!HMIsEnabled(pVM))
{
AssertMsgFailed(("No RC module for this driver!\n"));
rc = VERR_INVALID_PARAMETER;
}
LogFlow(("pdmR3DevHlp_IOPortRegisterRC: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnIOPortRegisterR0} */
static DECLCALLBACK(int) pdmR3DevHlp_IOPortRegisterR0(PPDMDEVINS pDevIns, RTIOPORT Port, RTIOPORT cPorts, RTR0PTR pvUser,
const char *pszOut, const char *pszIn,
const char *pszOutStr, const char *pszInStr, const char *pszDesc)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_IOPortRegisterR0: caller='%s'/%d: Port=%#x cPorts=%#x pvUser=%p pszOut=%p:{%s} pszIn=%p:{%s} pszOutStr=%p:{%s} pszInStr=%p:{%s} pszDesc=%p:{%s}\n", pDevIns->pReg->szName, pDevIns->iInstance,
Port, cPorts, pvUser, pszOut, pszOut, pszIn, pszIn, pszOutStr, pszOutStr, pszInStr, pszInStr, pszDesc, pszDesc));
/*
* Resolve the functions (one of the can be NULL).
*/
int rc = VINF_SUCCESS;
if ( pDevIns->pReg->szR0Mod[0]
&& (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0))
{
R0PTRTYPE(PFNIOMIOPORTIN) pfnR0PtrIn = 0;
if (pszIn)
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pszIn, &pfnR0PtrIn);
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszIn)\n", pDevIns->pReg->szR0Mod, pszIn));
}
R0PTRTYPE(PFNIOMIOPORTOUT) pfnR0PtrOut = 0;
if (pszOut && RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pszOut, &pfnR0PtrOut);
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszOut)\n", pDevIns->pReg->szR0Mod, pszOut));
}
R0PTRTYPE(PFNIOMIOPORTINSTRING) pfnR0PtrInStr = 0;
if (pszInStr && RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pszInStr, &pfnR0PtrInStr);
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszInStr)\n", pDevIns->pReg->szR0Mod, pszInStr));
}
R0PTRTYPE(PFNIOMIOPORTOUTSTRING) pfnR0PtrOutStr = 0;
if (pszOutStr && RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pszOutStr, &pfnR0PtrOutStr);
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszOutStr)\n", pDevIns->pReg->szR0Mod, pszOutStr));
}
if (RT_SUCCESS(rc))
{
#if 0 /** @todo needs a real string cache for this */
if (pDevIns->iInstance > 0)
{
char *pszDesc2 = MMR3HeapAPrintf(pVM, MM_TAG_PDM_DEVICE_DESC, "%s [%u]", pszDesc, pDevIns->iInstance);
if (pszDesc2)
pszDesc = pszDesc2;
}
#endif
rc = IOMR3IOPortRegisterR0(pDevIns->Internal.s.pVMR3, pDevIns, Port, cPorts, pvUser, pfnR0PtrOut, pfnR0PtrIn, pfnR0PtrOutStr, pfnR0PtrInStr, pszDesc);
}
}
else
{
AssertMsgFailed(("No R0 module for this driver!\n"));
rc = VERR_INVALID_PARAMETER;
}
LogFlow(("pdmR3DevHlp_IOPortRegisterR0: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnIOPortDeregister} */
static DECLCALLBACK(int) pdmR3DevHlp_IOPortDeregister(PPDMDEVINS pDevIns, RTIOPORT Port, RTIOPORT cPorts)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_IOPortDeregister: caller='%s'/%d: Port=%#x cPorts=%#x\n", pDevIns->pReg->szName, pDevIns->iInstance,
Port, cPorts));
int rc = IOMR3IOPortDeregister(pDevIns->Internal.s.pVMR3, pDevIns, Port, cPorts);
LogFlow(("pdmR3DevHlp_IOPortDeregister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnMMIORegister} */
static DECLCALLBACK(int) pdmR3DevHlp_MMIORegister(PPDMDEVINS pDevIns, RTGCPHYS GCPhysStart, uint32_t cbRange, RTHCPTR pvUser,
PFNIOMMMIOWRITE pfnWrite, PFNIOMMMIOREAD pfnRead, PFNIOMMMIOFILL pfnFill,
uint32_t fFlags, const char *pszDesc)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_MMIORegister: caller='%s'/%d: GCPhysStart=%RGp cbRange=%#x pvUser=%p pfnWrite=%p pfnRead=%p pfnFill=%p fFlags=%#x pszDesc=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhysStart, cbRange, pvUser, pfnWrite, pfnRead, pfnFill, pszDesc, fFlags, pszDesc));
if (pDevIns->iInstance > 0)
{
char *pszDesc2 = MMR3HeapAPrintf(pVM, MM_TAG_PDM_DEVICE_DESC, "%s [%u]", pszDesc, pDevIns->iInstance);
if (pszDesc2)
pszDesc = pszDesc2;
}
int rc = IOMR3MmioRegisterR3(pVM, pDevIns, GCPhysStart, cbRange, pvUser,
pfnWrite, pfnRead, pfnFill, fFlags, pszDesc);
LogFlow(("pdmR3DevHlp_MMIORegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnMMIORegisterRC} */
static DECLCALLBACK(int) pdmR3DevHlp_MMIORegisterRC(PPDMDEVINS pDevIns, RTGCPHYS GCPhysStart, uint32_t cbRange, RTRCPTR pvUser,
const char *pszWrite, const char *pszRead, const char *pszFill)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_MMIORegisterRC: caller='%s'/%d: GCPhysStart=%RGp cbRange=%#x pvUser=%p pszWrite=%p:{%s} pszRead=%p:{%s} pszFill=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhysStart, cbRange, pvUser, pszWrite, pszWrite, pszRead, pszRead, pszFill, pszFill));
/*
* Resolve the functions.
* Not all function have to present, leave it to IOM to enforce this.
*/
int rc = VINF_SUCCESS;
if ( pDevIns->pReg->szRCMod[0]
&& (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_RC)
&& !HMIsEnabled(pVM))
{
RTRCPTR RCPtrWrite = NIL_RTRCPTR;
if (pszWrite)
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pszWrite, &RCPtrWrite);
RTRCPTR RCPtrRead = NIL_RTRCPTR;
int rc2 = VINF_SUCCESS;
if (pszRead)
rc2 = pdmR3DevGetSymbolRCLazy(pDevIns, pszRead, &RCPtrRead);
RTRCPTR RCPtrFill = NIL_RTRCPTR;
int rc3 = VINF_SUCCESS;
if (pszFill)
rc3 = pdmR3DevGetSymbolRCLazy(pDevIns, pszFill, &RCPtrFill);
if (RT_SUCCESS(rc) && RT_SUCCESS(rc2) && RT_SUCCESS(rc3))
rc = IOMR3MmioRegisterRC(pVM, pDevIns, GCPhysStart, cbRange, pvUser, RCPtrWrite, RCPtrRead, RCPtrFill);
else
{
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszWrite)\n", pDevIns->pReg->szRCMod, pszWrite));
AssertMsgRC(rc2, ("Failed to resolve %s.%s (pszRead)\n", pDevIns->pReg->szRCMod, pszRead));
AssertMsgRC(rc3, ("Failed to resolve %s.%s (pszFill)\n", pDevIns->pReg->szRCMod, pszFill));
if (RT_FAILURE(rc2) && RT_SUCCESS(rc))
rc = rc2;
if (RT_FAILURE(rc3) && RT_SUCCESS(rc))
rc = rc3;
}
}
else if (!HMIsEnabled(pVM))
{
AssertMsgFailed(("No RC module for this driver!\n"));
rc = VERR_INVALID_PARAMETER;
}
LogFlow(("pdmR3DevHlp_MMIORegisterRC: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnMMIORegisterR0} */
static DECLCALLBACK(int) pdmR3DevHlp_MMIORegisterR0(PPDMDEVINS pDevIns, RTGCPHYS GCPhysStart, uint32_t cbRange, RTR0PTR pvUser,
const char *pszWrite, const char *pszRead, const char *pszFill)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_MMIORegisterHC: caller='%s'/%d: GCPhysStart=%RGp cbRange=%#x pvUser=%p pszWrite=%p:{%s} pszRead=%p:{%s} pszFill=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhysStart, cbRange, pvUser, pszWrite, pszWrite, pszRead, pszRead, pszFill, pszFill));
/*
* Resolve the functions.
* Not all function have to present, leave it to IOM to enforce this.
*/
int rc = VINF_SUCCESS;
if ( pDevIns->pReg->szR0Mod[0]
&& (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0))
{
R0PTRTYPE(PFNIOMMMIOWRITE) pfnR0PtrWrite = 0;
if (pszWrite)
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pszWrite, &pfnR0PtrWrite);
R0PTRTYPE(PFNIOMMMIOREAD) pfnR0PtrRead = 0;
int rc2 = VINF_SUCCESS;
if (pszRead)
rc2 = pdmR3DevGetSymbolR0Lazy(pDevIns, pszRead, &pfnR0PtrRead);
R0PTRTYPE(PFNIOMMMIOFILL) pfnR0PtrFill = 0;
int rc3 = VINF_SUCCESS;
if (pszFill)
rc3 = pdmR3DevGetSymbolR0Lazy(pDevIns, pszFill, &pfnR0PtrFill);
if (RT_SUCCESS(rc) && RT_SUCCESS(rc2) && RT_SUCCESS(rc3))
rc = IOMR3MmioRegisterR0(pDevIns->Internal.s.pVMR3, pDevIns, GCPhysStart, cbRange, pvUser, pfnR0PtrWrite, pfnR0PtrRead, pfnR0PtrFill);
else
{
AssertMsgRC(rc, ("Failed to resolve %s.%s (pszWrite)\n", pDevIns->pReg->szR0Mod, pszWrite));
AssertMsgRC(rc2, ("Failed to resolve %s.%s (pszRead)\n", pDevIns->pReg->szR0Mod, pszRead));
AssertMsgRC(rc3, ("Failed to resolve %s.%s (pszFill)\n", pDevIns->pReg->szR0Mod, pszFill));
if (RT_FAILURE(rc2) && RT_SUCCESS(rc))
rc = rc2;
if (RT_FAILURE(rc3) && RT_SUCCESS(rc))
rc = rc3;
}
}
else
{
AssertMsgFailed(("No R0 module for this driver!\n"));
rc = VERR_INVALID_PARAMETER;
}
LogFlow(("pdmR3DevHlp_MMIORegisterR0: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnMMIODeregister} */
static DECLCALLBACK(int) pdmR3DevHlp_MMIODeregister(PPDMDEVINS pDevIns, RTGCPHYS GCPhysStart, uint32_t cbRange)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_MMIODeregister: caller='%s'/%d: GCPhysStart=%RGp cbRange=%#x\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhysStart, cbRange));
int rc = IOMR3MmioDeregister(pDevIns->Internal.s.pVMR3, pDevIns, GCPhysStart, cbRange);
LogFlow(("pdmR3DevHlp_MMIODeregister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/**
* @copydoc PDMDEVHLPR3::pfnMMIO2Register
*/
static DECLCALLBACK(int) pdmR3DevHlp_MMIO2Register(PPDMDEVINS pDevIns, uint32_t iRegion, RTGCPHYS cb, uint32_t fFlags, void **ppv, const char *pszDesc)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_MMIO2Register: caller='%s'/%d: iRegion=%#x cb=%#RGp fFlags=%RX32 ppv=%p pszDescp=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, iRegion, cb, fFlags, ppv, pszDesc, pszDesc));
/** @todo PGMR3PhysMMIO2Register mangles the description, move it here and
* use a real string cache. */
int rc = PGMR3PhysMMIO2Register(pDevIns->Internal.s.pVMR3, pDevIns, iRegion, cb, fFlags, ppv, pszDesc);
LogFlow(("pdmR3DevHlp_MMIO2Register: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/**
* @copydoc PDMDEVHLPR3::pfnMMIO2Deregister
*/
static DECLCALLBACK(int) pdmR3DevHlp_MMIO2Deregister(PPDMDEVINS pDevIns, uint32_t iRegion)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_MMIO2Deregister: caller='%s'/%d: iRegion=%#x\n",
pDevIns->pReg->szName, pDevIns->iInstance, iRegion));
AssertReturn(iRegion <= UINT8_MAX || iRegion == UINT32_MAX, VERR_INVALID_PARAMETER);
int rc = PGMR3PhysMMIO2Deregister(pDevIns->Internal.s.pVMR3, pDevIns, iRegion);
LogFlow(("pdmR3DevHlp_MMIO2Deregister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/**
* @copydoc PDMDEVHLPR3::pfnMMIO2Map
*/
static DECLCALLBACK(int) pdmR3DevHlp_MMIO2Map(PPDMDEVINS pDevIns, uint32_t iRegion, RTGCPHYS GCPhys)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_MMIO2Map: caller='%s'/%d: iRegion=%#x GCPhys=%#RGp\n",
pDevIns->pReg->szName, pDevIns->iInstance, iRegion, GCPhys));
int rc = PGMR3PhysMMIO2Map(pDevIns->Internal.s.pVMR3, pDevIns, iRegion, GCPhys);
LogFlow(("pdmR3DevHlp_MMIO2Map: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/**
* @copydoc PDMDEVHLPR3::pfnMMIO2Unmap
*/
static DECLCALLBACK(int) pdmR3DevHlp_MMIO2Unmap(PPDMDEVINS pDevIns, uint32_t iRegion, RTGCPHYS GCPhys)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_MMIO2Unmap: caller='%s'/%d: iRegion=%#x GCPhys=%#RGp\n",
pDevIns->pReg->szName, pDevIns->iInstance, iRegion, GCPhys));
int rc = PGMR3PhysMMIO2Unmap(pDevIns->Internal.s.pVMR3, pDevIns, iRegion, GCPhys);
LogFlow(("pdmR3DevHlp_MMIO2Unmap: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/**
* @copydoc PDMDEVHLPR3::pfnMMHyperMapMMIO2
*/
static DECLCALLBACK(int) pdmR3DevHlp_MMHyperMapMMIO2(PPDMDEVINS pDevIns, uint32_t iRegion, RTGCPHYS off, RTGCPHYS cb,
const char *pszDesc, PRTRCPTR pRCPtr)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_MMHyperMapMMIO2: caller='%s'/%d: iRegion=%#x off=%RGp cb=%RGp pszDesc=%p:{%s} pRCPtr=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, iRegion, off, cb, pszDesc, pszDesc, pRCPtr));
if (pDevIns->iInstance > 0)
{
char *pszDesc2 = MMR3HeapAPrintf(pVM, MM_TAG_PDM_DEVICE_DESC, "%s [%u]", pszDesc, pDevIns->iInstance);
if (pszDesc2)
pszDesc = pszDesc2;
}
int rc = MMR3HyperMapMMIO2(pVM, pDevIns, iRegion, off, cb, pszDesc, pRCPtr);
LogFlow(("pdmR3DevHlp_MMHyperMapMMIO2: caller='%s'/%d: returns %Rrc *pRCPtr=%RRv\n", pDevIns->pReg->szName, pDevIns->iInstance, rc, *pRCPtr));
return rc;
}
/**
* @copydoc PDMDEVHLPR3::pfnMMIO2MapKernel
*/
static DECLCALLBACK(int) pdmR3DevHlp_MMIO2MapKernel(PPDMDEVINS pDevIns, uint32_t iRegion, RTGCPHYS off, RTGCPHYS cb,
const char *pszDesc, PRTR0PTR pR0Ptr)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_MMIO2MapKernel: caller='%s'/%d: iRegion=%#x off=%RGp cb=%RGp pszDesc=%p:{%s} pR0Ptr=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, iRegion, off, cb, pszDesc, pszDesc, pR0Ptr));
if (pDevIns->iInstance > 0)
{
char *pszDesc2 = MMR3HeapAPrintf(pVM, MM_TAG_PDM_DEVICE_DESC, "%s [%u]", pszDesc, pDevIns->iInstance);
if (pszDesc2)
pszDesc = pszDesc2;
}
int rc = PGMR3PhysMMIO2MapKernel(pVM, pDevIns, iRegion, off, cb, pszDesc, pR0Ptr);
LogFlow(("pdmR3DevHlp_MMIO2MapKernel: caller='%s'/%d: returns %Rrc *pR0Ptr=%RHv\n", pDevIns->pReg->szName, pDevIns->iInstance, rc, *pR0Ptr));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnROMRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_ROMRegister(PPDMDEVINS pDevIns, RTGCPHYS GCPhysStart, uint32_t cbRange,
const void *pvBinary, uint32_t cbBinary, uint32_t fFlags, const char *pszDesc)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_ROMRegister: caller='%s'/%d: GCPhysStart=%RGp cbRange=%#x pvBinary=%p cbBinary=%#x fFlags=%#RX32 pszDesc=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhysStart, cbRange, pvBinary, cbBinary, fFlags, pszDesc, pszDesc));
/** @todo can we mangle pszDesc? */
int rc = PGMR3PhysRomRegister(pDevIns->Internal.s.pVMR3, pDevIns, GCPhysStart, cbRange, pvBinary, cbBinary, fFlags, pszDesc);
LogFlow(("pdmR3DevHlp_ROMRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnROMProtectShadow} */
static DECLCALLBACK(int) pdmR3DevHlp_ROMProtectShadow(PPDMDEVINS pDevIns, RTGCPHYS GCPhysStart, uint32_t cbRange, PGMROMPROT enmProt)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_ROMProtectShadow: caller='%s'/%d: GCPhysStart=%RGp cbRange=%#x enmProt=%d\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhysStart, cbRange, enmProt));
int rc = PGMR3PhysRomProtect(pDevIns->Internal.s.pVMR3, GCPhysStart, cbRange, enmProt);
LogFlow(("pdmR3DevHlp_ROMProtectShadow: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnSSMRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_SSMRegister(PPDMDEVINS pDevIns, uint32_t uVersion, size_t cbGuess, const char *pszBefore,
PFNSSMDEVLIVEPREP pfnLivePrep, PFNSSMDEVLIVEEXEC pfnLiveExec, PFNSSMDEVLIVEVOTE pfnLiveVote,
PFNSSMDEVSAVEPREP pfnSavePrep, PFNSSMDEVSAVEEXEC pfnSaveExec, PFNSSMDEVSAVEDONE pfnSaveDone,
PFNSSMDEVLOADPREP pfnLoadPrep, PFNSSMDEVLOADEXEC pfnLoadExec, PFNSSMDEVLOADDONE pfnLoadDone)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_SSMRegister: caller='%s'/%d: uVersion=#x cbGuess=%#x pszBefore=%p:{%s}\n"
" pfnLivePrep=%p pfnLiveExec=%p pfnLiveVote=%p pfnSavePrep=%p pfnSaveExec=%p pfnSaveDone=%p pszLoadPrep=%p pfnLoadExec=%p pfnLoadDone=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, uVersion, cbGuess, pszBefore, pszBefore,
pfnLivePrep, pfnLiveExec, pfnLiveVote,
pfnSavePrep, pfnSaveExec, pfnSaveDone,
pfnLoadPrep, pfnLoadExec, pfnLoadDone));
int rc = SSMR3RegisterDevice(pDevIns->Internal.s.pVMR3, pDevIns, pDevIns->pReg->szName, pDevIns->iInstance,
uVersion, cbGuess, pszBefore,
pfnLivePrep, pfnLiveExec, pfnLiveVote,
pfnSavePrep, pfnSaveExec, pfnSaveDone,
pfnLoadPrep, pfnLoadExec, pfnLoadDone);
LogFlow(("pdmR3DevHlp_SSMRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnTMTimerCreate} */
static DECLCALLBACK(int) pdmR3DevHlp_TMTimerCreate(PPDMDEVINS pDevIns, TMCLOCK enmClock, PFNTMTIMERDEV pfnCallback, void *pvUser, uint32_t fFlags, const char *pszDesc, PPTMTIMERR3 ppTimer)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_TMTimerCreate: caller='%s'/%d: enmClock=%d pfnCallback=%p pvUser=%p fFlags=%#x pszDesc=%p:{%s} ppTimer=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, enmClock, pfnCallback, pvUser, fFlags, pszDesc, pszDesc, ppTimer));
if (pDevIns->iInstance > 0) /** @todo use a string cache here later. */
{
char *pszDesc2 = MMR3HeapAPrintf(pVM, MM_TAG_PDM_DEVICE_DESC, "%s [%u]", pszDesc, pDevIns->iInstance);
if (pszDesc2)
pszDesc = pszDesc2;
}
int rc = TMR3TimerCreateDevice(pVM, pDevIns, enmClock, pfnCallback, pvUser, fFlags, pszDesc, ppTimer);
LogFlow(("pdmR3DevHlp_TMTimerCreate: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnTMUtcNow} */
static DECLCALLBACK(PRTTIMESPEC) pdmR3DevHlp_TMUtcNow(PPDMDEVINS pDevIns, PRTTIMESPEC pTime)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_TMUtcNow: caller='%s'/%d: pTime=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pTime));
pTime = TMR3UtcNow(pDevIns->Internal.s.pVMR3, pTime);
LogFlow(("pdmR3DevHlp_TMUtcNow: caller='%s'/%d: returns %RU64\n", pDevIns->pReg->szName, pDevIns->iInstance, RTTimeSpecGetNano(pTime)));
return pTime;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnTMTimeVirtGet} */
static DECLCALLBACK(uint64_t) pdmR3DevHlp_TMTimeVirtGet(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_TMTimeVirtGet: caller='%s'\n",
pDevIns->pReg->szName, pDevIns->iInstance));
uint64_t u64Time = TMVirtualSyncGet(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_TMTimeVirtGet: caller='%s'/%d: returns %RU64\n", pDevIns->pReg->szName, pDevIns->iInstance, u64Time));
return u64Time;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnTMTimeVirtGetFreq} */
static DECLCALLBACK(uint64_t) pdmR3DevHlp_TMTimeVirtGetFreq(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_TMTimeVirtGetFreq: caller='%s'\n",
pDevIns->pReg->szName, pDevIns->iInstance));
uint64_t u64Freq = TMVirtualGetFreq(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_TMTimeVirtGetFreq: caller='%s'/%d: returns %RU64\n", pDevIns->pReg->szName, pDevIns->iInstance, u64Freq));
return u64Freq;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnTMTimeVirtGetNano} */
static DECLCALLBACK(uint64_t) pdmR3DevHlp_TMTimeVirtGetNano(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_TMTimeVirtGetNano: caller='%s'\n",
pDevIns->pReg->szName, pDevIns->iInstance));
uint64_t u64Time = TMVirtualGet(pDevIns->Internal.s.pVMR3);
uint64_t u64Nano = TMVirtualToNano(pDevIns->Internal.s.pVMR3, u64Time);
LogFlow(("pdmR3DevHlp_TMTimeVirtGetNano: caller='%s'/%d: returns %RU64\n", pDevIns->pReg->szName, pDevIns->iInstance, u64Nano));
return u64Nano;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetSupDrvSession} */
static DECLCALLBACK(PSUPDRVSESSION) pdmR3DevHlp_GetSupDrvSession(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_GetSupDrvSession: caller='%s'\n",
pDevIns->pReg->szName, pDevIns->iInstance));
PSUPDRVSESSION pSession = pDevIns->Internal.s.pVMR3->pSession;
LogFlow(("pdmR3DevHlp_GetSupDrvSession: caller='%s'/%d: returns %#p\n", pDevIns->pReg->szName, pDevIns->iInstance, pSession));
return pSession;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPhysRead} */
static DECLCALLBACK(int) pdmR3DevHlp_PhysRead(PPDMDEVINS pDevIns, RTGCPHYS GCPhys, void *pvBuf, size_t cbRead)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
LogFlow(("pdmR3DevHlp_PhysRead: caller='%s'/%d: GCPhys=%RGp pvBuf=%p cbRead=%#x\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhys, pvBuf, cbRead));
#if defined(VBOX_STRICT) && defined(PDM_DEVHLP_DEADLOCK_DETECTION)
if (!VM_IS_EMT(pVM))
{
char szNames[128];
uint32_t cLocks = PDMR3CritSectCountOwned(pVM, szNames, sizeof(szNames));
AssertMsg(cLocks == 0, ("cLocks=%u %s\n", cLocks, szNames));
}
#endif
VBOXSTRICTRC rcStrict;
if (VM_IS_EMT(pVM))
rcStrict = PGMPhysRead(pVM, GCPhys, pvBuf, cbRead, PGMACCESSORIGIN_DEVICE);
else
rcStrict = PGMR3PhysReadExternal(pVM, GCPhys, pvBuf, cbRead, PGMACCESSORIGIN_DEVICE);
AssertMsg(rcStrict == VINF_SUCCESS, ("%Rrc\n", VBOXSTRICTRC_VAL(rcStrict))); /** @todo track down the users for this bugger. */
Log(("pdmR3DevHlp_PhysRead: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VBOXSTRICTRC_VAL(rcStrict) ));
return VBOXSTRICTRC_VAL(rcStrict);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPhysWrite} */
static DECLCALLBACK(int) pdmR3DevHlp_PhysWrite(PPDMDEVINS pDevIns, RTGCPHYS GCPhys, const void *pvBuf, size_t cbWrite)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
LogFlow(("pdmR3DevHlp_PhysWrite: caller='%s'/%d: GCPhys=%RGp pvBuf=%p cbWrite=%#x\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhys, pvBuf, cbWrite));
#if defined(VBOX_STRICT) && defined(PDM_DEVHLP_DEADLOCK_DETECTION)
if (!VM_IS_EMT(pVM))
{
char szNames[128];
uint32_t cLocks = PDMR3CritSectCountOwned(pVM, szNames, sizeof(szNames));
AssertMsg(cLocks == 0, ("cLocks=%u %s\n", cLocks, szNames));
}
#endif
VBOXSTRICTRC rcStrict;
if (VM_IS_EMT(pVM))
rcStrict = PGMPhysWrite(pVM, GCPhys, pvBuf, cbWrite, PGMACCESSORIGIN_DEVICE);
else
rcStrict = PGMR3PhysWriteExternal(pVM, GCPhys, pvBuf, cbWrite, PGMACCESSORIGIN_DEVICE);
AssertMsg(rcStrict == VINF_SUCCESS, ("%Rrc\n", VBOXSTRICTRC_VAL(rcStrict))); /** @todo track down the users for this bugger. */
Log(("pdmR3DevHlp_PhysWrite: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VBOXSTRICTRC_VAL(rcStrict) ));
return VBOXSTRICTRC_VAL(rcStrict);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPhysGCPhys2CCPtr} */
static DECLCALLBACK(int) pdmR3DevHlp_PhysGCPhys2CCPtr(PPDMDEVINS pDevIns, RTGCPHYS GCPhys, uint32_t fFlags, void **ppv, PPGMPAGEMAPLOCK pLock)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
LogFlow(("pdmR3DevHlp_PhysGCPhys2CCPtr: caller='%s'/%d: GCPhys=%RGp fFlags=%#x ppv=%p pLock=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhys, fFlags, ppv, pLock));
AssertReturn(!fFlags, VERR_INVALID_PARAMETER);
#if defined(VBOX_STRICT) && defined(PDM_DEVHLP_DEADLOCK_DETECTION)
if (!VM_IS_EMT(pVM))
{
char szNames[128];
uint32_t cLocks = PDMR3CritSectCountOwned(pVM, szNames, sizeof(szNames));
AssertMsg(cLocks == 0, ("cLocks=%u %s\n", cLocks, szNames));
}
#endif
int rc = PGMR3PhysGCPhys2CCPtrExternal(pVM, GCPhys, ppv, pLock);
Log(("pdmR3DevHlp_PhysGCPhys2CCPtr: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPhysGCPhys2CCPtrReadOnly} */
static DECLCALLBACK(int) pdmR3DevHlp_PhysGCPhys2CCPtrReadOnly(PPDMDEVINS pDevIns, RTGCPHYS GCPhys, uint32_t fFlags, const void **ppv, PPGMPAGEMAPLOCK pLock)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
LogFlow(("pdmR3DevHlp_PhysGCPhys2CCPtrReadOnly: caller='%s'/%d: GCPhys=%RGp fFlags=%#x ppv=%p pLock=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPhys, fFlags, ppv, pLock));
AssertReturn(!fFlags, VERR_INVALID_PARAMETER);
#if defined(VBOX_STRICT) && defined(PDM_DEVHLP_DEADLOCK_DETECTION)
if (!VM_IS_EMT(pVM))
{
char szNames[128];
uint32_t cLocks = PDMR3CritSectCountOwned(pVM, szNames, sizeof(szNames));
AssertMsg(cLocks == 0, ("cLocks=%u %s\n", cLocks, szNames));
}
#endif
int rc = PGMR3PhysGCPhys2CCPtrReadOnlyExternal(pVM, GCPhys, ppv, pLock);
Log(("pdmR3DevHlp_PhysGCPhys2CCPtrReadOnly: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPhysReleasePageMappingLock} */
static DECLCALLBACK(void) pdmR3DevHlp_PhysReleasePageMappingLock(PPDMDEVINS pDevIns, PPGMPAGEMAPLOCK pLock)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
LogFlow(("pdmR3DevHlp_PhysReleasePageMappingLock: caller='%s'/%d: pLock=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pLock));
PGMPhysReleasePageMappingLock(pVM, pLock);
Log(("pdmR3DevHlp_PhysReleasePageMappingLock: caller='%s'/%d: returns void\n", pDevIns->pReg->szName, pDevIns->iInstance));
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPhysReadGCVirt} */
static DECLCALLBACK(int) pdmR3DevHlp_PhysReadGCVirt(PPDMDEVINS pDevIns, void *pvDst, RTGCPTR GCVirtSrc, size_t cb)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_PhysReadGCVirt: caller='%s'/%d: pvDst=%p GCVirt=%RGv cb=%#x\n",
pDevIns->pReg->szName, pDevIns->iInstance, pvDst, GCVirtSrc, cb));
PVMCPU pVCpu = VMMGetCpu(pVM);
if (!pVCpu)
return VERR_ACCESS_DENIED;
#if defined(VBOX_STRICT) && defined(PDM_DEVHLP_DEADLOCK_DETECTION)
/** @todo SMP. */
#endif
int rc = PGMPhysSimpleReadGCPtr(pVCpu, pvDst, GCVirtSrc, cb);
LogFlow(("pdmR3DevHlp_PhysReadGCVirt: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPhysWriteGCVirt} */
static DECLCALLBACK(int) pdmR3DevHlp_PhysWriteGCVirt(PPDMDEVINS pDevIns, RTGCPTR GCVirtDst, const void *pvSrc, size_t cb)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_PhysWriteGCVirt: caller='%s'/%d: GCVirtDst=%RGv pvSrc=%p cb=%#x\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCVirtDst, pvSrc, cb));
PVMCPU pVCpu = VMMGetCpu(pVM);
if (!pVCpu)
return VERR_ACCESS_DENIED;
#if defined(VBOX_STRICT) && defined(PDM_DEVHLP_DEADLOCK_DETECTION)
/** @todo SMP. */
#endif
int rc = PGMPhysSimpleWriteGCPtr(pVCpu, GCVirtDst, pvSrc, cb);
LogFlow(("pdmR3DevHlp_PhysWriteGCVirt: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPhysGCPtr2GCPhys} */
static DECLCALLBACK(int) pdmR3DevHlp_PhysGCPtr2GCPhys(PPDMDEVINS pDevIns, RTGCPTR GCPtr, PRTGCPHYS pGCPhys)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_PhysGCPtr2GCPhys: caller='%s'/%d: GCPtr=%RGv pGCPhys=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, GCPtr, pGCPhys));
PVMCPU pVCpu = VMMGetCpu(pVM);
if (!pVCpu)
return VERR_ACCESS_DENIED;
#if defined(VBOX_STRICT) && defined(PDM_DEVHLP_DEADLOCK_DETECTION)
/** @todo SMP. */
#endif
int rc = PGMPhysGCPtr2GCPhys(pVCpu, GCPtr, pGCPhys);
LogFlow(("pdmR3DevHlp_PhysGCPtr2GCPhys: caller='%s'/%d: returns %Rrc *pGCPhys=%RGp\n", pDevIns->pReg->szName, pDevIns->iInstance, rc, *pGCPhys));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnMMHeapAlloc} */
static DECLCALLBACK(void *) pdmR3DevHlp_MMHeapAlloc(PPDMDEVINS pDevIns, size_t cb)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_MMHeapAlloc: caller='%s'/%d: cb=%#x\n", pDevIns->pReg->szName, pDevIns->iInstance, cb));
void *pv = MMR3HeapAlloc(pDevIns->Internal.s.pVMR3, MM_TAG_PDM_DEVICE_USER, cb);
LogFlow(("pdmR3DevHlp_MMHeapAlloc: caller='%s'/%d: returns %p\n", pDevIns->pReg->szName, pDevIns->iInstance, pv));
return pv;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnMMHeapAllocZ} */
static DECLCALLBACK(void *) pdmR3DevHlp_MMHeapAllocZ(PPDMDEVINS pDevIns, size_t cb)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_MMHeapAllocZ: caller='%s'/%d: cb=%#x\n", pDevIns->pReg->szName, pDevIns->iInstance, cb));
void *pv = MMR3HeapAllocZ(pDevIns->Internal.s.pVMR3, MM_TAG_PDM_DEVICE_USER, cb);
LogFlow(("pdmR3DevHlp_MMHeapAllocZ: caller='%s'/%d: returns %p\n", pDevIns->pReg->szName, pDevIns->iInstance, pv));
return pv;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnMMHeapFree} */
static DECLCALLBACK(void) pdmR3DevHlp_MMHeapFree(PPDMDEVINS pDevIns, void *pv)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_MMHeapFree: caller='%s'/%d: pv=%p\n", pDevIns->pReg->szName, pDevIns->iInstance, pv));
MMR3HeapFree(pv);
LogFlow(("pdmR3DevHlp_MMHeapAlloc: caller='%s'/%d: returns void\n", pDevIns->pReg->szName, pDevIns->iInstance));
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMState} */
static DECLCALLBACK(VMSTATE) pdmR3DevHlp_VMState(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VMSTATE enmVMState = VMR3GetState(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_VMState: caller='%s'/%d: returns %d (%s)\n", pDevIns->pReg->szName, pDevIns->iInstance,
enmVMState, VMR3GetStateName(enmVMState)));
return enmVMState;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMTeleportedAndNotFullyResumedYet} */
static DECLCALLBACK(bool) pdmR3DevHlp_VMTeleportedAndNotFullyResumedYet(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
bool fRc = VMR3TeleportedAndNotFullyResumedYet(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_VMState: caller='%s'/%d: returns %RTbool\n", pDevIns->pReg->szName, pDevIns->iInstance,
fRc));
return fRc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMSetError} */
static DECLCALLBACK(int) pdmR3DevHlp_VMSetError(PPDMDEVINS pDevIns, int rc, RT_SRC_POS_DECL, const char *pszFormat, ...)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
va_list args;
va_start(args, pszFormat);
int rc2 = VMSetErrorV(pDevIns->Internal.s.pVMR3, rc, RT_SRC_POS_ARGS, pszFormat, args); Assert(rc2 == rc); NOREF(rc2);
va_end(args);
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMSetErrorV} */
static DECLCALLBACK(int) pdmR3DevHlp_VMSetErrorV(PPDMDEVINS pDevIns, int rc, RT_SRC_POS_DECL, const char *pszFormat, va_list va)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
int rc2 = VMSetErrorV(pDevIns->Internal.s.pVMR3, rc, RT_SRC_POS_ARGS, pszFormat, va); Assert(rc2 == rc); NOREF(rc2);
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMSetRuntimeError} */
static DECLCALLBACK(int) pdmR3DevHlp_VMSetRuntimeError(PPDMDEVINS pDevIns, uint32_t fFlags, const char *pszErrorId, const char *pszFormat, ...)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
va_list args;
va_start(args, pszFormat);
int rc = VMSetRuntimeErrorV(pDevIns->Internal.s.pVMR3, fFlags, pszErrorId, pszFormat, args);
va_end(args);
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMSetRuntimeErrorV} */
static DECLCALLBACK(int) pdmR3DevHlp_VMSetRuntimeErrorV(PPDMDEVINS pDevIns, uint32_t fFlags, const char *pszErrorId, const char *pszFormat, va_list va)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
int rc = VMSetRuntimeErrorV(pDevIns->Internal.s.pVMR3, fFlags, pszErrorId, pszFormat, va);
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDBGFStopV} */
static DECLCALLBACK(int) pdmR3DevHlp_DBGFStopV(PPDMDEVINS pDevIns, const char *pszFile, unsigned iLine, const char *pszFunction, const char *pszFormat, va_list args)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
#ifdef LOG_ENABLED
va_list va2;
va_copy(va2, args);
LogFlow(("pdmR3DevHlp_DBGFStopV: caller='%s'/%d: pszFile=%p:{%s} iLine=%d pszFunction=%p:{%s} pszFormat=%p:{%s} (%N)\n",
pDevIns->pReg->szName, pDevIns->iInstance, pszFile, pszFile, iLine, pszFunction, pszFunction, pszFormat, pszFormat, pszFormat, &va2));
va_end(va2);
#endif
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
int rc = DBGFR3EventSrcV(pVM, DBGFEVENT_DEV_STOP, pszFile, iLine, pszFunction, pszFormat, args);
if (rc == VERR_DBGF_NOT_ATTACHED)
rc = VINF_SUCCESS;
LogFlow(("pdmR3DevHlp_DBGFStopV: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDBGFInfoRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_DBGFInfoRegister(PPDMDEVINS pDevIns, const char *pszName, const char *pszDesc, PFNDBGFHANDLERDEV pfnHandler)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_DBGFInfoRegister: caller='%s'/%d: pszName=%p:{%s} pszDesc=%p:{%s} pfnHandler=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pszName, pszName, pszDesc, pszDesc, pfnHandler));
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
int rc = DBGFR3InfoRegisterDevice(pVM, pszName, pszDesc, pfnHandler, pDevIns);
LogFlow(("pdmR3DevHlp_DBGFInfoRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDBGFRegRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_DBGFRegRegister(PPDMDEVINS pDevIns, PCDBGFREGDESC paRegisters)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_DBGFRegRegister: caller='%s'/%d: paRegisters=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, paRegisters));
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
int rc = DBGFR3RegRegisterDevice(pVM, paRegisters, pDevIns, pDevIns->pReg->szName, pDevIns->iInstance);
LogFlow(("pdmR3DevHlp_DBGFRegRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDBGFTraceBuf} */
static DECLCALLBACK(RTTRACEBUF) pdmR3DevHlp_DBGFTraceBuf(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
RTTRACEBUF hTraceBuf = pDevIns->Internal.s.pVMR3->hTraceBufR3;
LogFlow(("pdmR3DevHlp_DBGFTraceBuf: caller='%s'/%d: returns %p\n", pDevIns->pReg->szName, pDevIns->iInstance, hTraceBuf));
return hTraceBuf;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnSTAMRegister} */
static DECLCALLBACK(void) pdmR3DevHlp_STAMRegister(PPDMDEVINS pDevIns, void *pvSample, STAMTYPE enmType, const char *pszName, STAMUNIT enmUnit, const char *pszDesc)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
STAM_REG(pVM, pvSample, enmType, pszName, enmUnit, pszDesc);
NOREF(pVM);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnSTAMRegisterF} */
static DECLCALLBACK(void) pdmR3DevHlp_STAMRegisterF(PPDMDEVINS pDevIns, void *pvSample, STAMTYPE enmType, STAMVISIBILITY enmVisibility,
STAMUNIT enmUnit, const char *pszDesc, const char *pszName, ...)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
va_list args;
va_start(args, pszName);
int rc = STAMR3RegisterV(pVM, pvSample, enmType, enmVisibility, enmUnit, pszDesc, pszName, args);
va_end(args);
AssertRC(rc);
NOREF(pVM);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnSTAMRegisterV} */
static DECLCALLBACK(void) pdmR3DevHlp_STAMRegisterV(PPDMDEVINS pDevIns, void *pvSample, STAMTYPE enmType, STAMVISIBILITY enmVisibility,
STAMUNIT enmUnit, const char *pszDesc, const char *pszName, va_list args)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
int rc = STAMR3RegisterV(pVM, pvSample, enmType, enmVisibility, enmUnit, pszDesc, pszName, args);
AssertRC(rc);
NOREF(pVM);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCIRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_PCIRegister(PPDMDEVINS pDevIns, PPCIDEVICE pPciDev)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_PCIRegister: caller='%s'/%d: pPciDev=%p:{.config={%#.256Rhxs}\n",
pDevIns->pReg->szName, pDevIns->iInstance, pPciDev, pPciDev->config));
/*
* Validate input.
*/
if (!pPciDev)
{
Assert(pPciDev);
LogFlow(("pdmR3DevHlp_PCIRegister: caller='%s'/%d: returns %Rrc (pPciDev)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!pPciDev->config[0] && !pPciDev->config[1])
{
Assert(pPciDev->config[0] || pPciDev->config[1]);
LogFlow(("pdmR3DevHlp_PCIRegister: caller='%s'/%d: returns %Rrc (vendor)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (pDevIns->Internal.s.pPciDeviceR3)
{
/** @todo the PCI device vs. PDM device designed is a bit flawed if we have to
* support a PDM device with multiple PCI devices. This might become a problem
* when upgrading the chipset for instance because of multiple functions in some
* devices...
*/
AssertMsgFailed(("Only one PCI device per device is currently implemented!\n"));
return VERR_PDM_ONE_PCI_FUNCTION_PER_DEVICE;
}
/*
* Choose the PCI bus for the device.
*
* This is simple. If the device was configured for a particular bus, the PCIBusNo
* configuration value will be set. If not the default bus is 0.
*/
int rc;
PPDMPCIBUS pBus = pDevIns->Internal.s.pPciBusR3;
if (!pBus)
{
uint8_t u8Bus;
rc = CFGMR3QueryU8Def(pDevIns->Internal.s.pCfgHandle, "PCIBusNo", &u8Bus, 0);
AssertLogRelMsgRCReturn(rc, ("Configuration error: PCIBusNo query failed with rc=%Rrc (%s/%d)\n",
rc, pDevIns->pReg->szName, pDevIns->iInstance), rc);
AssertLogRelMsgReturn(u8Bus < RT_ELEMENTS(pVM->pdm.s.aPciBuses),
("Configuration error: PCIBusNo=%d, max is %d. (%s/%d)\n", u8Bus,
RT_ELEMENTS(pVM->pdm.s.aPciBuses), pDevIns->pReg->szName, pDevIns->iInstance),
VERR_PDM_NO_PCI_BUS);
pBus = pDevIns->Internal.s.pPciBusR3 = &pVM->pdm.s.aPciBuses[u8Bus];
}
if (pBus->pDevInsR3)
{
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0)
pDevIns->Internal.s.pPciBusR0 = MMHyperR3ToR0(pVM, pDevIns->Internal.s.pPciBusR3);
else
pDevIns->Internal.s.pPciBusR0 = NIL_RTR0PTR;
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_RC)
pDevIns->Internal.s.pPciBusRC = MMHyperR3ToRC(pVM, pDevIns->Internal.s.pPciBusR3);
else
pDevIns->Internal.s.pPciBusRC = NIL_RTRCPTR;
/*
* Check the configuration for PCI device and function assignment.
*/
int iDev = -1;
uint8_t u8Device;
rc = CFGMR3QueryU8(pDevIns->Internal.s.pCfgHandle, "PCIDeviceNo", &u8Device);
if (RT_SUCCESS(rc))
{
AssertMsgReturn(u8Device <= 31,
("Configuration error: PCIDeviceNo=%d, max is 31. (%s/%d)\n",
u8Device, pDevIns->pReg->szName, pDevIns->iInstance),
VERR_PDM_BAD_PCI_CONFIG);
uint8_t u8Function;
rc = CFGMR3QueryU8(pDevIns->Internal.s.pCfgHandle, "PCIFunctionNo", &u8Function);
AssertMsgRCReturn(rc, ("Configuration error: PCIDeviceNo, but PCIFunctionNo query failed with rc=%Rrc (%s/%d)\n",
rc, pDevIns->pReg->szName, pDevIns->iInstance),
rc);
AssertMsgReturn(u8Function <= 7,
("Configuration error: PCIFunctionNo=%d, max is 7. (%s/%d)\n",
u8Function, pDevIns->pReg->szName, pDevIns->iInstance),
VERR_PDM_BAD_PCI_CONFIG);
iDev = (u8Device << 3) | u8Function;
}
else if (rc != VERR_CFGM_VALUE_NOT_FOUND)
{
AssertMsgFailed(("Configuration error: PCIDeviceNo query failed with rc=%Rrc (%s/%d)\n",
rc, pDevIns->pReg->szName, pDevIns->iInstance));
return rc;
}
/*
* Call the pci bus device to do the actual registration.
*/
pdmLock(pVM);
rc = pBus->pfnRegisterR3(pBus->pDevInsR3, pPciDev, pDevIns->pReg->szName, iDev);
pdmUnlock(pVM);
if (RT_SUCCESS(rc))
{
pPciDev->pDevIns = pDevIns;
pDevIns->Internal.s.pPciDeviceR3 = pPciDev;
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0)
pDevIns->Internal.s.pPciDeviceR0 = MMHyperR3ToR0(pVM, pPciDev);
else
pDevIns->Internal.s.pPciDeviceR0 = NIL_RTR0PTR;
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_RC)
pDevIns->Internal.s.pPciDeviceRC = MMHyperR3ToRC(pVM, pPciDev);
else
pDevIns->Internal.s.pPciDeviceRC = NIL_RTRCPTR;
Log(("PDM: Registered device '%s'/%d as PCI device %d on bus %d\n",
pDevIns->pReg->szName, pDevIns->iInstance, pPciDev->devfn, pDevIns->Internal.s.pPciBusR3->iBus));
}
}
else
{
AssertLogRelMsgFailed(("Configuration error: No PCI bus available. This could be related to init order too!\n"));
rc = VERR_PDM_NO_PCI_BUS;
}
LogFlow(("pdmR3DevHlp_PCIRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCIIORegionRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_PCIIORegionRegister(PPDMDEVINS pDevIns, int iRegion, uint32_t cbRegion, PCIADDRESSSPACE enmType, PFNPCIIOREGIONMAP pfnCallback)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_PCIIORegionRegister: caller='%s'/%d: iRegion=%d cbRegion=%#x enmType=%d pfnCallback=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, iRegion, cbRegion, enmType, pfnCallback));
/*
* Validate input.
*/
if (iRegion < 0 || iRegion >= PCI_NUM_REGIONS)
{
Assert(iRegion >= 0 && iRegion < PCI_NUM_REGIONS);
LogFlow(("pdmR3DevHlp_PCIIORegionRegister: caller='%s'/%d: returns %Rrc (iRegion)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
switch ((int)enmType)
{
case PCI_ADDRESS_SPACE_IO:
/*
* Sanity check: don't allow to register more than 32K of the PCI I/O space.
*/
AssertMsgReturn(cbRegion <= _32K,
("caller='%s'/%d: %#x\n", pDevIns->pReg->szName, pDevIns->iInstance, cbRegion),
VERR_INVALID_PARAMETER);
break;
case PCI_ADDRESS_SPACE_MEM:
case PCI_ADDRESS_SPACE_MEM_PREFETCH:
case PCI_ADDRESS_SPACE_MEM | PCI_ADDRESS_SPACE_BAR64:
case PCI_ADDRESS_SPACE_MEM_PREFETCH | PCI_ADDRESS_SPACE_BAR64:
/*
* Sanity check: don't allow to register more than 512MB of the PCI MMIO space for
* now. If this limit is increased beyond 2GB, adapt the aligned check below as well!
*/
AssertMsgReturn(cbRegion <= 512 * _1M,
("caller='%s'/%d: %#x\n", pDevIns->pReg->szName, pDevIns->iInstance, cbRegion),
VERR_INVALID_PARAMETER);
break;
default:
AssertMsgFailed(("enmType=%#x is unknown\n", enmType));
LogFlow(("pdmR3DevHlp_PCIIORegionRegister: caller='%s'/%d: returns %Rrc (enmType)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!pfnCallback)
{
Assert(pfnCallback);
LogFlow(("pdmR3DevHlp_PCIIORegionRegister: caller='%s'/%d: returns %Rrc (callback)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
AssertRelease(VMR3GetState(pVM) != VMSTATE_RUNNING);
/*
* Must have a PCI device registered!
*/
int rc;
PPCIDEVICE pPciDev = pDevIns->Internal.s.pPciDeviceR3;
if (pPciDev)
{
/*
* We're currently restricted to page aligned MMIO regions.
*/
if ( ((enmType & ~(PCI_ADDRESS_SPACE_BAR64 | PCI_ADDRESS_SPACE_MEM_PREFETCH)) == PCI_ADDRESS_SPACE_MEM)
&& cbRegion != RT_ALIGN_32(cbRegion, PAGE_SIZE))
{
Log(("pdmR3DevHlp_PCIIORegionRegister: caller='%s'/%d: aligning cbRegion %#x -> %#x\n",
pDevIns->pReg->szName, pDevIns->iInstance, cbRegion, RT_ALIGN_32(cbRegion, PAGE_SIZE)));
cbRegion = RT_ALIGN_32(cbRegion, PAGE_SIZE);
}
/*
* For registering PCI MMIO memory or PCI I/O memory, the size of the region must be a power of 2!
*/
int iLastSet = ASMBitLastSetU32(cbRegion);
Assert(iLastSet > 0);
uint32_t cbRegionAligned = RT_BIT_32(iLastSet - 1);
if (cbRegion > cbRegionAligned)
cbRegion = cbRegionAligned * 2; /* round up */
PPDMPCIBUS pBus = pDevIns->Internal.s.pPciBusR3;
Assert(pBus);
pdmLock(pVM);
rc = pBus->pfnIORegionRegisterR3(pBus->pDevInsR3, pPciDev, iRegion, cbRegion, enmType, pfnCallback);
pdmUnlock(pVM);
}
else
{
AssertMsgFailed(("No PCI device registered!\n"));
rc = VERR_PDM_NOT_PCI_DEVICE;
}
LogFlow(("pdmR3DevHlp_PCIIORegionRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCISetConfigCallbacks} */
static DECLCALLBACK(void) pdmR3DevHlp_PCISetConfigCallbacks(PPDMDEVINS pDevIns, PPCIDEVICE pPciDev, PFNPCICONFIGREAD pfnRead, PPFNPCICONFIGREAD ppfnReadOld,
PFNPCICONFIGWRITE pfnWrite, PPFNPCICONFIGWRITE ppfnWriteOld)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_PCISetConfigCallbacks: caller='%s'/%d: pPciDev=%p pfnRead=%p ppfnReadOld=%p pfnWrite=%p ppfnWriteOld=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pPciDev, pfnRead, ppfnReadOld, pfnWrite, ppfnWriteOld));
/*
* Validate input and resolve defaults.
*/
AssertPtr(pfnRead);
AssertPtr(pfnWrite);
AssertPtrNull(ppfnReadOld);
AssertPtrNull(ppfnWriteOld);
AssertPtrNull(pPciDev);
if (!pPciDev)
pPciDev = pDevIns->Internal.s.pPciDeviceR3;
AssertReleaseMsg(pPciDev, ("You must register your device first!\n"));
PPDMPCIBUS pBus = pDevIns->Internal.s.pPciBusR3;
AssertRelease(pBus);
AssertRelease(VMR3GetState(pVM) != VMSTATE_RUNNING);
/*
* Do the job.
*/
pdmLock(pVM);
pBus->pfnSetConfigCallbacksR3(pBus->pDevInsR3, pPciDev, pfnRead, ppfnReadOld, pfnWrite, ppfnWriteOld);
pdmUnlock(pVM);
LogFlow(("pdmR3DevHlp_PCISetConfigCallbacks: caller='%s'/%d: returns void\n", pDevIns->pReg->szName, pDevIns->iInstance));
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCIPhysRead} */
static DECLCALLBACK(int) pdmR3DevHlp_PCIPhysRead(PPDMDEVINS pDevIns, RTGCPHYS GCPhys, void *pvBuf, size_t cbRead)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
#ifndef PDM_DO_NOT_RESPECT_PCI_BM_BIT
/*
* Just check the busmaster setting here and forward the request to the generic read helper.
*/
PPCIDEVICE pPciDev = pDevIns->Internal.s.pPciDeviceR3;
AssertReleaseMsg(pPciDev, ("No PCI device registered!\n"));
if (!PCIDevIsBusmaster(pPciDev))
{
Log(("pdmR3DevHlp_PCIPhysRead: caller='%s'/%d: returns %Rrc - Not bus master! GCPhys=%RGp cbRead=%#zx\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_PDM_NOT_PCI_BUS_MASTER, GCPhys, cbRead));
return VERR_PDM_NOT_PCI_BUS_MASTER;
}
#endif
return pDevIns->pHlpR3->pfnPhysRead(pDevIns, GCPhys, pvBuf, cbRead);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCIPhysRead} */
static DECLCALLBACK(int) pdmR3DevHlp_PCIPhysWrite(PPDMDEVINS pDevIns, RTGCPHYS GCPhys, const void *pvBuf, size_t cbWrite)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
#ifndef PDM_DO_NOT_RESPECT_PCI_BM_BIT
/*
* Just check the busmaster setting here and forward the request to the generic read helper.
*/
PPCIDEVICE pPciDev = pDevIns->Internal.s.pPciDeviceR3;
AssertReleaseMsg(pPciDev, ("No PCI device registered!\n"));
if (!PCIDevIsBusmaster(pPciDev))
{
Log(("pdmR3DevHlp_PCIPhysWrite: caller='%s'/%d: returns %Rrc - Not bus master! GCPhys=%RGp cbWrite=%#zx\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_PDM_NOT_PCI_BUS_MASTER, GCPhys, cbWrite));
return VERR_PDM_NOT_PCI_BUS_MASTER;
}
#endif
return pDevIns->pHlpR3->pfnPhysWrite(pDevIns, GCPhys, pvBuf, cbWrite);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCISetIrq} */
static DECLCALLBACK(void) pdmR3DevHlp_PCISetIrq(PPDMDEVINS pDevIns, int iIrq, int iLevel)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_PCISetIrq: caller='%s'/%d: iIrq=%d iLevel=%d\n", pDevIns->pReg->szName, pDevIns->iInstance, iIrq, iLevel));
/*
* Validate input.
*/
Assert(iIrq == 0);
Assert((uint32_t)iLevel <= PDM_IRQ_LEVEL_FLIP_FLOP);
/*
* Must have a PCI device registered!
*/
PPCIDEVICE pPciDev = pDevIns->Internal.s.pPciDeviceR3;
if (pPciDev)
{
PPDMPCIBUS pBus = pDevIns->Internal.s.pPciBusR3; /** @todo the bus should be associated with the PCI device not the PDM device. */
Assert(pBus);
PVM pVM = pDevIns->Internal.s.pVMR3;
pdmLock(pVM);
uint32_t uTagSrc;
if (iLevel & PDM_IRQ_LEVEL_HIGH)
{
pDevIns->Internal.s.uLastIrqTag = uTagSrc = pdmCalcIrqTag(pVM, pDevIns->idTracing);
if (iLevel == PDM_IRQ_LEVEL_HIGH)
VBOXVMM_PDM_IRQ_HIGH(VMMGetCpu(pVM), RT_LOWORD(uTagSrc), RT_HIWORD(uTagSrc));
else
VBOXVMM_PDM_IRQ_HILO(VMMGetCpu(pVM), RT_LOWORD(uTagSrc), RT_HIWORD(uTagSrc));
}
else
uTagSrc = pDevIns->Internal.s.uLastIrqTag;
pBus->pfnSetIrqR3(pBus->pDevInsR3, pPciDev, iIrq, iLevel, uTagSrc);
if (iLevel == PDM_IRQ_LEVEL_LOW)
VBOXVMM_PDM_IRQ_LOW(VMMGetCpu(pVM), RT_LOWORD(uTagSrc), RT_HIWORD(uTagSrc));
pdmUnlock(pVM);
}
else
AssertReleaseMsgFailed(("No PCI device registered!\n"));
LogFlow(("pdmR3DevHlp_PCISetIrq: caller='%s'/%d: returns void\n", pDevIns->pReg->szName, pDevIns->iInstance));
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCISetIrqNoWait} */
static DECLCALLBACK(void) pdmR3DevHlp_PCISetIrqNoWait(PPDMDEVINS pDevIns, int iIrq, int iLevel)
{
pdmR3DevHlp_PCISetIrq(pDevIns, iIrq, iLevel);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCIRegisterMsi} */
static DECLCALLBACK(int) pdmR3DevHlp_PCIRegisterMsi(PPDMDEVINS pDevIns, PPDMMSIREG pMsiReg)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_PCIRegisterMsi: caller='%s'/%d: %d MSI vectors %d MSI-X vectors\n", pDevIns->pReg->szName, pDevIns->iInstance, pMsiReg->cMsiVectors,pMsiReg->cMsixVectors ));
int rc = VINF_SUCCESS;
/*
* Must have a PCI device registered!
*/
PPCIDEVICE pPciDev = pDevIns->Internal.s.pPciDeviceR3;
if (pPciDev)
{
PPDMPCIBUS pBus = pDevIns->Internal.s.pPciBusR3; /** @todo the bus should be associated with the PCI device not the PDM device. */
Assert(pBus);
PVM pVM = pDevIns->Internal.s.pVMR3;
pdmLock(pVM);
if (pBus->pfnRegisterMsiR3)
rc = pBus->pfnRegisterMsiR3(pBus->pDevInsR3, pPciDev, pMsiReg);
else
rc = VERR_NOT_IMPLEMENTED;
pdmUnlock(pVM);
}
else
AssertReleaseMsgFailed(("No PCI device registered!\n"));
LogFlow(("pdmR3DevHlp_PCISetIrq: caller='%s'/%d: returns void\n", pDevIns->pReg->szName, pDevIns->iInstance));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnISASetIrq} */
static DECLCALLBACK(void) pdmR3DevHlp_ISASetIrq(PPDMDEVINS pDevIns, int iIrq, int iLevel)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_ISASetIrq: caller='%s'/%d: iIrq=%d iLevel=%d\n", pDevIns->pReg->szName, pDevIns->iInstance, iIrq, iLevel));
/*
* Validate input.
*/
Assert(iIrq < 16);
Assert((uint32_t)iLevel <= PDM_IRQ_LEVEL_FLIP_FLOP);
PVM pVM = pDevIns->Internal.s.pVMR3;
/*
* Do the job.
*/
pdmLock(pVM);
uint32_t uTagSrc;
if (iLevel & PDM_IRQ_LEVEL_HIGH)
{
pDevIns->Internal.s.uLastIrqTag = uTagSrc = pdmCalcIrqTag(pVM, pDevIns->idTracing);
if (iLevel == PDM_IRQ_LEVEL_HIGH)
VBOXVMM_PDM_IRQ_HIGH(VMMGetCpu(pVM), RT_LOWORD(uTagSrc), RT_HIWORD(uTagSrc));
else
VBOXVMM_PDM_IRQ_HILO(VMMGetCpu(pVM), RT_LOWORD(uTagSrc), RT_HIWORD(uTagSrc));
}
else
uTagSrc = pDevIns->Internal.s.uLastIrqTag;
PDMIsaSetIrq(pVM, iIrq, iLevel, uTagSrc); /* (The API takes the lock recursively.) */
if (iLevel == PDM_IRQ_LEVEL_LOW)
VBOXVMM_PDM_IRQ_LOW(VMMGetCpu(pVM), RT_LOWORD(uTagSrc), RT_HIWORD(uTagSrc));
pdmUnlock(pVM);
LogFlow(("pdmR3DevHlp_ISASetIrq: caller='%s'/%d: returns void\n", pDevIns->pReg->szName, pDevIns->iInstance));
}
/** @interface_method_impl{PDMDEVHLPR3,pfnISASetIrqNoWait} */
static DECLCALLBACK(void) pdmR3DevHlp_ISASetIrqNoWait(PPDMDEVINS pDevIns, int iIrq, int iLevel)
{
pdmR3DevHlp_ISASetIrq(pDevIns, iIrq, iLevel);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDriverAttach} */
static DECLCALLBACK(int) pdmR3DevHlp_DriverAttach(PPDMDEVINS pDevIns, uint32_t iLun, PPDMIBASE pBaseInterface, PPDMIBASE *ppBaseInterface, const char *pszDesc)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_DriverAttach: caller='%s'/%d: iLun=%d pBaseInterface=%p ppBaseInterface=%p pszDesc=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, iLun, pBaseInterface, ppBaseInterface, pszDesc, pszDesc));
/*
* Lookup the LUN, it might already be registered.
*/
PPDMLUN pLunPrev = NULL;
PPDMLUN pLun = pDevIns->Internal.s.pLunsR3;
for (; pLun; pLunPrev = pLun, pLun = pLun->pNext)
if (pLun->iLun == iLun)
break;
/*
* Create the LUN if if wasn't found, else check if driver is already attached to it.
*/
if (!pLun)
{
if ( !pBaseInterface
|| !pszDesc
|| !*pszDesc)
{
Assert(pBaseInterface);
Assert(pszDesc || *pszDesc);
return VERR_INVALID_PARAMETER;
}
pLun = (PPDMLUN)MMR3HeapAlloc(pVM, MM_TAG_PDM_LUN, sizeof(*pLun));
if (!pLun)
return VERR_NO_MEMORY;
pLun->iLun = iLun;
pLun->pNext = pLunPrev ? pLunPrev->pNext : NULL;
pLun->pTop = NULL;
pLun->pBottom = NULL;
pLun->pDevIns = pDevIns;
pLun->pUsbIns = NULL;
pLun->pszDesc = pszDesc;
pLun->pBase = pBaseInterface;
if (!pLunPrev)
pDevIns->Internal.s.pLunsR3 = pLun;
else
pLunPrev->pNext = pLun;
Log(("pdmR3DevHlp_DriverAttach: Registered LUN#%d '%s' with device '%s'/%d.\n",
iLun, pszDesc, pDevIns->pReg->szName, pDevIns->iInstance));
}
else if (pLun->pTop)
{
AssertMsgFailed(("Already attached! The device should keep track of such things!\n"));
LogFlow(("pdmR3DevHlp_DriverAttach: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_PDM_DRIVER_ALREADY_ATTACHED));
return VERR_PDM_DRIVER_ALREADY_ATTACHED;
}
Assert(pLun->pBase == pBaseInterface);
/*
* Get the attached driver configuration.
*/
int rc;
PCFGMNODE pNode = CFGMR3GetChildF(pDevIns->Internal.s.pCfgHandle, "LUN#%u", iLun);
if (pNode)
rc = pdmR3DrvInstantiate(pVM, pNode, pBaseInterface, NULL /*pDrvAbove*/, pLun, ppBaseInterface);
else
rc = VERR_PDM_NO_ATTACHED_DRIVER;
LogFlow(("pdmR3DevHlp_DriverAttach: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnQueueCreate} */
static DECLCALLBACK(int) pdmR3DevHlp_QueueCreate(PPDMDEVINS pDevIns, size_t cbItem, uint32_t cItems, uint32_t cMilliesInterval,
PFNPDMQUEUEDEV pfnCallback, bool fGCEnabled, const char *pszName, PPDMQUEUE *ppQueue)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_QueueCreate: caller='%s'/%d: cbItem=%#x cItems=%#x cMilliesInterval=%u pfnCallback=%p fGCEnabled=%RTbool pszName=%p:{%s} ppQueue=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, cbItem, cItems, cMilliesInterval, pfnCallback, fGCEnabled, pszName, pszName, ppQueue));
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
if (pDevIns->iInstance > 0)
{
pszName = MMR3HeapAPrintf(pVM, MM_TAG_PDM_DEVICE_DESC, "%s_%u", pszName, pDevIns->iInstance);
AssertLogRelReturn(pszName, VERR_NO_MEMORY);
}
int rc = PDMR3QueueCreateDevice(pVM, pDevIns, cbItem, cItems, cMilliesInterval, pfnCallback, fGCEnabled, pszName, ppQueue);
LogFlow(("pdmR3DevHlp_QueueCreate: caller='%s'/%d: returns %Rrc *ppQueue=%p\n", pDevIns->pReg->szName, pDevIns->iInstance, rc, *ppQueue));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnCritSectInit} */
static DECLCALLBACK(int) pdmR3DevHlp_CritSectInit(PPDMDEVINS pDevIns, PPDMCRITSECT pCritSect, RT_SRC_POS_DECL,
const char *pszNameFmt, va_list va)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_CritSectInit: caller='%s'/%d: pCritSect=%p pszNameFmt=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, pCritSect, pszNameFmt, pszNameFmt));
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
int rc = pdmR3CritSectInitDevice(pVM, pDevIns, pCritSect, RT_SRC_POS_ARGS, pszNameFmt, va);
LogFlow(("pdmR3DevHlp_CritSectInit: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnCritSectGetNop} */
static DECLCALLBACK(PPDMCRITSECT) pdmR3DevHlp_CritSectGetNop(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
PPDMCRITSECT pCritSect = PDMR3CritSectGetNop(pVM);
LogFlow(("pdmR3DevHlp_CritSectGetNop: caller='%s'/%d: return %p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pCritSect));
return pCritSect;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnCritSectGetNopR0} */
static DECLCALLBACK(R0PTRTYPE(PPDMCRITSECT)) pdmR3DevHlp_CritSectGetNopR0(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
R0PTRTYPE(PPDMCRITSECT) pCritSect = PDMR3CritSectGetNopR0(pVM);
LogFlow(("pdmR3DevHlp_CritSectGetNopR0: caller='%s'/%d: return %RHv\n",
pDevIns->pReg->szName, pDevIns->iInstance, pCritSect));
return pCritSect;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnCritSectGetNopRC} */
static DECLCALLBACK(RCPTRTYPE(PPDMCRITSECT)) pdmR3DevHlp_CritSectGetNopRC(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
RCPTRTYPE(PPDMCRITSECT) pCritSect = PDMR3CritSectGetNopRC(pVM);
LogFlow(("pdmR3DevHlp_CritSectGetNopRC: caller='%s'/%d: return %RRv\n",
pDevIns->pReg->szName, pDevIns->iInstance, pCritSect));
return pCritSect;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnSetDeviceCritSect} */
static DECLCALLBACK(int) pdmR3DevHlp_SetDeviceCritSect(PPDMDEVINS pDevIns, PPDMCRITSECT pCritSect)
{
/*
* Validate input.
*
* Note! We only allow the automatically created default critical section
* to be replaced by this API.
*/
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertPtrReturn(pCritSect, VERR_INVALID_POINTER);
LogFlow(("pdmR3DevHlp_SetDeviceCritSect: caller='%s'/%d: pCritSect=%p (%s)\n",
pDevIns->pReg->szName, pDevIns->iInstance, pCritSect, pCritSect->s.pszName));
AssertReturn(PDMCritSectIsInitialized(pCritSect), VERR_INVALID_PARAMETER);
PVM pVM = pDevIns->Internal.s.pVMR3;
AssertReturn(pCritSect->s.pVMR3 == pVM, VERR_INVALID_PARAMETER);
VM_ASSERT_EMT(pVM);
VM_ASSERT_STATE_RETURN(pVM, VMSTATE_CREATING, VERR_WRONG_ORDER);
AssertReturn(pDevIns->pCritSectRoR3, VERR_PDM_DEV_IPE_1);
AssertReturn(pDevIns->pCritSectRoR3->s.fAutomaticDefaultCritsect, VERR_WRONG_ORDER);
AssertReturn(!pDevIns->pCritSectRoR3->s.fUsedByTimerOrSimilar, VERR_WRONG_ORDER);
AssertReturn(pDevIns->pCritSectRoR3 != pCritSect, VERR_INVALID_PARAMETER);
/*
* Replace the critical section and destroy the automatic default section.
*/
PPDMCRITSECT pOldCritSect = pDevIns->pCritSectRoR3;
pDevIns->pCritSectRoR3 = pCritSect;
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0)
pDevIns->pCritSectRoR0 = MMHyperCCToR0(pVM, pDevIns->pCritSectRoR3);
else
Assert(pDevIns->pCritSectRoR0 == NIL_RTRCPTR);
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_RC)
pDevIns->pCritSectRoRC = MMHyperCCToRC(pVM, pDevIns->pCritSectRoR3);
else
Assert(pDevIns->pCritSectRoRC == NIL_RTRCPTR);
PDMR3CritSectDelete(pOldCritSect);
if (pDevIns->pReg->fFlags & (PDM_DEVREG_FLAGS_RC | PDM_DEVREG_FLAGS_R0))
MMHyperFree(pVM, pOldCritSect);
else
MMR3HeapFree(pOldCritSect);
LogFlow(("pdmR3DevHlp_SetDeviceCritSect: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VINF_SUCCESS));
return VINF_SUCCESS;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnThreadCreate} */
static DECLCALLBACK(int) pdmR3DevHlp_ThreadCreate(PPDMDEVINS pDevIns, PPPDMTHREAD ppThread, void *pvUser, PFNPDMTHREADDEV pfnThread,
PFNPDMTHREADWAKEUPDEV pfnWakeup, size_t cbStack, RTTHREADTYPE enmType, const char *pszName)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_ThreadCreate: caller='%s'/%d: ppThread=%p pvUser=%p pfnThread=%p pfnWakeup=%p cbStack=%#zx enmType=%d pszName=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, ppThread, pvUser, pfnThread, pfnWakeup, cbStack, enmType, pszName, pszName));
int rc = pdmR3ThreadCreateDevice(pDevIns->Internal.s.pVMR3, pDevIns, ppThread, pvUser, pfnThread, pfnWakeup, cbStack, enmType, pszName);
LogFlow(("pdmR3DevHlp_ThreadCreate: caller='%s'/%d: returns %Rrc *ppThread=%RTthrd\n", pDevIns->pReg->szName, pDevIns->iInstance,
rc, *ppThread));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnSetAsyncNotification} */
static DECLCALLBACK(int) pdmR3DevHlp_SetAsyncNotification(PPDMDEVINS pDevIns, PFNPDMDEVASYNCNOTIFY pfnAsyncNotify)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT0(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_SetAsyncNotification: caller='%s'/%d: pfnAsyncNotify=%p\n", pDevIns->pReg->szName, pDevIns->iInstance, pfnAsyncNotify));
int rc = VINF_SUCCESS;
AssertStmt(pfnAsyncNotify, rc = VERR_INVALID_PARAMETER);
AssertStmt(!pDevIns->Internal.s.pfnAsyncNotify, rc = VERR_WRONG_ORDER);
AssertStmt(pDevIns->Internal.s.fIntFlags & (PDMDEVINSINT_FLAGS_SUSPENDED | PDMDEVINSINT_FLAGS_RESET), rc = VERR_WRONG_ORDER);
VMSTATE enmVMState = VMR3GetState(pDevIns->Internal.s.pVMR3);
AssertStmt( enmVMState == VMSTATE_SUSPENDING
|| enmVMState == VMSTATE_SUSPENDING_EXT_LS
|| enmVMState == VMSTATE_SUSPENDING_LS
|| enmVMState == VMSTATE_RESETTING
|| enmVMState == VMSTATE_RESETTING_LS
|| enmVMState == VMSTATE_POWERING_OFF
|| enmVMState == VMSTATE_POWERING_OFF_LS,
rc = VERR_INVALID_STATE);
if (RT_SUCCESS(rc))
pDevIns->Internal.s.pfnAsyncNotify = pfnAsyncNotify;
LogFlow(("pdmR3DevHlp_SetAsyncNotification: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnAsyncNotificationCompleted} */
static DECLCALLBACK(void) pdmR3DevHlp_AsyncNotificationCompleted(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VMSTATE enmVMState = VMR3GetState(pVM);
if ( enmVMState == VMSTATE_SUSPENDING
|| enmVMState == VMSTATE_SUSPENDING_EXT_LS
|| enmVMState == VMSTATE_SUSPENDING_LS
|| enmVMState == VMSTATE_RESETTING
|| enmVMState == VMSTATE_RESETTING_LS
|| enmVMState == VMSTATE_POWERING_OFF
|| enmVMState == VMSTATE_POWERING_OFF_LS)
{
LogFlow(("pdmR3DevHlp_AsyncNotificationCompleted: caller='%s'/%d:\n", pDevIns->pReg->szName, pDevIns->iInstance));
VMR3AsyncPdmNotificationWakeupU(pVM->pUVM);
}
else
LogFlow(("pdmR3DevHlp_AsyncNotificationCompleted: caller='%s'/%d: enmVMState=%d\n", pDevIns->pReg->szName, pDevIns->iInstance, enmVMState));
}
/** @interface_method_impl{PDMDEVHLPR3,pfnRTCRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_RTCRegister(PPDMDEVINS pDevIns, PCPDMRTCREG pRtcReg, PCPDMRTCHLP *ppRtcHlp)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_RTCRegister: caller='%s'/%d: pRtcReg=%p:{.u32Version=%#x, .pfnWrite=%p, .pfnRead=%p} ppRtcHlp=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pRtcReg, pRtcReg->u32Version, pRtcReg->pfnWrite,
pRtcReg->pfnWrite, ppRtcHlp));
/*
* Validate input.
*/
if (pRtcReg->u32Version != PDM_RTCREG_VERSION)
{
AssertMsgFailed(("u32Version=%#x expected %#x\n", pRtcReg->u32Version,
PDM_RTCREG_VERSION));
LogFlow(("pdmR3DevHlp_RTCRegister: caller='%s'/%d: returns %Rrc (version)\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( !pRtcReg->pfnWrite
|| !pRtcReg->pfnRead)
{
Assert(pRtcReg->pfnWrite);
Assert(pRtcReg->pfnRead);
LogFlow(("pdmR3DevHlp_RTCRegister: caller='%s'/%d: returns %Rrc (callbacks)\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!ppRtcHlp)
{
Assert(ppRtcHlp);
LogFlow(("pdmR3DevHlp_RTCRegister: caller='%s'/%d: returns %Rrc (ppRtcHlp)\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Only one DMA device.
*/
PVM pVM = pDevIns->Internal.s.pVMR3;
if (pVM->pdm.s.pRtc)
{
AssertMsgFailed(("Only one RTC device is supported!\n"));
LogFlow(("pdmR3DevHlp_RTCRegister: caller='%s'/%d: returns %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Allocate and initialize pci bus structure.
*/
int rc = VINF_SUCCESS;
PPDMRTC pRtc = (PPDMRTC)MMR3HeapAlloc(pDevIns->Internal.s.pVMR3, MM_TAG_PDM_DEVICE, sizeof(*pRtc));
if (pRtc)
{
pRtc->pDevIns = pDevIns;
pRtc->Reg = *pRtcReg;
pVM->pdm.s.pRtc = pRtc;
/* set the helper pointer. */
*ppRtcHlp = &g_pdmR3DevRtcHlp;
Log(("PDM: Registered RTC device '%s'/%d pDevIns=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pDevIns));
}
else
rc = VERR_NO_MEMORY;
LogFlow(("pdmR3DevHlp_RTCRegister: caller='%s'/%d: returns %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDMARegister} */
static DECLCALLBACK(int) pdmR3DevHlp_DMARegister(PPDMDEVINS pDevIns, unsigned uChannel, PFNDMATRANSFERHANDLER pfnTransferHandler, void *pvUser)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_DMARegister: caller='%s'/%d: uChannel=%d pfnTransferHandler=%p pvUser=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, uChannel, pfnTransferHandler, pvUser));
int rc = VINF_SUCCESS;
if (pVM->pdm.s.pDmac)
pVM->pdm.s.pDmac->Reg.pfnRegister(pVM->pdm.s.pDmac->pDevIns, uChannel, pfnTransferHandler, pvUser);
else
{
AssertMsgFailed(("Configuration error: No DMAC controller available. This could be related to init order too!\n"));
rc = VERR_PDM_NO_DMAC_INSTANCE;
}
LogFlow(("pdmR3DevHlp_DMARegister: caller='%s'/%d: returns %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDMAReadMemory} */
static DECLCALLBACK(int) pdmR3DevHlp_DMAReadMemory(PPDMDEVINS pDevIns, unsigned uChannel, void *pvBuffer, uint32_t off, uint32_t cbBlock, uint32_t *pcbRead)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_DMAReadMemory: caller='%s'/%d: uChannel=%d pvBuffer=%p off=%#x cbBlock=%#x pcbRead=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, uChannel, pvBuffer, off, cbBlock, pcbRead));
int rc = VINF_SUCCESS;
if (pVM->pdm.s.pDmac)
{
uint32_t cb = pVM->pdm.s.pDmac->Reg.pfnReadMemory(pVM->pdm.s.pDmac->pDevIns, uChannel, pvBuffer, off, cbBlock);
if (pcbRead)
*pcbRead = cb;
}
else
{
AssertMsgFailed(("Configuration error: No DMAC controller available. This could be related to init order too!\n"));
rc = VERR_PDM_NO_DMAC_INSTANCE;
}
LogFlow(("pdmR3DevHlp_DMAReadMemory: caller='%s'/%d: returns %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDMAWriteMemory} */
static DECLCALLBACK(int) pdmR3DevHlp_DMAWriteMemory(PPDMDEVINS pDevIns, unsigned uChannel, const void *pvBuffer, uint32_t off, uint32_t cbBlock, uint32_t *pcbWritten)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_DMAWriteMemory: caller='%s'/%d: uChannel=%d pvBuffer=%p off=%#x cbBlock=%#x pcbWritten=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, uChannel, pvBuffer, off, cbBlock, pcbWritten));
int rc = VINF_SUCCESS;
if (pVM->pdm.s.pDmac)
{
uint32_t cb = pVM->pdm.s.pDmac->Reg.pfnWriteMemory(pVM->pdm.s.pDmac->pDevIns, uChannel, pvBuffer, off, cbBlock);
if (pcbWritten)
*pcbWritten = cb;
}
else
{
AssertMsgFailed(("Configuration error: No DMAC controller available. This could be related to init order too!\n"));
rc = VERR_PDM_NO_DMAC_INSTANCE;
}
LogFlow(("pdmR3DevHlp_DMAWriteMemory: caller='%s'/%d: returns %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDMASetDREQ} */
static DECLCALLBACK(int) pdmR3DevHlp_DMASetDREQ(PPDMDEVINS pDevIns, unsigned uChannel, unsigned uLevel)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_DMASetDREQ: caller='%s'/%d: uChannel=%d uLevel=%d\n",
pDevIns->pReg->szName, pDevIns->iInstance, uChannel, uLevel));
int rc = VINF_SUCCESS;
if (pVM->pdm.s.pDmac)
pVM->pdm.s.pDmac->Reg.pfnSetDREQ(pVM->pdm.s.pDmac->pDevIns, uChannel, uLevel);
else
{
AssertMsgFailed(("Configuration error: No DMAC controller available. This could be related to init order too!\n"));
rc = VERR_PDM_NO_DMAC_INSTANCE;
}
LogFlow(("pdmR3DevHlp_DMASetDREQ: caller='%s'/%d: returns %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDMAGetChannelMode} */
static DECLCALLBACK(uint8_t) pdmR3DevHlp_DMAGetChannelMode(PPDMDEVINS pDevIns, unsigned uChannel)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_DMAGetChannelMode: caller='%s'/%d: uChannel=%d\n",
pDevIns->pReg->szName, pDevIns->iInstance, uChannel));
uint8_t u8Mode;
if (pVM->pdm.s.pDmac)
u8Mode = pVM->pdm.s.pDmac->Reg.pfnGetChannelMode(pVM->pdm.s.pDmac->pDevIns, uChannel);
else
{
AssertMsgFailed(("Configuration error: No DMAC controller available. This could be related to init order too!\n"));
u8Mode = 3 << 2 /* illegal mode type */;
}
LogFlow(("pdmR3DevHlp_DMAGetChannelMode: caller='%s'/%d: returns %#04x\n",
pDevIns->pReg->szName, pDevIns->iInstance, u8Mode));
return u8Mode;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDMASchedule} */
static DECLCALLBACK(void) pdmR3DevHlp_DMASchedule(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_DMASchedule: caller='%s'/%d: VM_FF_PDM_DMA %d -> 1\n",
pDevIns->pReg->szName, pDevIns->iInstance, VM_FF_IS_SET(pVM, VM_FF_PDM_DMA)));
AssertMsg(pVM->pdm.s.pDmac, ("Configuration error: No DMAC controller available. This could be related to init order too!\n"));
VM_FF_SET(pVM, VM_FF_PDM_DMA);
#ifdef VBOX_WITH_REM
REMR3NotifyDmaPending(pVM);
#endif
VMR3NotifyGlobalFFU(pVM->pUVM, VMNOTIFYFF_FLAGS_DONE_REM);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnCMOSWrite} */
static DECLCALLBACK(int) pdmR3DevHlp_CMOSWrite(PPDMDEVINS pDevIns, unsigned iReg, uint8_t u8Value)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_CMOSWrite: caller='%s'/%d: iReg=%#04x u8Value=%#04x\n",
pDevIns->pReg->szName, pDevIns->iInstance, iReg, u8Value));
int rc;
if (pVM->pdm.s.pRtc)
{
PPDMDEVINS pDevInsRtc = pVM->pdm.s.pRtc->pDevIns;
rc = PDMCritSectEnter(pDevInsRtc->pCritSectRoR3, VERR_IGNORED);
if (RT_SUCCESS(rc))
{
rc = pVM->pdm.s.pRtc->Reg.pfnWrite(pDevInsRtc, iReg, u8Value);
PDMCritSectLeave(pDevInsRtc->pCritSectRoR3);
}
}
else
rc = VERR_PDM_NO_RTC_INSTANCE;
LogFlow(("pdmR3DevHlp_CMOSWrite: caller='%s'/%d: return %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnCMOSRead} */
static DECLCALLBACK(int) pdmR3DevHlp_CMOSRead(PPDMDEVINS pDevIns, unsigned iReg, uint8_t *pu8Value)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_CMOSWrite: caller='%s'/%d: iReg=%#04x pu8Value=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, iReg, pu8Value));
int rc;
if (pVM->pdm.s.pRtc)
{
PPDMDEVINS pDevInsRtc = pVM->pdm.s.pRtc->pDevIns;
rc = PDMCritSectEnter(pDevInsRtc->pCritSectRoR3, VERR_IGNORED);
if (RT_SUCCESS(rc))
{
rc = pVM->pdm.s.pRtc->Reg.pfnRead(pDevInsRtc, iReg, pu8Value);
PDMCritSectLeave(pDevInsRtc->pCritSectRoR3);
}
}
else
rc = VERR_PDM_NO_RTC_INSTANCE;
LogFlow(("pdmR3DevHlp_CMOSWrite: caller='%s'/%d: return %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnAssertEMT} */
static DECLCALLBACK(bool) pdmR3DevHlp_AssertEMT(PPDMDEVINS pDevIns, const char *pszFile, unsigned iLine, const char *pszFunction)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
if (VM_IS_EMT(pDevIns->Internal.s.pVMR3))
return true;
char szMsg[100];
RTStrPrintf(szMsg, sizeof(szMsg), "AssertEMT '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance);
RTAssertMsg1Weak(szMsg, iLine, pszFile, pszFunction);
AssertBreakpoint();
return false;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnAssertOther} */
static DECLCALLBACK(bool) pdmR3DevHlp_AssertOther(PPDMDEVINS pDevIns, const char *pszFile, unsigned iLine, const char *pszFunction)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
if (!VM_IS_EMT(pDevIns->Internal.s.pVMR3))
return true;
char szMsg[100];
RTStrPrintf(szMsg, sizeof(szMsg), "AssertOther '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance);
RTAssertMsg1Weak(szMsg, iLine, pszFile, pszFunction);
AssertBreakpoint();
return false;
}
/** @interface_method_impl{PDMDEVHLP,pfnLdrGetRCInterfaceSymbols} */
static DECLCALLBACK(int) pdmR3DevHlp_LdrGetRCInterfaceSymbols(PPDMDEVINS pDevIns, void *pvInterface, size_t cbInterface,
const char *pszSymPrefix, const char *pszSymList)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_PDMLdrGetRCInterfaceSymbols: caller='%s'/%d: pvInterface=%p cbInterface=%zu pszSymPrefix=%p:{%s} pszSymList=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, pvInterface, cbInterface, pszSymPrefix, pszSymPrefix, pszSymList, pszSymList));
int rc;
if ( strncmp(pszSymPrefix, "dev", 3) == 0
&& RTStrIStr(pszSymPrefix + 3, pDevIns->pReg->szName) != NULL)
{
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_RC)
rc = PDMR3LdrGetInterfaceSymbols(pDevIns->Internal.s.pVMR3,
pvInterface, cbInterface,
pDevIns->pReg->szRCMod, pDevIns->Internal.s.pDevR3->pszRCSearchPath,
pszSymPrefix, pszSymList,
false /*fRing0OrRC*/);
else
{
AssertMsgFailed(("Not a raw-mode enabled driver\n"));
rc = VERR_PERMISSION_DENIED;
}
}
else
{
AssertMsgFailed(("Invalid prefix '%s' for '%s'; must start with 'dev' and contain the driver name!\n",
pszSymPrefix, pDevIns->pReg->szName));
rc = VERR_INVALID_NAME;
}
LogFlow(("pdmR3DevHlp_PDMLdrGetRCInterfaceSymbols: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName,
pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLP,pfnLdrGetR0InterfaceSymbols} */
static DECLCALLBACK(int) pdmR3DevHlp_LdrGetR0InterfaceSymbols(PPDMDEVINS pDevIns, void *pvInterface, size_t cbInterface,
const char *pszSymPrefix, const char *pszSymList)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_PDMLdrGetR0InterfaceSymbols: caller='%s'/%d: pvInterface=%p cbInterface=%zu pszSymPrefix=%p:{%s} pszSymList=%p:{%s}\n",
pDevIns->pReg->szName, pDevIns->iInstance, pvInterface, cbInterface, pszSymPrefix, pszSymPrefix, pszSymList, pszSymList));
int rc;
if ( strncmp(pszSymPrefix, "dev", 3) == 0
&& RTStrIStr(pszSymPrefix + 3, pDevIns->pReg->szName) != NULL)
{
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0)
rc = PDMR3LdrGetInterfaceSymbols(pDevIns->Internal.s.pVMR3,
pvInterface, cbInterface,
pDevIns->pReg->szR0Mod, pDevIns->Internal.s.pDevR3->pszR0SearchPath,
pszSymPrefix, pszSymList,
true /*fRing0OrRC*/);
else
{
AssertMsgFailed(("Not a ring-0 enabled driver\n"));
rc = VERR_PERMISSION_DENIED;
}
}
else
{
AssertMsgFailed(("Invalid prefix '%s' for '%s'; must start with 'dev' and contain the driver name!\n",
pszSymPrefix, pDevIns->pReg->szName));
rc = VERR_INVALID_NAME;
}
LogFlow(("pdmR3DevHlp_PDMLdrGetR0InterfaceSymbols: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName,
pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLP,pfnCallR0} */
static DECLCALLBACK(int) pdmR3DevHlp_CallR0(PPDMDEVINS pDevIns, uint32_t uOperation, uint64_t u64Arg)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_CallR0: caller='%s'/%d: uOperation=%#x u64Arg=%#RX64\n",
pDevIns->pReg->szName, pDevIns->iInstance, uOperation, u64Arg));
/*
* Resolve the ring-0 entry point. There is not need to remember this like
* we do for drivers since this is mainly for construction time hacks and
* other things that aren't performance critical.
*/
int rc;
if (pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0)
{
char szSymbol[ sizeof("devR0") + sizeof(pDevIns->pReg->szName) + sizeof("ReqHandler")];
strcat(strcat(strcpy(szSymbol, "devR0"), pDevIns->pReg->szName), "ReqHandler");
szSymbol[sizeof("devR0") - 1] = RT_C_TO_UPPER(szSymbol[sizeof("devR0") - 1]);
PFNPDMDRVREQHANDLERR0 pfnReqHandlerR0;
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, szSymbol, &pfnReqHandlerR0);
if (RT_SUCCESS(rc))
{
/*
* Make the ring-0 call.
*/
PDMDEVICECALLREQHANDLERREQ Req;
Req.Hdr.u32Magic = SUPVMMR0REQHDR_MAGIC;
Req.Hdr.cbReq = sizeof(Req);
Req.pDevInsR0 = PDMDEVINS_2_R0PTR(pDevIns);
Req.pfnReqHandlerR0 = pfnReqHandlerR0;
Req.uOperation = uOperation;
Req.u32Alignment = 0;
Req.u64Arg = u64Arg;
rc = SUPR3CallVMMR0Ex(pVM->pVMR0, NIL_VMCPUID, VMMR0_DO_PDM_DEVICE_CALL_REQ_HANDLER, 0, &Req.Hdr);
}
else
pfnReqHandlerR0 = NIL_RTR0PTR;
}
else
rc = VERR_ACCESS_DENIED;
LogFlow(("pdmR3DevHlp_CallR0: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName,
pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLP,pfnVMGetSuspendReason} */
static DECLCALLBACK(VMSUSPENDREASON) pdmR3DevHlp_VMGetSuspendReason(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
VMSUSPENDREASON enmReason = VMR3GetSuspendReason(pVM->pUVM);
LogFlow(("pdmR3DevHlp_VMGetSuspendReason: caller='%s'/%d: returns %d\n",
pDevIns->pReg->szName, pDevIns->iInstance, enmReason));
return enmReason;
}
/** @interface_method_impl{PDMDEVHLP,pfnVMGetResumeReason} */
static DECLCALLBACK(VMRESUMEREASON) pdmR3DevHlp_VMGetResumeReason(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
VMRESUMEREASON enmReason = VMR3GetResumeReason(pVM->pUVM);
LogFlow(("pdmR3DevHlp_VMGetResumeReason: caller='%s'/%d: returns %d\n",
pDevIns->pReg->szName, pDevIns->iInstance, enmReason));
return enmReason;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetUVM} */
static DECLCALLBACK(PUVM) pdmR3DevHlp_GetUVM(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_GetUVM: caller='%s'/%d: returns %p\n", pDevIns->pReg->szName, pDevIns->iInstance, pDevIns->Internal.s.pVMR3));
return pDevIns->Internal.s.pVMR3->pUVM;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetVM} */
static DECLCALLBACK(PVM) pdmR3DevHlp_GetVM(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
LogFlow(("pdmR3DevHlp_GetVM: caller='%s'/%d: returns %p\n", pDevIns->pReg->szName, pDevIns->iInstance, pDevIns->Internal.s.pVMR3));
return pDevIns->Internal.s.pVMR3;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetVMCPU} */
static DECLCALLBACK(PVMCPU) pdmR3DevHlp_GetVMCPU(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_GetVMCPU: caller='%s'/%d for CPU %u\n", pDevIns->pReg->szName, pDevIns->iInstance, VMMGetCpuId(pDevIns->Internal.s.pVMR3)));
return VMMGetCpu(pDevIns->Internal.s.pVMR3);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetCurrentCpuId} */
static DECLCALLBACK(VMCPUID) pdmR3DevHlp_GetCurrentCpuId(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VMCPUID idCpu = VMMGetCpuId(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_GetCurrentCpuId: caller='%s'/%d for CPU %u\n", pDevIns->pReg->szName, pDevIns->iInstance, idCpu));
return idCpu;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPCIBusRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_PCIBusRegister(PPDMDEVINS pDevIns, PPDMPCIBUSREG pPciBusReg, PCPDMPCIHLPR3 *ppPciHlpR3)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_PCIBusRegister: caller='%s'/%d: pPciBusReg=%p:{.u32Version=%#x, .pfnRegisterR3=%p, .pfnIORegionRegisterR3=%p, "
".pfnSetIrqR3=%p, .pfnFakePCIBIOSR3=%p, .pszSetIrqRC=%p:{%s}, .pszSetIrqR0=%p:{%s}} ppPciHlpR3=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pPciBusReg, pPciBusReg->u32Version, pPciBusReg->pfnRegisterR3,
pPciBusReg->pfnIORegionRegisterR3, pPciBusReg->pfnSetIrqR3, pPciBusReg->pfnFakePCIBIOSR3,
pPciBusReg->pszSetIrqRC, pPciBusReg->pszSetIrqRC, pPciBusReg->pszSetIrqR0, pPciBusReg->pszSetIrqR0, ppPciHlpR3));
/*
* Validate the structure.
*/
if (pPciBusReg->u32Version != PDM_PCIBUSREG_VERSION)
{
AssertMsgFailed(("u32Version=%#x expected %#x\n", pPciBusReg->u32Version, PDM_PCIBUSREG_VERSION));
LogFlow(("pdmR3DevHlp_PCIRegister: caller='%s'/%d: returns %Rrc (version)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( !pPciBusReg->pfnRegisterR3
|| !pPciBusReg->pfnIORegionRegisterR3
|| !pPciBusReg->pfnSetIrqR3
|| (!pPciBusReg->pfnFakePCIBIOSR3 && !pVM->pdm.s.aPciBuses[0].pDevInsR3)) /* Only the first bus needs to do the BIOS work. */
{
Assert(pPciBusReg->pfnRegisterR3);
Assert(pPciBusReg->pfnIORegionRegisterR3);
Assert(pPciBusReg->pfnSetIrqR3);
Assert(pPciBusReg->pfnFakePCIBIOSR3);
LogFlow(("pdmR3DevHlp_PCIBusRegister: caller='%s'/%d: returns %Rrc (R3 callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pPciBusReg->pszSetIrqRC
&& !VALID_PTR(pPciBusReg->pszSetIrqRC))
{
Assert(VALID_PTR(pPciBusReg->pszSetIrqRC));
LogFlow(("pdmR3DevHlp_PCIBusRegister: caller='%s'/%d: returns %Rrc (GC callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pPciBusReg->pszSetIrqR0
&& !VALID_PTR(pPciBusReg->pszSetIrqR0))
{
Assert(VALID_PTR(pPciBusReg->pszSetIrqR0));
LogFlow(("pdmR3DevHlp_PCIBusRegister: caller='%s'/%d: returns %Rrc (GC callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!ppPciHlpR3)
{
Assert(ppPciHlpR3);
LogFlow(("pdmR3DevHlp_PCIBusRegister: caller='%s'/%d: returns %Rrc (ppPciHlpR3)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Find free PCI bus entry.
*/
unsigned iBus = 0;
for (iBus = 0; iBus < RT_ELEMENTS(pVM->pdm.s.aPciBuses); iBus++)
if (!pVM->pdm.s.aPciBuses[iBus].pDevInsR3)
break;
if (iBus >= RT_ELEMENTS(pVM->pdm.s.aPciBuses))
{
AssertMsgFailed(("Too many PCI buses. Max=%u\n", RT_ELEMENTS(pVM->pdm.s.aPciBuses)));
LogFlow(("pdmR3DevHlp_PCIBusRegister: caller='%s'/%d: returns %Rrc (pci bus)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
PPDMPCIBUS pPciBus = &pVM->pdm.s.aPciBuses[iBus];
/*
* Resolve and init the RC bits.
*/
if (pPciBusReg->pszSetIrqRC)
{
int rc = pdmR3DevGetSymbolRCLazy(pDevIns, pPciBusReg->pszSetIrqRC, &pPciBus->pfnSetIrqRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pPciBusReg->pszSetIrqRC, rc));
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_PCIRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
pPciBus->pDevInsRC = PDMDEVINS_2_RCPTR(pDevIns);
}
else
{
pPciBus->pfnSetIrqRC = 0;
pPciBus->pDevInsRC = 0;
}
/*
* Resolve and init the R0 bits.
*/
if (pPciBusReg->pszSetIrqR0)
{
int rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pPciBusReg->pszSetIrqR0, &pPciBus->pfnSetIrqR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pPciBusReg->pszSetIrqR0, rc));
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_PCIRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
pPciBus->pDevInsR0 = PDMDEVINS_2_R0PTR(pDevIns);
}
else
{
pPciBus->pfnSetIrqR0 = 0;
pPciBus->pDevInsR0 = 0;
}
/*
* Init the R3 bits.
*/
pPciBus->iBus = iBus;
pPciBus->pDevInsR3 = pDevIns;
pPciBus->pfnRegisterR3 = pPciBusReg->pfnRegisterR3;
pPciBus->pfnRegisterMsiR3 = pPciBusReg->pfnRegisterMsiR3;
pPciBus->pfnIORegionRegisterR3 = pPciBusReg->pfnIORegionRegisterR3;
pPciBus->pfnSetConfigCallbacksR3 = pPciBusReg->pfnSetConfigCallbacksR3;
pPciBus->pfnSetIrqR3 = pPciBusReg->pfnSetIrqR3;
pPciBus->pfnFakePCIBIOSR3 = pPciBusReg->pfnFakePCIBIOSR3;
Log(("PDM: Registered PCI bus device '%s'/%d pDevIns=%p\n", pDevIns->pReg->szName, pDevIns->iInstance, pDevIns));
/* set the helper pointer and return. */
*ppPciHlpR3 = &g_pdmR3DevPciHlp;
LogFlow(("pdmR3DevHlp_PCIBusRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VINF_SUCCESS));
return VINF_SUCCESS;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPICRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_PICRegister(PPDMDEVINS pDevIns, PPDMPICREG pPicReg, PCPDMPICHLPR3 *ppPicHlpR3)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: pPicReg=%p:{.u32Version=%#x, .pfnSetIrqR3=%p, .pfnGetInterruptR3=%p, .pszGetIrqRC=%p:{%s}, .pszGetInterruptRC=%p:{%s}, .pszGetIrqR0=%p:{%s}, .pszGetInterruptR0=%p:{%s} } ppPicHlpR3=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pPicReg, pPicReg->u32Version, pPicReg->pfnSetIrqR3, pPicReg->pfnGetInterruptR3,
pPicReg->pszSetIrqRC, pPicReg->pszSetIrqRC, pPicReg->pszGetInterruptRC, pPicReg->pszGetInterruptRC,
pPicReg->pszSetIrqR0, pPicReg->pszSetIrqR0, pPicReg->pszGetInterruptR0, pPicReg->pszGetInterruptR0,
ppPicHlpR3));
/*
* Validate input.
*/
if (pPicReg->u32Version != PDM_PICREG_VERSION)
{
AssertMsgFailed(("u32Version=%#x expected %#x\n", pPicReg->u32Version, PDM_PICREG_VERSION));
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc (version)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( !pPicReg->pfnSetIrqR3
|| !pPicReg->pfnGetInterruptR3)
{
Assert(pPicReg->pfnSetIrqR3);
Assert(pPicReg->pfnGetInterruptR3);
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc (R3 callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( ( pPicReg->pszSetIrqRC
|| pPicReg->pszGetInterruptRC)
&& ( !VALID_PTR(pPicReg->pszSetIrqRC)
|| !VALID_PTR(pPicReg->pszGetInterruptRC))
)
{
Assert(VALID_PTR(pPicReg->pszSetIrqRC));
Assert(VALID_PTR(pPicReg->pszGetInterruptRC));
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc (RC callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pPicReg->pszSetIrqRC
&& !(pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_RC))
{
Assert(pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_RC);
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc (RC flag)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pPicReg->pszSetIrqR0
&& !(pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0))
{
Assert(pDevIns->pReg->fFlags & PDM_DEVREG_FLAGS_R0);
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc (R0 flag)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!ppPicHlpR3)
{
Assert(ppPicHlpR3);
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc (ppPicHlpR3)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Only one PIC device.
*/
PVM pVM = pDevIns->Internal.s.pVMR3;
if (pVM->pdm.s.Pic.pDevInsR3)
{
AssertMsgFailed(("Only one pic device is supported!\n"));
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* RC stuff.
*/
if (pPicReg->pszSetIrqRC)
{
int rc = pdmR3DevGetSymbolRCLazy(pDevIns, pPicReg->pszSetIrqRC, &pVM->pdm.s.Pic.pfnSetIrqRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pPicReg->pszSetIrqRC, rc));
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pPicReg->pszGetInterruptRC, &pVM->pdm.s.Pic.pfnGetInterruptRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pPicReg->pszGetInterruptRC, rc));
}
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
pVM->pdm.s.Pic.pDevInsRC = PDMDEVINS_2_RCPTR(pDevIns);
}
else
{
pVM->pdm.s.Pic.pDevInsRC = 0;
pVM->pdm.s.Pic.pfnSetIrqRC = 0;
pVM->pdm.s.Pic.pfnGetInterruptRC = 0;
}
/*
* R0 stuff.
*/
if (pPicReg->pszSetIrqR0)
{
int rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pPicReg->pszSetIrqR0, &pVM->pdm.s.Pic.pfnSetIrqR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pPicReg->pszSetIrqR0, rc));
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pPicReg->pszGetInterruptR0, &pVM->pdm.s.Pic.pfnGetInterruptR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pPicReg->pszGetInterruptR0, rc));
}
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
pVM->pdm.s.Pic.pDevInsR0 = PDMDEVINS_2_R0PTR(pDevIns);
Assert(pVM->pdm.s.Pic.pDevInsR0);
}
else
{
pVM->pdm.s.Pic.pfnSetIrqR0 = 0;
pVM->pdm.s.Pic.pfnGetInterruptR0 = 0;
pVM->pdm.s.Pic.pDevInsR0 = 0;
}
/*
* R3 stuff.
*/
pVM->pdm.s.Pic.pDevInsR3 = pDevIns;
pVM->pdm.s.Pic.pfnSetIrqR3 = pPicReg->pfnSetIrqR3;
pVM->pdm.s.Pic.pfnGetInterruptR3 = pPicReg->pfnGetInterruptR3;
Log(("PDM: Registered PIC device '%s'/%d pDevIns=%p\n", pDevIns->pReg->szName, pDevIns->iInstance, pDevIns));
/* set the helper pointer and return. */
*ppPicHlpR3 = &g_pdmR3DevPicHlp;
LogFlow(("pdmR3DevHlp_PICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VINF_SUCCESS));
return VINF_SUCCESS;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnAPICRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_APICRegister(PPDMDEVINS pDevIns, PPDMAPICREG pApicReg, PCPDMAPICHLPR3 *ppApicHlpR3)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: pApicReg=%p:{.u32Version=%#x, .pfnGetInterruptR3=%p, .pfnSetBaseR3=%p, .pfnGetBaseR3=%p, "
".pfnSetTPRR3=%p, .pfnGetTPRR3=%p, .pfnWriteMSR3=%p, .pfnReadMSR3=%p, .pfnBusDeliverR3=%p, .pfnLocalInterruptR3=%p .pfnGetTimerFreqR3=%p, pszGetInterruptRC=%p:{%s}, pszSetBaseRC=%p:{%s}, pszGetBaseRC=%p:{%s}, "
".pszSetTPRRC=%p:{%s}, .pszGetTPRRC=%p:{%s}, .pszWriteMSRRC=%p:{%s}, .pszReadMSRRC=%p:{%s}, .pszBusDeliverRC=%p:{%s}, .pszLocalInterruptRC=%p:{%s}, .pszGetTimerFreqRC=%p:{%s}} ppApicHlpR3=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pApicReg, pApicReg->u32Version, pApicReg->pfnGetInterruptR3, pApicReg->pfnSetBaseR3,
pApicReg->pfnGetBaseR3, pApicReg->pfnSetTPRR3, pApicReg->pfnGetTPRR3, pApicReg->pfnWriteMSRR3, pApicReg->pfnReadMSRR3, pApicReg->pfnBusDeliverR3, pApicReg->pfnLocalInterruptR3, pApicReg->pfnGetTimerFreqR3, pApicReg->pszGetInterruptRC,
pApicReg->pszGetInterruptRC, pApicReg->pszSetBaseRC, pApicReg->pszSetBaseRC, pApicReg->pszGetBaseRC, pApicReg->pszGetBaseRC,
pApicReg->pszSetTPRRC, pApicReg->pszSetTPRRC, pApicReg->pszGetTPRRC, pApicReg->pszGetTPRRC, pApicReg->pszWriteMSRRC, pApicReg->pszWriteMSRRC, pApicReg->pszReadMSRRC, pApicReg->pszReadMSRRC, pApicReg->pszBusDeliverRC,
pApicReg->pszBusDeliverRC, pApicReg->pszLocalInterruptRC, pApicReg->pszLocalInterruptRC, pApicReg->pszGetTimerFreqRC, pApicReg->pszGetTimerFreqRC, ppApicHlpR3));
/*
* Validate input.
*/
if (pApicReg->u32Version != PDM_APICREG_VERSION)
{
AssertMsgFailed(("u32Version=%#x expected %#x\n", pApicReg->u32Version, PDM_APICREG_VERSION));
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc (version)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( !pApicReg->pfnGetInterruptR3
|| !pApicReg->pfnHasPendingIrqR3
|| !pApicReg->pfnSetBaseR3
|| !pApicReg->pfnGetBaseR3
|| !pApicReg->pfnSetTPRR3
|| !pApicReg->pfnGetTPRR3
|| !pApicReg->pfnWriteMSRR3
|| !pApicReg->pfnReadMSRR3
|| !pApicReg->pfnBusDeliverR3
|| !pApicReg->pfnLocalInterruptR3
|| !pApicReg->pfnGetTimerFreqR3)
{
Assert(pApicReg->pfnGetInterruptR3);
Assert(pApicReg->pfnHasPendingIrqR3);
Assert(pApicReg->pfnSetBaseR3);
Assert(pApicReg->pfnGetBaseR3);
Assert(pApicReg->pfnSetTPRR3);
Assert(pApicReg->pfnGetTPRR3);
Assert(pApicReg->pfnWriteMSRR3);
Assert(pApicReg->pfnReadMSRR3);
Assert(pApicReg->pfnBusDeliverR3);
Assert(pApicReg->pfnLocalInterruptR3);
Assert(pApicReg->pfnGetTimerFreqR3);
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc (R3 callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( ( pApicReg->pszGetInterruptRC
|| pApicReg->pszHasPendingIrqRC
|| pApicReg->pszSetBaseRC
|| pApicReg->pszGetBaseRC
|| pApicReg->pszSetTPRRC
|| pApicReg->pszGetTPRRC
|| pApicReg->pszWriteMSRRC
|| pApicReg->pszReadMSRRC
|| pApicReg->pszBusDeliverRC
|| pApicReg->pszLocalInterruptRC
|| pApicReg->pszGetTimerFreqRC)
&& ( !VALID_PTR(pApicReg->pszGetInterruptRC)
|| !VALID_PTR(pApicReg->pszHasPendingIrqRC)
|| !VALID_PTR(pApicReg->pszSetBaseRC)
|| !VALID_PTR(pApicReg->pszGetBaseRC)
|| !VALID_PTR(pApicReg->pszSetTPRRC)
|| !VALID_PTR(pApicReg->pszGetTPRRC)
|| !VALID_PTR(pApicReg->pszWriteMSRRC)
|| !VALID_PTR(pApicReg->pszReadMSRRC)
|| !VALID_PTR(pApicReg->pszBusDeliverRC)
|| !VALID_PTR(pApicReg->pszLocalInterruptRC)
|| !VALID_PTR(pApicReg->pszGetTimerFreqRC))
)
{
Assert(VALID_PTR(pApicReg->pszGetInterruptRC));
Assert(VALID_PTR(pApicReg->pszHasPendingIrqRC));
Assert(VALID_PTR(pApicReg->pszSetBaseRC));
Assert(VALID_PTR(pApicReg->pszGetBaseRC));
Assert(VALID_PTR(pApicReg->pszSetTPRRC));
Assert(VALID_PTR(pApicReg->pszGetTPRRC));
Assert(VALID_PTR(pApicReg->pszReadMSRRC));
Assert(VALID_PTR(pApicReg->pszWriteMSRRC));
Assert(VALID_PTR(pApicReg->pszBusDeliverRC));
Assert(VALID_PTR(pApicReg->pszLocalInterruptRC));
Assert(VALID_PTR(pApicReg->pszGetTimerFreqRC));
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc (RC callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( ( pApicReg->pszGetInterruptR0
|| pApicReg->pszHasPendingIrqR0
|| pApicReg->pszSetBaseR0
|| pApicReg->pszGetBaseR0
|| pApicReg->pszSetTPRR0
|| pApicReg->pszGetTPRR0
|| pApicReg->pszWriteMSRR0
|| pApicReg->pszReadMSRR0
|| pApicReg->pszBusDeliverR0
|| pApicReg->pszLocalInterruptR0
|| pApicReg->pszGetTimerFreqR0)
&& ( !VALID_PTR(pApicReg->pszGetInterruptR0)
|| !VALID_PTR(pApicReg->pszHasPendingIrqR0)
|| !VALID_PTR(pApicReg->pszSetBaseR0)
|| !VALID_PTR(pApicReg->pszGetBaseR0)
|| !VALID_PTR(pApicReg->pszSetTPRR0)
|| !VALID_PTR(pApicReg->pszGetTPRR0)
|| !VALID_PTR(pApicReg->pszReadMSRR0)
|| !VALID_PTR(pApicReg->pszWriteMSRR0)
|| !VALID_PTR(pApicReg->pszBusDeliverR0)
|| !VALID_PTR(pApicReg->pszLocalInterruptR0)
|| !VALID_PTR(pApicReg->pszGetTimerFreqR0))
)
{
Assert(VALID_PTR(pApicReg->pszGetInterruptR0));
Assert(VALID_PTR(pApicReg->pszHasPendingIrqR0));
Assert(VALID_PTR(pApicReg->pszSetBaseR0));
Assert(VALID_PTR(pApicReg->pszGetBaseR0));
Assert(VALID_PTR(pApicReg->pszSetTPRR0));
Assert(VALID_PTR(pApicReg->pszGetTPRR0));
Assert(VALID_PTR(pApicReg->pszReadMSRR0));
Assert(VALID_PTR(pApicReg->pszWriteMSRR0));
Assert(VALID_PTR(pApicReg->pszBusDeliverR0));
Assert(VALID_PTR(pApicReg->pszLocalInterruptR0));
Assert(VALID_PTR(pApicReg->pszGetTimerFreqR0));
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc (R0 callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!ppApicHlpR3)
{
Assert(ppApicHlpR3);
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc (ppApicHlpR3)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Only one APIC device. On SMP we have single logical device covering all LAPICs,
* as they need to communicate and share state easily.
*/
PVM pVM = pDevIns->Internal.s.pVMR3;
if (pVM->pdm.s.Apic.pDevInsR3)
{
AssertMsgFailed(("Only one apic device is supported!\n"));
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Resolve & initialize the RC bits.
*/
if (pApicReg->pszGetInterruptRC)
{
int rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszGetInterruptRC, &pVM->pdm.s.Apic.pfnGetInterruptRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszGetInterruptRC, rc));
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszHasPendingIrqRC, &pVM->pdm.s.Apic.pfnHasPendingIrqRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszHasPendingIrqRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszSetBaseRC, &pVM->pdm.s.Apic.pfnSetBaseRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszSetBaseRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszGetBaseRC, &pVM->pdm.s.Apic.pfnGetBaseRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszGetBaseRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszSetTPRRC, &pVM->pdm.s.Apic.pfnSetTPRRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszSetTPRRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszGetTPRRC, &pVM->pdm.s.Apic.pfnGetTPRRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszGetTPRRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszWriteMSRRC, &pVM->pdm.s.Apic.pfnWriteMSRRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszWriteMSRRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszReadMSRRC, &pVM->pdm.s.Apic.pfnReadMSRRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszReadMSRRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszBusDeliverRC, &pVM->pdm.s.Apic.pfnBusDeliverRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszBusDeliverRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszLocalInterruptRC, &pVM->pdm.s.Apic.pfnLocalInterruptRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszLocalInterruptRC, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolRCLazy(pDevIns, pApicReg->pszGetTimerFreqRC, &pVM->pdm.s.Apic.pfnGetTimerFreqRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pApicReg->pszGetTimerFreqRC, rc));
}
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
pVM->pdm.s.Apic.pDevInsRC = PDMDEVINS_2_RCPTR(pDevIns);
}
else
{
pVM->pdm.s.Apic.pDevInsRC = 0;
pVM->pdm.s.Apic.pfnGetInterruptRC = 0;
pVM->pdm.s.Apic.pfnHasPendingIrqRC = 0;
pVM->pdm.s.Apic.pfnSetBaseRC = 0;
pVM->pdm.s.Apic.pfnGetBaseRC = 0;
pVM->pdm.s.Apic.pfnSetTPRRC = 0;
pVM->pdm.s.Apic.pfnGetTPRRC = 0;
pVM->pdm.s.Apic.pfnWriteMSRRC = 0;
pVM->pdm.s.Apic.pfnReadMSRRC = 0;
pVM->pdm.s.Apic.pfnBusDeliverRC = 0;
pVM->pdm.s.Apic.pfnLocalInterruptRC = 0;
pVM->pdm.s.Apic.pfnGetTimerFreqRC = 0;
}
/*
* Resolve & initialize the R0 bits.
*/
if (pApicReg->pszGetInterruptR0)
{
int rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszGetInterruptR0, &pVM->pdm.s.Apic.pfnGetInterruptR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszGetInterruptR0, rc));
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszHasPendingIrqR0, &pVM->pdm.s.Apic.pfnHasPendingIrqR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszHasPendingIrqR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszSetBaseR0, &pVM->pdm.s.Apic.pfnSetBaseR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszSetBaseR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszGetBaseR0, &pVM->pdm.s.Apic.pfnGetBaseR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszGetBaseR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszSetTPRR0, &pVM->pdm.s.Apic.pfnSetTPRR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszSetTPRR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszGetTPRR0, &pVM->pdm.s.Apic.pfnGetTPRR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszGetTPRR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszWriteMSRR0, &pVM->pdm.s.Apic.pfnWriteMSRR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszWriteMSRR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszReadMSRR0, &pVM->pdm.s.Apic.pfnReadMSRR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszReadMSRR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszBusDeliverR0, &pVM->pdm.s.Apic.pfnBusDeliverR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszBusDeliverR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszLocalInterruptR0, &pVM->pdm.s.Apic.pfnLocalInterruptR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszLocalInterruptR0, rc));
}
if (RT_SUCCESS(rc))
{
rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pApicReg->pszGetTimerFreqR0, &pVM->pdm.s.Apic.pfnGetTimerFreqR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pApicReg->pszGetTimerFreqR0, rc));
}
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
pVM->pdm.s.Apic.pDevInsR0 = PDMDEVINS_2_R0PTR(pDevIns);
Assert(pVM->pdm.s.Apic.pDevInsR0);
}
else
{
pVM->pdm.s.Apic.pfnGetInterruptR0 = 0;
pVM->pdm.s.Apic.pfnHasPendingIrqR0 = 0;
pVM->pdm.s.Apic.pfnSetBaseR0 = 0;
pVM->pdm.s.Apic.pfnGetBaseR0 = 0;
pVM->pdm.s.Apic.pfnSetTPRR0 = 0;
pVM->pdm.s.Apic.pfnGetTPRR0 = 0;
pVM->pdm.s.Apic.pfnWriteMSRR0 = 0;
pVM->pdm.s.Apic.pfnReadMSRR0 = 0;
pVM->pdm.s.Apic.pfnBusDeliverR0 = 0;
pVM->pdm.s.Apic.pfnLocalInterruptR0 = 0;
pVM->pdm.s.Apic.pfnGetTimerFreqR0 = 0;
pVM->pdm.s.Apic.pDevInsR0 = 0;
}
/*
* Initialize the HC bits.
*/
pVM->pdm.s.Apic.pDevInsR3 = pDevIns;
pVM->pdm.s.Apic.pfnGetInterruptR3 = pApicReg->pfnGetInterruptR3;
pVM->pdm.s.Apic.pfnHasPendingIrqR3 = pApicReg->pfnHasPendingIrqR3;
pVM->pdm.s.Apic.pfnSetBaseR3 = pApicReg->pfnSetBaseR3;
pVM->pdm.s.Apic.pfnGetBaseR3 = pApicReg->pfnGetBaseR3;
pVM->pdm.s.Apic.pfnSetTPRR3 = pApicReg->pfnSetTPRR3;
pVM->pdm.s.Apic.pfnGetTPRR3 = pApicReg->pfnGetTPRR3;
pVM->pdm.s.Apic.pfnWriteMSRR3 = pApicReg->pfnWriteMSRR3;
pVM->pdm.s.Apic.pfnReadMSRR3 = pApicReg->pfnReadMSRR3;
pVM->pdm.s.Apic.pfnBusDeliverR3 = pApicReg->pfnBusDeliverR3;
pVM->pdm.s.Apic.pfnLocalInterruptR3 = pApicReg->pfnLocalInterruptR3;
pVM->pdm.s.Apic.pfnGetTimerFreqR3 = pApicReg->pfnGetTimerFreqR3;
Log(("PDM: Registered APIC device '%s'/%d pDevIns=%p\n", pDevIns->pReg->szName, pDevIns->iInstance, pDevIns));
/* set the helper pointer and return. */
*ppApicHlpR3 = &g_pdmR3DevApicHlp;
LogFlow(("pdmR3DevHlp_APICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VINF_SUCCESS));
return VINF_SUCCESS;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnIOAPICRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_IOAPICRegister(PPDMDEVINS pDevIns, PPDMIOAPICREG pIoApicReg, PCPDMIOAPICHLPR3 *ppIoApicHlpR3)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: pIoApicReg=%p:{.u32Version=%#x, .pfnSetIrqR3=%p, .pszSetIrqRC=%p:{%s}, .pszSetIrqR0=%p:{%s}} ppIoApicHlpR3=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pIoApicReg, pIoApicReg->u32Version, pIoApicReg->pfnSetIrqR3,
pIoApicReg->pszSetIrqRC, pIoApicReg->pszSetIrqRC, pIoApicReg->pszSetIrqR0, pIoApicReg->pszSetIrqR0, ppIoApicHlpR3));
/*
* Validate input.
*/
if (pIoApicReg->u32Version != PDM_IOAPICREG_VERSION)
{
AssertMsgFailed(("u32Version=%#x expected %#x\n", pIoApicReg->u32Version, PDM_IOAPICREG_VERSION));
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (version)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!pIoApicReg->pfnSetIrqR3 || !pIoApicReg->pfnSendMsiR3)
{
Assert(pIoApicReg->pfnSetIrqR3);
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (R3 callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pIoApicReg->pszSetIrqRC
&& !VALID_PTR(pIoApicReg->pszSetIrqRC))
{
Assert(VALID_PTR(pIoApicReg->pszSetIrqRC));
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (GC callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pIoApicReg->pszSendMsiRC
&& !VALID_PTR(pIoApicReg->pszSendMsiRC))
{
Assert(VALID_PTR(pIoApicReg->pszSendMsiRC));
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (GC callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pIoApicReg->pszSetIrqR0
&& !VALID_PTR(pIoApicReg->pszSetIrqR0))
{
Assert(VALID_PTR(pIoApicReg->pszSetIrqR0));
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (GC callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pIoApicReg->pszSendMsiR0
&& !VALID_PTR(pIoApicReg->pszSendMsiR0))
{
Assert(VALID_PTR(pIoApicReg->pszSendMsiR0));
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (GC callbacks)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!ppIoApicHlpR3)
{
Assert(ppIoApicHlpR3);
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (ppApicHlp)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* The I/O APIC requires the APIC to be present (hacks++).
* If the I/O APIC does GC stuff so must the APIC.
*/
PVM pVM = pDevIns->Internal.s.pVMR3;
if (!pVM->pdm.s.Apic.pDevInsR3)
{
AssertMsgFailed(("Configuration error / Init order error! No APIC!\n"));
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (no APIC)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( pIoApicReg->pszSetIrqRC
&& !pVM->pdm.s.Apic.pDevInsRC)
{
AssertMsgFailed(("Configuration error! APIC doesn't do GC, I/O APIC does!\n"));
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (no GC APIC)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Only one I/O APIC device.
*/
if (pVM->pdm.s.IoApic.pDevInsR3)
{
AssertMsgFailed(("Only one ioapic device is supported!\n"));
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc (only one)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Resolve & initialize the GC bits.
*/
if (pIoApicReg->pszSetIrqRC)
{
int rc = pdmR3DevGetSymbolRCLazy(pDevIns, pIoApicReg->pszSetIrqRC, &pVM->pdm.s.IoApic.pfnSetIrqRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pIoApicReg->pszSetIrqRC, rc));
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
pVM->pdm.s.IoApic.pDevInsRC = PDMDEVINS_2_RCPTR(pDevIns);
}
else
{
pVM->pdm.s.IoApic.pDevInsRC = 0;
pVM->pdm.s.IoApic.pfnSetIrqRC = 0;
}
if (pIoApicReg->pszSendMsiRC)
{
int rc = pdmR3DevGetSymbolRCLazy(pDevIns, pIoApicReg->pszSetIrqRC, &pVM->pdm.s.IoApic.pfnSendMsiRC);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szRCMod, pIoApicReg->pszSendMsiRC, rc));
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
}
else
{
pVM->pdm.s.IoApic.pfnSendMsiRC = 0;
}
/*
* Resolve & initialize the R0 bits.
*/
if (pIoApicReg->pszSetIrqR0)
{
int rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pIoApicReg->pszSetIrqR0, &pVM->pdm.s.IoApic.pfnSetIrqR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pIoApicReg->pszSetIrqR0, rc));
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
pVM->pdm.s.IoApic.pDevInsR0 = PDMDEVINS_2_R0PTR(pDevIns);
Assert(pVM->pdm.s.IoApic.pDevInsR0);
}
else
{
pVM->pdm.s.IoApic.pfnSetIrqR0 = 0;
pVM->pdm.s.IoApic.pDevInsR0 = 0;
}
if (pIoApicReg->pszSendMsiR0)
{
int rc = pdmR3DevGetSymbolR0Lazy(pDevIns, pIoApicReg->pszSendMsiR0, &pVM->pdm.s.IoApic.pfnSendMsiR0);
AssertMsgRC(rc, ("%s::%s rc=%Rrc\n", pDevIns->pReg->szR0Mod, pIoApicReg->pszSendMsiR0, rc));
if (RT_FAILURE(rc))
{
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
}
else
{
pVM->pdm.s.IoApic.pfnSendMsiR0 = 0;
}
/*
* Initialize the R3 bits.
*/
pVM->pdm.s.IoApic.pDevInsR3 = pDevIns;
pVM->pdm.s.IoApic.pfnSetIrqR3 = pIoApicReg->pfnSetIrqR3;
pVM->pdm.s.IoApic.pfnSendMsiR3 = pIoApicReg->pfnSendMsiR3;
Log(("PDM: Registered I/O APIC device '%s'/%d pDevIns=%p\n", pDevIns->pReg->szName, pDevIns->iInstance, pDevIns));
/* set the helper pointer and return. */
*ppIoApicHlpR3 = &g_pdmR3DevIoApicHlp;
LogFlow(("pdmR3DevHlp_IOAPICRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VINF_SUCCESS));
return VINF_SUCCESS;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnHPETRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_HPETRegister(PPDMDEVINS pDevIns, PPDMHPETREG pHpetReg, PCPDMHPETHLPR3 *ppHpetHlpR3)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_HPETRegister: caller='%s'/%d:\n"));
/*
* Validate input.
*/
if (pHpetReg->u32Version != PDM_HPETREG_VERSION)
{
AssertMsgFailed(("u32Version=%#x expected %#x\n", pHpetReg->u32Version, PDM_HPETREG_VERSION));
LogFlow(("pdmR3DevHlp_HPETRegister: caller='%s'/%d: returns %Rrc (version)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!ppHpetHlpR3)
{
Assert(ppHpetHlpR3);
LogFlow(("pdmR3DevHlp_HPETRegister: caller='%s'/%d: returns %Rrc (ppApicHlpR3)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/* set the helper pointer and return. */
*ppHpetHlpR3 = &g_pdmR3DevHpetHlp;
LogFlow(("pdmR3DevHlp_HPETRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VINF_SUCCESS));
return VINF_SUCCESS;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnPciRawRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_PciRawRegister(PPDMDEVINS pDevIns, PPDMPCIRAWREG pPciRawReg, PCPDMPCIRAWHLPR3 *ppPciRawHlpR3)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_PciRawRegister: caller='%s'/%d:\n"));
/*
* Validate input.
*/
if (pPciRawReg->u32Version != PDM_PCIRAWREG_VERSION)
{
AssertMsgFailed(("u32Version=%#x expected %#x\n", pPciRawReg->u32Version, PDM_PCIRAWREG_VERSION));
LogFlow(("pdmR3DevHlp_PciRawRegister: caller='%s'/%d: returns %Rrc (version)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!ppPciRawHlpR3)
{
Assert(ppPciRawHlpR3);
LogFlow(("pdmR3DevHlp_PciRawRegister: caller='%s'/%d: returns %Rrc (ppApicHlpR3)\n", pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/* set the helper pointer and return. */
*ppPciRawHlpR3 = &g_pdmR3DevPciRawHlp;
LogFlow(("pdmR3DevHlp_PciRawRegister: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, VINF_SUCCESS));
return VINF_SUCCESS;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnDMACRegister} */
static DECLCALLBACK(int) pdmR3DevHlp_DMACRegister(PPDMDEVINS pDevIns, PPDMDMACREG pDmacReg, PCPDMDMACHLP *ppDmacHlp)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_DMACRegister: caller='%s'/%d: pDmacReg=%p:{.u32Version=%#x, .pfnRun=%p, .pfnRegister=%p, .pfnReadMemory=%p, .pfnWriteMemory=%p, .pfnSetDREQ=%p, .pfnGetChannelMode=%p} ppDmacHlp=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pDmacReg, pDmacReg->u32Version, pDmacReg->pfnRun, pDmacReg->pfnRegister,
pDmacReg->pfnReadMemory, pDmacReg->pfnWriteMemory, pDmacReg->pfnSetDREQ, pDmacReg->pfnGetChannelMode, ppDmacHlp));
/*
* Validate input.
*/
if (pDmacReg->u32Version != PDM_DMACREG_VERSION)
{
AssertMsgFailed(("u32Version=%#x expected %#x\n", pDmacReg->u32Version,
PDM_DMACREG_VERSION));
LogFlow(("pdmR3DevHlp_DMACRegister: caller='%s'/%d: returns %Rrc (version)\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if ( !pDmacReg->pfnRun
|| !pDmacReg->pfnRegister
|| !pDmacReg->pfnReadMemory
|| !pDmacReg->pfnWriteMemory
|| !pDmacReg->pfnSetDREQ
|| !pDmacReg->pfnGetChannelMode)
{
Assert(pDmacReg->pfnRun);
Assert(pDmacReg->pfnRegister);
Assert(pDmacReg->pfnReadMemory);
Assert(pDmacReg->pfnWriteMemory);
Assert(pDmacReg->pfnSetDREQ);
Assert(pDmacReg->pfnGetChannelMode);
LogFlow(("pdmR3DevHlp_DMACRegister: caller='%s'/%d: returns %Rrc (callbacks)\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
if (!ppDmacHlp)
{
Assert(ppDmacHlp);
LogFlow(("pdmR3DevHlp_DMACRegister: caller='%s'/%d: returns %Rrc (ppDmacHlp)\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Only one DMA device.
*/
PVM pVM = pDevIns->Internal.s.pVMR3;
if (pVM->pdm.s.pDmac)
{
AssertMsgFailed(("Only one DMA device is supported!\n"));
LogFlow(("pdmR3DevHlp_DMACRegister: caller='%s'/%d: returns %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, VERR_INVALID_PARAMETER));
return VERR_INVALID_PARAMETER;
}
/*
* Allocate and initialize pci bus structure.
*/
int rc = VINF_SUCCESS;
PPDMDMAC pDmac = (PPDMDMAC)MMR3HeapAlloc(pDevIns->Internal.s.pVMR3, MM_TAG_PDM_DEVICE, sizeof(*pDmac));
if (pDmac)
{
pDmac->pDevIns = pDevIns;
pDmac->Reg = *pDmacReg;
pVM->pdm.s.pDmac = pDmac;
/* set the helper pointer. */
*ppDmacHlp = &g_pdmR3DevDmacHlp;
Log(("PDM: Registered DMAC device '%s'/%d pDevIns=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, pDevIns));
}
else
rc = VERR_NO_MEMORY;
LogFlow(("pdmR3DevHlp_DMACRegister: caller='%s'/%d: returns %Rrc\n",
pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/**
* @copydoc PDMDEVHLPR3::pfnRegisterVMMDevHeap
*/
static DECLCALLBACK(int) pdmR3DevHlp_RegisterVMMDevHeap(PPDMDEVINS pDevIns, RTGCPHYS GCPhys, RTR3PTR pvHeap, unsigned cbSize)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
int rc = PDMR3VmmDevHeapRegister(pDevIns->Internal.s.pVMR3, GCPhys, pvHeap, cbSize);
return rc;
}
/**
* @copydoc PDMDEVHLPR3::pfnUnregisterVMMDevHeap
*/
static DECLCALLBACK(int) pdmR3DevHlp_UnregisterVMMDevHeap(PPDMDEVINS pDevIns, RTGCPHYS GCPhys)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
int rc = PDMR3VmmDevHeapUnregister(pDevIns->Internal.s.pVMR3, GCPhys);
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMReset} */
static DECLCALLBACK(int) pdmR3DevHlp_VMReset(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_VMReset: caller='%s'/%d: VM_FF_RESET %d -> 1\n",
pDevIns->pReg->szName, pDevIns->iInstance, VM_FF_IS_SET(pVM, VM_FF_RESET)));
/*
* We postpone this operation because we're likely to be inside a I/O instruction
* and the EIP will be updated when we return.
* We still return VINF_EM_RESET to break out of any execution loops and force FF evaluation.
*/
bool fHaltOnReset;
int rc = CFGMR3QueryBool(CFGMR3GetChild(CFGMR3GetRoot(pVM), "PDM"), "HaltOnReset", &fHaltOnReset);
if (RT_SUCCESS(rc) && fHaltOnReset)
{
Log(("pdmR3DevHlp_VMReset: Halt On Reset!\n"));
rc = VINF_EM_HALT;
}
else
{
VM_FF_SET(pVM, VM_FF_RESET);
rc = VINF_EM_RESET;
}
LogFlow(("pdmR3DevHlp_VMReset: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMSuspend} */
static DECLCALLBACK(int) pdmR3DevHlp_VMSuspend(PPDMDEVINS pDevIns)
{
int rc;
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_VMSuspend: caller='%s'/%d:\n",
pDevIns->pReg->szName, pDevIns->iInstance));
/** @todo Always take the SMP path - fewer code paths. */
if (pVM->cCpus > 1)
{
/* We own the IOM lock here and could cause a deadlock by waiting for a VCPU that is blocking on the IOM lock. */
rc = VMR3ReqCallNoWait(pVM, VMCPUID_ANY_QUEUE, (PFNRT)VMR3Suspend, 2, pVM->pUVM, VMSUSPENDREASON_VM);
AssertRC(rc);
rc = VINF_EM_SUSPEND;
}
else
rc = VMR3Suspend(pVM->pUVM, VMSUSPENDREASON_VM);
LogFlow(("pdmR3DevHlp_VMSuspend: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/**
* Worker for pdmR3DevHlp_VMSuspendSaveAndPowerOff that is invoked via a queued
* EMT request to avoid deadlocks.
*
* @returns VBox status code fit for scheduling.
* @param pVM Pointer to the VM.
* @param pDevIns The device that triggered this action.
*/
static DECLCALLBACK(int) pdmR3DevHlp_VMSuspendSaveAndPowerOffWorker(PVM pVM, PPDMDEVINS pDevIns)
{
/*
* Suspend the VM first then do the saving.
*/
int rc = VMR3Suspend(pVM->pUVM, VMSUSPENDREASON_VM);
if (RT_SUCCESS(rc))
{
PUVM pUVM = pVM->pUVM;
rc = pUVM->pVmm2UserMethods->pfnSaveState(pVM->pUVM->pVmm2UserMethods, pUVM);
/*
* On success, power off the VM, on failure we'll leave it suspended.
*/
if (RT_SUCCESS(rc))
{
rc = VMR3PowerOff(pVM->pUVM);
if (RT_FAILURE(rc))
LogRel(("%s/SSP: VMR3PowerOff failed: %Rrc\n", pDevIns->pReg->szName, rc));
}
else
LogRel(("%s/SSP: pfnSaveState failed: %Rrc\n", pDevIns->pReg->szName, rc));
}
else
LogRel(("%s/SSP: Suspend failed: %Rrc\n", pDevIns->pReg->szName, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMSuspendSaveAndPowerOff} */
static DECLCALLBACK(int) pdmR3DevHlp_VMSuspendSaveAndPowerOff(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_VMSuspendSaveAndPowerOff: caller='%s'/%d:\n",
pDevIns->pReg->szName, pDevIns->iInstance));
int rc;
if ( pVM->pUVM->pVmm2UserMethods
&& pVM->pUVM->pVmm2UserMethods->pfnSaveState)
{
rc = VMR3ReqCallNoWait(pVM, VMCPUID_ANY_QUEUE, (PFNRT)pdmR3DevHlp_VMSuspendSaveAndPowerOffWorker, 2, pVM, pDevIns);
if (RT_SUCCESS(rc))
{
LogRel(("%s: Suspending, Saving and Powering Off the VM\n", pDevIns->pReg->szName));
rc = VINF_EM_SUSPEND;
}
}
else
rc = VERR_NOT_SUPPORTED;
LogFlow(("pdmR3DevHlp_VMSuspendSaveAndPowerOff: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMPowerOff} */
static DECLCALLBACK(int) pdmR3DevHlp_VMPowerOff(PPDMDEVINS pDevIns)
{
int rc;
PDMDEV_ASSERT_DEVINS(pDevIns);
PVM pVM = pDevIns->Internal.s.pVMR3;
VM_ASSERT_EMT(pVM);
LogFlow(("pdmR3DevHlp_VMPowerOff: caller='%s'/%d:\n",
pDevIns->pReg->szName, pDevIns->iInstance));
/** @todo Always take the SMP path - fewer code paths. */
if (pVM->cCpus > 1)
{
/* We might be holding locks here and could cause a deadlock since
VMR3PowerOff rendezvous with the other CPUs. */
rc = VMR3ReqCallNoWait(pVM, VMCPUID_ANY_QUEUE, (PFNRT)VMR3PowerOff, 1, pVM->pUVM);
AssertRC(rc);
/* Set the VCPU state to stopped here as well to make sure no
inconsistency with the EM state occurs. */
VMCPU_SET_STATE(VMMGetCpu(pVM), VMCPUSTATE_STOPPED);
rc = VINF_EM_OFF;
}
else
rc = VMR3PowerOff(pVM->pUVM);
LogFlow(("pdmR3DevHlp_VMPowerOff: caller='%s'/%d: returns %Rrc\n", pDevIns->pReg->szName, pDevIns->iInstance, rc));
return rc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnA20IsEnabled} */
static DECLCALLBACK(bool) pdmR3DevHlp_A20IsEnabled(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
bool fRc = PGMPhysIsA20Enabled(VMMGetCpu(pDevIns->Internal.s.pVMR3));
LogFlow(("pdmR3DevHlp_A20IsEnabled: caller='%s'/%d: returns %d\n", pDevIns->pReg->szName, pDevIns->iInstance, fRc));
return fRc;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnA20Set} */
static DECLCALLBACK(void) pdmR3DevHlp_A20Set(PPDMDEVINS pDevIns, bool fEnable)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_A20Set: caller='%s'/%d: fEnable=%d\n", pDevIns->pReg->szName, pDevIns->iInstance, fEnable));
PGMR3PhysSetA20(VMMGetCpu(pDevIns->Internal.s.pVMR3), fEnable);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetCpuId} */
static DECLCALLBACK(void) pdmR3DevHlp_GetCpuId(PPDMDEVINS pDevIns, uint32_t iLeaf,
uint32_t *pEax, uint32_t *pEbx, uint32_t *pEcx, uint32_t *pEdx)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
VM_ASSERT_EMT(pDevIns->Internal.s.pVMR3);
LogFlow(("pdmR3DevHlp_GetCpuId: caller='%s'/%d: iLeaf=%d pEax=%p pEbx=%p pEcx=%p pEdx=%p\n",
pDevIns->pReg->szName, pDevIns->iInstance, iLeaf, pEax, pEbx, pEcx, pEdx));
AssertPtr(pEax); AssertPtr(pEbx); AssertPtr(pEcx); AssertPtr(pEdx);
CPUMGetGuestCpuId(VMMGetCpu(pDevIns->Internal.s.pVMR3), iLeaf, 0 /*iSubLeaf*/, pEax, pEbx, pEcx, pEdx);
<|fim▁hole|>}
/**
* The device helper structure for trusted devices.
*/
const PDMDEVHLPR3 g_pdmR3DevHlpTrusted =
{
PDM_DEVHLPR3_VERSION,
pdmR3DevHlp_IOPortRegister,
pdmR3DevHlp_IOPortRegisterRC,
pdmR3DevHlp_IOPortRegisterR0,
pdmR3DevHlp_IOPortDeregister,
pdmR3DevHlp_MMIORegister,
pdmR3DevHlp_MMIORegisterRC,
pdmR3DevHlp_MMIORegisterR0,
pdmR3DevHlp_MMIODeregister,
pdmR3DevHlp_MMIO2Register,
pdmR3DevHlp_MMIO2Deregister,
pdmR3DevHlp_MMIO2Map,
pdmR3DevHlp_MMIO2Unmap,
pdmR3DevHlp_MMHyperMapMMIO2,
pdmR3DevHlp_MMIO2MapKernel,
pdmR3DevHlp_ROMRegister,
pdmR3DevHlp_ROMProtectShadow,
pdmR3DevHlp_SSMRegister,
pdmR3DevHlp_TMTimerCreate,
pdmR3DevHlp_TMUtcNow,
pdmR3DevHlp_PhysRead,
pdmR3DevHlp_PhysWrite,
pdmR3DevHlp_PhysGCPhys2CCPtr,
pdmR3DevHlp_PhysGCPhys2CCPtrReadOnly,
pdmR3DevHlp_PhysReleasePageMappingLock,
pdmR3DevHlp_PhysReadGCVirt,
pdmR3DevHlp_PhysWriteGCVirt,
pdmR3DevHlp_PhysGCPtr2GCPhys,
pdmR3DevHlp_MMHeapAlloc,
pdmR3DevHlp_MMHeapAllocZ,
pdmR3DevHlp_MMHeapFree,
pdmR3DevHlp_VMState,
pdmR3DevHlp_VMTeleportedAndNotFullyResumedYet,
pdmR3DevHlp_VMSetError,
pdmR3DevHlp_VMSetErrorV,
pdmR3DevHlp_VMSetRuntimeError,
pdmR3DevHlp_VMSetRuntimeErrorV,
pdmR3DevHlp_DBGFStopV,
pdmR3DevHlp_DBGFInfoRegister,
pdmR3DevHlp_DBGFRegRegister,
pdmR3DevHlp_DBGFTraceBuf,
pdmR3DevHlp_STAMRegister,
pdmR3DevHlp_STAMRegisterF,
pdmR3DevHlp_STAMRegisterV,
pdmR3DevHlp_PCIRegister,
pdmR3DevHlp_PCIRegisterMsi,
pdmR3DevHlp_PCIIORegionRegister,
pdmR3DevHlp_PCISetConfigCallbacks,
pdmR3DevHlp_PCIPhysRead,
pdmR3DevHlp_PCIPhysWrite,
pdmR3DevHlp_PCISetIrq,
pdmR3DevHlp_PCISetIrqNoWait,
pdmR3DevHlp_ISASetIrq,
pdmR3DevHlp_ISASetIrqNoWait,
pdmR3DevHlp_DriverAttach,
pdmR3DevHlp_QueueCreate,
pdmR3DevHlp_CritSectInit,
pdmR3DevHlp_CritSectGetNop,
pdmR3DevHlp_CritSectGetNopR0,
pdmR3DevHlp_CritSectGetNopRC,
pdmR3DevHlp_SetDeviceCritSect,
pdmR3DevHlp_ThreadCreate,
pdmR3DevHlp_SetAsyncNotification,
pdmR3DevHlp_AsyncNotificationCompleted,
pdmR3DevHlp_RTCRegister,
pdmR3DevHlp_PCIBusRegister,
pdmR3DevHlp_PICRegister,
pdmR3DevHlp_APICRegister,
pdmR3DevHlp_IOAPICRegister,
pdmR3DevHlp_HPETRegister,
pdmR3DevHlp_PciRawRegister,
pdmR3DevHlp_DMACRegister,
pdmR3DevHlp_DMARegister,
pdmR3DevHlp_DMAReadMemory,
pdmR3DevHlp_DMAWriteMemory,
pdmR3DevHlp_DMASetDREQ,
pdmR3DevHlp_DMAGetChannelMode,
pdmR3DevHlp_DMASchedule,
pdmR3DevHlp_CMOSWrite,
pdmR3DevHlp_CMOSRead,
pdmR3DevHlp_AssertEMT,
pdmR3DevHlp_AssertOther,
pdmR3DevHlp_LdrGetRCInterfaceSymbols,
pdmR3DevHlp_LdrGetR0InterfaceSymbols,
pdmR3DevHlp_CallR0,
pdmR3DevHlp_VMGetSuspendReason,
pdmR3DevHlp_VMGetResumeReason,
0,
0,
0,
0,
0,
0,
0,
pdmR3DevHlp_GetUVM,
pdmR3DevHlp_GetVM,
pdmR3DevHlp_GetVMCPU,
pdmR3DevHlp_GetCurrentCpuId,
pdmR3DevHlp_RegisterVMMDevHeap,
pdmR3DevHlp_UnregisterVMMDevHeap,
pdmR3DevHlp_VMReset,
pdmR3DevHlp_VMSuspend,
pdmR3DevHlp_VMSuspendSaveAndPowerOff,
pdmR3DevHlp_VMPowerOff,
pdmR3DevHlp_A20IsEnabled,
pdmR3DevHlp_A20Set,
pdmR3DevHlp_GetCpuId,
pdmR3DevHlp_TMTimeVirtGet,
pdmR3DevHlp_TMTimeVirtGetFreq,
pdmR3DevHlp_TMTimeVirtGetNano,
pdmR3DevHlp_GetSupDrvSession,
PDM_DEVHLPR3_VERSION /* the end */
};
/** @interface_method_impl{PDMDEVHLPR3,pfnGetUVM} */
static DECLCALLBACK(PUVM) pdmR3DevHlp_Untrusted_GetUVM(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return NULL;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetVM} */
static DECLCALLBACK(PVM) pdmR3DevHlp_Untrusted_GetVM(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return NULL;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetVMCPU} */
static DECLCALLBACK(PVMCPU) pdmR3DevHlp_Untrusted_GetVMCPU(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return NULL;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetCurrentCpuId} */
static DECLCALLBACK(VMCPUID) pdmR3DevHlp_Untrusted_GetCurrentCpuId(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return NIL_VMCPUID;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnRegisterVMMDevHeap} */
static DECLCALLBACK(int) pdmR3DevHlp_Untrusted_RegisterVMMDevHeap(PPDMDEVINS pDevIns, RTGCPHYS GCPhys, RTR3PTR pvHeap, unsigned cbSize)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
NOREF(GCPhys); NOREF(pvHeap); NOREF(cbSize);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return VERR_ACCESS_DENIED;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnUnregisterVMMDevHeap} */
static DECLCALLBACK(int) pdmR3DevHlp_Untrusted_UnregisterVMMDevHeap(PPDMDEVINS pDevIns, RTGCPHYS GCPhys)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
NOREF(GCPhys);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return VERR_ACCESS_DENIED;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMReset} */
static DECLCALLBACK(int) pdmR3DevHlp_Untrusted_VMReset(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return VERR_ACCESS_DENIED;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMSuspend} */
static DECLCALLBACK(int) pdmR3DevHlp_Untrusted_VMSuspend(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return VERR_ACCESS_DENIED;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMSuspendSaveAndPowerOff} */
static DECLCALLBACK(int) pdmR3DevHlp_Untrusted_VMSuspendSaveAndPowerOff(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return VERR_ACCESS_DENIED;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnVMPowerOff} */
static DECLCALLBACK(int) pdmR3DevHlp_Untrusted_VMPowerOff(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return VERR_ACCESS_DENIED;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnA20IsEnabled} */
static DECLCALLBACK(bool) pdmR3DevHlp_Untrusted_A20IsEnabled(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return false;
}
/** @interface_method_impl{PDMDEVHLPR3,pfnA20Set} */
static DECLCALLBACK(void) pdmR3DevHlp_Untrusted_A20Set(PPDMDEVINS pDevIns, bool fEnable)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
NOREF(fEnable);
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetCpuId} */
static DECLCALLBACK(void) pdmR3DevHlp_Untrusted_GetCpuId(PPDMDEVINS pDevIns, uint32_t iLeaf,
uint32_t *pEax, uint32_t *pEbx, uint32_t *pEcx, uint32_t *pEdx)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
NOREF(iLeaf); NOREF(pEax); NOREF(pEbx); NOREF(pEcx); NOREF(pEdx);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
}
/** @interface_method_impl{PDMDEVHLPR3,pfnGetSupDrvSession} */
static DECLCALLBACK(PSUPDRVSESSION) pdmR3DevHlp_Untrusted_GetSupDrvSession(PPDMDEVINS pDevIns)
{
PDMDEV_ASSERT_DEVINS(pDevIns);
AssertReleaseMsgFailed(("Untrusted device called trusted helper! '%s'/%d\n", pDevIns->pReg->szName, pDevIns->iInstance));
return (PSUPDRVSESSION)0;
}
/**
* The device helper structure for non-trusted devices.
*/
const PDMDEVHLPR3 g_pdmR3DevHlpUnTrusted =
{
PDM_DEVHLPR3_VERSION,
pdmR3DevHlp_IOPortRegister,
pdmR3DevHlp_IOPortRegisterRC,
pdmR3DevHlp_IOPortRegisterR0,
pdmR3DevHlp_IOPortDeregister,
pdmR3DevHlp_MMIORegister,
pdmR3DevHlp_MMIORegisterRC,
pdmR3DevHlp_MMIORegisterR0,
pdmR3DevHlp_MMIODeregister,
pdmR3DevHlp_MMIO2Register,
pdmR3DevHlp_MMIO2Deregister,
pdmR3DevHlp_MMIO2Map,
pdmR3DevHlp_MMIO2Unmap,
pdmR3DevHlp_MMHyperMapMMIO2,
pdmR3DevHlp_MMIO2MapKernel,
pdmR3DevHlp_ROMRegister,
pdmR3DevHlp_ROMProtectShadow,
pdmR3DevHlp_SSMRegister,
pdmR3DevHlp_TMTimerCreate,
pdmR3DevHlp_TMUtcNow,
pdmR3DevHlp_PhysRead,
pdmR3DevHlp_PhysWrite,
pdmR3DevHlp_PhysGCPhys2CCPtr,
pdmR3DevHlp_PhysGCPhys2CCPtrReadOnly,
pdmR3DevHlp_PhysReleasePageMappingLock,
pdmR3DevHlp_PhysReadGCVirt,
pdmR3DevHlp_PhysWriteGCVirt,
pdmR3DevHlp_PhysGCPtr2GCPhys,
pdmR3DevHlp_MMHeapAlloc,
pdmR3DevHlp_MMHeapAllocZ,
pdmR3DevHlp_MMHeapFree,
pdmR3DevHlp_VMState,
pdmR3DevHlp_VMTeleportedAndNotFullyResumedYet,
pdmR3DevHlp_VMSetError,
pdmR3DevHlp_VMSetErrorV,
pdmR3DevHlp_VMSetRuntimeError,
pdmR3DevHlp_VMSetRuntimeErrorV,
pdmR3DevHlp_DBGFStopV,
pdmR3DevHlp_DBGFInfoRegister,
pdmR3DevHlp_DBGFRegRegister,
pdmR3DevHlp_DBGFTraceBuf,
pdmR3DevHlp_STAMRegister,
pdmR3DevHlp_STAMRegisterF,
pdmR3DevHlp_STAMRegisterV,
pdmR3DevHlp_PCIRegister,
pdmR3DevHlp_PCIRegisterMsi,
pdmR3DevHlp_PCIIORegionRegister,
pdmR3DevHlp_PCISetConfigCallbacks,
pdmR3DevHlp_PCIPhysRead,
pdmR3DevHlp_PCIPhysWrite,
pdmR3DevHlp_PCISetIrq,
pdmR3DevHlp_PCISetIrqNoWait,
pdmR3DevHlp_ISASetIrq,
pdmR3DevHlp_ISASetIrqNoWait,
pdmR3DevHlp_DriverAttach,
pdmR3DevHlp_QueueCreate,
pdmR3DevHlp_CritSectInit,
pdmR3DevHlp_CritSectGetNop,
pdmR3DevHlp_CritSectGetNopR0,
pdmR3DevHlp_CritSectGetNopRC,
pdmR3DevHlp_SetDeviceCritSect,
pdmR3DevHlp_ThreadCreate,
pdmR3DevHlp_SetAsyncNotification,
pdmR3DevHlp_AsyncNotificationCompleted,
pdmR3DevHlp_RTCRegister,
pdmR3DevHlp_PCIBusRegister,
pdmR3DevHlp_PICRegister,
pdmR3DevHlp_APICRegister,
pdmR3DevHlp_IOAPICRegister,
pdmR3DevHlp_HPETRegister,
pdmR3DevHlp_PciRawRegister,
pdmR3DevHlp_DMACRegister,
pdmR3DevHlp_DMARegister,
pdmR3DevHlp_DMAReadMemory,
pdmR3DevHlp_DMAWriteMemory,
pdmR3DevHlp_DMASetDREQ,
pdmR3DevHlp_DMAGetChannelMode,
pdmR3DevHlp_DMASchedule,
pdmR3DevHlp_CMOSWrite,
pdmR3DevHlp_CMOSRead,
pdmR3DevHlp_AssertEMT,
pdmR3DevHlp_AssertOther,
pdmR3DevHlp_LdrGetRCInterfaceSymbols,
pdmR3DevHlp_LdrGetR0InterfaceSymbols,
pdmR3DevHlp_CallR0,
pdmR3DevHlp_VMGetSuspendReason,
pdmR3DevHlp_VMGetResumeReason,
0,
0,
0,
0,
0,
0,
0,
pdmR3DevHlp_Untrusted_GetUVM,
pdmR3DevHlp_Untrusted_GetVM,
pdmR3DevHlp_Untrusted_GetVMCPU,
pdmR3DevHlp_Untrusted_GetCurrentCpuId,
pdmR3DevHlp_Untrusted_RegisterVMMDevHeap,
pdmR3DevHlp_Untrusted_UnregisterVMMDevHeap,
pdmR3DevHlp_Untrusted_VMReset,
pdmR3DevHlp_Untrusted_VMSuspend,
pdmR3DevHlp_Untrusted_VMSuspendSaveAndPowerOff,
pdmR3DevHlp_Untrusted_VMPowerOff,
pdmR3DevHlp_Untrusted_A20IsEnabled,
pdmR3DevHlp_Untrusted_A20Set,
pdmR3DevHlp_Untrusted_GetCpuId,
pdmR3DevHlp_TMTimeVirtGet,
pdmR3DevHlp_TMTimeVirtGetFreq,
pdmR3DevHlp_TMTimeVirtGetNano,
pdmR3DevHlp_Untrusted_GetSupDrvSession,
PDM_DEVHLPR3_VERSION /* the end */
};
/**
* Queue consumer callback for internal component.
*
* @returns Success indicator.
* If false the item will not be removed and the flushing will stop.
* @param pVM Pointer to the VM.
* @param pItem The item to consume. Upon return this item will be freed.
*/
DECLCALLBACK(bool) pdmR3DevHlpQueueConsumer(PVM pVM, PPDMQUEUEITEMCORE pItem)
{
PPDMDEVHLPTASK pTask = (PPDMDEVHLPTASK)pItem;
LogFlow(("pdmR3DevHlpQueueConsumer: enmOp=%d pDevIns=%p\n", pTask->enmOp, pTask->pDevInsR3));
switch (pTask->enmOp)
{
case PDMDEVHLPTASKOP_ISA_SET_IRQ:
PDMIsaSetIrq(pVM, pTask->u.SetIRQ.iIrq, pTask->u.SetIRQ.iLevel, pTask->u.SetIRQ.uTagSrc);
break;
case PDMDEVHLPTASKOP_PCI_SET_IRQ:
{
/* Same as pdmR3DevHlp_PCISetIrq, except we've got a tag already. */
PPDMDEVINS pDevIns = pTask->pDevInsR3;
PPCIDEVICE pPciDev = pDevIns->Internal.s.pPciDeviceR3;
if (pPciDev)
{
PPDMPCIBUS pBus = pDevIns->Internal.s.pPciBusR3; /** @todo the bus should be associated with the PCI device not the PDM device. */
Assert(pBus);
pdmLock(pVM);
pBus->pfnSetIrqR3(pBus->pDevInsR3, pPciDev, pTask->u.SetIRQ.iIrq,
pTask->u.SetIRQ.iLevel, pTask->u.SetIRQ.uTagSrc);
pdmUnlock(pVM);
}
else
AssertReleaseMsgFailed(("No PCI device registered!\n"));
break;
}
case PDMDEVHLPTASKOP_IOAPIC_SET_IRQ:
PDMIoApicSetIrq(pVM, pTask->u.SetIRQ.iIrq, pTask->u.SetIRQ.iLevel, pTask->u.SetIRQ.uTagSrc);
break;
default:
AssertReleaseMsgFailed(("Invalid operation %d\n", pTask->enmOp));
break;
}
return true;
}
/** @} */<|fim▁end|> | LogFlow(("pdmR3DevHlp_GetCpuId: caller='%s'/%d: returns void - *pEax=%#x *pEbx=%#x *pEcx=%#x *pEdx=%#x\n",
pDevIns->pReg->szName, pDevIns->iInstance, *pEax, *pEbx, *pEcx, *pEdx)); |
<|file_name|>stub.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
#===============================================================================
#
# Dependencies
#
#-------------------------------------------------------------------------------
from layman.utils import path
from layman.overlays.source import OverlaySource
#===============================================================================
#
# Class StubOverlay
#<|fim▁hole|>class StubOverlay(OverlaySource):
''' Handles overlays with missing modules. '''
type = 'N/A'
type_key = 'n/a'
def __init__(self, parent, config, _location, ignore = 0):
super(StubOverlay, self).__init__(parent,
config, _location, ignore)
self.branch = self.parent.branch
self.info = {'name': self.parent.name, 'type': self.parent.ovl_type}
self.missing_msg = 'Overlay "%(name)s" is missing "%(type)s" module!'\
% self.info
self.hint = 'Did you install layman with "%(type)s" support?'\
% self.info
def add(self, base):
'''Add overlay.'''
self.output.error(self.missing_msg)
self.output.warn(self.hint)
return True
def update(self, base, src):
'''
Updates overlay src-url.
'''
self.output.error(self.missing_msg)
self.output.warn(self.hint)
return True
def sync(self, base):
'''Sync overlay.'''
self.output.error(self.missing_msg)
self.output.warn(self.hint)
return True
def supported(self):
'''Overlay type supported?'''
return False<|fim▁end|> | #-------------------------------------------------------------------------------
|
<|file_name|>sysex.rs<|end_file_name|><|fim▁begin|>// This file is part of a6-tools.
// Copyright (C) 2017 Jeffrey Sharp
//
// a6-tools is free software: you can redistribute it and/or modify it
// under the terms of the GNU General Public License as published
// by the Free Software Foundation, either version 3 of the License,
// or (at your option) any later version.
//
// a6-tools is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
// the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with a6-tools. If not, see <http://www.gnu.org/licenses/>.
use std::cmp;
use std::io;
use std::io::prelude::*;
use io::*;
use self::SysExReadError::*;
// MIDI byte ranges
const DATA_MIN: u8 = 0x00; // \_ Data bytes
const DATA_MAX: u8 = 0x7F; // /
const STATUS_MIN: u8 = 0x80; // \_ Status bytes
const STATUS_MAX: u8 = 0xEF; // /
const SYSEX_START: u8 = 0xF0; // \_ System exlusive messages
const SYSEX_END: u8 = 0xF7; // /
const SYSCOM_MIN: u8 = 0xF1; // \_ System common messages
const SYSCOM_MAX: u8 = 0xF6; // /
const SYSRT_MIN: u8 = 0xF8; // \_ System real-time messages
const SYSRT_MAX: u8 = 0xFF; // /
// Masks
const ALL_BITS: u8 = 0xFF;
const STATUS_BIT: u8 = 0x80;
/// Consumes the given `input` stream and detects MIDI System Exclusive messages
/// of length `cap` or less. Invokes the handler `on_msg` for each detected
/// message and the handler `on_err` for each error condition.
pub fn read_sysex<R, M, E>(
input: &mut R,
cap: usize,
on_msg: M,
on_err: E,
) -> io::Result<bool>
where
R: BufRead,
M: Fn(usize, &[u8]) -> bool,
E: Fn(usize, usize, SysExReadError) -> bool,
{
let mut start = 0; // Start position of message or skipped chunk
let mut next = 0; // Position of next unread byte
let mut len = 0; // Length of message data (no start/end bytes) or skipped chunk (all bytes)
// Message data, without SysEx start/end bytes
let mut buf = vec![0u8; cap].into_boxed_slice();
// Helper for invoking the on_msg/on_err handlers
macro_rules! fire {
($fn:ident, $($arg:expr),+) => {
if !$fn($($arg),+) { return Ok(false) }
}
}
loop {
// State A: Not In SysEx Message
{
let (read, found) = input.skip_until_bits(SYSEX_START, ALL_BITS)?;
next += read;
let end = match found {
Some(_) => next - 1,
None => next,
};
let len = end - start;
if len != 0 {
fire!(on_err, start, len, NotSysEx);
}
match found {
Some(_) => start = end,
None => return Ok(true),
}
}
// State B: In SysEx Message
len = 0;
loop {
let idx = cmp::min(len, cap);
let (read, found) = input.read_until_bits(STATUS_BIT, STATUS_BIT, &mut buf[idx..])?;
next += read;
match found {
Some(SYSRT_MIN...SYSRT_MAX) => {
len += read - 1;
// remain in state B
},
Some(SYSEX_START) => {
let end = next - 1;
fire!(on_err, start, end - start, UnexpectedByte);
start = end;
len = 0;
// restart state B
},
Some(SYSEX_END) => {
len += read - 1;
if len > cap {
fire!(on_err, start, next - start, Overflow)
} else {
fire!(on_msg, start, &buf[..len])
}
start = next;
break // to state A
},
Some(_) => {
let end = next - 1;
fire!(on_err, start, end - start, UnexpectedByte);
start = end;
break // to State A
},
None => {
fire!(on_err, start, next - start, UnexpectedEof);
return Ok(true)
}
}
}
}
Ok(true)
}
/// Possible error conditions encountered by `read_sysex`.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum SysExReadError {
/// The bytes did not contain a System Exclusive message.
NotSysEx,
/// A System Exclusive message exceeded the maximum allowed length.
Overflow,
/// A System Exclusive message was interrupted by an unexpected byte.
UnexpectedByte,
/// A System Exclusive message was interrupted by end-of-file.
UnexpectedEof,
}
/// Encodes a sequence of bytes into a sequence of 7-bit values.
pub fn encode_7bit(src: &[u8], dst: &mut Vec<u8>)
{
// Iteration
// | Leftover bits
// | | 7-bit output
// | | |
// 0: ........ 00000000 -> yield 7 bits
// 1: .......1 11111110 -> yield 7 bits
// 2: ......22 22222211 -> yield 7 bits
// 3: .....333 33333222 -> yield 7 bits
// 4: ....4444 44443333 -> yield 7 bits
// 5: ...55555 55544444 -> yield 7 bits
// 6: ..666666 66555555 -> yield 7 bits, then
// ........ .6666666 -> yield 7 bits again
// 7: (repeats)
let mut data = 0u16; // a shift register where bytes become bits
let mut bits = 0; // how many leftover bits from previous iteration
for v in src {
// Add 8 input bits.
data |= (*v as u16) << bits;
// Yield 7 bits. Accrue 1 leftover bit for next iteration.
dst.push((data & 0x7F) as u8);
data >>= 7;
bits += 1;
// Every 7 iterations, 7 leftover bits have accrued.
// Consume them to yield another 7-bit output.
if bits == 7 {
dst.push((data & 0x7F) as u8);
data = 0;
bits = 0;
}
}
// Yield final leftover bits, if any.
if bits > 0 {
dst.push((data & 0x7F) as u8);
}
}
/// Decodes a sequence of 7-bit values into a sequence of bytes.
pub fn decode_7bit(src: &[u8], dst: &mut Vec<u8>)
{
// Iteration
// | Leftover bits
// | | Byte output
// | | |
// 0: ........ .0000000 (not enough bits for a byte)
// 1: ..111111 10000000 -> yield byte
// 2: ...22222 22111111 -> yield byte
// 3: ....3333 33322222 -> yield byte
// 4: .....444 44443333 -> yield byte
// 5: ......55 55555444 -> yield byte
// 6: .......6 66666655 -> yield byte
// 7: ........ 77777776 -> yield byte
// 8: (repeats)
let mut data = 0u16; // a shift register where bits become bytes
let mut bits = 0; // how many leftover bits from previous iteration
for v in src {
// Isolate 7 input bits.
let v = (*v & 0x7F) as u16;
if bits == 0 {
// Initially, and after every 8 iterations, there are no leftover
// bits from the previous iteration. With only 7 new bits, there
// aren't enough to make a byte. Just let those bits become the
// leftovers for the next iteration.
data = v;
bits = 7;
} else {
// For other iterations, there are leftover bits from the previous
// iteration. Consider those as least significant, and the 7 new
// bits as most significant, and yield a byte. Any unused bits
// become leftovers for the next iteration to use.
data |= v << bits;
dst.push((data & 0xFF) as u8);
data >>= 8;
bits -= 1;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use self::ReadEvent::*;
#[derive(Clone, PartialEq, Eq, Debug)]
enum ReadEvent {
Message { pos: usize, msg: Vec<u8> },
Error { pos: usize, len: usize, err: SysExReadError },
}
fn run_read(mut bytes: &[u8], cap: usize) -> Vec<ReadEvent> {
use std::cell::RefCell;
let events = RefCell::new(vec![]);
let result = read_sysex(
&mut bytes, cap,<|fim▁hole|> |pos, msg| {
events.borrow_mut().push(Message { pos, msg: msg.to_vec() });
true
},
|pos, len, err| {
events.borrow_mut().push(Error { pos, len, err });
true
},
);
assert!(result.unwrap());
events.into_inner()
}
#[test]
fn test_read_sysex_empty() {
let events = run_read(b"", 10);
assert_eq!(events.len(), 0);
}
#[test]
fn test_read_sysex_junk() {
let events = run_read(b"any", 10);
assert_eq!(events.len(), 1);
assert_eq!(events[0], Error { pos: 0, len: 3, err: NotSysEx });
}
#[test]
fn test_read_sysex_sysex() {
let events = run_read(b"\xF0msg\xF7", 10);
assert_eq!(events.len(), 1);
assert_eq!(events[0], Message { pos: 0, msg: b"msg".to_vec() });
}
#[test]
fn test_read_sysex_with_junk() {
let events = run_read(b"abc\xF0def\xF7ghi\xF0jkl\xF7mno", 10);
assert_eq!(events.len(), 5);
assert_eq!(events[0], Error { pos: 0, len: 3, err: NotSysEx });
assert_eq!(events[1], Message { pos: 3, msg: b"def".to_vec() });
assert_eq!(events[2], Error { pos: 8, len: 3, err: NotSysEx });
assert_eq!(events[3], Message { pos: 11, msg: b"jkl".to_vec() });
assert_eq!(events[4], Error { pos: 16, len: 3, err: NotSysEx });
}
#[test]
fn test_read_sysex_with_sysrt() {
let events = run_read(b"\xF0abc\xF8def\xF7", 10);
assert_eq!(events.len(), 1);
assert_eq!(events[0], Message { pos: 0, msg: b"abcdef".to_vec() });
}
#[test]
fn test_read_sysex_interrupted_by_sysex() {
let events = run_read(b"\xF0abc\xF0def\xF7", 10);
assert_eq!(events.len(), 2);
assert_eq!(events[0], Error { pos: 0, len: 4, err: UnexpectedByte });
assert_eq!(events[1], Message { pos: 4, msg: b"def".to_vec() });
}
#[test]
fn test_read_sysex_interrupted_by_status() {
let events = run_read(b"\xF0abc\xA5def\xF7", 10);
assert_eq!(events.len(), 2);
assert_eq!(events[0], Error { pos: 0, len: 4, err: UnexpectedByte });
assert_eq!(events[1], Error { pos: 4, len: 5, err: NotSysEx });
}
#[test]
fn test_read_sysex_interrupted_by_eof() {
let events = run_read(b"\xF0abc", 10);
assert_eq!(events.len(), 1);
assert_eq!(events[0], Error { pos: 0, len: 4, err: UnexpectedEof });
}
#[test]
fn test_read_sysex_overflow() {
let events = run_read(b"\xF0abc\xF7", 2);
assert_eq!(events.len(), 1);
assert_eq!(events[0], Error { pos: 0, len: 5, err: Overflow });
}
#[test]
fn test_read_sysex_overflow_2() {
let events = run_read(b"\xF0abc\xF8def\xF7", 2);
assert_eq!(events.len(), 1);
assert_eq!(events[0], Error { pos: 0, len: 9, err: Overflow });
}
#[test]
fn test_encode_7bit() {
let data8 = [
0xF1, 0xE2, 0xD3, 0xC4, 0xB5, 0xA6, 0x97, 0x88, 0x79, 0x6A,
];
let mut data7 = vec![];
encode_7bit(&data8, &mut data7);
assert_eq!(data7.len(), 12);
// always 0
// | new bits
// | | leftover bits
// | | |
// 0b_x_xxxx_xxx
assert_eq!(data7[ 0], 0b_0_1110001_);
assert_eq!(data7[ 1], 0b_0_100010_1);
assert_eq!(data7[ 2], 0b_0_10011_11);
assert_eq!(data7[ 3], 0b_0_0100_110);
assert_eq!(data7[ 4], 0b_0_101_1100);
assert_eq!(data7[ 5], 0b_0_10_10110);
assert_eq!(data7[ 6], 0b_0_1_101001);
assert_eq!(data7[ 7], 0b_0__1001011);
assert_eq!(data7[ 8], 0b_0_0001000_);
assert_eq!(data7[ 9], 0b_0_111001_1);
assert_eq!(data7[10], 0b_0_01010_01);
assert_eq!(data7[11], 0b_0_0000_011);
// | |
// | final leftover bits
// 0-padding
}
#[test]
fn test_decode_7bit() {
let data7 = [
// don't care
// | leftover bits
// | | new bits
// | | |
// 0b_x_xxxx_xxx
0b_1_1110001_,
0b_0_100010_1,
0b_1_10011_11,
0b_0_0100_110,
0b_1_101_1100,
0b_0_10_10110,
0b_1_1_101001,
0b_0__1001011,
0b_1_0001000_,
0b_0_111001_1,
0b_1_01010_01,
0b_0_1111_011,
];
let mut data8 = vec![];
decode_7bit(&data7, &mut data8);
assert_eq!(data8.len(), 10);
assert_eq!(data8[0], 0xF1);
assert_eq!(data8[1], 0xE2);
assert_eq!(data8[2], 0xD3);
assert_eq!(data8[3], 0xC4);
assert_eq!(data8[4], 0xB5);
assert_eq!(data8[5], 0xA6);
assert_eq!(data8[6], 0x97);
assert_eq!(data8[7], 0x88);
assert_eq!(data8[8], 0x79);
assert_eq!(data8[9], 0x6A);
// Final leftover 4 bits go unused.
}
}<|fim▁end|> | |
<|file_name|>filters.component.ts<|end_file_name|><|fim▁begin|>import { FilterService, Filter, FilterTree, FilterType, FilterIndex } from 'lib/filter';
import { Component, EventEmitter, OnInit, Input, Output } from '@angular/core';
@Component({
selector: 'iw-filters',
templateUrl: './filters.component.html',
styleUrls: ['./filters.component.css'],
providers: [FilterService]
})
export class FiltersComponent implements OnInit {
@Input() rows: any[];
// @Input() keys: string[];
@Input() filters: Filter[] = [];
@Input() overrideFilters: FilterIndex = {};
@Input() advancedFiltering = false; // Disabled by default.
@Input() operator: 'and' | 'or' = 'and';
@Output() filter = new EventEmitter<any[]>();
constructor(private filterService: FilterService) { }
ngOnInit() {
this.filters = this.filterService.detectFilters(this.rows, this.overrideFilters);<|fim▁hole|>
getLabel(filter: Filter) {
return filter.label ? filter.label : filter.key;
}
filterAnyFields(value: any) {
const filterTree: FilterTree = {
operator: 'or',
filters: this.filters.map((f) => {
f.value = value;
return f;
})
};
const filtered = this.filterService.filterByTree(this.rows, filterTree);
this.filter.emit(filtered);
}
toggleAdvancedFiltering() {
this.advancedFiltering = !this.advancedFiltering;
}
changeOperator(operator: 'and' | 'or') {
this.operator = operator;
this.executeFiltering();
}
onFilterChange() {
this.executeFiltering();
}
isSimpleFilter(filter: Filter) {
return [FilterType.Array, FilterType.Object].indexOf(filter.type) < 0;
}
executeFiltering() {
const filterTree = {
operator: this.operator,
filters: this.filters
};
const filteredRows = this.filterService.filterByTree(this.rows, filterTree);
this.filter.emit(filteredRows);
}
onNestedFilterChange(filter: Filter, nestedFilter: Filter) {
filter.value = (<Filter[]>filter.filters).some((nf: any) => nf.value) ? 'any' : undefined;
this.executeFiltering();
}
private createNestedFilters() {
this.filters.forEach((filter) => {
const parentFilter = this.filters.find(f => f.key === filter.key);
if (parentFilter) {
if (filter.type === FilterType.Array) {
parentFilter.filters = this.filterService
.detectFilters(this.getSubRows(filter.key));
} else if (filter.type === FilterType.Object) {
parentFilter.filters = this.filterService
.detectFilters(this.getNestedObjects(filter.key));
}
}
});
}
private getNestedObjects(key: string) {
return this.rows.reduce((result, row) => {
result.push(row[key]);
return result;
}, []);
}
private getSubRows(key: string) {
return this.rows.reduce((result, row) => result.concat(row[key]), []);
}
}<|fim▁end|> | this.createNestedFilters();
} |
<|file_name|>binomial_prob.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Let's say we play a game where I keep flipping a coin until I get
heads. If the first time I get heads is on the nth coin, then I pay
you 2n-1 dollars. How much would you pay me to play this game?
You should end up with a sequence that you need to find the closed
form of. If you don't know how to do this, write some python code that
sums the first 100.
E(W) = sum_{n >= 1} (2n-1)/2^n = 3
"""
import matplotlib.pyplot as plt
import numpy as np
## simulate the number of flips before heads
def coin():
tails, num_flips = True, 0
while tails:
num_flips += 1
if np.random.binomial(1,0.5):
tails = False
return num_flips
if __name__ == '__main__':
## simulate
flips = [coin() for k in xrange(10000)]
## get the distribution of counts condition on the number of flips
range_flips = range(1, max(flips) + 1)
counts = np.array([flips.count(k)*1. for k in range_flips])
fig = plt.figure()
ax = fig.add_subplot(111)
ax.bar(range_flips,counts,alpha=0.4)
ax.set_ylabel("counts")
ax.set_xlabel("num flips to win")
#print [int(i) for i in counts]
winnings = sum([counts[k - 1]*(2*(k)-1)/sum(counts) for k in range_flips])
<|fim▁hole|> #print range_flips
print winnings
plt.show()<|fim▁end|> | |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// Take well-formed json from a sensu check result a context rich html document to be mail to
// one or more addresses.
//
// LICENSE:
// Copyright 2016 Yieldbot. <[email protected]>
// Released under the MIT License; see LICENSE
// for details.
package main
import (
"bytes"
"fmt"
"github.com/codegangsta/cli"<|fim▁hole|> // "log"
"net/smtp"
"os"
// "time"
)
func main() {
var emailAddress string
var smtpHost string
var smtpPort string
var emailSender string
var debug bool
app := cli.NewApp()
app.Name = "handler-mailer"
app.Usage = "Send context rich html alert notifications via email"
app.Action = func(c *cli.Context) {
if debug {
fmt.Printf("This is the sending address: %v \n", emailSender)
fmt.Printf("This is the recieving address: %v\n", emailAddress)
fmt.Printf("This is the smtp address: %v:%v\n", smtpHost, smtpPort)
sensuutil.Exit("debug")
}
// Get the sensu event data
sensuEvent := new(sensuhandler.SensuEvent)
sensuEvent = sensuEvent.AcquireSensuEvent()
// Connect to the remote SMTP server.
s, err := smtp.Dial(smtpHost + ":" + smtpPort)
if err != nil {
sensuutil.EHndlr(err)
}
defer s.Close()
// Set the sender and recipient.
s.Mail(emailSender)
s.Rcpt(emailAddress)
// Send the email body.
ws, err := s.Data()
if err != nil {
sensuutil.EHndlr(err)
}
defer ws.Close()
buf := bytes.NewBufferString("This is the email body.")
if _, err = buf.WriteTo(ws); err != nil {
sensuutil.EHndlr(err)
}
fmt.Printf("Email sent to %s\n", emailAddress)
}
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "address",
Value: "[email protected]",
Usage: "email address to send to",
EnvVar: "SENSU_HANDLER_EMAIL_ADDRESS",
Destination: &emailAddress,
},
cli.StringFlag{
Name: "host",
Value: "localhost",
Usage: "smtp server",
EnvVar: "SENSU_HANDLER_EMAIL_HOST",
Destination: &smtpHost,
},
cli.StringFlag{
Name: "port",
Value: "25",
Usage: "smtp port",
EnvVar: "SENSU_HANDLER_EMAIL_PORT",
Destination: &smtpPort,
},
cli.StringFlag{
Name: "sender",
Value: "[email protected]",
Usage: "email sender",
EnvVar: "SENSU_HANDLER_EMAIL_SENDER",
Destination: &emailSender,
},
cli.BoolFlag{
Name: "debug",
Usage: "Print debugging info, no alerts will be sent",
Destination: &debug,
},
}
app.Run(os.Args)
}<|fim▁end|> | // "github.com/yieldbot/sensumailer/lib"
"github.com/yieldbot/sensuplugin/sensuhandler"
"github.com/yieldbot/sensuplugin/sensuutil" |
<|file_name|>Item.js<|end_file_name|><|fim▁begin|>/*
* Paper.js
*
* This file is part of Paper.js, a JavaScript Vector Graphics Library,
* based on Scriptographer.org and designed to be largely API compatible.
* http://paperjs.org/
* http://scriptographer.org/
*
* Copyright (c) 2011, Juerg Lehni & Jonathan Puckey
* http://lehni.org/ & http://jonathanpuckey.com/
*
* Distributed under the MIT license. See LICENSE file for details.
*
* All rights reserved.
*/
module('Item');
test('copyTo(project)', function() {
var project = paper.project;
var path = new Path();
var secondDoc = new Project();
var copy = path.copyTo(secondDoc);
equals(function() {
return secondDoc.activeLayer.children.indexOf(copy) != -1;
}, true);
equals(function() {
return project.activeLayer.children.indexOf(copy) == -1;
}, true);
equals(function() {
return copy != path;
}, true);
});
test('copyTo(layer)', function() {
var project = paper.project;
var path = new Path();
var layer = new Layer();
var copy = path.copyTo(layer);
equals(function() {
return layer.children.indexOf(copy) != -1;
}, true);
equals(function() {
return project.layers[0].children.indexOf(copy) == -1;
}, true);
});
test('clone()', function() {
var project = paper.project;
var path = new Path();
var copy = path.clone();
equals(function() {
return project.activeLayer.children.length;
}, 2);
equals(function() {
return path != copy;
}, true);
});
test('addChild(item)', function() {
var project = paper.project;
var path = new Path();
project.activeLayer.addChild(path);
equals(function() {
return project.activeLayer.children.length;
}, 1);
});
test('item.parent / item.isChild / item.isParent / item.layer', function() {
var project = paper.project;
var secondDoc = new Project();
var path = new Path();
project.activeLayer.addChild(path);
equals(function() {
return project.activeLayer.children.indexOf(path) != -1;
}, true);
equals(function() {
return path.layer == project.activeLayer;
}, true);
secondDoc.activeLayer.addChild(path);
equals(function() {
return project.activeLayer.isChild(path);
}, false);
equals(function() {
return path.layer == secondDoc.activeLayer;
}, true);
equals(function() {
return path.isParent(project.activeLayer);
}, false);
equals(function() {
return secondDoc.activeLayer.isChild(path);
}, true);
equals(function() {
return path.isParent(secondDoc.activeLayer);
}, true);
equals(function() {
return project.activeLayer.children.indexOf(path) == -1;
}, true);
equals(function() {
return secondDoc.activeLayer.children.indexOf(path) == 0;
}, true);
});
test('item.lastChild / item.firstChild', function() {
var project = paper.project;
var path = new Path();
var secondPath = new Path();
equals(function() {
return project.activeLayer.firstChild == path;
}, true);
equals(function() {
return project.activeLayer.lastChild == secondPath;
}, true);
});
test('insertChild(0, item)', function() {
var project = paper.project;
var path = new Path();
var secondPath = new Path();
project.activeLayer.insertChild(0, secondPath);
equals(function() {
return secondPath.index < path.index;
}, true);
});
test('insertAbove(item)', function() {
var project = paper.project;
var path = new Path();
var secondPath = new Path();
path.insertAbove(secondPath);
equals(function() {
return project.activeLayer.lastChild == path;
}, true);
});
test('insertBelow(item)', function() {
var project = paper.project;
var firstPath = new Path();
var secondPath = new Path();
equals(function() {
return secondPath.index > firstPath.index;
}, true);<|fim▁hole|> return secondPath.index < firstPath.index;
}, true);
});
test('isDescendant(item) / isAncestor(item)', function() {
var project = paper.project;
var path = new Path();
equals(function() {
return path.isDescendant(project.activeLayer);
}, true);
equals(function() {
return project.activeLayer.isDescendant(path);
}, false);
equals(function() {
return path.isAncestor(project.activeLayer);
}, false);
equals(function() {
return project.activeLayer.isAncestor(path);
}, true);
// an item can't be its own descendant:
equals(function() {
return project.activeLayer.isDescendant(project.activeLayer);
}, false);
// an item can't be its own ancestor:
equals(function() {
return project.activeLayer.isAncestor(project.activeLayer);
}, false);
});
test('isGroupedWith', function() {
var project = paper.project;
var path = new Path();
var secondPath = new Path();
var group = new Group([path]);
var secondGroup = new Group([secondPath]);
equals(function() {
return path.isGroupedWith(secondPath);
}, false);
secondGroup.addChild(path);
equals(function() {
return path.isGroupedWith(secondPath);
}, true);
equals(function() {
return path.isGroupedWith(group);
}, false);
equals(function() {
return path.isDescendant(secondGroup);
}, true);
equals(function() {
return secondGroup.isDescendant(path);
}, false);
equals(function() {
return secondGroup.isDescendant(secondGroup);
}, false);
equals(function() {
return path.isGroupedWith(secondGroup);
}, false);
paper.project.activeLayer.addChild(path);
equals(function() {
return path.isGroupedWith(secondPath);
}, false);
paper.project.activeLayer.addChild(secondPath);
equals(function() {
return path.isGroupedWith(secondPath);
}, false);
});
test('getPreviousSibling() / getNextSibling()', function() {
var firstPath = new Path();
var secondPath = new Path();
equals(function() {
return firstPath.nextSibling == secondPath;
}, true);
equals(function() {
return secondPath.previousSibling == firstPath;
}, true);
equals(function() {
return secondPath.nextSibling == null;
}, true);
});
test('reverseChildren()', function() {
var project = paper.project;
var path = new Path();
var secondPath = new Path();
var thirdPath = new Path();
equals(function() {
return project.activeLayer.firstChild == path;
}, true);
project.activeLayer.reverseChildren();
equals(function() {
return project.activeLayer.firstChild == path;
}, false);
equals(function() {
return project.activeLayer.firstChild == thirdPath;
}, true);
equals(function() {
return project.activeLayer.lastChild == path;
}, true);
});
test('Check item#project when moving items across projects', function() {
var project = paper.project;
var doc1 = new Project();
var path = new Path();
var group = new Group();
group.addChild(new Path());
equals(function() {
return path.project == doc1;
}, true);
var doc2 = new Project();
doc2.activeLayer.addChild(path);
equals(function() {
return path.project == doc2;
}, true);
doc2.activeLayer.addChild(group);
equals(function() {
return group.children[0].project == doc2;
}, true);
});
test('group.selected', function() {
var path = new Path([0, 0]);
var path2 = new Path([0, 0]);
var group = new Group([path, path2]);
path.selected = true;
equals(function() {
return group.selected;
}, true);
path.selected = false;
equals(function() {
return group.selected;
}, false);
group.selected = true;
equals(function() {
return path.selected;
}, true);
equals(function() {
return path2.selected;
}, true);
group.selected = false;
equals(function() {
return path.selected;
}, false);
equals(function() {
return path2.selected;
}, false);
});
test('Check parent children object for named item', function() {
var path = new Path();
path.name = 'test';
equals(function() {
return paper.project.activeLayer.children['test'] == path;
}, true);
var path2 = new Path();
path2.name = 'test';
equals(function() {
return paper.project.activeLayer.children['test'] == path2;
}, true);
path2.remove();
equals(function() {
return paper.project.activeLayer.children['test'] == path;
}, true);
path.remove();
equals(function() {
return !paper.project.activeLayer.children['test'];
}, true);
});
test('Named child access 1', function() {
var path = new Path();
path.name = 'test';
var path2 = new Path();
path2.name = 'test';
path.remove();
equals(function() {
return paper.project.activeLayer.children['test'] == path2;
}, true);
});
test('Named child access 2', function() {
var path = new Path();
path.name = 'test';
var path2 = new Path();
path2.name = 'test';
path.remove();
equals(function() {
return paper.project.activeLayer.children['test'] == path2;
}, true);
equals(function() {
return paper.project.activeLayer._namedChildren['test'].length == 1;
}, true);
path2.remove();
equals(function() {
return !paper.project.activeLayer._namedChildren['test'];
}, true);
equals(function() {
return paper.project.activeLayer.children['test'] === undefined;
}, true);
});
test('Named child access 3', function() {
var path = new Path();
path.name = 'test';
var path2 = new Path();
path2.name = 'test';
var group = new Group();
group.addChild(path2);
equals(function() {
return paper.project.activeLayer.children['test'] == path;
}, true);
// TODO: Tests should not access internal properties
equals(function() {
return paper.project.activeLayer._namedChildren['test'].length;
}, 1);
equals(function() {
return group.children['test'] == path2;
}, true);
equals(function() {
return group._namedChildren['test'].length == 1;
}, true);
equals(function() {
return paper.project.activeLayer._namedChildren['test'][0] == path;
}, true);
paper.project.activeLayer.appendTop(path2);
equals(function() {
return group.children['test'] == null;
}, true);
equals(function() {
return group._namedChildren['test'] === undefined;
}, true);
equals(function() {
return paper.project.activeLayer.children['test'] == path2;
}, true);
equals(function() {
return paper.project.activeLayer._namedChildren['test'].length;
}, 2);
});
test('Setting name of child back to null', function() {
var path = new Path();
path.name = 'test';
var path2 = new Path();
path2.name = 'test';
equals(function() {
return paper.project.activeLayer.children['test'] == path2;
}, true);
path2.name = null;
equals(function() {
return paper.project.activeLayer.children['test'] == path;
}, true);
path.name = null;
equals(function() {
return paper.project.activeLayer.children['test'] === undefined;
}, true);
});
test('Renaming item', function() {
var path = new Path();
path.name = 'test';
path.name = 'test2';
equals(function() {
return paper.project.activeLayer.children['test'] === undefined;
}, true);
equals(function() {
return paper.project.activeLayer.children['test2'] == path;
}, true);
});
test('Changing item#position.x', function() {
var path = new Path.Circle(new Point(50, 50), 50);
path.position.x += 5;
equals(path.position.toString(), '{ x: 55, y: 50 }', 'path.position.x += 5');
});
test('Naming a removed item', function() {
var path = new Path();
path.remove();
path.name = 'test';
});
test('Naming a layer', function() {
var layer = new Layer();
layer.name = 'test';
});
test('Cloning a linked size', function() {
var path = new Path([40, 75], [140, 75]);
var error = null;
try {
var cloneSize = path.bounds.size.clone();
} catch (e) {
error = e;
}
var description = 'Cloning a linked size should not throw an error';
if (error)
description += ': ' + error;
equals(error == null, true, description);
});<|fim▁end|> | secondPath.insertBelow(firstPath);
equals(function() { |
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>var webpack = require('webpack');<|fim▁hole|> entry: './main.jsx',
output: {
path: './output',
publicPath: '/output',
filename: 'bundle.js'
},
module: {
loaders: [
{ test: /\.jsx/, loader: 'jsx-loader' }
]
},
watch: true
};<|fim▁end|> | var path = require('path');
module.exports = { |
<|file_name|>apiKeys.js<|end_file_name|><|fim▁begin|>module.exports = {
visionKey: 'AIzaSyAA14j-7sIJLDTRZd3bYpZrmCEoFA9IN40',
pairingID: 'b5378ca6',
pairingKey: '690be2968f8f08b26fcc1f2c9c8f5b90',
recipesKey: 'qAjqbB5sPamshJwWJJh01Y3exb3Jp1wBzcOjsnrqegcRf1PCXT',<|fim▁hole|> backUpRecipesKey: 'jHbWfZqPEUmsh0NElMAPdMXlfPm1p1M9n5NjsnPD1l0Vjhsjng'
}<|fim▁end|> | |
<|file_name|>ScrollableTabBar.js<|end_file_name|><|fim▁begin|>const React = require('react');
const { ViewPropTypes } = ReactNative = require('react-native');
const {
View,
Animated,
StyleSheet,
ScrollView,
Text,
Platform,
Dimensions,
I18nManager
} = ReactNative;
const Button = require('./Button');
//import { PropTypes } from 'react'
const WINDOW_WIDTH = Dimensions.get('window').width;
const ScrollableTabBar = React.createClass({
propTypes: {
goToPage: React.PropTypes.func,
activeTab: React.PropTypes.number,
tabs: React.PropTypes.array,
backgroundColor: React.PropTypes.string,
activeTextColor: React.PropTypes.string,
inactiveTextColor: React.PropTypes.string,
scrollOffset: React.PropTypes.number,
//style: ViewPropTypes.style,
//tabStyle: ViewPropTypes.style,
//tabsContainerStyle: ViewPropTypes.style,
//tabStyle: ViewPropTypes.style,
textStyle: Text.propTypes.style,
renderTab: React.PropTypes.func,
//underlineStyle: ViewPropTypes.style,
onScroll:React.PropTypes.func,
},
getDefaultProps() {
return {
scrollOffset: 52,
activeTextColor: 'navy',
inactiveTextColor: 'black',
backgroundColor: null,
style: {},
tabStyle: {},
tabsContainerStyle: {},
tabStyle: {},
underlineStyle: {},
};
},
getInitialState() {
this._tabsMeasurements = [];
return {
_leftTabUnderline: new Animated.Value(0),
_widthTabUnderline: new Animated.Value(0),
_containerWidth: null,
};
},
componentDidMount() {
this.props.scrollValue.addListener(this.updateView);
},
updateView(offset) {
//console.log("updateView="+JSON.stringify(offset));
//console.log("updateView="+JSON.stringify(this.props));
const position = Math.floor(offset.value);
const pageOffset = offset.value % 1;
const tabCount = this.props.tabs.length;
const lastTabPosition = tabCount - 1;
if (tabCount === 0 || offset.value < 0 || offset.value > lastTabPosition) {
return;
}
if (this.necessarilyMeasurementsCompleted(position, position === lastTabPosition)) {
this.updateTabPanel(position, pageOffset);
this.updateTabUnderline(position, pageOffset, tabCount);
}
},
necessarilyMeasurementsCompleted(position, isLastTab) {
return this._tabsMeasurements[position] &&
(isLastTab || this._tabsMeasurements[position + 1]) &&
this._tabContainerMeasurements &&
this._containerMeasurements;
},
updateTabPanel(position, pageOffset) {
const containerWidth = this._containerMeasurements.width;
const tabWidth = this._tabsMeasurements[position].width;
//console.log("containerWidth="+containerWidth+" tabWidth="+tabWidth);
const nextTabMeasurements = this._tabsMeasurements[position + 1];
const nextTabWidth = nextTabMeasurements && nextTabMeasurements.width || 0;
const tabOffset = this._tabsMeasurements[position].left;
const absolutePageOffset = pageOffset * tabWidth;
let newScrollX = tabOffset + absolutePageOffset;
// center tab and smooth tab change (for when tabWidth changes a lot between two tabs)
newScrollX -= (containerWidth - (1 - pageOffset) * tabWidth - pageOffset * nextTabWidth) / 2;
newScrollX = newScrollX >= 0 ? newScrollX : 0;
if (Platform.OS === 'android') {
this._scrollView.scrollTo({x: newScrollX, y: 0, animated: false, });
} else {
const rightBoundScroll = this._tabContainerMeasurements.width - (this._containerMeasurements.width);
newScrollX = newScrollX > rightBoundScroll ? rightBoundScroll : newScrollX;
this._scrollView.scrollTo({x: newScrollX, y: 0, animated: false, });
}
<|fim▁hole|> updateTabUnderline(position, pageOffset, tabCount) {
const tabPad = this.props.underlineAlignText?this.props.tabPadding:0;
const lineLeft = this._tabsMeasurements[position].left;
const lineRight = this._tabsMeasurements[position].right;
if (position < tabCount - 1) {
const nextTabLeft = this._tabsMeasurements[position + 1].left;
const nextTabRight = this._tabsMeasurements[position + 1].right;
const newLineLeft = (pageOffset * nextTabLeft + (1 - pageOffset) * lineLeft);
const newLineRight = (pageOffset * nextTabRight + (1 - pageOffset) * lineRight);
this.state._leftTabUnderline.setValue(newLineLeft+tabPad);
this.state._widthTabUnderline.setValue(newLineRight - newLineLeft -tabPad*2);
} else {
this.state._leftTabUnderline.setValue(lineLeft+tabPad);
this.state._widthTabUnderline.setValue(lineRight - lineLeft-tabPad*2);
}
},
renderTab(name, page, isTabActive, onPressHandler, onLayoutHandler) {
const { activeTextColor, inactiveTextColor, textStyle, } = this.props;
const textColor = isTabActive ? activeTextColor : inactiveTextColor;
const fontWeight = isTabActive ? 'bold' : 'normal';
return <Button
key={`${name}_${page}`}
accessible={true}
accessibilityLabel={name}
accessibilityTraits='button'
onPress={() => onPressHandler(page)}
onLayout={onLayoutHandler}
>
<View style={[this.props.tabStyle||styles.tab, ]}>
<Text style={[{color: textColor, fontWeight, }, textStyle, ]}>
{name}
</Text>
</View>
</Button>;
},
measureTab(page, event) {
console.log("measureTab="+page+"layout "+JSON.stringify(event.nativeEvent.layout));
const { x, width, height, } = event.nativeEvent.layout;
this._tabsMeasurements[page] = {left: x, right: x + width, width, height, };
this.updateView({value: this.props.scrollValue._value, });
},
render() {
const tabUnderlineStyle = {
position: 'absolute',
height: 1,
backgroundColor: 'navy',
bottom: 0,
};
const key = I18nManager.isRTL ? 'right' : 'left';
const dynamicTabUnderline = {
[`${key}`]: this.state._leftTabUnderline,
width: this.state._widthTabUnderline
}
return <View
style={[this.props.tabsContainerStyle||styles.container, ]}
onLayout={this.onContainerLayout}
>
<ScrollView
automaticallyAdjustContentInsets={false}
ref={(scrollView) => { this._scrollView = scrollView; }}
horizontal={true}
showsHorizontalScrollIndicator={false}
showsVerticalScrollIndicator={false}
directionalLockEnabled={true}
onScroll={this.props.onScroll}
bounces={false}
scrollsToTop={false}
>
<View
style={[styles.tabs, {width: this.state._containerWidth, }, ]}
ref={'tabContainer'}
onLayout={this.onTabContainerLayout}
>
{this.props.tabs.map((name, page) => {
const isTabActive = this.props.activeTab === page;
const renderTab = this.props.renderTab || this.renderTab;
return renderTab(name, page, isTabActive, this.props.goToPage, this.measureTab.bind(this, page));
})}
<Animated.View style={[tabUnderlineStyle, dynamicTabUnderline, this.props.underlineStyle, ]} />
</View>
</ScrollView>
</View>;
},
componentWillReceiveProps(nextProps) {
// If the tabs change, force the width of the tabs container to be recalculated
if (JSON.stringify(this.props.tabs) !== JSON.stringify(nextProps.tabs) && this.state._containerWidth) {
this.setState({ _containerWidth: null, });
}
},
onTabContainerLayout(e) {
this._tabContainerMeasurements = e.nativeEvent.layout;
let width = this._tabContainerMeasurements.width;
if (width < WINDOW_WIDTH) {
width = WINDOW_WIDTH;
}
this.setState({ _containerWidth: width, });
this.updateView({value: this.props.scrollValue._value, });
},
onContainerLayout(e) {
this._containerMeasurements = e.nativeEvent.layout;
this.updateView({value: this.props.scrollValue._value, });
},
});
module.exports = ScrollableTabBar;
const styles = StyleSheet.create({
tab: {
height: 49,
alignItems: 'center',
justifyContent: 'center',
paddingLeft: 20,
paddingRight: 20,
},
container: {
height: 50,
borderWidth: 1,
borderTopWidth: 0,
borderLeftWidth: 0,
borderRightWidth: 0,
borderColor: '#ccc',
},
tabs: {
flexDirection: 'row',
// justifyContent: 'space-around', android设备可能撞车
},
});<|fim▁end|> | },
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
from django.conf.urls import url, patterns
from rango import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^about/$', views.about, name='about'),
# 匹配URL斜杠前所有的字母数字
# 例如 a-z, A-Z, 或者 0-9)和连字符(-
# 然后把这个值作为category_name_slug参数传递给views.category(),
url(r'^category/(?P<category_name_slug>[\w\-]+)/$',views.category, name='category'),
url(r'^add_category/$', views.add_category, name='add_category'),
url(r'^category/(?P<category_name_slug>[\w\-]+)/add_page/$',views.add_page, name='add_page'),<|fim▁hole|>)<|fim▁end|> | |
<|file_name|>send_raw.js<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Red Hat Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.<|fim▁hole|> * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var container = require('rhea');
var amqp_message = container.message;
var args = require('./options.js').options({
'm': { alias: 'messages', default: 100, describe: 'number of messages to send'},
'n': { alias: 'node', default: 'examples', describe: 'name of node (e.g. queue) to which messages are sent'},
'h': { alias: 'host', default: 'localhost', describe: 'dns or ip name of server where you want to connect'},
'p': { alias: 'port', default: 5672, describe: 'port to connect to'}
}).help('help').argv;
var confirmed = 0, sent = 0;
var total = args.messages;
container.on('sendable', function (context) {
while (context.sender.sendable() && sent < total) {
sent++;
console.log('sent ' + sent);
var stringifiedPayload = JSON.stringify({'sequence':sent});
// In this example, we are sending a byte array containing ascii
// characters though this can be any opaque binary payload
var body = amqp_message.data_section(new Buffer(stringifiedPayload, 'utf8'));
context.sender.send({message_id:sent, body});
}
});
container.on('accepted', function (context) {
if (++confirmed === total) {
console.log('all messages confirmed');
context.connection.close();
}
});
container.on('disconnected', function (context) {
if (context.error) console.error('%s %j', context.error, context.error);
sent = confirmed;
});
container.connect({port: args.port, host: args.host}).open_sender(args.node);<|fim▁end|> | * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* |
<|file_name|>buildVideoXML.py<|end_file_name|><|fim▁begin|>#!/Library/Frameworks/Python.framework/Versions/Current/bin/python
import os
from os.path import join, getsize
from random import randint
def addEntry (XMLFile, finfo, dirs, NASPath):
#finfo[1].replace(' ', '_')
finfo[1] = finfo[1].replace('.', '_', finfo.count('.')-1)
title = finfo[1].split('.')[0]
root = ''<|fim▁hole|> for pathchunk in pathlist:
for dirname in dirs:
if pathchunk == dirname:
genre = dirname
imageRoot = ''
for pathchunk in pathlist:
if pathchunk.find('videos') == -1:
imageRoot = imageRoot + pathchunk + '/'
else:
imageRoot = imageRoot + 'videos/images/'
break
imageFile = imageRoot + title + '.jpg'
if os.path.exists(imageFile):
imageFile = 'images/' + title + '.jpg'
else:
imageFile = 'images/FAM%d.jpg' % randint(1,116)
XMLFile.write("<movie>\n")
XMLFile.write("<num>" + str(finfo[2]) + "</num>\n")
XMLFile.write("<origtitle>" + title + "</origtitle>\n")
XMLFile.write("<year>2009</year>\n")
XMLFile.write("<genre>" + genre + "</genre>\n")
XMLFile.write("<mpaa>Rated G</mpaa>\n")
XMLFile.write("<director></director>\n")
XMLFile.write("<actors></actors>\n")
XMLFile.write("<description></description>\n")
XMLFile.write("<path>" + NASPath + "</path>\n")
XMLFile.write("<length>110</length>\n")
XMLFile.write("<videocodec>MP4</videocodec>\n")
XMLFile.write("<poster>" + imageFile + "</poster>\n")
XMLFile.write("</movie>\n\n")
#------ End of addEntry
videosDir = '/Volumes/Volume_1-1/media/videos'
#videosDir = './videos'
videoXMLFileName = videosDir + '/videos.xml'
NASRoot = "Y:\\media\\videos\\"
allfiles = []
allDirs = []
print 'Reading in files from ' + videosDir;
for root, dirs, files in os.walk(videosDir):
for dirname in dirs:
allDirs.append(dirname)
for name in files:
if (name.find('mp4') > -1 or name.find('MP4') > -1) and name.find('._') == -1:
allfiles.append([root, name, len(allfiles)])
if (name.find('mkv') > -1 or name.find('MKV') > -1) and name.find('._') == -1:
allfiles.append([root, name, len(allfiles)])
if (name.find('avi') > -1 or name.find('AVI') > -1) and name.find('._') == -1:
allfiles.append([root, name, len(allfiles)])
videoXMLFile = open(videoXMLFileName, 'w')
videoXMLFile.write("<xml>\n")
videoXMLFile.write("<viddb>\n")
videoXMLFile.write("<movies>" + str(len(allfiles)) +"</movies>\n\n")
print '...read in ' + str(len(allfiles) + 1) + ' files'
print 'Building XML media file at ' + videoXMLFileName
for finfo in allfiles:
pathlist = finfo[0].split('/')
NASPath = NASRoot
for pathchunk in pathlist[5:]:
NASPath = NASPath + pathchunk + "\\"
NASPath = NASPath + finfo[1]
#print NASPath + " - " + finfo[0] + "/" + finfo[1]
addEntry (videoXMLFile, finfo, allDirs, NASPath)
videoXMLFile.write("</viddb>\n")
videoXMLFile.write("</xml>\n")
videoXMLFile.close()
print 'Built XML media file for ' + str(len(allfiles) + 1) + ' movies'<|fim▁end|> | genre = 'Tom and Frederika'
pathlist = finfo[0].split('/') |
<|file_name|>printbin.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from pcitweak.bitstring import BitString
for n in range(0x10):
b = BitString(uint=n, length=4)<|fim▁hole|><|fim▁end|> | print " % 3d 0x%02x %s" % (n, n, b.bin) |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>/**
* 'json' test suite
*
* Usage:
* nodeunit test.js
*
* Can limit the tests with the 'TEST_ONLY' environment variable: a
* space-separated lists of dir names to which to limit. E.g.:
* TEST_ONLY=hello-server nodeunit test.js
* Can also prefix with a '-' to *exclude* that test. E.g.: to run all but
* the 'irc' test:
* TEST_ONLY='-irc' nodeunit test.js
*/
var path = require('path');
var exec = require('child_process').exec;
var fs = require('fs');
var testCase = require('nodeunit').testCase;
var ansidiff = require('ansidiff');
var warn = console.warn;
//---- test cases
var data = {
//setUp: function (callback) {
// ...
//},
parseLookup: function (test) {
var parseLookup = require('../lib/json.js').parseLookup;
test.deepEqual(parseLookup('42'), [42]);
test.deepEqual(parseLookup('a'), ['a']);
test.deepEqual(parseLookup('a.b'), ['a', 'b']);
test.deepEqual(parseLookup('a.b.c'), ['a', 'b', 'c']);
test.deepEqual(parseLookup('[42]'), [42]);
test.deepEqual(parseLookup('["a"]'), ['a']);
test.deepEqual(parseLookup('["a"]'), ['a']);
test.deepEqual(parseLookup('b[42]'), ['b', 42]);
test.deepEqual(parseLookup('b["a"]'), ['b', 'a']);
test.deepEqual(parseLookup('b["a"]'), ['b', 'a']);
test.deepEqual(parseLookup('[42].b'), [42, 'b']);
test.deepEqual(parseLookup('["a"].b'), ['a', 'b']);
test.deepEqual(parseLookup('["a"].b'), ['a', 'b']);
test.deepEqual(parseLookup('["a-b"]'), ['a-b']);
test.deepEqual(parseLookup('["a-b"]'), ['a-b']);
test.deepEqual(parseLookup('["a.b"]'), ['a.b']);
test.deepEqual(parseLookup('["a.b"]'), ['a.b']);
test.deepEqual(parseLookup('["a[b"]'), ['a[b']);
test.deepEqual(parseLookup('["a[b"]'), ['a[b']);
test.deepEqual(parseLookup('["a]b"]'), ['a]b']);
test.deepEqual(parseLookup('["a]b"]'), ['a]b']);
/* BEGIN JSSTYLED */
test.deepEqual(parseLookup("['a\\'[b']"), ["a'[b"]);
test.deepEqual(parseLookup("['a\\'[b'].c"), ["a'[b", "c"]);
/* END JSSTYLED */
test.deepEqual(parseLookup('a/b', '/'), ['a', 'b']);
test.deepEqual(parseLookup('a.b/c', '/'), ['a.b', 'c']);
test.deepEqual(parseLookup('a.b/c[42]', '/'), ['a.b', 'c', 42]);
test.deepEqual(parseLookup('["a/b"]', '/'), ['a/b']);
test.done();
}
};
// Process includes and excludes from 'TEST_ONLY'.
var only = [],
excludes = [];
if (process.env.TEST_ONLY) {
warn('Note: Limiting "test.js" tests by $TEST_ONLY: "' +
process.env.TEST_ONLY + '"');
var tokens = process.env.TEST_ONLY.trim().split(/\s+/);
for (var i = 0; i < tokens.length; i++) {
if (tokens[i][0] === '-') {
excludes.push(tokens[i].slice(1));
} else {
only.push(tokens[i]);
}
}<|fim▁hole|>// Add a test case for each dir with a 'test.sh' script.
var names = fs.readdirSync(__dirname);
for (var i = 0; i < names.length; ++i) {
var name = names[i];
if (only.length && only.indexOf(name) == -1) {
continue;
}
if (excludes.length && excludes.indexOf(name) != -1) {
continue;
}
var dir = path.join(__dirname, name);
if (fs.statSync(dir).isDirectory()) {
try {
fs.statSync(path.join(dir, 'cmd'));
} catch (e) {
continue;
}
if (data[name] !== undefined) {
throw ('error: test "' + name + '" already exists');
}
data[name] = (function (dir) {
return function (test) {
var numTests = 0;
var expectedExitCode = null;
try {
var p = path.join(dir, 'expected.exitCode');
if (fs.statSync(p)) {
expectedExitCode = Number(fs.readFileSync(p));
numTests += 1;
}
} catch (e) {}
var expectedStdout = null;
try {
var p = path.join(dir, 'expected.stdout');
if (fs.statSync(p)) {
expectedStdout = fs.readFileSync(p, 'utf8');
numTests += 1;
}
} catch (e) {}
var expectedStderr = null;
try {
var p = path.join(dir, 'expected.stderr');
if (fs.statSync(p)) {
expectedStderr = fs.readFileSync(p, 'utf8');
numTests += 1;
}
} catch (e) {}
test.expect(numTests);
exec('bash cmd', {
'cwd': dir
}, function (error, stdout, stderr) {
var errmsg = ('\n-- return value:\n' +
(error && error.code) + '\n-- expected stdout:\n' +
expectedStdout + '\n-- stdout:\n' + stdout +
'\n-- stdout diff:\n' +
ansidiff.chars(expectedStdout, stdout));
if (expectedStderr !== null) {
errmsg += '\n-- expected stderr:\n' + expectedStderr;
}
if (stderr !== null) {
errmsg += '\n-- stderr:\n' + stderr;
}
if (expectedStderr !== null) {
errmsg += '\n-- stderr diff:\n' +
ansidiff.chars(expectedStderr, stderr);
}
if (expectedExitCode !== null) {
test.equal(expectedExitCode, error && error.code || 0,
'\n\nunexpected exit code' + errmsg);
}
if (expectedStdout !== null) {
test.equal(stdout, expectedStdout,
'\n\nunexpected stdout' + errmsg);
}
if (expectedStderr !== null) {
test.equal(stderr, expectedStderr,
'\n\nunexpected stderr' + errmsg);
}
test.done();
});
}
})(dir);
}
}
exports['test'] = testCase(data);<|fim▁end|> | }
|
<|file_name|>condor_IBCC.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
__author__ = 'greghines'
import numpy as np
import os
import pymongo
import sys
import cPickle as pickle
import bisect
import random
import csv
import matplotlib.pyplot as plt
if os.path.exists("/home/ggdhines"):
base_directory = "/home/ggdhines"
else:
base_directory = "/home/greg"
def index(a, x):
'Locate the leftmost value exactly equal to x'
i = bisect.bisect_left(a, x)
if i != len(a) and a[i] == x:
return i
raise ValueError
sys.path.append(base_directory+"/github/reduction/experimental/classifier")
sys.path.append(base_directory+"/github/pyIBCC/python")
import ibcc
from iterativeEM import IterativeEM
if os.path.exists("/home/ggdhines"):
base_directory = "/home/ggdhines"
else:
base_directory = "/home/greg"
client = pymongo.MongoClient()
db = client['condor_2014-11-23']
classification_collection = db["condor_classifications"]
subject_collection = db["condor_subjects"]
gold = pickle.load(open(base_directory+"/condor_gold.pickle","rb"))
gold.sort(key = lambda x:x[1])
to_sample_from = (zip(*gold)[0])[1301:]
sample = random.sample(to_sample_from,100)
big_userList = []
big_subjectList = []
animal_count = 0
f = open(base_directory+"/Databases/condor_ibcc.csv","wb")
f.write("a,b,c\n")
alreadyDone = []
subjectVote = {}
gold_condor = []
only_one = []
vote_list = []
for count,zooniverse_id in enumerate(sample):
subject = subject_collection.find_one({"zooniverse_id":zooniverse_id})
if subject["classification_count"] < 3:
print "**"
only_one.append(zooniverse_id)
continue
print count
#gold standard
gold_classification = classification_collection.find_one({"user_name":"wreness", "subjects.zooniverse_id":zooniverse_id})
assert gold_classification["tutorial"] == False
found_condor = False
try:
mark_index = [ann.keys() for ann in gold_classification["annotations"]].index(["marks",])
markings = gold_classification["annotations"][mark_index].values()[0]
<|fim▁hole|> animal_type = animal["animal"]
found_condor = (animal_type == "condor")
except KeyError:
continue
except ValueError:
pass
if found_condor:
gold_condor.append(1)
else:
gold_condor.append(0)
alreadyDone = []
classification_count = 0
for classification in classification_collection.find({"subjects.zooniverse_id":zooniverse_id}):
if "user_name" in classification:
user = classification["user_name"]
else:
user = classification["user_ip"]
#print user
if ("user_name" in classification) and (classification["user_name"] == "wreness"):
continue
if user in alreadyDone:
continue
classification_count += 1
if classification_count == 3:
break
alreadyDone.append(user)
if not(user in big_userList):
big_userList.append(user)
if not(zooniverse_id in big_subjectList):
big_subjectList.append(zooniverse_id)
user_index = big_userList.index(user)
subject_index = big_subjectList.index(zooniverse_id)
try:
mark_index = [ann.keys() for ann in classification["annotations"]].index(["marks",])
markings = classification["annotations"][mark_index].values()[0]
found = False
for animal in markings.values():
animal_type = animal["animal"]
if animal_type in ["condor"]:
found = True
break
if found:
vote_list.append((user_index,subject_index,1))
f.write(str(user_index) + ","+str(subject_index) + ",1\n")
if not(zooniverse_id in subjectVote):
subjectVote[zooniverse_id] = [1]
else:
subjectVote[zooniverse_id].append(1)
else:
vote_list.append((user_index,subject_index,0))
f.write(str(user_index) + ","+str(subject_index) + ",0\n")
if not(zooniverse_id in subjectVote):
subjectVote[zooniverse_id] = [0]
else:
subjectVote[zooniverse_id].append(0)
except (ValueError,KeyError):
f.write(str(user_index) + ","+str(subject_index) + ",0\n")
if not(zooniverse_id in subjectVote):
subjectVote[zooniverse_id] = [0]
else:
subjectVote[zooniverse_id].append(0)
if classification_count == 0:
print subject
assert classification_count > 0
condor_count = 0.
total_count = 0.
false_positives = []
true_positives = []
false_negatives = []
true_negatives = []
confusion = [[0.,0.],[0.,0.]]
for votes in subjectVote.values():
if np.mean(votes) >= 0.5:
condor_count += 1
confusion[1][1] += np.mean(votes)
confusion[1][0] += 1 - np.mean(votes)
true_positives.append(np.mean(votes))
#false_negatives.append(1-np.mean(votes))
else:
#false_positives.append(np.mean(votes))
true_negatives.append(1-np.mean(votes))
confusion[0][0] += 1 - np.mean(votes)
confusion[0][1] += np.mean(votes)
total_count += 1
pp = condor_count / total_count
print confusion
confusion = [[max(int(confusion[0][0]),1),max(int(confusion[0][1]),1)],[max(int(confusion[1][0]),1),max(int(confusion[1][1]),1)]]
print confusion
print pp
f.close()
with open(base_directory+"/Databases/condor_ibcc.py","wb") as f:
f.write("import numpy as np\n")
f.write("scores = np.array([0,1])\n")
f.write("nScores = len(scores)\n")
f.write("nClasses = 2\n")
f.write("inputFile = \""+base_directory+"/Databases/condor_ibcc.csv\"\n")
f.write("outputFile = \""+base_directory+"/Databases/condor_ibcc.out\"\n")
f.write("confMatFile = \""+base_directory+"/Databases/condor_ibcc.mat\"\n")
f.write("nu0 = np.array(["+str(int((1-pp)*100))+","+str(int(pp*100))+"])\n")
f.write("alpha0 = np.array("+str(confusion)+")\n")
#f.write("alpha0 = np.array([[185,1],[6,52]])\n")
#f.write("alpha0 = np.array([[3,1],[1,3]])\n")
#start by removing all temp files
try:
os.remove(base_directory+"/Databases/condor_ibcc.out")
except OSError:
pass
try:
os.remove(base_directory+"/Databases/condor_ibcc.mat")
except OSError:
pass
try:
os.remove(base_directory+"/Databases/condor_ibcc.csv.dat")
except OSError:
pass
#pickle.dump((big_subjectList,big_userList),open(base_directory+"/Databases/tempOut.pickle","wb"))
ibcc.runIbcc(base_directory+"/Databases/condor_ibcc.py")
values = []
errors = 0
low = 0
X_positive = []
X_negative = []
with open(base_directory+"/Databases/condor_ibcc.out","rb") as f:
ibcc_results = csv.reader(f, delimiter=' ')
for ii,row in enumerate(ibcc_results):
if ii == 20000:
break
wreness_condor = gold_condor[ii]
ibcc_condor = float(row[2])
if wreness_condor == 0:
X_negative.append(ibcc_condor)
else:
X_positive.append(ibcc_condor)
#print X_negative
# print X_positive
# plt.hist([X_positive,X_negative],10)
# plt.show()
alpha_list = X_negative[:]
alpha_list.extend(X_positive)
alpha_list.sort()
roc_X = []
roc_Y = []
for alpha in alpha_list:
positive_count = sum([1 for x in X_positive if x >= alpha])
positive_rate = positive_count/float(len(X_positive))
negative_count = sum([1 for x in X_negative if x >= alpha])
negative_rate = negative_count/float(len(X_negative))
roc_X.append(negative_rate)
roc_Y.append(positive_rate)
#print roc_X
plt.plot(roc_X,roc_Y,color="red")
X_positive = []
X_negative = []
#repeat with MV
for subject_index,zooniverse_id in enumerate(big_subjectList):
votes = subjectVote[zooniverse_id]
wreness_condor = gold_condor[subject_index]
if wreness_condor == 0:
X_negative.append(np.mean(votes))
else:
X_positive.append(np.mean(votes))
alpha_list = X_negative[:]
alpha_list.extend(X_positive)
alpha_list.sort()
roc_X = []
roc_Y = []
for alpha in alpha_list:
positive_count = sum([1 for x in X_positive if x >= alpha])
positive_rate = positive_count/float(len(X_positive))
negative_count = sum([1 for x in X_negative if x >= alpha])
negative_rate = negative_count/float(len(X_negative))
roc_X.append(negative_rate)
roc_Y.append(positive_rate)
#print roc_X
plt.plot(roc_X,roc_Y,color="green")
classify = IterativeEM()
classify.__classify__(vote_list,2)
estimates = classify.__getEstimates__()
X_positive = []
X_negative = []
for subject_index,zooniverse_id in enumerate(big_subjectList):
probability = estimates[subject_index]
wreness_condor = gold_condor[subject_index]
if wreness_condor == 0:
X_negative.append(probability)
else:
X_positive.append(probability)
alpha_list = X_negative[:]
alpha_list.extend(X_positive)
alpha_list.sort()
roc_X = []
roc_Y = []
for alpha in alpha_list:
positive_count = sum([1 for x in X_positive if x >= alpha])
positive_rate = positive_count/float(len(X_positive))
negative_count = sum([1 for x in X_negative if x >= alpha])
negative_rate = negative_count/float(len(X_negative))
roc_X.append(negative_rate)
roc_Y.append(positive_rate)
#print roc_X
plt.plot(roc_X,roc_Y,color="blue")
#plt.xlim((0,1.05))
plt.plot((0,1),(0,1),'--')
plt.xlabel("False Positive Rate")
plt.ylabel("True Positive Rate")
#plt.plot([0.058],[0.875],'o')
plt.show()<|fim▁end|> | try:
for animal in markings.values(): |
<|file_name|>aarch64.rs<|end_file_name|><|fim▁begin|>use dynasmrt::{dynasm, DynasmApi, DynasmLabelApi};
use std::{io, slice, mem};
use std::io::Write;
fn main() {
let mut ops = dynasmrt::aarch64::Assembler::new().unwrap();
let string = "Hello World!";
dynasm!(ops
; .arch aarch64
; ->hello:
; .bytes string.as_bytes()
; .align 4
; ->print:
; .qword print as _
);
let hello = ops.offset();
dynasm!(ops
; .arch aarch64
; adr x0, ->hello
; movz x1, string.len() as u32
; ldr x9, ->print
; str x30, [sp, #-16]!
; blr x9
; ldr x30, [sp], #16
; ret
);<|fim▁hole|>
assert!(hello_fn());
}
pub extern "C" fn print(buffer: *const u8, length: u64) -> bool {
io::stdout()
.write_all(unsafe { slice::from_raw_parts(buffer, length as usize) })
.is_ok()
}<|fim▁end|> |
let buf = ops.finalize().unwrap();
let hello_fn: extern "C" fn() -> bool = unsafe { mem::transmute(buf.ptr(hello)) }; |
<|file_name|>TcamView.py<|end_file_name|><|fim▁begin|># Copyright 2017 The Imaging Source Europe GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import time
from tcam_capture.CapsDesc import CapsDesc
from tcam_capture.TcamScreen import TcamScreen
from tcam_capture.FileNameGenerator import FileNameGenerator
from tcam_capture.MediaSaver import MediaSaver
from tcam_capture.Settings import Settings
from tcam_capture.Encoder import MediaType, get_encoder_dict
from tcam_capture.TcamCaptureData import TcamCaptureData
from tcam_capture.FPSCounter import FPSCounter
from PyQt5 import QtGui, QtWidgets, QtCore
from PyQt5.QtWidgets import (QWidget, QHBoxLayout)
from PyQt5.QtCore import QObject, pyqtSignal, Qt, QEvent
import logging
import gi
gi.require_version("Gst", "1.0")
gi.require_version("Tcam", "0.1")
gi.require_version("GstVideo", "1.0")
from gi.repository import Tcam, Gst, GLib, GstVideo
log = logging.getLogger(__name__)
class TcamView(QWidget):
image_saved = pyqtSignal(str)
video_saved = pyqtSignal(str)
new_pixel_under_mouse = pyqtSignal(bool, int, int, QtGui.QColor)
current_fps = pyqtSignal(float)
format_selected = pyqtSignal(str, str, str) # format, widthxheight, framerate
first_image = pyqtSignal()
def __init__(self, serial: str, dev_type: str, parent=None):
super(TcamView, self).__init__(parent)
self.layout = QHBoxLayout()
self.container = TcamScreen(self)
self.container.new_pixel_under_mouse.connect(self.new_pixel_under_mouse_slot)
self.fullscreen_container = None # separate widget for fullscreen usage
self.is_fullscreen = False
self.layout.addWidget(self.container)
self.layout.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
self.setLayout(self.layout)
self.serial = serial
self.dev_type = dev_type
self.tcam = None
self.pipeline = None
self.image = None
self.mouse_is_pressed = False
self.use_dutils = True
self.current_width = 0
self.current_height = 0
self.device_lost_callbacks = []
self.caps_desc = None
self.video_format = None
self.retry_countdown = 0
self.settings = None
self.video_fng = None
self.image_fng = None
# additional timer to update actual_fps
# when no images arrive
self.fps_timer = QtCore.QTimer()
self.fps_timer.timeout.connect(self.fps_tick)
self.fps = None
self.file_pattern = ""
self.file_location = "/tmp"
self.caps = None
self.state = None
self.videosaver = None
self.imagesaver = None
self.window_id = self.container.winId()
self.displaysink = None
def get_caps_desc(self):
"""
Returns a CapsDesc describing the caps of the currently opened device
Returns None if device is not opened
"""
if not self.caps_desc:
tcam = self.get_tcam()
if not tcam:
return None
caps = tcam.get_static_pad("src").query_caps()
self.caps_desc = CapsDesc(caps)
return self.caps_desc
def new_pixel_under_mouse_slot(self, active: bool,
mouse_x: int, mouse_y: int,
color: QtGui.QColor):
self.new_pixel_under_mouse.emit(active, mouse_x, mouse_y, color)
def eventFilter(self, obj, event):
""""""
if event.type == QEvent.KeyPress:
if event.key() == Qt.Key_F11:
self.toggle_fullscreen()
return True
return QObject.eventFilter(self, obj, event)
def set_settings(self, new_settings: Settings):
"""
Update settings of all subclasses
"""
self.settings = new_settings
self.use_dutils = self.settings.use_dutils
if not self.video_fng:
self.video_fng = FileNameGenerator(self.serial,
self.settings.video_name)
else:
self.video_fng.set_settings(self.settings.video_name)
self.video_fng.location = self.settings.save_location
self.video_fng.file_suffix = get_encoder_dict()[self.settings.video_type].file_ending
if not self.image_fng:
self.image_fng = FileNameGenerator(self.serial,
self.settings.image_name)
else:
self.image_fng.set_settings(self.settings.image_name)
self.image_fng.location = self.settings.save_location
self.image_fng.file_suffix = get_encoder_dict()[self.settings.image_type].file_ending
def toggle_fullscreen(self):
if self.is_fullscreen:
self.is_fullscreen = False
self.showNormal()
self.fullscreen_container.hide()
# self.fullscreen_container.deleteLater()
self.fullscreen_container = None
self.displaysink.set_window_handle(self.window_id)
else:
self.is_fullscreen = True
self.fullscreen_container = TcamScreen()
self.fullscreen_container.is_fullscreen = True
self.fullscreen_container.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.fullscreen_container.showFullScreen()
self.fullscreen_container.show()
self.container.first_image = True
self.displaysink.set_window_handle(self.fullscreen_container.winId())
self.fullscreen_container.setFocusPolicy(QtCore.Qt.StrongFocus)
self.fullscreen_container.installEventFilter(self.fullscreen_container)
self.fullscreen_container.destroy_widget.connect(self.toggle_fullscreen)
# either show info that we are in trigger mode and still waiting for the first image
# or show that last image we had. This way we always have something to show to the user
if self.is_trigger_mode_on() and self.container.first_image:
self.fullscreen_container.wait_for_first_image()
else:
self.fullscreen_container.on_new_pixmap(self.container.pix.pixmap())
def fit_view(self):
if self.is_fullscreen:
self.fullscreen_container.fit_in_view.emit()
else:
self.container.fit_in_view.emit()
def save_image(self, image_type: str):
if not self.imagesaver:
self.imagesaver = MediaSaver(self.serial, self.caps, MediaType.image)
self.imagesaver.saved.connect(self.image_saved_callback)
self.imagesaver.error.connect(self.image_error_callback)
self.image_fng.set_settings(self.settings.image_name)
fn = self.image_fng.create_file_name("image")
self.imagesaver.current_filename = fn
self.imagesaver.save_image(get_encoder_dict()[image_type])
def image_saved_callback(self, image_path: str):
"""
SLOT for imagesaver callback for successfull saving
"""
self.image_saved.emit(image_path)
def image_error_callback(self, error_msg: str):
pass
def video_saved_callback(self, video_path: str):
"""
SLOT for videosaver callback for successfull saving
"""
self.video_saved.emit(video_path)
def start_recording_video(self, video_type: str):
"""
"""
if self.videosaver:
log.error("A video recording is already ongoing.")
return
self.videosaver = MediaSaver(self.serial, self.caps, MediaType.video)
self.videosaver.set_encoder(video_type)
self.videosaver.location = self.file_location
self.videosaver.current_filename = self.video_fng.create_file_name()
self.videosaver.saved.connect(self.video_saved_callback)
self.videosaver.start_recording_video(video_type)
def stop_recording_video(self):
"""
"""
if self.videosaver:
self.videosaver.stop_recording_video()
self.videosaver = None
def get_gst_state(self, timeout=5):
"""
Arguments:
timeout=5, optional
"""
if not self.pipeline:
return None
return self.pipeline.get_state(timeout).state
def play(self, video_format=None):
if self.videosaver:
self.stop_recording_video()
if self.pipeline is None:
self.create_pipeline()
if self.get_gst_state() == Gst.State.PLAYING:
log.debug("Setting state to NULL")
# Set to NULL to ensure that buffers,
# etc are destroyed.
# do this by calling stop
# so that additional steps like fps.stop()
# are taken
self.stop()
self.pipeline.set_state(Gst.State.READY)
if video_format:
caps_desc = self.get_caps_desc()
if caps_desc.contains(video_format):
self.video_format = video_format
else:
log.error("Given format caps could not be found in caps descriptions. {}".format(video_format))
log.error("Falling back to default behavior.")
if self.video_format is not None:
log.info("Setting format to {}".format(video_format))
caps = self.pipeline.get_by_name("bin")
caps.set_property("device-caps",
video_format)
if self.state and self.settings.apply_property_cache:
log.info("Property state found.")
# log.debug("Setting state: ==>{}<==".format(self.state))
self.tcam.set_property("state", str(self.state))
self.state = None
else:
log.info("No property state to be applied. Starting vanilla camera")
log.debug("Setting state to PLAYING")
self.pipeline.set_state(Gst.State.PLAYING)
self.fps_timer.start(1000) # 1 second
self.fps = FPSCounter()
self.fps.start()
self.container.first_image = True
if self.is_trigger_mode_on():
self.container.wait_for_first_image()
def fps_tick(self):
"""
Recalculate the current fps and emit current_fps signal
"""
self.current_fps.emit(self.fps.get_fps())
def new_buffer(self, appsink):
"""
callback for appsink new-sample signal
converts gstbuffer into qpixmap and gives it to the display container
"""
self.fps.tick()
self.fps_tick()
if self.container.first_image:
self.first_image.emit()
self.container.remove_wait_for_fist_image()
buf = self.pipeline.get_by_name("sink").emit("pull-sample")
caps = buf.get_caps()
self.caps = caps
if (not (self.videosaver and self.videosaver.accept_buffer) and
not (self.imagesaver and self.imagesaver.accept_buffer)):
return Gst.FlowReturn.OK
b = buf.get_buffer()
if self.videosaver and self.videosaver.accept_buffer:
self.videosaver.feed_image(b)
if self.imagesaver and self.imagesaver.accept_buffer:
self.imagesaver.feed_image(b)
return Gst.FlowReturn.OK
def create_pipeline(self, video_format=None):
# we cheat
# inject the type into the serial
# this ensures that no matter what we
# always have the correct backend
if self.dev_type:
self.serial = "{}-{}".format(self.serial, self.dev_type.lower())
# the queue element before the sink is important.
# it allows set_state to work as expected.
# the sink is synced with our main thread (the display thread).
# changing the state from out main thread will cause a deadlock,
# since the remaining buffers can not be displayed because our main thread
# is currently in set_state<|fim▁hole|> "! tee name=tee "
"! queue max-size-buffers=2 leaky=downstream "
"! video/x-raw,format=BGRx "
"! appsink name=sink emit-signals=true sync=false drop=true max-buffers=4 "
"tee. "
"! queue max-size-buffers=2 leaky=downstream "
"! videoconvert "
"! xvimagesink double-buffer=true sync=false name=displaysink draw-borders=false")
self.pipeline = None
self.pipeline = Gst.parse_launch(pipeline_str.format(serial=self.serial,
type=self.dev_type.lower(),
dutils=self.use_dutils))
self.displaysink = self.pipeline.get_by_name("displaysink")
sink = self.pipeline.get_by_name("sink")
sink.connect("new-sample", self.new_buffer)
# Create bus to get events from GStreamer pipeline
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.enable_sync_message_emission()
self.bus.connect('message::error', self.on_error)
self.bus.connect('message::info', self.on_info)
self.bus.connect("sync-message::element", self.on_sync_message)
self.tcam = self.pipeline.get_by_name("bin")
if video_format:
self.tcam.set_property("device-caps", video_format)
# This ready is required so that get_caps_desc
# works and does not return ANY
self.pipeline.set_state(Gst.State.READY)
log.debug("Created pipeline and set to READY")
log.debug("Pipeline is: {}".format(pipeline_str.format(serial=self.serial,
type=self.dev_type.lower(),
dutils=self.use_dutils)))
def on_sync_message(self, bus, message):
structure = message.get_structure()
if structure is None:
return
message_name = structure.get_name()
if message_name == "prepare-window-handle":
# "Note that trying to get the drawingarea XID in your on_sync_message() handler
# will cause a segfault because of threading issues."
# print 'sinkx_overview win_id: %s (%s)' % (self.gstWindowId, self.video_container.winId())
assert self.window_id
message.src.set_window_handle(self.window_id)
def pause(self):
log.info("Setting state to PAUSED")
if self.pipeline:
self.pipeline.set_state(Gst.State.PAUSED)
else:
log.error("Pipeline object does not exist.")
self.fps_timer.stop()
if self.fps:
self.fps.stop()
def stop(self):
"""
Stop playback
"""
log.info("Setting state to NULL")
self.fps_timer.stop()
if self.fps:
self.fps.stop()
self.pipeline.set_state(Gst.State.NULL)
def on_info(self, bus, msg):
"""
Callback for gst bus info messages
"""
info, dbg = msg.parse_info()
log.info(dbg)
if msg.src.get_name() == "bin":
if dbg.startswith("Working with src caps:"):
log.info("{}".format(dbg.split(": ")[1]))
self.caps = dbg.split(": ")[1]
self.fire_format_selected(dbg.split(": ")[1])
else:
log.error("Info from bin: {}".format(dbg))
else:
log.error("ERROR:", msg.src.get_name())
if dbg:
log.debug("Debug info:", dbg)
def fire_format_selected(self, caps: str):
"""
Emit SIGNAL that the pipeline has selected
src caps and inform listeners what the caps are
"""
if caps is None or caps == "NULL":
log.error("Bin returned faulty source caps. Not firiing format_selected")
return
c = Gst.Caps.from_string(caps)
if c.is_empty():
log.error("Received empty caps. Aborting fire_format_selected")
return
structure = c.get_structure(0)
self.image_fng.set_caps(c)
self.video_fng.set_caps(c)
if structure.get_name() == "image/jpeg":
fmt = "jpeg"
else:
fmt = structure.get_value("format")
resolution = "{}x{}".format(structure.get_value("width"),
structure.get_value("height"))
# compatability problems
# Older python bindings do not know the type Gst.Fraction.
# Thus we have to work around this problem...
results = re.search("framerate=\(fraction\)\d+/\d+", caps)
if results:
fps = results.group()
fps = fps.replace("framerate=(fraction)", "")
else:
fps = None
log.error("Unable to determine framerate settings. This will affect usability.")
self.format_selected.emit(fmt, resolution, str(fps))
def on_error(self, bus, msg):
"""
Callback for gst bus messages
Receives errors and chooses appropriate actions
"""
err, dbg = msg.parse_error()
if "tcamsrc-" in msg.src.get_name():
if err:
if "Device lost (" in err.message:
m = re.search('Device lost \((.*)\)', err.message)
log.error("Received device lost message for {}".format(m.group(1)))
self.fire_device_lost()
else:
log.error("Error from source: {}".format(err.message))
self.retry_countdown -= 1
if self.retry_countdown <= 0:
log.error("Repeatedly retried to start stream. No Success. Giving up.")
return
log.info("Trying restart of stream")
self.stop()
self.play(self.video_format)
else:
log.error("ERROR: {} : {}".format(msg.src.get_name(), err.message))
if dbg:
log.debug("Debug info: {}".format(dbg))
def get_tcam(self):
return self.tcam
def register_device_lost(self, callback):
self.device_lost_callbacks.append(callback)
def fire_device_lost(self):
"""
Notify all callback that our device is gone
"""
for cb in self.device_lost_callbacks:
cb()
def is_trigger_mode_on(self):
if not self.tcam:
return False
names = self.tcam.get_tcam_property_names()
if "Trigger Mode" not in names:
return False
try:
(result, value,
minval, maxval,
defval, step,
valuetype,
flags,
category, group) = self.tcam.get_tcam_property("Trigger Mode")
except TypeError:
log.warning("get_tcam_property failed for '{}'".format("Trigger Mode"))
return False
if valuetype == "boolean":
if value:
return True
return False
elif valuetype == "enum":
if value == "On":
return True
return True
def trigger_image(self):
"""
Checks if trigger mode is active and try to trigger an image
"""
if self.is_trigger_mode_on():
self.tcam.set_tcam_property("Software Trigger", True)
def start_roi_capture(self, finished_signal):
"""
Start capturing a ROI and emit finished_signal once the capture is finished
"""
self.container.start_roi_capture(finished_signal)
def add_roi(self, roi_widget):
"""
Add the given roi_widget for permanent display.
Call remove_roi to undo.
"""
self.container.add_roi(roi_widget)
def remove_roi(self, roi_widget):
"""
Remove roi_widget from display
"""
self.container.remove_roi(roi_widget)
def get_state(self):
"""
Retrieve a json description of the current property settings
Returns:
str or None
"""
if not self.tcam:
return None
return self.tcam.get_property("state")
def load_state(self, state: str):
"""
Arguments:
state:
str containing json descibing the property values
"""
self.state = state
@staticmethod
def has_dutils():
"""
Check to see if the gstreamer module gsttcamdutils is available.
"""
factory = Gst.ElementFactory.find("tcamdutils")
if factory:
return True
return False<|fim▁end|> | pipeline_str = ("tcambin serial={serial} name=bin use-dutils={dutils} "
"! video/x-raw,format=BGRx " |
<|file_name|>ExtrinsicRegistry.cpp<|end_file_name|><|fim▁begin|>#include "Registry.hpp"
<|fim▁hole|><|fim▁end|> | Registry::ExtrinsicRegistry::ExtrinsicRegistry() : window{}, splitter{}, GUI{new GuiSettings{}} {} |
<|file_name|>ref_.rs<|end_file_name|><|fim▁begin|>use serde_json::Value;
use url;<|fim▁hole|>use super::super::scope;
#[allow(missing_copy_implementations)]
pub struct Ref {
pub url: url::Url,
}
impl super::Validator for Ref {
fn validate(&self, val: &Value, path: &str, scope: &scope::Scope) -> super::ValidationState {
let schema = scope.resolve(&self.url);
if schema.is_some() {
schema.unwrap().validate_in(val, path)
} else {
let mut state = super::ValidationState::new();
state.missing.push(self.url.clone());
state
}
}
}<|fim▁end|> | |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>export * from './login.component';<|fim▁hole|><|fim▁end|> | export * from './login-vm.model'; |
<|file_name|>funcion1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Wed Dec 21 20:05:40 2016
@author: lucia
"""
<|fim▁hole|> print('Primera funcion de mi projecttwo')
print('Modifico la primera funcion 2')<|fim▁end|> | def funcion1(): |
<|file_name|>OpenGamaWebsiteHandler.java<|end_file_name|><|fim▁begin|>/*********************************************************************************************
*
* 'OpenGamaWebsiteHandler.java, in plugin ummisco.gama.ui.shared, is part of the source code of the
* GAMA modeling and simulation platform.
* (v. 1.8.1)
*
* (c) 2007-2020 UMI 209 UMMISCO IRD/UPMC & Partners
*
* Visit https://github.com/gama-platform/gama for license information and developers contact.
*
*
**********************************************************************************************/
package ummisco.gama.ui.commands;
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;<|fim▁hole|>public class OpenGamaWebsiteHandler extends AbstractHandler {
/**
* Method execute()
*
* @see org.eclipse.core.commands.IHandler#execute(org.eclipse.core.commands.ExecutionEvent)
*/
@Override
public Object execute(final ExecutionEvent event) throws ExecutionException {
GAMA.getGui().openWelcomePage(false);
return null;
}
}<|fim▁end|> | import org.eclipse.core.commands.ExecutionException;
import msi.gama.runtime.GAMA;
|
<|file_name|>SipInterfaceListener.java<|end_file_name|><|fim▁begin|><|fim▁hole|> *
* This file is part of MjSip (http://www.mjsip.org)
*
* MjSip is free software; you can redistribute it and/or modify
* it under the terms of the Affero GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* MjSip is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Affero GNU General Public License for more details.
*
* You should have received a copy of the Affero GNU General Public License
* along with MjSip; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Author(s):
* Luca Veltri ([email protected])
*/
package org.zoolu.sip.provider;
import org.zoolu.sip.message.Message;
/** A SipInterfaceListener listens for SipInterface onReceivedMessage(SipInterfaceListener,Message) events.
*/
public interface SipInterfaceListener
{
/** When a new Message is received by the SipInterface. */
public void onReceivedMessage(SipInterface sip, Message message);
}<|fim▁end|> | /*
* Copyright (C) 2005 Luca Veltri - University of Parma - Italy |
<|file_name|>JsonTreeEditListPropertyJavascriptTest.shouldFailReadLastIndexOfNonArray.js<|end_file_name|><|fim▁begin|>var node = S(input, "application/json");<|fim▁hole|><|fim▁end|> |
node.lastIndexOf("test"); |
<|file_name|>d3dbuffer.cpp<|end_file_name|><|fim▁begin|>/*
SoftTH, Software multihead solution for Direct3D
Copyright (C) 2005-2012 Keijo Ruotsalainen, www.kegetys.fi
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/<|fim▁hole|>
#include "main.h"
// These GUIDs are used to detect new interfaces to override back to originals to pass to D3D
#include <INITGUID.H>
DEFINE_GUID(IID_IDirect3DIndexBuffer9Managed, 0x7c9dd65e, 0xd3f7, 0x4529, 0xac, 0xee, 0x78, 0x58, 0xaa, 0xbb, 0xcc, 0xdd);
DEFINE_GUID(IID_IDirect3DVertexBuffer9Quirk, 0x7c9dd65e, 0xd3f7, 0x4529, 0xac, 0xfe, 0x12, 0x34, 0xaa, 0xbb, 0xcc, 0xdd);
DEFINE_GUID(IID_IDirect3DVertexBuffer9Managed, 0x7c9dd65e, 0xd3f7, 0x4529, 0xac, 0x00, 0xff, 0x23, 0xaa, 0xbb, 0xcc, 0xdd);
#ifdef USE_DISCARD_FLAG
#define FLOCKFLAGS D3DLOCK_DISCARD
#else
#ifdef RECREATE_ON_REUSE
#define FLOCKFLAGS NULL
#else
#define FLOCKFLAGS D3DLOCK_NOOVERWRITE
#endif
#endif
static const int recreateThreshold = 3;
IDirect3DIndexBuffer9Managed::IDirect3DIndexBuffer9Managed(IDirect3DDevice9Ex* device, IDirect3DDevice9New* wantDevFake, UINT wantLength, DWORD wantUsage,D3DFORMAT wantFormat,D3DPOOL Pool,HANDLE* pSharedHandle)
{
ONCE dbg("Using Index buffer manage-emulation");
dbgf("IDirect3DIndexBuffer9Managed 0x%08X: %d bytes, %s %s %s share: 0x%08X", this, Length, getUsage(Usage), getMode(Format), getPool(Pool), pSharedHandle);
bufSys = NULL;
buf = NULL;
#ifdef LATE_IB_UPDATE
fullDirty = false;
#endif
lockSections.reserve(8); // Reserve a bit of space
lastRecreate = 0;
Length = wantLength;
Usage = wantUsage;
Format = wantFormat;
devFake = wantDevFake;
if(Pool != D3DPOOL_MANAGED) {
dbg("IDirect3DIndexBuffer9Managed: Non-managed manage-emulation??");
exit(0);
}
Pool = D3DPOOL_DEFAULT;
if(Usage & D3DUSAGE_DYNAMIC)
Usage -= D3DUSAGE_DYNAMIC;
Usage |= D3DUSAGE_WRITEONLY;
/*
Usage |= D3DUSAGE_DYNAMIC;
*/
dev = device;
if(dev->CreateIndexBuffer(Length, Usage, Format, Pool, &buf, pSharedHandle) != D3D_OK) {
dbg("IDirect3DIndexBuffer9Managed: CreateIndexBuffer failed!");
return;
}
bufSize = Length;
bufSys = new BYTE[bufSize+BUF_EXTRA_BYTES];
ZeroMemory(bufSys, bufSize+BUF_EXTRA_BYTES);
}
bool IDirect3DIndexBuffer9Managed::ReCreate()
{
bool didRecreate = false;
if(!buf) return didRecreate;
if(devFake->getFrameNumber() - lastRecreate < recreateThreshold)
{
dbgf("IDirect3DIndexBuffer9Managed 0x%08X - Recreate at frame %d", this, devFake->getFrameNumber());
int r = buf->Release();
if(r!=0)
{
dbg("IDirect3DIndexBuffer9Managed::ReCreate() Warning: %d refs on old buffer, forcing release", r);
while(buf->Release()) {};
}
didRecreate = true;
D3DCALL( dev->CreateIndexBuffer(Length, Usage, Format, D3DPOOL_DEFAULT, &buf, NULL) );
}
lastRecreate = devFake->getFrameNumber();
return didRecreate;
}
HRESULT IDirect3DIndexBuffer9Managed::Lock(UINT OffsetToLock,UINT SizeToLock,void** ppbData,DWORD Flags)
{
dbgf("IDirect3DIndexBuffer9Managed 0x%08X: lock: off %d, %d bytes, %s", this, OffsetToLock, SizeToLock, getLock(Flags));
if(OffsetToLock+SizeToLock > bufSize)
dbg("WARNING: Application attempted to lock too large indexbuffer (%d > %d)", OffsetToLock+SizeToLock, bufSize);
if(!bufSys || !buf)
return D3DERR_INVALIDCALL;
// Return handle to system memory buffer
*ppbData = bufSys+OffsetToLock;
#ifdef RECREATE_ON_REUSE
if(!fullDirty && lockSections.size() == 0)
{
// First lock
if(ReCreate())
{
fullDirty = true;
}
}
#endif
#ifndef LATE_IB_UPDATE
LOCKSECTION l;
l.lockOffset = OffsetToLock;
l.lockSize = SizeToLock;
lockSections.push_back(l);
#else
if((OffsetToLock == 0 && SizeToLock == 0) || (OffsetToLock == 0 && SizeToLock == bufSize))
{
// Whole IB locked
fullDirty = true;
}
else
{
LOCKSECTION l; // Dirty sections
l.lockOffset = OffsetToLock;
l.lockSize = SizeToLock;
lockSections.push_back(l);
}
#endif
return D3D_OK;
}
HRESULT IDirect3DIndexBuffer9Managed::Unlock()
{
dbgf("IDirect3DIndexBuffer9Managed 0x%08X: Unlock", this);
if(!bufSys || !buf)
return D3DERR_INVALIDCALL;
/*
DWORD lockOffset = lockSections[lockSections.size()-1].lockOffset;
DWORD lockSize = lockSections[lockSections.size()-1].lockSize;
// Copy sysmem to vidmem
void *vb;
if(buf->Lock(lockOffset, lockSize, &vb, D3DLOCK_NOOVERWRITE|D3DLOCK_DONOTWAIT) != D3D_OK) {
dbg("IDirect3DVertexBuffer9Managed: Unlock: FAILED!");
return D3D_OK;
}
memcpy(vb, bufSys+lockOffset, lockSize?lockSize:bufSize);
buf->Unlock();
lockSections.pop_back();
*/
#ifndef LATE_IB_UPDATE
/*DWORD flags = D3DLOCK_DISCARD; // Crashes A10-C
if(config.main.debugD3D)
{
flags = NULL; // Debug D3D doesn't like discard here
}
*/
DWORD flags = NULL;
if(lockSections.size() == 0)
{
dbg("WARNING: Indexbuffer unlock without lock??");
return D3DERR_INVALIDCALL;
}
DWORD lockOffset = lockSections[lockSections.size()-1].lockOffset;
DWORD lockSize = lockSections[lockSections.size()-1].lockSize;
// Copy locked buffer to video memory
void *ib;
if(buf->Lock(lockOffset, lockSize, &ib, flags) != D3D_OK) {
dbg("IDirect3DIndexBuffer9Managed: Unlock: Lock failed!");
return D3DERR_INVALIDCALL;
}
memcpy(ib, bufSys+lockOffset, lockSize?lockSize:bufSize);
buf->Unlock();
lockSections.pop_back();
#endif
return D3D_OK;
}
#ifdef LATE_IB_UPDATE
IDirect3DIndexBuffer9* IDirect3DIndexBuffer9Managed::GetRealBuffer()
{
dbgf("IDirect3DIndexBuffer9Managed 0x%08X: GetRealBuffer", this);
if(lockSections.size() || fullDirty)
{
#ifdef ALWAYS_FULL_UPDATE
fullDirty = true;
#endif
//dbg("IDirect3DIndexBuffer9Managed 0x%08X: UPDATE");
if(!fullDirty)
{
// Partial lock
for(int i=0;i<lockSections.size();i++)
{
DWORD lockOffset = lockSections[i].lockOffset;
DWORD lockSize = lockSections[i].lockSize;
#ifdef RECREATE_ON_REUSE
const DWORD flags = NULL;
//const DWORD flags = D3DLOCK_NOOVERWRITE;
#else
// Improves performance - but isn't safe and not allowed by D3D spec
const DWORD flags = D3DLOCK_NOOVERWRITE;
#endif
// Copy sysmem to vidmem
void *vb;
if(buf->Lock(lockOffset, lockSize, &vb, flags) != D3D_OK) {
dbg("IDirect3DIndexBuffer9Managed: Unlock: Lock failed!");
return buf;
}
memcpy(vb, bufSys+lockOffset, lockSize?lockSize:bufSize);
buf->Unlock();
}
}
else
{
// Full buffer lock
// Copy sysmem to vidmem
void *vb;
if(buf->Lock(0, 0, &vb, FLOCKFLAGS) != D3D_OK) {
dbg("IDirect3DVertexBuffer9Managed: Unlock: Lock failed!");
return buf;
}
memcpy(vb, bufSys, bufSize);
buf->Unlock();
}
lockSections.clear();
fullDirty = false;
}
return buf;
}
#endif
// Managed VB emulation
IDirect3DVertexBuffer9Managed::IDirect3DVertexBuffer9Managed(IDirect3DDevice9Ex* wantDev, IDirect3DDevice9New* wantDevFake, UINT wantLength, DWORD wantUsage, DWORD wantFVF, D3DPOOL Pool, HANDLE* pSharedHandle)
{
ONCE dbg("Using Vertex buffer manage-emulation");
dev = wantDev;
devFake = wantDevFake;
bufSys = NULL;
buf = NULL;
HRESULT ret;
fullDirty = false;
locks = 0;
Length = wantLength;
Usage = wantUsage;
FVF = wantFVF;
lockSections.reserve(8); // Reserve a bit of space
if(Pool != D3DPOOL_MANAGED) {
dbg("IDirect3DIndexBuffer9Managed: Non-managed manage-emulation??");
exit(0);
}
Pool = D3DPOOL_DEFAULT;
if(Usage & D3DUSAGE_DYNAMIC)
Usage -= D3DUSAGE_DYNAMIC;
Usage |= D3DUSAGE_WRITEONLY;
ret = dev->CreateVertexBuffer(Length, Usage, FVF, Pool, &buf, pSharedHandle);
if(ret != D3D_OK)
{
result = ret;
dbg("WARNING: IDirect3DVertexBuffer9Managed: CreateVertexBuffer D3DPOOL_DEFAULT failed!");
return;
}
bufSize = Length;
bufSys = new BYTE[bufSize+BUF_EXTRA_BYTES];
ZeroMemory(bufSys, bufSize+BUF_EXTRA_BYTES);
result = D3D_OK;
}
bool IDirect3DVertexBuffer9Managed::ReCreate()
{
bool didRecreate = false;
if(!buf) return didRecreate;
if(devFake->getFrameNumber() - lastRecreate < recreateThreshold)
{
dbg("IDirect3DVertexBuffer9Managed 0x%08X - Recreate at frame %d (last recreate %d)", this, devFake->getFrameNumber(), lastRecreate);
int r = buf->Release();
if(r!=0)
{
dbg("IDirect3DVertexBuffer9Managed::ReCreate() Warning: %d refs on old buffer, forcing release", r);
while(buf->Release()) {};
}
didRecreate = true;
D3DCALL( dev->CreateVertexBuffer(Length, Usage, FVF, D3DPOOL_DEFAULT, &buf, NULL) );
}
else
dbg("IDirect3DVertexBuffer9Managed 0x%08X - NO recreate at frame %d (last recreate %d)", this, devFake->getFrameNumber(), lastRecreate);
lastRecreate = devFake->getFrameNumber();
return didRecreate;
}
HRESULT IDirect3DVertexBuffer9Managed::Lock(UINT OffsetToLock,UINT SizeToLock,void** ppbData,DWORD Flags)
{
dbgf("IDirect3DVertexBuffer9Managed 0x%08X: lock: off %d, %d bytes, %s", this, OffsetToLock, SizeToLock, getLock(Flags));
if(OffsetToLock+SizeToLock > bufSize)
dbg("WARNING: Application attempted to lock too large vertexbuffer (%d > %d)", OffsetToLock+SizeToLock, bufSize);
if(!bufSys || !buf)
return D3DERR_INVALIDCALL;
// Return handle to system memory buffer
*ppbData = bufSys+OffsetToLock;
#ifdef RECREATE_ON_REUSE
if(!fullDirty && lockSections.size() == 0)
{
// First lock
if(ReCreate())
{
fullDirty = true;
}
//fullDirty = true;
}
#endif
if((OffsetToLock == 0 && SizeToLock == 0) || (OffsetToLock == 0 && SizeToLock == bufSize))
{
// Whole VB locked
fullDirty = true;
}
else
{
LOCKSECTION l; // Dirty sections
l.lockOffset = OffsetToLock;
l.lockSize = SizeToLock;
lockSections.push_back(l);
}
locks++;
return D3D_OK;
}
HRESULT IDirect3DVertexBuffer9Managed::Unlock()
{
dbgf("IDirect3DVertexBuffer9Managed 0x%08X: Unlock", this);
if(!bufSys || !buf)
return D3DERR_INVALIDCALL;
locks--;
/*
DWORD lockOffset = lockSections[lockSections.size()-1].lockOffset;
DWORD lockSize = lockSections[lockSections.size()-1].lockSize;
// Copy sysmem to vidmem
void *vb;
if(buf->Lock(lockOffset, lockSize, &vb, D3DLOCK_NOOVERWRITE|D3DLOCK_DONOTWAIT) != D3D_OK) {
dbg("IDirect3DVertexBuffer9Managed: Unlock: FAILED!");
return D3D_OK;
}
memcpy(vb, bufSys+lockOffset, lockSize?lockSize:bufSize);
buf->Unlock();
lockSections.pop_back();
*/
/*
DWORD flags = D3DLOCK_NOOVERWRITE;
if(lockSections.size() == 0)
{
dbg("WARNING: Vertexbuffer unlock without lock??");
return D3DERR_INVALIDCALL;
}
DWORD lockOffset = lockSections[lockSections.size()-1].lockOffset;
DWORD lockSize = lockSections[lockSections.size()-1].lockSize;
// Copy locked buffer to video memory
void *ib;
if(buf->Lock(lockOffset, lockSize, &ib, flags) != D3D_OK) {
dbg("IDirect3DVertexBuffer9Managed: Unlock: Lock failed!");
return D3DERR_INVALIDCALL;
}
memcpy(ib, bufSys+lockOffset, lockSize?lockSize:bufSize);
buf->Unlock();
lockSections.pop_back();
*/
return D3D_OK;
}
IDirect3DVertexBuffer9* IDirect3DVertexBuffer9Managed::GetRealBuffer()
{
dbgf("IDirect3DVertexBuffer9Managed 0x%08X: GetRealBuffer", this);
if(locks)
dbg("WARNING: IDirect3DVertexBuffer9Managed 0x%08X: GetRealBuffer() with %d active locks", locks);
if(lockSections.size() || fullDirty)
{
#ifdef ALWAYS_FULL_UPDATE
fullDirty = true;
#endif
//dbg("IDirect3DVertexBuffer9Managed 0x%08X: UPDATE");
if(!fullDirty)
{
// Partial buffer lock
for(int i=0;i<lockSections.size();i++)
{
DWORD lockOffset = lockSections[i].lockOffset;
DWORD lockSize = lockSections[i].lockSize;
#ifdef RECREATE_ON_REUSE
const DWORD flags = NULL;
#else
// Improves performance - but isn't safe and not allowed by D3D spec
const DWORD flags = D3DLOCK_NOOVERWRITE;
#endif
// Copy sysmem to vidmem
void *vb;
if(buf->Lock(lockOffset, lockSize, &vb, flags) != D3D_OK) {
dbg("IDirect3DVertexBuffer9Managed: Unlock: Lock failed!");
return buf;
}
memcpy(vb, bufSys+lockOffset, lockSize?lockSize:bufSize);
buf->Unlock();
}
}
else
{
// Full buffer lock
// Copy sysmem to vidmem
void *vb;
if(buf->Lock(0, 0, &vb, FLOCKFLAGS/*|D3DLOCK_NOOVERWRITE*/) != D3D_OK) {
dbg("IDirect3DVertexBuffer9Managed: Unlock: Lock failed!");
return buf;
}
memcpy(vb, bufSys, bufSize);
buf->Unlock();
}
lockSections.clear();
fullDirty = false;
}
return buf;
}<|fim▁end|> |
#include "d3dbuffer.h" |
<|file_name|>todo.py<|end_file_name|><|fim▁begin|>from flask import Flask
from flask.ext.restful import reqparse, abort, Api, Resource
app = Flask(__name__)
api = Api(app)
TODOS = {
'todo1': {'task': 'build an API'},
'todo2': {'task': '?????'},
'todo3': {'task': 'profit!'},
}
def abort_if_todo_doesnt_exist(todo_id):
if todo_id not in TODOS:
abort(404, message="Todo {} doesn't exist".format(todo_id))
<|fim▁hole|>parser = reqparse.RequestParser()
parser.add_argument('task', type=str)
# Todo
# show a single todo item and lets you delete them
class Todo(Resource):
def get(self, todo_id):
abort_if_todo_doesnt_exist(todo_id)
return TODOS[todo_id]
def delete(self, todo_id):
abort_if_todo_doesnt_exist(todo_id)
del TODOS[todo_id]
return '', 204
def put(self, todo_id):
args = parser.parse_args()
task = {'task': args['task']}
TODOS[todo_id] = task
return task, 201
# TodoList
# shows a list of all todos, and lets you POST to add new tasks
class TodoList(Resource):
def get(self):
return TODOS
def post(self):
args = parser.parse_args()
todo_id = 'todo%d' % (len(TODOS) + 1)
TODOS[todo_id] = {'task': args['task']}
return TODOS[todo_id], 201
##
## Actually setup the Api resource routing here
##
api.add_resource(TodoList, '/todos')
api.add_resource(Todo, '/todos/<string:todo_id>')
if __name__ == '__main__':
app.run(debug=True)<|fim▁end|> | |
<|file_name|>std.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011, De Verkeersonderneming <[email protected]>
#
# This file is part of PyRITS - A tool for processing and analyzing transport
# management system data.
#
# PyRITS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyRITS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Calculate driving times and driving delays for database Erniesoft.
The following type codes are used in the driving times algorithm. The
description of each type code is described in the table below::
typecode | description
----------+---------------------------------------------------
0 | Basic Sign Of Life
1 | User Login
2 | User Logout
3 | Driver Switch
10 | Activity Started
11 | Activity Cancelled
12 | Activity Report
13 | Activity End
14 | Activity Join
15 | Activity Leave
16 | Activity Switch
17 | PTO alarm
18 | PTO start
19 | PTO end
20 | Task Received
21 | Task Accepted
22 | Task Refused
23 | Task Busy
24 | Task Cancelled
25 | Task Finished
30 | Outgoing Phone Started
31 | Incoming Phone Started
32 | Outgoing Phone Finished
33 | Incoming Phone Finished
40 | Navigation Started
41 | Navigation Cancelled
42 | Navigation ETA update
43 | Navigation Destination Reached
50 | Start of Speed Limit Violation
51 | Start of Peak RPM violation
52 | Start of Average RPM violation
53 | Acceleration Limit Violation
54 | Acceleration Limit Violation End
55 | Engine Idle Violation
56 | Enter Geofence
57 | Exit Geofence
58 | Panic Alert
59 | Maximum Activity Duration Exceeded
60 | End of Speed Limit Violation
61 | End of Peak RPM Lmit Violation
62 | End of Average RPM Limit Violation
65 | Update of Speed Limit Violation
66 | Update of Peak RPM Limit Violation
67 | Update of Average RPM Limit Violation
71 | Contact ON
72 | Contact OFF
73 | System Shutdown
75 | Violation of Engine Lock
76 | Motion Alert (Vehicle Moves while contact is off)
77 | Vehicle Immobilization
78 | Power Disconnect (Run on Battery)
79 | Battery Low
82 | Driving Times State Event
83 | Driving Times Driving Warning
84 | Driving Times Driving Violation
85 | Driving Times Total Driving Warning
90 | Temperature Sensor Violation
91 | Temperature Sensor Violation Reminder
92 | Coolant Temperature Violation
93 | Coolant Temperature Violation Reminder
97 | Wrong Trailer Tethered
98 | Trailer Tethered
99 | Trailer Untethered
200 | GPRS Status Info
"""
import sys
import os
import math
import datetime
import logging
import csv
import psycopg2.extras
import pyrits.config
import pyrits.std
import pyrits.HTML
import pyrits.erniesoft.query
__author__ = "Serrano Pereira"
__copyright__ = "Copyright 2011, De Verkeersonderneming"
__credits__ = ["Serrano Pereira <[email protected]>"]
__license__ = "GPL3"
__version__ = "0.1.2"
__maintainer__ = "Serrano Pereira"
__email__ = "[email protected]"
__status__ = "Production"
__date__ = "2011/11/24"
DEBUG = 0
class Standard(object):
"""Super class for :class:`DrivingDelays` and :class:`DrivingTimes`."""
def get_vehicles_from_date_range(self, date_start=None, date_end=None):
"""Return all vehicle codes that occur in a date range."""
cursor = self.connection.cursor()
if not date_start:
cursor.execute("""SELECT DISTINCT terminalcode
FROM hist_bcacties
WHERE terminalcode IS NOT NULL;""")
else:
cursor.execute("""SELECT DISTINCT terminalcode
FROM hist_bcacties
WHERE tijd BETWEEN '%s' AND '%s'
AND terminalcode IS NOT NULL;""" % (date_start, date_end))
vehicle_codes = []
for t in cursor:
vehicle_codes.append(t[0])
cursor.close()
return vehicle_codes
class DrivingDelays(Standard):
"""Driving delays calculator.
It calculates the clean driving times, the planned driving delays, and the
realized driving delays.
"""
def __init__(self, connection):
self.connection = connection
self.location_combinations = {}
self.reset()
def reset(self):
"""Set variables with default values."""
self.task_routes = {}
self.task_delays = {}
self.clean_driving_times = {}
self.route_distances = {}
self.static_tasks = []
def __set_static_tasks(self):
"""Set a variable with all static tasks id's.
Static tasks are tasks for which the vehicle has not moved. The task
id's for static tasks are saved to variable `self.static_tasks`. The
static tasks are required for method :meth:`__set_clean_driving_times`.
"""
cursor = self.connection.cursor()
cursor.execute("SELECT task_id FROM driving_times WHERE static = 't';")
for task_id in cursor:
self.static_tasks.append(task_id[0])
cursor.close()
def __set_clean_driving_times(self, vehicle_code):
"""Calculate the clean driving times.
In order to calculate the clean driving times, first the driving
distance for each route is retrieved from the database. Than this
distances and the average driving time for a given distance is used to
calculate the clean driving time.
If the clean driving time is already present in the database, it will
not calculate the clean driving time.
It will set the clean driving time for static tasks to 0.
.. note::
Usage of method :meth:`__set_clean_driving_times_from_modelit` is
favored over this method. This method was removed from the default
routines.
"""
# First set the static tasks.
self.__set_static_tasks()
# This query lists the visited locations in the realized order. This
# is used to determine the route for each task.
cursor = self.connection.cursor()
cursor.execute("""SELECT r.ritregelnr, r.locatiecode
FROM tbl_ritregels AS r
INNER JOIN hist_bcacties AS a ON a.elipsref = r.ritregelnr
INNER JOIN task_to_vehicle as v ON v.task_id = r.ritregelnr
WHERE v.vehicle_code='%s'
GROUP BY r.ritregelnr,r.locatiecode
ORDER BY MIN(a.tijd);""" % (vehicle_code))
# Define location A and location B for each task.
location_a = None
for task_id,location_b in cursor:
# Convert location names consisting of spaces to an empty string.
if location_b:
location_b = location_b.strip()
# Add a location tuple to the tuples list if both locations are set.
if bool(location_a) and bool(location_b):
tuple = [location_a,location_b]
# By sorting the tuple, retrieval from the database will be
# faster if to be retrieved tuples are sorted as well.
tuple.sort()
# Save the tuple.
self.task_routes[task_id] = tuple
# For the next loop location A is the current location.
if location_b:
location_a = location_b
else:
location_a = None
# Set the route distance for each task from the database.
for task_id, route in self.task_routes.iteritems():
cursor.execute(pyrits.erniesoft.query.sget('get_route_info',
{':location_a': route[0],
':location_b': route[1]}
))
tuple = cursor.fetchone()
if tuple:
driving_time, distance = tuple
# Save clean driving time in seconds for each task.
if driving_time != None:
self.clean_driving_times[task_id] = driving_time
# Save route distance in meters for each task.
if distance != None:
self.route_distances[task_id] = distance
# Close database cursor.
cursor.close()
# Calculate the clean driving time for each task based on the route
# distances.
for task_id, distance in self.route_distances.iteritems():
# Set the clean driving time to 0 if it's a static task.
if task_id in self.static_tasks:
self.clean_driving_times[task_id] = 0
continue
# Skip if the clean driving time is already known.
if task_id in self.clean_driving_times:
continue
# Calculate the clean driving time from the distance.
# Convert distance to kilimeters (from meters).
distance = distance / 1000.0
# Calculate clean driving time in seconds (from hours).
driving_time = pyrits.std.driving_time_from_distance(distance) * 3600.0
# Save the clean driving time for each task to a variable.
self.clean_driving_times[task_id] = driving_time
def __set_clean_driving_times_from_modelit(self):
"""Sets clean driving times and planned delays using Modelit data.
This method replaces methods :ref:`__set_clean_driving_times` and
:ref:`calculate_planned_delays`.
In the Erniesoft database, two values are calculated by Modelit:
* freeflowmin
* delaymin
Sometimes freeflowmin is set to 0, while delaymin has a positive
value. This means that delaymin could not be calculated, and in this
case the value for freeflowmin is set in field delaymin.
Sometimes the value for delaymin is negative. The cause of this is
unknown and such values should be ignored.
This methods uses these two values to set `clean_driving_time` and
`planned_delay` in table driving_times.
"""
cursor = self.connection.cursor()
cursor2 = self.connection.cursor()
# Set the static tasks.
self.__set_static_tasks()
# Get Modelit data from the database.
cursor.execute("""SELECT r.ritregelnr, r.freeflowmin, r.delaymin
FROM tbl_ritregels r
WHERE r.freeflowmin IS NOT NULL
-- It's unclear why some delays are negative, so skip these records.
AND r.delaymin > 0;""")
# Before saving clean driving times (minutes) and planned delays (minutes)
# from Modelit to the driving_times table, some preprocessing needs
# to be done.
for task_id,freeflowmin,delaymin in cursor:
# Sometimes freeflowmin is set to 0, while delaymin has a positive
# value. This means that delaymin could not be calculated, and in
# this case the value for freeflowmin is set in field delaymin.
if freeflowmin == 0 and delaymin > 0:
freeflowmin = delaymin
delaymin = 'NULL'
# Set the clean driving time to 0 if the vehicle hasn't moved.
if task_id in self.static_tasks:
freeflowmin = 0
# Convert minutes to seconds.
if isinstance(freeflowmin, float):
freeflowmin *= 60
if isinstance(delaymin, float):
delaymin *= 60
# Save clean driving times (seconds) and planned delays (seconds)
# to table driving_times.
cursor2.execute("""UPDATE driving_times
SET (clean_driving_time, planned_delay) = (%s, %s)
WHERE task_id = %d;""" % (freeflowmin, delaymin, task_id))
self.connection.commit()
def calculate_realized_delays(self, vehicle_code):
"""Calculate the realized driving delays.
In order to calculate the realized delays, the clean driving times are
defined as well and both are saved to the ``driving_times`` table in
the database.
.. note::
Usage of method :meth:`calculate_delays_using_modelit` is
favored over this method. This method was removed from the default
routines.
"""
# Reset all values.
self.reset()
# Only vehicles with a code that start with a "W" have a board computer
# on board. Other vehicles lack the required information and are skipped.
if not vehicle_code.startswith("W"):
logging.info("Vehicle with code %s is skipped; codes must start with 'W'" % vehicle_code)
return 1
# First define clean driving times which are required for calculating
# the delays.
logging.info("Calculating clean driving times for vehicle %s..." % vehicle_code)
self.__set_clean_driving_times(vehicle_code)
logging.info("Calculating realized delays for vehicle %s..." % vehicle_code)
# Get database cursor.
cursor = self.connection.cursor()
cursor2 = self.connection.cursor()
# Calculate the delay for each task and save the delay to the database.
cursor.execute(pyrits.erniesoft.query.sget('driving_times_from_tasks',
{':task_ids': str(self.task_routes.keys()).strip('[]')}))
for task_id,real_time in cursor:
# Get the clean driving time.
if task_id not in self.clean_driving_times:
continue
clean_time = self.clean_driving_times[task_id]
# Get the route distance.
distance = self.route_distances[task_id]
# Calculate the delay in seconds.
delay = real_time - clean_time
# If the realized delay is less than 3 minutes, set it to 0.
if -180 < delay < 180: delay = 0
# Save delay to database.
cursor2.execute("""UPDATE driving_times
SET (clean_driving_time,route_distance,delay) = (%d,%d,%d)
WHERE task_id=%d;""" % (clean_time,distance,delay,task_id))
# Commit the database transaction.
self.connection.commit()
# Close database cursor.
cursor.close()
cursor2.close()
return 0
def calculate_delays_using_modelit(self):
"""Calculate the realized driving delays.
In order to calculate the realized delays, the clean driving times (and
the planned delays) are first defined with method :meth:`__set_clean_driving_times_from_modelit`.
Then the realized delays are calculated and saved to the
``driving_times`` table in the database.
"""
# Reset all values.
self.reset()
# First define clean driving times which are required for calculating
# the delays.
logging.info("Setting clean driving times and planned delays...")
self.__set_clean_driving_times_from_modelit()
logging.info("Calculating realized delays...")
# Get database cursor.
cursor = self.connection.cursor()
cursor2 = self.connection.cursor()
# Calculate the delay for each task and save the delay to the database.
cursor.execute("""SELECT task_id, clean_driving_time, driving_time
FROM driving_times
WHERE clean_driving_time IS NOT NULL
AND driving_time IS NOT NULL;""")
for task_id, clean_driving_time, driving_time in cursor:
# Calculate the realized delay in seconds.
delay = driving_time - clean_driving_time
# If the realized delay is less than 3 minutes, set it to 0.
if -180 < delay < 180: delay = 0
# Save delay to database.
cursor2.execute("""UPDATE driving_times
SET (delay) = (%d)
WHERE task_id = %d;""" % (delay,task_id))
# Commit the database transaction.
self.connection.commit()
# Close database cursor.
cursor.close()
cursor2.close()
# Finally, populate table driving_times_rides.
logging.info("Setting driving times and delays per ride...")
self.set_driving_times_rides()
return 0
def set_driving_times_rides(self):
"""Populate table :ref:`driving_times_rides <db-erniesoft-driving_times_rides>`."""
cursor = self.connection.cursor()
# Purge the table.
cursor.execute("DELETE FROM driving_times_rides;")
self.connection.commit()
# Populate the table.
query = """INSERT INTO driving_times_rides
SELECT ride_id,
MIN(task_start),
MAX(task_end),
SUM(clean_driving_time),
SUM(driving_time),
SUM(planned_delay),
SUM(delay),
BOOL_OR(overnight),
BOOL_OR(static)
FROM driving_times
GROUP BY ride_id;"""
cursor.execute(query)
self.connection.commit()
cursor.close()
def calculate_planned_delays(self):
"""Calculate the planned driving delays.
A task can be visualized as follows::
DR DL TT
|-------------------------|------|----------|
ETA ETA
Previous task Current task
DR = Driving time without delay*
DL = Planned delay
TT = Terminal time; estimated time for loading/unloading/etc.*
ETA = Estimated Time of Arrival; these mark the estimated beginning and
end of a task.*
* Value is obtained from the database.
The formula used for calculating the planned driving delay is::
DL = ETA(curr. task) - ETA(prev. task) - TT - DR
The clean driving times that are required for this methods are
obtained from the database. Hence it is required to run
:meth:`calculate_realized_delays` before running this methods as that
method saves the clean driving times to the database.
.. note::
This method is no longer part of the default routine because method
:meth:`__set_clean_driving_times_from_modelit` saves both the
clean driving times and the planned delays to the database.
"""
# Reset all values.
self.reset()
logging.info("Calculating planned delays...")
# Get database cursor.
cursor = self.connection.cursor()
# Get ride and order information, sorted by ride so that we can
# perform calculations per ride. We need to list each task, hence the
# "LEFT OUTER" joins.
# Marco said that we could just sort by [ritlink,ETA], but this is not
# reliable as the ETA's sometimes contain errors which would result in
# wrongly ordered tasks and thus wrongly calculated planned delays.
# 'r.volgorde' denotes the planned order, and that is what the ETA's
# are based on.
cursor.execute("""SELECT r.ritlink,r.ritregelnr,r.actiecode,r.locatiecode,r.eta,o.opdrachttype,c.defaultlengteuurexport,c.defaultlengteuurimport,t.clean_driving_time
FROM tbl_ritregels AS r
LEFT OUTER JOIN tbl_orders AS o ON r.orderlink = o.ordernr
LEFT OUTER JOIN stm_actiecodes AS c ON c.actiecode = r.actiecode
LEFT OUTER JOIN driving_times AS t ON t.task_id = r.ritregelnr
ORDER BY r.ritlink, r.volgorde;""")
# Calculate the planned delay for each ride.
eta_prev = None
previous_ride = None
previous_task_str = None
previous_location = None
for ride_id,task_id,task_str,location,eta_current,task_type,term_time_export, \
term_time_import,clean_driving_time in cursor:
# Check if we entered a new ride. If so, do not use the last ETA.
if previous_ride != ride_id:
eta_prev = None
# Check for required values and calculate the delay.
if eta_prev and eta_current and clean_driving_time and task_type in ("EXPORT","IMPORT"):
# Sometimes, the current date precedes the previous date
# because of incorrectly defined ETA's. Handle this by either
# skipping the task or raising an error.
if eta_prev > eta_current:
#raise ValueError("The previous ETA (%s) does not precede the current ETA (%s) for task #%d." % (eta_prev,eta_current,task_id))
logging.info("Task #%d is skipped because the previous ETA does not precede the current ETA." % task_id)
# Continue with the next task.
eta_prev = eta_current
previous_ride = ride_id
previous_task_str = task_str
previous_location = location
continue
# Calculate the time difference in seconds between the two ETA's.
eta_delta = pyrits.std.total_seconds(eta_current - eta_prev)
# There are some situation that we need to correct for if both
# ETA's are the same.
if eta_delta == 0:
# If the ETA's are the same, this will for sure result in
# a negative delay. So just set the delay to 0.
self.task_delays[task_id] = 0
# Continue with the next task.
eta_prev = eta_current
previous_ride = ride_id
previous_task_str = task_str
previous_location = location
continue
# Calculate the planned terminal time in seconds.
if task_type == "EXPORT":
# Convert hours to seconds
terminal_time = term_time_export * 3600.0
elif task_type == "IMPORT":
# Convert hours to seconds
terminal_time = term_time_import * 3600.0
# Calculate planned delay.
delay = eta_delta - terminal_time - clean_driving_time
# Debugging
if DEBUG and task_id == 125229:
print "%s - %s - %s - %s = %s" % (eta_current, eta_prev, terminal_time, clean_driving_time, delay)
print "%s - %s - %s = %s" % (eta_delta/3600.0, terminal_time/3600.0, clean_driving_time/3600.0, delay/3600.0)
# Save the delay.
self.task_delays[task_id] = delay
# Set variables for the next loop.
eta_prev = eta_current
previous_ride = ride_id
previous_task_str = task_str
previous_location = location
# Calculate the planned delay for each task and save the delay to the
# database.
for task_id,delay in self.task_delays.iteritems():
# Save delay to database.
cursor.execute("UPDATE driving_times SET (planned_delay) = (%d) WHERE task_id=%d;" % (delay,task_id))
# Commit the database transaction.
self.connection.commit()
# Close database cursor.
cursor.close()
return 0
class DrivingTimes(Standard):
"""realized driving times calculator."""
def __init__(self, connection):
self.connection = connection
self.datetime_format = "%Y-%m-%d %H:%M:%S"
self.field_names = ('ritregelnr','terminalcode','type','tijd','speed',
'kmstand','startactie','actie','actiecode','sessionseq')
self.reset()
def reset(self):
"""Set variables with default values."""
self.in_queue = False
self.data = None
self.tasks = []
self.task_tuples = []
self.driving_tuples = []
self.driving_tuples_queue = []
self.driving_tuple = [None,None]
self.driving_times = {}
self.overnight_tasks = []
self.arrivals = []
self.static_tasks = []
def mark_value(self, value):
"""Returns a marked version of a variable.
Marked variables are colored yellow in the HTML output.
"""
return "::"+str(value)
def unmark_value(self, value):
"""Returns the unmarked version of a marked variable."""
if not isinstance(value, str) or not value.startswith('::'):
return value
value = value[2:]
if len(value) == 19:
try:
value = datetime.datetime.strptime(value, self.datetime_format)
except:
pass
return value
def purge(self):
"""Purge the driving times table."""
cursor = self.connection.cursor()
cursor.execute("DELETE FROM driving_times;")
self.connection.commit()
cursor.close()
def calculate_vehicle(self, vehicle_code, date_start=None, date_end=None):
"""Run all methods necessary for calculating the driving times.
Calculates the realized driving times for a specific vehicle. If the
date range is also set, it will only calculate the driving times for
that date range.
"""
# Reset all values.
self.reset()
# Create database cursor.
cursor = self.connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
# Only vehicles with a code that start with a "W" have a board computer
# on board. Other vehicles lack the required information and should be
# skipped.
if not vehicle_code.startswith("W"):
logging.info("Vehicle with code %s is skipped; codes must start with 'W'" % vehicle_code)
return 1
if not (date_start and date_end):
logging.info("Vehicle: %s" % (vehicle_code))
else:
logging.info("Date range: %s to %s; Vehicle: %s" % (date_start, date_end, vehicle_code))
logging.info("Obtaining data...")
# Select the right query and get all ride actions from the database.
if not (date_start and date_end):
query = pyrits.erniesoft.query.sget('actions_from_vehicle',
{':vehicle_code': vehicle_code})
else:
query = pyrits.erniesoft.query.sget('actions_from_date_range',
{':date_start': date_start,
':date_end': date_end,
':vehicle_code': vehicle_code})
cursor.execute(query)
logging.info("Processing data...")
# Load all data into memory.
self.data = pyrits.std.DataDict(cursor, self.field_names)
cursor.close()
# Check if any traces were found. If not, abort.
if len(self.data) == 0:
logging.info("No ride actions found. Nothing to do.")
return 1
logging.info("Calculating driving times...")
# Define tasks.
self.set_tasks(self.data)
# Check if any tasks were found. Sometimes we do find actions, but
# they are not linked to any task.
if len(self.tasks) == 0:
logging.info("No tasks found. Nothing to do.")
return 1
# Set the driving tuples.
self.set_driving_tuples(self.data)
# Set task tuples.
self.set_task_tuples(self.data, self.tasks)
# Calculate driving time for each task.
self.set_driving_times(self.data, self.driving_tuples, self.task_tuples)
# Set tasks for which the vehicle has not moved.
self.set_static_tasks(self.arrivals, self.task_tuples, self.data)
# Succesful termination.
return 0
def calculate_ride(self, ride_number):
"""Run all methods necessary for calculating the driving times.
Calculates the realized driving times for a specific ride ID.
"""
# Reset all values.
self.reset()
logging.info("Calculating driving times for ride #%s" % ride_number)
logging.info("Accessing database...")
# Connect to the database and execute the query.
cursor = self.connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
cursor.execute(pyrits.erniesoft.query.sget('actions_from_ride', {':ridenum': ride_number}))
logging.info("Processing data...")
# Load all drive data into memory.
self.data = pyrits.std.DataDict(cursor, self.field_names)
cursor.close()
# Check if any actions were found. If not, abort.
if len(self.data) == 0:
logging.info("No actions for this ride number. Nothing to do.")
return 1
logging.info("Calculating driving times...")
# Define tasks.
self.set_tasks(self.data)
# Check if any tasks were found. Sometimes we do find actions, but
# they are not linked to any task.
if len(self.tasks) == 0:
logging.info("No tasks found. Nothing to do.")
return 1
# Set the driving tuples.
self.set_driving_tuples(self.data)
# Set task tuples.
self.set_task_tuples(self.data, self.tasks)
# Calculate driving times.
self.set_driving_times(self.data, self.driving_tuples, self.task_tuples)
# Set tasks for which the vehicle has not moved.
self.set_static_tasks(self.arrivals, self.task_tuples, self.data)
# Succesful termination.
return 0
def save_to_database(self):
"""Save the driving times to the database.
Previously calculated driving times will be saved to a table
``driving_times`` in the database.
"""
cursor = self.connection.cursor()
cursor2 = self.connection.cursor()
logging.info("Saving driving times to database...")
for task_id, task_name in self.get_tasks(unique=1):
# Check if this task has been overnight.
overnight = 'f'
if task_id in self.overnight_tasks:
overnight = 't'
# Check if this is a static task.
static = 'f'
if task_id in self.static_tasks:
static = 't'
# Get the ride number for this task.
query = "SELECT ritlink FROM tbl_ritregels WHERE ritregelnr = %d;" % (task_id)
cursor2.execute(query)
ride_id = cursor2.fetchone()
# Construct insert query.
columns = {
'task_start': self.task_times[task_id][0],
'task_end': self.task_times[task_id][1],
'driving_time': self.driving_times[task_id],
'overnight': overnight,
'static': static,
}
query = "INSERT INTO driving_times (task_id,ride_id,%%s) VALUES (%d,%d,%%s);" % (task_id, ride_id[0])
query = pyrits.erniesoft.query.construct_insert_query(query, columns)
# Insert the driving time into the database. Note that each task
# should have a corresponding driving time in self.driving_times.
# If this is not the case, the tasks are computed wrongly.
#try:
cursor.execute(query)
#except:
# raise KeyError(task_id)
# Commit the transaction.
self.connection.commit()
cursor.close()
cursor2.close()
def output_html(self, filename, open_file=0):
"""Save the realized driving times to a HTML file `filename`.
If `open_file` is set to 1, the HTML file is automatically opened in
the default web browser.
"""
logging.info("Writing results to %s" % filename)
# Process data for output.
marked_data = self.mark_driving_times(self.data, self.driving_tuples, self.task_tuples, self.tasks)
formatted_driving_times = pyrits.std.driving_times_formatted(self.tasks, self.driving_times)
# Open output file for writing.
f = open(filename, 'w')
# Write HTML header.
f_header = open(os.path.join(pyrits.std.module_path(), 'include/header.html'), 'r')
f.writelines(f_header)
f_header.close()
# Write driving times to output file.
html = pyrits.HTML.Table(formatted_driving_times, header_row = ('Taak ID','Taak','Reistijd (s)','Reistijd (h:m:s)'))
f.writelines(html)
# Write marked data to output file.
field_names = list(self.field_names)
field_names.append('taak')
html = pyrits.HTML.Table(marked_data, header_row = field_names)
f.writelines(html)
# Write HTML footer.
f_footer = open(os.path.join(pyrits.std.module_path(), 'include/footer.html'), 'r')
f.writelines(f_footer)
f_footer.close()
# Close output file.
f.close()
# Open the HTML file in a browser.
if open_file:
logging.info("Opening results in web browser.")
pyrits.std.open_in_browser(filename)
# -----------------------------------
# Setters
def set_driving_tuples(self, data):
"""Define the start and end positions of actual driving.
Driving tuples will be calculated and saved to a list
`self.driving_tuples`. A driving tuple is a list of two integers.
The two integers represent the start and end position respectively.
The integers indicate row numbers of the actions list `data`.
"""
contact_off = 0
parking = 0
washing = 0
resting = 0
departed = None
arrived = None
moved = 0
driving = 0
driving_state = None
last_km_position = None
last_driving_time = None
drive_state = None
finished_tasks = []
# Trace types to be ignored (as indicated by T. Verduijn).
ignored_types = (82,56,42,57)
for i,row in enumerate(data):
# Reset values.
skip_current = 0
# First we set some variables which can tell us whether the
# vehicle is driving or not.
# Are we driving right now? If were are not driving, this is set
# to None.
is_driving = self.driving_tuple[0]
# Find out if the vehicle has moved.
if last_km_position:
if row['kmstand'] > last_km_position:
moved = 1
else:
moved = 0
# Sometimes, a started "PA" action is never ended. To account for
# this, if parking, and we come across an action which is neither
# "UN" or "PA", then unset parking.
if parking:
if row['actie'] not in ("", "UN", "PA"):
parking = 0
# Find out if the vehicle has arrived.
if row['actie'] == "AankAdres":<|fim▁hole|> if row['type'] == 10:
# From this point, put driving time tuples in the queue
# because we don't know if the driver is going to cancel
# this activity.
self.in_queue = True
# Arrived at destination.
elif row['type'] == 13:
# Arrived at destination.
arrived = 1
# Keep track of arrival actions. This is needed later by
# method set_static_tasks().
self.arrivals.append(i)
# The driver didn't cancel the activity, so the driving
# times in the queue can be cleared.
self.driving_tuples_queue = []
# Reset values.
departed = 0
self.in_queue = False
if is_driving:
# If driving, reset driving tuple. Because
# self.in_queue is set to False now, current driving
# times will be saved anyway. This is to prevent that.
self.driving_tuple = [None,None]
is_driving = False
# Arrival cancelled.
elif row['type'] == 11:
# The driver didn't arrive after all. So put the driving
# times from the queue in the actual list.
self.driving_tuples.extend(self.driving_tuples_queue)
# Reset values.
self.driving_tuples_queue = []
self.in_queue = False
# Find out if the vehicle has departed.
elif row['actie'] == "VertrAdres":
# Departure has started.
if row['type'] == 13:
if row['ritregelnr'] not in finished_tasks:
# We've offcially departed.
departed = 1
# We need to keep track of finished tasks because
# sometimes the driver tells the board computer that
# he departed more than once.
finished_tasks.append(row['ritregelnr'])
# Reset values.
arrived = 0
driving_state = None
# Find out if the driver is using the driving button.
elif row['actie'] == "DR":
# The driver seems to use the drive button. Whenever
# driving_state is not set to None, the driving_state has more
# control of deciding when the vehicle is driving.
if row['type'] == 10:
driving_state = 1
elif row['type'] in (13,11):
driving_state = 0
# Find out if the vehicle is parking.
elif row['actie'] == "PA":
if row['type'] == 10:
parking = 1
elif row['type'] in (13,11):
parking = 0
# Washing the vehicle?
elif row['actie'] == "Wassen":
if row['type'] == 10:
washing = 1
elif row['type'] in (13,11):
washing = 0
# Find out if the driver has decided to take a nap.
elif row['actie'] == "NachtRust":
if row['type'] == 10:
resting = 1
elif row['type'] in (13,11):
resting = 0
# "Action" is set to something else.
else:
# Find out if the contact is off.
if row['type'] == 72:
contact_off = 1
elif row['type'] == 71:
contact_off = 0
# The contact was just turned on, so do not log any
# movements for this round.
skip_current = 1
# Decide when the vehicle is driving based on the set variables.
if skip_current:
pass
elif is_driving and row['type'] in ignored_types:
# Ignore these types when already driving.
pass
elif not contact_off and not parking and not resting and not washing:
# The vehicle may be driving.
if departed or departed == None:
# The vehicle has departed. It's now clear to decide if the
# vehicle is driving.
if driving_state:
# If driving_state is not set to None (the driver has
# used the driving button), give this variable more
# control over the driving boolean.
self.set_driving_tuple(1, i)
elif driving_state == 0:
# Check if the driver marked the end of driving.
self.set_driving_tuple(0, i)
driving_state = None
# The driver is not using the driving button. So decide
# when the vehicle is moving based on other variables.
elif moved:
# The vehicle has moved. It must be driving.
self.set_driving_tuple(1, i, offset=-1)
# Set the last driving time.
last_driving_time = row['tijd']
else:
# The vehicle hasn't moved. It's probably not driving.
self.set_driving_tuple(0, i, offset=-1)
elif not departed:
# Don't ever set to driving when the vehicle has not even
# departed.
self.set_driving_tuple(0, i, offset=-1)
else:
# Either the contact is off, it's parking, or the driver is
# resting. So it can't be driving.
self.set_driving_tuple(0, i, offset=-1)
# What's the current position of the kilometer counter?
last_km_position = row['kmstand']
def set_driving_tuple(self, driving, index, offset=0):
"""Construct and save a single driving tuple."""
if driving:
if self.driving_tuple[0] == None:
self.driving_tuple[0] = index+offset
else:
if self.driving_tuple[0] == None:
return
if self.driving_tuple[1] == None:
self.driving_tuple[1] = index+offset
if self.in_queue:
self.driving_tuples_queue.append(self.driving_tuple)
else:
self.driving_tuples.append(self.driving_tuple)
self.driving_tuple = [None,None]
else:
raise ValueError
def set_tasks(self, data):
"""Returns the tasks list (actual order).
This method uses the order as defined in the "actions" table. The
order defined in this table is how the tasks were actually started.
Returns list of tuples (task_no,task_name).
"""
self.tasks = []
last_task = None
for row in data:
if row['ritregelnr'] and row['ritregelnr'] != last_task:
last_task = row['ritregelnr']
tuple = (row['ritregelnr'],row['actiecode'])
self.tasks.append(tuple)
def set_task_tuples(self, data, tasks):
"""Set the task tuples which define start and end of a task.
Each tuple is a list [task_id, task_start_id, task_end_id]. The start
and end id's are indexes of the main data object `data`.
The end of the current task and the start of the next task is defined
as the departure from an address.
This method also keeps a list `self.overnight_tasks` to which all task
numbers marked with an overnight are saved. By default, a task is
marked 'overnight' if the time between contact on and contact off has
been longer than five hours (setting 'time-diff-for-overnight').
"""
# List which will hold all the task tuples.
self.task_tuples = []
# Dictionary which will hold the start and end time for each task.
self.task_times = {}
# List which will hold all the task for which an overnight occurred.
self.overnight_tasks = []
# Keep track of the last handled task.
last_finished_task = None
# The index of the current task in tasks list.
current_task_index = 0
# Set the first task to the first task ID (ritregelnr). Task ID's
# are always linked to a single task and are unique within a ride.
current_task = tasks[current_task_index][0]
# Assume that every drive starts with the first task.
task_tuple = [current_task,0,None]
# Set the start time for the first task.
self.task_times[current_task] = [data[0]['tijd'], None]
# Get total number of tasks.
n_tasks = len(tasks)
# Last time that the contact was turned off. Required for defining
# overnight tasks.
last_contact_off = None
i = None
for i, row in enumerate(data):
# Find out if the vehicle has departed.
if self.has_departed(row, tasks, n_tasks, current_task_index, last_finished_task):
# We've officially ended a task.
# We need to keep track of the last finished task because
# sometimes the driver tells the board computer that
# he left the address twice or more in a row.
last_finished_task = current_task
# Set the end time for this task.
if current_task in self.task_times:
self.task_times[current_task][1] = row['tijd']
else:
# In the rare situation where a tasks is ended, but was
# never started, only set the end time.
self.task_times[current_task] = [None, row['tijd']]
# Set the current task to the next task, if any.
if current_task_index+1 < n_tasks:
# Set to next task.
current_task_index += 1
current_task = tasks[current_task_index][0]
# Set the start time for the next task.
if current_task not in self.task_times:
self.task_times[current_task] = [data[i+1]['tijd'], None]
else:
# There is no follow up task in tasks, so set to
# undefined.
current_task = None
# The previous task has ended, so set the end row for
# the task tuple and add the tuple to the task tuples.
task_tuple[2] = i
self.task_tuples.append(task_tuple)
# As the last task has ended, a new task is started.
task_tuple = [current_task,i+1,None]
# We are inside a task.
if current_task:
# Keep track of tasks for which there has been an overnight.
if row['type'] == 72:
last_contact_off = row['tijd']
elif row['type'] == 71:
if last_contact_off:
# Decide whether there has been an overnight (the
# time difference between contact on and contact
# off is > 5 hours).
time_delta = row['tijd'] - last_contact_off
if pyrits.std.total_seconds(time_delta) > pyrits.config.cfg.get('time-diff-for-overnight'):
self.overnight_tasks.append(current_task)
# Reset values.
last_contact_off = None
if i:
# The very last task has ended, so set the end row for the task tuple.
task_tuple[2] = i
self.task_tuples.append(task_tuple)
def has_departed(self, row, tasks, len_tasks, current_task_index, last_finished_task):
"""Returns True if the vehicle has departed from a location.
Departure marks the end of the current task and the beginning of the
next task.
"""
# Check if this trace is set as departure.
if row['actie'] == "VertrAdres" and row['type'] == 13 and row['ritregelnr'] != last_finished_task:
return True
# If multiple rides are analyzed, it is possible that a new tasks
# is entered, without leaving an address. In this case, end the current
# task.
elif row['ritregelnr'] and current_task_index+1 < len_tasks and row['ritregelnr'] == tasks[current_task_index+1][0]:
return True
return False
def set_static_tasks(self, arrivals, task_tuples, data):
"""Set tasks for which the vehicle has not moved between departure and
arrival.
Sometimes the vehicle has not even moved before arrival at desitnation.
For these tasks we want and expected driving time of 0.
If the kilometer counter position at departure is the same as for
arrival, then we assume the vehicle has not moved. Departure is defined
as the start of a task obtained from `task_tuples` set by
:meth:`set_task_tuples`. Arrival of each task `arrivals` was set by
:meth:`set_driving_tuples` in variable `self.arrivals`.
"""
# Check each arrival id.
for arrival_id in arrivals:
# First get the departure and task id to which this arrival id
# belongs.
departure_id = None
for task,start,end in task_tuples:
if start <= arrival_id <= end:
task_id = task
departure_id = start
break
# Continue with the next arrival id if no task id was found.
if departure_id == None:
continue
# If we have a departure id, we can get the kilometer counter
# position of both departure and arrival.
km_departure = data[departure_id]['kmstand']
km_arrival = data[arrival_id]['kmstand']
# Now determine if the vehicle has moved. If not, add it to our
# list of tasks for which the vehicle has not moved.
if km_departure == km_arrival:
self.static_tasks.append(task_id)
def set_driving_times(self, data, driving_tuples, task_tuples):
"""Set the driving time for each task."""
self.driving_times = {}
# Set driving time for all tasks (that are present in task_tuples) to 0.
# The first item in each tuple is the task number/ritregelnr.
# Note that we do not use self.tasks for this, but this shouldn't
# matter because if the tasks tuples were calculated correctly, all
# tasks should be present in task_tuples as well.
for t in task_tuples:
self.driving_times[t[0]] = 0
# Calculate total driving time (in seconds) for all tasks.
for t in driving_tuples:
# Get the current task.
task = self.get_taskno_from_driving_tuple(t, task_tuples)
# If there is no task for this driving tuple, skip it.
if not task:
continue
# Get the duration for the current driving time.
time_delta = data[t[1]]['tijd'] - data[t[0]]['tijd']
# Set driving time.
self.driving_times[task] += pyrits.std.total_seconds(time_delta)
# -----------------------------------
# Getters
def get_data(self):
"""Returns the main data object."""
return self.data
def get_driving_times(self):
"""Returns a list of all driving times."""
return self.driving_times
def get_driving_tuples(self):
"""Returns a list of all driving tuples."""
return self.driving_tuples
def get_tasks(self, unique=0):
"""Returns a list of all task id's.
If `unique` is set to True, a task id's is only returned once.
"""
if not unique:
return self.tasks
else:
seen = []
result = []
for tuple in self.tasks:
if tuple[0] in seen: continue
seen.append(tuple[0])
result.append(tuple)
return result
def get_task_tuples(self):
"""Returns a list of all task tuples."""
return self.task_tuples
def get_driving_indexes(self, driving_tuples):
"""Return a list of indexes where the vehicle was driving.
Indexes are indexes of the `self.data` object.
"""
indexes = []
for start,end in driving_tuples:
for x in range(start, end+1):
indexes.append(x)
return indexes
def get_taskno_from_driving_tuple(self, driving_tuple, task_tuples):
"""Return the task id that belongs to a driving tuple.
Note that all id's used here are indexes of the main data object.
"""
for task,start,end in task_tuples:
if driving_tuple[0] >= start and driving_tuple[1] <= end:
return task
return None
def get_taskno_from_arrival_id(self, arrival_id, task_tuples):
"""Return the task id that belongs to an arrival id.
Note that all id's used here are indexes of the main data object.
"""
for task,start,end in task_tuples:
if start <= arrival_id <= end:
return task
return None
def get_taskstr_from_taskno(self, tasks, n):
"""Return task name from a task number.
The task number must be a ride rule number.
"""
for task_id,name in tasks:
if task_id == n:
return name
return None
# -----------------------------------
# Generators
def mark_driving_times(self, data, driving_tuples, task_tuples, tasks):
"""Marks certain items in the main data object `data`.
Some items need to be colored in the HTML object and are thus marked
for coloring by this method.
"""
driving_indexes = self.get_driving_indexes(driving_tuples)
for i,row in enumerate(data):
# Get the current task.
for task_id, start, end in task_tuples:
if start <= i <= end:
current_task = task_id
break
# Decide which cells to color.
for key, value in row.iteritems():
if key == 'tijd' and i in driving_indexes:
row[key] = self.mark_value(value)
if key == 'actie' and value in ('AankAdres','VertrAdres'):
row[key] = self.mark_value(value)
# Add the current task to the row.
row['taak'] = "%s_%s" % (self.get_taskstr_from_taskno(tasks, current_task), current_task)
# Color the task if it's an overnight task.
if current_task in self.overnight_tasks:
row['taak'] = self.mark_value(row['taak'])
yield row
class Preprocess(object):
"""Perform some precalculations on the database.
These precalculations must be performed before calculating driving times,
delays, etc.
"""
def __init__(self, connection):
self.connection = connection
def start(self):
self.task_to_vehicle()
self.task_to_route()
logging.info("Done")
def task_to_vehicle(self):
"""Fill the table `task_to_vehicle`.
Table `task_to_vehicle` is required for calculating the driving times.
Sometimes more vehicles are found for a single task in the activities
table. Table `task_to_vehicle` is created to define the vehicle actually
used for each task.
Warning: After execution of this function there may still be tasks
missing from the `task_to_vehicle` table because some `hist_bcacties`
records lack a vehicle code (this is an inconsistency in the database).
"""
logging.info("Defining vehicle for each task...")
cursor = self.connection.cursor()
cursor2 = self.connection.cursor()
# First purge the table.
cursor.execute("DELETE FROM task_to_vehicle;")
self.connection.commit()
# First set one vehicle code per task (doesn't matter which one, but
# this is so tasks that are excluded from the next query have a
# vehicle code set as well). This also means that tasks for
# which no task was finished, the vehicle code is set anyway. This
# is probably not desired as this might lead to incorrect driving times
# for such tasks. Is there a better way to do this?
cursor.execute("""INSERT INTO task_to_vehicle (task_id,vehicle_code)
SELECT elipsref, MAX(terminalcode)
FROM hist_bcacties
WHERE elipsref IS NOT NULL
AND terminalcode IS NOT NULL
GROUP BY elipsref;""")
self.connection.commit()
# Finally update the vehicle code for each task returned by the
# following query. This query only shows vehicles for which a task
# was actually finished (typecode=13).
# NOTE: Notice the MAX() in that query. This means that if we still
# find more than one vehicle code for a task (this is not supposed to
# happen), we pick the last one just so the program doesn't crash.
cursor.execute("""SELECT r.ritregelnr, MAX(a.terminalcode)
FROM hist_bcacties a
INNER JOIN tbl_ritregels r ON (a.elipsref = r.ritregelnr)
WHERE a.typecode=13
AND a.actie = r.actiecode
AND a.terminalcode IS NOT NULL
GROUP BY r.ritregelnr;""")
for task, vehicle in cursor:
cursor2.execute("UPDATE task_to_vehicle SET (vehicle_code) = ('%s') WHERE task_id=%d;" % (vehicle,task))
self.connection.commit()
# Warning: At this point, there may still be tasks missing from the
# task_to_vehicle table because some `hist_bcacties` records lack
# a vehicle code (this is an inconsistency in the database).
cursor2.close()
cursor.close()
def task_to_route(self):
"""Fill the table `task_to_route`.
Table `task_to_route` is used to easily obtain the from and to location
for a task.
"""
logging.info("Defining route for each task...")
cursor = self.connection.cursor()
cursor2 = self.connection.cursor()
# First purge the table.
cursor.execute("DELETE FROM task_to_route;")
self.connection.commit()
# Define routes.
cursor.execute("""SELECT r.ritlink,
r.ritregelnr,
r.locatiecode,
r.locatiepostcode,
r.locatieplaats,
r.locatieland,
v.vehicle_code
FROM tbl_ritregels r
INNER JOIN hist_bcacties a ON a.elipsref = r.ritregelnr
INNER JOIN task_to_vehicle v ON v.task_id = r.ritregelnr
GROUP BY r.ritlink,r.ritregelnr,r.locatiepostcode,v.vehicle_code
ORDER BY v.vehicle_code, MIN(a.tijd);""")
previous_vehicle = None
from_task = None
from_code = None
from_postcode = None
from_city = None
from_country = None
for ride_id,task_id,to_code,to_postcode,to_city,to_country,vehicle in cursor:
# Skip unsupported vehicles.
if not vehicle.startswith("W"):
previous_vehicle = None
from_task = None
from_code = None
from_postcode = None
from_city = None
from_country = None
continue
# If a new vehicle is encountered, do not count this as a route.
# Reset "from" location so that the "to" location is saved for the
# next loop.
if previous_vehicle != vehicle:
from_postcode = None
# Convert post codes consisting of spaces to an empty string.
if to_postcode:
to_postcode = to_postcode.strip()
# Save route to the database.
if from_postcode and to_postcode:
columns = {'from_task': from_task,
'from_code': from_code,
'from_postcode': from_postcode,
'from_city': from_city,
'from_country': from_country,
'from_country': from_country,
'to_code': to_code,
'to_postcode': to_postcode,
'to_city': to_city,
'to_country': to_country
}
query = "INSERT INTO task_to_route (task_id,ride_id,%%s) VALUES (%d,%d,%%s);" % (task_id,ride_id)
query = pyrits.erniesoft.query.construct_insert_query(query, columns)
cursor2.execute(query)
# Set values for the next loop.
from_task = task_id
from_code = to_code
from_postcode = to_postcode
from_city = to_city
from_country = to_country
previous_vehicle = vehicle
# Commit changes.
self.connection.commit()
cursor2.close()
cursor.close()<|fim▁end|> | |
<|file_name|>core.py<|end_file_name|><|fim▁begin|>import numpy as np, time, itertools
from collections import OrderedDict
from .misc_utils import *
from . import distributions
concat = np.concatenate
import theano.tensor as T, theano
from importlib import import_module
import scipy.optimize
from .keras_theano_setup import floatX, FNOPTS
from keras.layers.core import Layer
from .filters import *
from .filtered_env import *
import random
import copy
import opensim as osim
from osim.env import *
# ================================================================
# Make agent
# ================================================================
def get_agent_cls(name):
p, m = name.rsplit('.', 1)
mod = import_module(p)
constructor = getattr(mod, m)
return constructor
# ================================================================
# Stats
# ================================================================
def add_episode_stats(stats, paths):
reward_key = "reward_raw" if "reward_raw" in paths[0] else "reward"
episoderewards = np.array([path[reward_key].sum() for path in paths])
pathlengths = np.array([pathlength(path) for path in paths])
stats["EpisodeRewards"] = episoderewards
stats["EpisodeLengths"] = pathlengths
stats["NumEpBatch"] = len(episoderewards)
stats["EpRewMean"] = episoderewards.mean()
stats["EpRewSEM"] = episoderewards.std()/np.sqrt(len(paths))
stats["EpRewMax"] = episoderewards.max()
stats["EpRewMin"] = episoderewards.min()
stats["EpLenMean"] = pathlengths.mean()
stats["EpLenMax"] = pathlengths.max()
stats["EpLenMin"] = pathlengths.min()
stats["RewPerStep"] = episoderewards.sum()/pathlengths.sum()
def add_prefixed_stats(stats, prefix, d):
for (k,v) in d.items():
stats[prefix+"_"+k] = v
# ================================================================
# Policy Gradients
# ================================================================
def compute_advantage(vf, paths, gamma, lam):
# Compute return, baseline, advantage
for path in paths:
path["return"] = discount(path["reward"], gamma)
b = path["baseline"] = vf.predict(path)
b1 = np.append(b, 0 if path["terminated"] else b[-1])
deltas = path["reward"] + gamma*b1[1:] - b1[:-1]
path["advantage"] = discount(deltas, gamma * lam)
alladv = np.concatenate([path["advantage"] for path in paths])
# Standardize advantage
std = alladv.std()
mean = alladv.mean()
for path in paths:
path["advantage"] = (path["advantage"] - mean) / std
PG_OPTIONS = [
("timestep_limit", int, 0, "maximum length of trajectories"),
("n_iter", int, 200, "number of batch"),
("parallel", int, 0, "collect trajectories in parallel"),
("timesteps_per_batch", int, 10000, ""),
("gamma", float, 0.99, "discount"),
("lam", float, 1.0, "lambda parameter from generalized advantage estimation"),
]
def run_policy_gradient_algorithm(env, agent, usercfg=None, callback=None):
cfg = update_default_config(PG_OPTIONS, usercfg)
cfg.update(usercfg)
print("policy gradient config", cfg)
# if cfg["parallel"]:
# raise NotImplementedError
tstart = time.time()
seed_iter = itertools.count()
for _ in range(cfg["n_iter"]):
# Rollouts ========
paths = get_paths(env, agent, cfg, seed_iter)
paths_subsampled = paths #subsample_paths(paths)
compute_advantage(agent.baseline, paths_subsampled, gamma=cfg["gamma"], lam=cfg["lam"])
# VF Update ========
vf_stats = agent.baseline.fit(paths_subsampled)
# Pol Update ========
pol_stats = agent.updater(paths_subsampled)
# Stats ========
stats = OrderedDict()
add_episode_stats(stats, paths)
add_prefixed_stats(stats, "vf", vf_stats)
add_prefixed_stats(stats, "pol", pol_stats)
stats["TimeElapsed"] = time.time() - tstart
if callback: callback(stats)
def run_policy_gradient_algorithm_hardmining(env, agent, usercfg=None, callback=None, seed_iter=None):
cfg = update_default_config(PG_OPTIONS, usercfg)
cfg.update(usercfg)
print("policy gradient config", cfg)
# if cfg["parallel"]:
# raise NotImplementedError
tstart = time.time()
if seed_iter is None:
seed_iter = itertools.count()
for _ in range(cfg["n_iter"]):
# Rollouts ========
paths = get_paths(env, agent, cfg, seed_iter)
paths_subsampled = paths #subsample_paths(paths)
compute_advantage(agent.baseline, paths_subsampled, gamma=cfg["gamma"], lam=cfg["lam"])
# VF Update ========
vf_stats = agent.baseline.fit(paths_subsampled)
# Pol Update ========
pol_stats = agent.updater(paths_subsampled)
# Stats ========
stats = OrderedDict()
add_episode_stats(stats, paths)
add_prefixed_stats(stats, "vf", vf_stats)
add_prefixed_stats(stats, "pol", pol_stats)
stats["TimeElapsed"] = time.time() - tstart
if callback: callback(stats)
# def subsample_paths(gpaths):
# paths = copy.deepcopy(gpaths)
# for i in range(len(paths)):
# plen = paths[i]['action'].shape[0]
# rno = random.sample(range(plen), 2*plen/3)
# for j in paths[i].keys():
# paths[i][j] = np.delete(paths[i][j], rno, axis=0)
# return paths
def parallel_rollout_worker((agent, ts_limit, ts_batch, iffilter, seed)):
try:
# print("Paralel rollout has been called")
return do_rollouts_serial(agent, ts_limit, ts_batch, iffilter, seed)
except Exception, e:
print("Exception in rollout worker: %s" % e)
import traceback; traceback.print_exc()
raise
def get_paths(env, agent, cfg, seed_iter):
paths = []
if cfg["parallel"]:
start_time = time.time()
from multiprocessing import Pool
# from pathos.multiprocessing import ProcessPool as Pool
num_processes = int(cfg["parallel"])
pool = Pool(processes=num_processes)
# very simple scheme, split work evenly among pool workers (queue would be better)
try:
def callback(result):
print("Length of paths: ", len(result), type(result))
paths.extend([path for paths_list in result for path in paths_list])
args_list = [(agent,
cfg['timestep_limit'],
cfg['timesteps_per_batch'] / num_processes,
cfg['filter'], next(seed_iter)
) for _ in range(num_processes)]
print(args_list)
result = pool.map_async(parallel_rollout_worker, args_list, callback=callback)
# result = pool.map(parallel_rollout_worker, args_list)
result.wait()#1e5)
if not paths:
# print("Paths is still empty")
# raise Exception
result.get()
except KeyboardInterrupt:
pool.terminate()
raise
except Exception:
pool.terminate()
raise
else:
pool.close()
finally:
pool.join()
print("Time elapsed (%d workers): %.2f" % (num_processes, time.time() - start_time))
else:
paths = do_rollouts_serial(agent, cfg["timestep_limit"], cfg["timesteps_per_batch"], cfg["filter"], next(seed_iter))
return paths
def rollout(env, agent, timestep_limit, seed):
"""
Simulate the env and agent for timestep_limit steps
"""
ob = env._reset(difficulty = 2, seed = seed)
terminated = False
data = defaultdict(list)
for _ in range(timestep_limit):
ob = agent.obfilt(ob)
data["observation"].append(ob)
action, agentinfo = agent.act(ob)
data["action"].append(action)
for (k,v) in agentinfo.items():
data[k].append(v)
ob,rew,done,envinfo = env.step(action)
data["reward"].append(rew)
rew = agent.rewfilt(rew)
for (k,v) in envinfo.items():
data[k].append(v)
if done:
terminated = True
break
data = {k:np.array(v) for (k,v) in data.items()}
data["terminated"] = terminated
return data
def do_rollouts_serial(agent, timestep_limit, n_timesteps, iffilter, seed):
env = RunEnv(False)
if iffilter==2:
ofd = FeatureInducer(env.observation_space)
env = FilteredEnv(env, ob_filter=ofd)
elif iffilter==1:
ofd = ConcatPrevious(env.observation_space)
env = FilteredEnv(env, ob_filter=ofd)
paths = []
timesteps_sofar = 0
while True:
np.random.seed(seed)
path = rollout(env, agent, timestep_limit, seed)
paths.append(path)
timesteps_sofar += pathlength(path)
if timesteps_sofar > n_timesteps:
break
print("Length of paths: ", len(paths))
env.close()
return paths
def pathlength(path):
return len(path["action"])
def animate_rollout(env, agent, n_timesteps,delay=.01):
total_reward = 0.
ob = env.reset()
print("Applying filter on Environment")
ofd = ConcatPrevious(env.observation_space)
# ob = ofd(ob)
# env.render()
# ob = np.array(ob)
for i in range(n_timesteps):
ob = ofd(ob)
ob = agent.obfilt(ob)
a, _info = agent.act(ob)
ob, _rew, done, _info = env.step(a)
# _rew = agent.rewfilt(_rew)
total_reward += _rew
# env.render()
ob = np.array(ob)
if done:
print(("terminated after %s timesteps"%i))
break
time.sleep(delay)
print(a.tolist())
print("Total episode reward = {}".format(total_reward))
# ================================================================
# Stochastic policies
# ================================================================
class StochPolicy(object):
@property
def probtype(self):
raise NotImplementedError
@property
def trainable_variables(self):
raise NotImplementedError
@property
def input(self):
raise NotImplementedError
def get_output(self):
raise NotImplementedError
def act(self, ob, stochastic=True):
prob = self._act_prob(ob[None])
if stochastic:
return self.probtype.sample(prob)[0], {"prob" : prob[0]}
else:
return self.probtype.maxprob(prob)[0], {"prob" : prob[0]}
def finalize(self):
self._act_prob = theano.function([self.input], self.get_output(), **FNOPTS)
class ProbType(object):
def sampled_variable(self):
raise NotImplementedError
def prob_variable(self):
raise NotImplementedError
def likelihood(self, a, prob):
raise NotImplementedError
def loglikelihood(self, a, prob):
raise NotImplementedError
def kl(self, prob0, prob1):
raise NotImplementedError
def entropy(self, prob):
raise NotImplementedError
def maxprob(self, prob):
raise NotImplementedError
class StochPolicyKeras(StochPolicy, EzPickle):
def __init__(self, net, probtype):
EzPickle.__init__(self, net, probtype)
self._net = net
self._probtype = probtype
self.finalize()
@property
def probtype(self):
return self._probtype
@property
def net(self):
return self._net
@property
def trainable_variables(self):
return self._net.trainable_weights
@property
def variables(self):
return self._net.get_params()[0]
@property
def input(self):
return self._net.input
def get_output(self):
return self._net.output
def get_updates(self):
self._net.output #pylint: disable=W0104
return self._net.updates
def get_flat(self):
return flatten(self.net.get_weights())
def set_from_flat(self, th):
weights = self.net.get_weights()
self._weight_shapes = [weight.shape for weight in weights]
self.net.set_weights(unflatten(th, self._weight_shapes))
class Categorical(ProbType):
def __init__(self, n):
self.n = n
def sampled_variable(self):
return T.ivector('a')
def prob_variable(self):
return T.matrix('prob')
def likelihood(self, a, prob):
return prob[T.arange(prob.shape[0]), a]
def loglikelihood(self, a, prob):
return T.log(self.likelihood(a, prob))
def kl(self, prob0, prob1):
return (prob0 * T.log(prob0/prob1)).sum(axis=1)
def entropy(self, prob0):
return - (prob0 * T.log(prob0)).sum(axis=1)
def sample(self, prob):
return distributions.categorical_sample(prob)
def maxprob(self, prob):
return prob.argmax(axis=1)
class CategoricalOneHot(ProbType):
def __init__(self, n):
self.n = n
def sampled_variable(self):
return T.matrix('a')
def prob_variable(self):
return T.matrix('prob')
def likelihood(self, a, prob):
return (a * prob).sum(axis=1)
def loglikelihood(self, a, prob):
return T.log(self.likelihood(a, prob))
def kl(self, prob0, prob1):
return (prob0 * T.log(prob0/prob1)).sum(axis=1)
def entropy(self, prob0):
return - (prob0 * T.log(prob0)).sum(axis=1)
def sample(self, prob):
assert prob.ndim == 2
inds = distributions.categorical_sample(prob)
out = np.zeros_like(prob)
out[np.arange(prob.shape[0]), inds] = 1
return out
def maxprob(self, prob):
out = np.zeros_like(prob)
out[prob.argmax(axis=1)] = 1
class DiagGauss(ProbType):
def __init__(self, d):
self.d = d
def sampled_variable(self):
return T.matrix('a')
def prob_variable(self):
return T.matrix('prob')
def loglikelihood(self, a, prob):
mean0 = prob[:,:self.d]
std0 = prob[:, self.d:]
# exp[ -(a - mu)^2/(2*sigma^2) ] / sqrt(2*pi*sigma^2)
return - 0.5 * T.square((a - mean0) / std0).sum(axis=1) - 0.5 * T.log(2.0 * np.pi) * self.d - T.log(std0).sum(axis=1)
def likelihood(self, a, prob):
return T.exp(self.loglikelihood(a, prob))
def kl(self, prob0, prob1):
mean0 = prob0[:, :self.d]
std0 = prob0[:, self.d:]
mean1 = prob1[:, :self.d]
std1 = prob1[:, self.d:]
return T.log(std1 / std0).sum(axis=1) + ((T.square(std0) + T.square(mean0 - mean1)) / (2.0 * T.square(std1))).sum(axis=1) - 0.5 * self.d
def entropy(self, prob):
std_nd = prob[:, self.d:]
return T.log(std_nd).sum(axis=1) + .5 * np.log(2 * np.pi * np.e) * self.d
def sample(self, prob):
mean_nd = prob[:, :self.d]
std_nd = prob[:, self.d:]
return np.random.randn(prob.shape[0], self.d).astype(floatX) * std_nd + mean_nd
def maxprob(self, prob):
return prob[:, :self.d]
def test_probtypes():
theano.config.floatX = 'float64'
np.random.seed(0)
prob_diag_gauss = np.array([-.2, .3, .4, -.5, 1.1, 1.5, .1, 1.9])
diag_gauss = DiagGauss(prob_diag_gauss.size // 2)
yield validate_probtype, diag_gauss, prob_diag_gauss
prob_categorical = np.array([.2, .3, .5])
categorical = Categorical(prob_categorical.size)<|fim▁hole|> N = 100000
# Check to see if mean negative log likelihood == differential entropy
Mval = np.repeat(prob[None, :], N, axis=0)
M = probtype.prob_variable()
X = probtype.sampled_variable()
calcloglik = theano.function([X, M], T.log(probtype.likelihood(X, M)), allow_input_downcast=True)
calcent = theano.function([M], probtype.entropy(M), allow_input_downcast=True)
Xval = probtype.sample(Mval)
logliks = calcloglik(Xval, Mval)
entval_ll = - logliks.mean()
entval_ll_stderr = logliks.std() / np.sqrt(N)
entval = calcent(Mval).mean()
print(entval, entval_ll, entval_ll_stderr)
assert np.abs(entval - entval_ll) < 3 * entval_ll_stderr # within 3 sigmas
# Check to see if kldiv[p,q] = - ent[p] - E_p[log q]
M2 = probtype.prob_variable()
q = prob + np.random.randn(prob.size) * 0.1
Mval2 = np.repeat(q[None, :], N, axis=0)
calckl = theano.function([M, M2], probtype.kl(M, M2), allow_input_downcast=True)
klval = calckl(Mval, Mval2).mean()
logliks = calcloglik(Xval, Mval2)
klval_ll = - entval - logliks.mean()
klval_ll_stderr = logliks.std() / np.sqrt(N)
print(klval, klval_ll, klval_ll_stderr)
assert np.abs(klval - klval_ll) < 3 * klval_ll_stderr # within 3 sigmas
# ================================================================
# Value functions
# ================================================================
class Baseline(object):
def fit(self, paths):
raise NotImplementedError
def predict(self, path):
raise NotImplementedError
class TimeDependentBaseline(Baseline):
def __init__(self):
self.baseline = None
def fit(self, paths):
rets = [path["return"] for path in paths]
maxlen = max(len(ret) for ret in rets)
retsum = np.zeros(maxlen)
retcount = np.zeros(maxlen)
for ret in rets:
retsum[:len(ret)] += ret
retcount[:len(ret)] += 1
retmean = retsum / retcount
i_depletion = np.searchsorted(-retcount, -4)
self.baseline = retmean[:i_depletion]
pred = concat([self.predict(path) for path in paths])
return {"EV" : explained_variance(pred, concat(rets))}
def predict(self, path):
if self.baseline is None:
return np.zeros(pathlength(path))
else:
lenpath = pathlength(path)
lenbase = len(self.baseline)
if lenpath > lenbase:
return concat([self.baseline, self.baseline[-1] + np.zeros(lenpath-lenbase)])
else:
return self.baseline[:lenpath]
class NnRegression(EzPickle):
def __init__(self, net, mixfrac=1.0, maxiter=25):
EzPickle.__init__(self, net, mixfrac, maxiter)
self.net = net
self.mixfrac = mixfrac
x_nx = net.input
self.predict = theano.function([x_nx], net.output, **FNOPTS)
ypred_ny = net.output
ytarg_ny = T.matrix("ytarg")
var_list = net.trainable_weights
l2 = 1e-3 * T.add(*[T.square(v).sum() for v in var_list])
N = x_nx.shape[0]
mse = T.sum(T.square(ytarg_ny - ypred_ny))/N
symb_args = [x_nx, ytarg_ny]
loss = mse + l2
self.opt = LbfgsOptimizer(loss, var_list, symb_args, maxiter=maxiter, extra_losses={"mse":mse, "l2":l2})
def fit(self, x_nx, ytarg_ny):
nY = ytarg_ny.shape[1]
ypredold_ny = self.predict(x_nx)
out = self.opt.update(x_nx, ytarg_ny*self.mixfrac + ypredold_ny*(1-self.mixfrac))
yprednew_ny = self.predict(x_nx)
out["PredStdevBefore"] = ypredold_ny.std()
out["PredStdevAfter"] = yprednew_ny.std()
out["TargStdev"] = ytarg_ny.std()
if nY==1:
out["EV_before"] = explained_variance_2d(ypredold_ny, ytarg_ny)[0]
out["EV_after"] = explained_variance_2d(yprednew_ny, ytarg_ny)[0]
else:
out["EV_avg"] = explained_variance(yprednew_ny.ravel(), ytarg_ny.ravel())
return out
class NnVf(object):
def __init__(self, net, timestep_limit, regression_params):
self.reg = NnRegression(net, **regression_params)
self.timestep_limit = timestep_limit
def predict(self, path):
ob_no = self.preproc(path["observation"])
return self.reg.predict(ob_no)[:,0]
def fit(self, paths):
ob_no = concat([self.preproc(path["observation"]) for path in paths], axis=0)
vtarg_n1 = concat([path["return"] for path in paths]).reshape(-1,1)
return self.reg.fit(ob_no, vtarg_n1)
def preproc(self, ob_no):
return concat([ob_no, np.arange(len(ob_no)).reshape(-1,1) / float(self.timestep_limit)], axis=1)
class NnCpd(EzPickle):
def __init__(self, net, probtype, maxiter=25):
EzPickle.__init__(self, net, probtype, maxiter)
self.net = net
x_nx = net.input
prob = net.output
a = probtype.sampled_variable()
var_list = net.trainable_weights
loglik = probtype.loglikelihood(a, prob)
self.loglikelihood = theano.function([a, x_nx], loglik, **FNOPTS)
loss = - loglik.mean()
symb_args = [x_nx, a]
self.opt = LbfgsOptimizer(loss, var_list, symb_args, maxiter=maxiter)
def fit(self, x_nx, a):
return self.opt.update(x_nx, a)
class SetFromFlat(object):
def __init__(self, var_list):
theta = T.vector()
start = 0
updates = []
for v in var_list:
shape = v.shape
size = T.prod(shape)
updates.append((v, theta[start:start+size].reshape(shape)))
start += size
self.op = theano.function([theta],[], updates=updates,**FNOPTS)
def __call__(self, theta):
self.op(theta.astype(floatX))
class GetFlat(object):
def __init__(self, var_list):
self.op = theano.function([], T.concatenate([v.flatten() for v in var_list]),**FNOPTS)
def __call__(self):
return self.op() #pylint: disable=E1101
class EzFlat(object):
def __init__(self, var_list):
self.gf = GetFlat(var_list)
self.sff = SetFromFlat(var_list)
def set_params_flat(self, theta):
self.sff(theta)
def get_params_flat(self):
return self.gf()
class LbfgsOptimizer(EzFlat):
def __init__(self, loss, params, symb_args, extra_losses=None, maxiter=25):
EzFlat.__init__(self, params)
self.all_losses = OrderedDict()
self.all_losses["loss"] = loss
if extra_losses is not None:
self.all_losses.update(extra_losses)
self.f_lossgrad = theano.function(list(symb_args), [loss, flatgrad(loss, params)],**FNOPTS)
self.f_losses = theano.function(symb_args, list(self.all_losses.values()),**FNOPTS)
self.maxiter=maxiter
def update(self, *args):
thprev = self.get_params_flat()
def lossandgrad(th):
self.set_params_flat(th)
l,g = self.f_lossgrad(*args)
g = g.astype('float64')
return (l,g)
losses_before = self.f_losses(*args)
theta, _, opt_info = scipy.optimize.fmin_l_bfgs_b(lossandgrad, thprev, maxiter=self.maxiter)
del opt_info['grad']
print(opt_info)
self.set_params_flat(theta)
losses_after = self.f_losses(*args)
info = OrderedDict()
for (name,lossbefore, lossafter) in zip(list(self.all_losses.keys()), losses_before, losses_after):
info[name+"_before"] = lossbefore
info[name+"_after"] = lossafter
return info
def numel(x):
return T.prod(x.shape)
def flatgrad(loss, var_list):
grads = T.grad(loss, var_list)
return T.concatenate([g.flatten() for g in grads])
# ================================================================
# Keras
# ================================================================
class ConcatFixedStd(Layer):
input_ndim = 2
def __init__(self, **kwargs):
Layer.__init__(self, **kwargs)
def build(self, input_shape):
input_dim = input_shape[1]
self.logstd = theano.shared(np.zeros(input_dim,floatX), name='{}_logstd'.format(self.name))
self.trainable_weights = [self.logstd]
super(ConcatFixedStd, self).build(input_shape)
def compute_ouput_shape(self, input_shape):
return (input_shape[0], input_shape[1] * 2)
def call(self, x, mask=None):
Mean = x
Std = T.repeat(T.exp(self.logstd)[None, :], Mean.shape[0], axis=0)
return T.concatenate([Mean, Std], axis=1)
# ================================================================
# Video monitoring
# ================================================================
def VIDEO_NEVER(_):
return False
def VIDEO_ALWAYS(_):
return True<|fim▁end|> | yield validate_probtype, categorical, prob_categorical
def validate_probtype(probtype, prob): |
<|file_name|>logging_utils.py<|end_file_name|><|fim▁begin|>import logging
from logging.handlers import RotatingFileHandler
import os
from appdirs import user_cache_dir
def configure_logging():
cache_dir = user_cache_dir(appname='spoppy')
LOG_FILE_NAME = os.path.join(
cache_dir, 'spoppy.log'
)
LOG_LEVEL = getattr(
logging,<|fim▁hole|> os.getenv('SPOPPY_LOG_LEVEL', '').upper(),
logging.INFO
)
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
logger = logging.getLogger('spoppy')
logger.setLevel(LOG_LEVEL)
handler = RotatingFileHandler(
LOG_FILE_NAME,
maxBytes=1024 * 1024 * 10,
backupCount=10,
)
handler.setLevel(LOG_LEVEL)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.debug('Spoppy logger set up')
requests_log = logging.getLogger('urllib3')
requests_log.setLevel(LOG_LEVEL)
requests_log.propagate = True
requests_log.addHandler(handler)
logger.debug('urllib3 logger set up')<|fim▁end|> | |
<|file_name|>LatinLayout.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});<|fim▁hole|><|fim▁end|> | exports.default = [['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'], ['a', 's', 'd', 'f', 'g', 'h', 'j', 'k', 'l'], ['z', 'x', 'c', 'v', 'b', 'n', 'm']];
module.exports = exports['default']; |
<|file_name|>coding.rs<|end_file_name|><|fim▁begin|>//! Coding system handler.
use remacs_macros::lisp_fn;
use crate::{
data::aref,
eval::unbind_to,
hashtable::{
gethash,
HashLookupResult::{Found, Missing},
LispHashTableRef,
},
lisp::LispObject,
lists::{get, put},
minibuf::completing_read,
multibyte::LispStringRef,
obarray::intern_lisp,
remacs_sys::{
code_convert_string as c_code_convert_string, code_convert_string_norecord,
encode_file_name as c_encode_file_name, globals, specbind,
},
remacs_sys::{
safe_eval, Qcoding_system_define_form, Qcoding_system_error, Qcoding_system_history,
Qcoding_system_p, Qcompletion_ignore_case, Qnil, Qno_conversion, Qt, Qutf_8,
Vcoding_system_hash_table,
},
threads::c_specpdl_index,
};
/// Return the spec vector of CODING_SYSTEM_SYMBOL.
/// Same as the CODING_SYSTEM_SPEC C macro.
fn coding_system_spec(coding_system: LispObject) -> LispObject {
gethash(
coding_system,
unsafe { Vcoding_system_hash_table }.into(),
Qnil,
)
}
/// Return the ID of OBJECT.
/// Same as the CODING_SYSTEM_ID C macro.
pub fn coding_system_id(object: LispObject) -> isize {
let h_ref: LispHashTableRef = unsafe { Vcoding_system_hash_table }.into();
match h_ref.lookup(object) {
Found(idx) => idx as isize,
Missing(_) => -1,
}
}
/// Check if X is a coding system or not. If it is, return the spec vector of
/// the coding system.
/// Alternative to the CHECK_CODING_SYSTEM_GET_SPEC C macro.
fn check_coding_system_get_spec(x: LispObject) -> LispObject {
match coding_system_spec(x) {
Qnil => {
check_coding_system_lisp(x);
match coding_system_spec(x) {
Qnil => wrong_type!(Qcoding_system_p, x),
spec => spec,
}
}
spec => spec,
}
}
/// Return t if OBJECT is nil or a coding-system.
/// See the documentation of `define-coding-system' for information
/// about coding-system objects.
#[lisp_fn]
pub fn coding_system_p(object: LispObject) -> bool {
object.is_nil()
|| coding_system_id(object) >= 0
|| object.is_symbol() && get(object.into(), Qcoding_system_define_form).into()
}
/// Check validity of CODING-SYSTEM.
/// If valid, return CODING-SYSTEM, else signal a `coding-system-error' error.
/// It is valid if it is nil or a symbol defined as a coding system by the
/// function `define-coding-system'.
#[lisp_fn(name = "check-coding-system", c_name = "check_coding_system")]
pub fn check_coding_system_lisp(coding_system: LispObject) -> LispObject {
let define_form = get(coding_system.into(), Qcoding_system_define_form);
if define_form.is_not_nil() {
put(coding_system.into(), Qcoding_system_define_form, Qnil);
unsafe { safe_eval(define_form) };
}
if !coding_system_p(coding_system) {
xsignal!(Qcoding_system_error, coding_system);
}
coding_system
}
/// Return the list of aliases of CODING-SYSTEM.
#[lisp_fn]
pub fn coding_system_aliases(coding_system: LispObject) -> LispObject {
let coding_system = match coding_system {
Qnil => Qno_conversion,
coding_system => coding_system,
};
let spec = check_coding_system_get_spec(coding_system);
aref(spec, 1)
}
/// Wrapper for encode_file_name (NOT PORTED)
pub fn encode_file_name(fname: LispStringRef) -> LispStringRef {
unsafe { c_encode_file_name(fname.into()) }.into()
}
/// Implements DECODE_SYSTEM macro
/// Decode the string `input_string` using the specified coding system
/// for system functions, if any.
pub fn decode_system(input_string: LispStringRef) -> LispStringRef {
let local_coding_system: LispObject = unsafe { globals.Vlocale_coding_system };
if local_coding_system.is_nil() {
input_string
} else {
unsafe { code_convert_string_norecord(input_string.into(), Qutf_8, true).into() }
}
}
/// Decode STRING which is encoded in CODING-SYSTEM, and return the result.
/// Optional third arg NOCOPY non-nil means it is OK to return STRING
/// itself if the decoding operation is trivial.
/// Optional fourth arg BUFFER non-nil means that the decoded text is inserted in that buffer after point (point does not move). In this case, the return value is the length of the decoded text.
/// This function sets `last-coding-system-used` to the precise coding system
/// used (which may be different from CODING-SYSTEM if CODING-SYSTEM is not fully specified.)
#[lisp_fn(min = "1")]
pub fn decode_coding_string(
string: LispObject,
coding_system: LispObject,
nocopy: LispObject,
buffer: LispObject,
) -> LispObject {
code_convert_string(
string,
coding_system,
buffer,
false,
nocopy.is_not_nil(),
false,
)
}
/// Encode STRING to CODING-SYSTEM, and return the result.
/// Optional third arg NOCOPY non-nil means it is OK to return STRING
/// itself if the encoding operation is trivial.
/// Optional fourth arg BUFFER non-nil means that the encoded text is inserted in that buffer after point (point does not move). In this case, the return value is the length of the encoded text.
/// This function sets `last-coding-system-used` to the precise coding system
/// used (which may be different from CODING-SYSTEM if CODING-SYSTEM is not fully specified.)
#[lisp_fn(min = "1")]
pub fn encode_coding_string(
string: LispObject,
coding_system: LispObject,
nocopy: LispObject,
buffer: LispObject,
) -> LispObject {<|fim▁hole|> coding_system,
buffer,
true,
nocopy.is_not_nil(),
false,
)
}
// Wrapper for code_convert_string (NOT PORTED)
pub fn code_convert_string(
string: LispObject,
coding_system: LispObject,
dst_object: LispObject,
encodep: bool,
nocopy: bool,
norecord: bool,
) -> LispObject {
unsafe { c_code_convert_string(string, coding_system, dst_object, encodep, nocopy, norecord) }
}
/// Read a coding system from the minibuffer, prompting with string PROMPT.
/// If the user enters null input, return second argument DEFAULT-CODING-SYSTEM.
/// Ignores case when completing coding systems (all Emacs coding systems
/// are lower-case).
#[lisp_fn(min = "1")]
pub fn read_coding_system(prompt: LispObject, mut default_coding_system: LispObject) -> LispObject {
let count = c_specpdl_index();
if let Some(s) = default_coding_system.as_symbol() {
default_coding_system = s.symbol_name();
}
unsafe {
specbind(Qcompletion_ignore_case, Qt);
}
let val = completing_read(
prompt,
unsafe { globals.Vcoding_system_alist },
Qnil,
Qt,
Qnil,
Qcoding_system_history,
default_coding_system,
Qnil,
);
unbind_to(count, Qnil);
let tem: LispStringRef = val.into();
if tem.is_empty() {
Qnil
} else {
intern_lisp(tem, None)
}
}
include!(concat!(env!("OUT_DIR"), "/coding_exports.rs"));<|fim▁end|> | code_convert_string(
string, |
<|file_name|>browserify.js<|end_file_name|><|fim▁begin|>module.exports = {<|fim▁hole|> 'dist/geo.js': ['src/index.js'],
}
}
};<|fim▁end|> | dist: {
files: { |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from profiles import views
urlpatterns = [
url(
regex=r"^edit/$",
view=views.ProfileEditUpdateView.as_view(),
name="profile_edit"
),
url(
regex="^confirm_role/(?P<membership_id>[-\w]+)/(?P<action>verify|deny)/$",
view=views.profile_confirm_role,
name="profile_confirm_role",
),
url(
regex="^deny_account/(?P<type_name>[\w]+)/(?P<account_name>[-\.\w]+)/$",
view=views.profile_deny_account,
name="profile_deny_account",
),
url(
regex="^confirm/$",
view=views.profile_confirm,
name="profile_confirm",
),
url(r"^$", views.profile_list, name="profile_list"),
url(r"^(?P<github_account>[-\w]+)/$", views.profile_detail, name="profile_detail"),
url(r"^github/(?P<github_account>[-\w]+)/$", views.profile_detail, name="github_profile_detail"),
url(r"^steem/(?P<steem_account>[-\.\w]+)/$", views.profile_detail, name="steem_profile_detail"),
url(r"^id/(?P<id>[-\w]+)/$", views.profile_detail, name="id_profile_detail"),<|fim▁hole|><|fim▁end|> | ] |
<|file_name|>tri-state-checkbox-test.js<|end_file_name|><|fim▁begin|>import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
import { render, click } from '@ember/test-helpers';
import { hbs } from 'ember-cli-htmlbars';
import styles from 'irene/components/tri-state-checkbox/index.scss';
module('Integration | Component | tri-state-checkbox', function (hooks) {
setupRenderingTest(hooks);
test('it does not render component if label is not passed', async function (assert) {
await render(hbs`<TriStateCheckbox />`);
assert.dom('[data-test-check]').doesNotExist();
const container = this.element.querySelector('[data-test-container]');
assert.equal(container.clientHeight, 0);
});
test('it toggles value on checkbox click', async function (assert) {
this.set('label', 'Test');
this.set('value', true);
this.set('onToggle', () => {});
this.set('onOverrideReset', () => {});
this.set('isToggleRunning', false);
this.set('isOverridden', false);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
const checkbox = this.element.querySelector('[data-test-input]');
assert.equal(checkbox.checked, true);
await click(checkbox);
assert.equal(checkbox.checked, false);
await click(checkbox);
assert.equal(checkbox.checked, true);
});
test('it toggles value on label click', async function (assert) {
this.set('label', 'Test');
this.set('value', true);
this.set('onToggle', () => {});
this.set('onOverrideReset', () => {});
this.set('isToggleRunning', false);
this.set('isOverridden', false);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
const checkbox = this.element.querySelector('[data-test-input]');<|fim▁hole|>
await click(label);
assert.equal(checkbox.checked, false);
await click(label);
assert.equal(checkbox.checked, true);
});
test('it should render label title if passed', async function (assert) {
this.set('label', 'Test label');
this.set('value', true);
this.set('onToggle', () => {});
this.set('onOverrideReset', () => {});
this.set('isToggleRunning', false);
this.set('isOverridden', false);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
let label = this.element.querySelector('[data-test-label]');
assert.equal(label.title, '');
this.set('title', 'Test title');
await render(
hbs`<TriStateCheckbox @label={{this.label}} @title={{this.title}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
label = this.element.querySelector('[data-test-label]');
assert.equal(label.title, 'Test title');
});
test('it toggles value on label click', async function (assert) {
this.set('label', 'Test');
this.set('value', true);
this.set('onToggle', () => {});
this.set('onOverrideReset', () => {});
this.set('isToggleRunning', false);
this.set('isOverridden', false);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
const checkbox = this.element.querySelector('[data-test-input]');
assert.equal(checkbox.checked, true);
const label = this.element.querySelector('[data-test-label]');
await click(label);
assert.equal(checkbox.checked, false);
await click(label);
assert.equal(checkbox.checked, true);
});
test('it should render progress spinner based on isToggleRunning value', async function (assert) {
this.set('label', 'Test');
this.set('value', true);
this.set('onToggle', () => {});
this.set('onOverrideReset', () => {});
this.set('isOverridden', false);
this.set('isToggleRunning', true);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
assert.dom('[data-test-progress-spinner]').exists();
this.set('isToggleRunning', false);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
assert.dom('[data-test-progress-spinner]').doesNotExist();
});
test('it should render switch style based on isOverridden value', async function (assert) {
this.set('label', 'Test');
this.set('value', true);
this.set('onToggle', () => {});
this.set('onOverrideReset', () => {});
this.set('isToggleRunning', false);
this.set('isOverridden', false);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
assert.dom('[data-test-check]').hasClass(styles['inherited']);
assert.dom('[data-test-check]').doesNotHaveClass(styles['overridden']);
this.set('isOverridden', true);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
assert.dom('[data-test-check]').doesNotHaveClass(styles['inherited']);
assert.dom('[data-test-check]').hasClass(styles['overridden']);
});
test('it should render reset button based on isOverridden value', async function (assert) {
this.set('label', 'Test');
this.set('value', true);
this.set('onToggle', () => {});
this.set('onOverrideReset', () => {});
this.set('isToggleRunning', false);
this.set('isOverridden', true);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
assert.dom('[data-test-reset]').exists();
this.set('isOverridden', false);
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
assert.dom('[data-test-reset]').doesNotExist();
});
test('it should execute onOverrideReset function on reset button click', async function (assert) {
this.set('label', 'Test');
this.set('value', true);
this.set('onToggle', () => {});
this.set('isToggleRunning', false);
this.set('isOverridden', true);
let flag = 1;
this.set('onOverrideReset', function reset() {
flag = 0;
});
await render(
hbs`<TriStateCheckbox @label={{this.label}} @value={{this.value}} @onToggle={{this.onToggle}} @onOverrideReset={{this.onOverrideReset}} @isToggleRunning={{this.isToggleRunning}} @isOverridden={{this.isOverridden}} />`
);
assert.equal(flag, 1);
const reset = this.element.querySelector('[data-test-reset]');
await click(reset);
assert.equal(flag, 0);
});
});<|fim▁end|> | assert.equal(checkbox.checked, true);
const label = this.element.querySelector('[data-test-label]'); |
<|file_name|>units.py<|end_file_name|><|fim▁begin|>order = ['','K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
class Sizes(object):
_BASE = 1000.
def toSize(self, value, input='', output='K'):
"""
Convert value in other measurement
"""
input = order.index(input)
output = order.index(output)
factor = input - output
return value * (self._BASE ** factor)
def converToBestUnit(self, value, input=''):
devider = len(str(int(self._BASE))) - 1
output = (len(str(value)) -2) / devider
output += order.index(input)
if output > len(order):
output = len(order) - 1
elif output < 0:
output = 0
output = order[output]
return self.toSize(value, input, output), output
<|fim▁hole|><|fim▁end|> | class Bytes(Sizes):
_BASE = 1024. |
<|file_name|>design_stack.py<|end_file_name|><|fim▁begin|>"""
Implementation of stack data structure in Python.
"""
class Stack:
def __init__(self,*vargs):
self.stack = list(vargs)
def __repr__(self):
return str(self.stack)
def top(self):
return self.stack[0]
def push(self,elem):<|fim▁hole|> return self.stack.pop(0)
if __name__ == '__main__':
stk = Stack(1,2,3,4)
print stk
print stk.top()
stk.push(10)
print stk
print stk.pop()
print stk<|fim▁end|> | self.stack.insert(0,elem)
def pop(self): |
<|file_name|>service.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package groundstation
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/signer/v4"
"github.com/aws/aws-sdk-go/private/protocol/restjson"
)
// GroundStation provides the API operation methods for making requests to
// AWS Ground Station. See this package's package overview docs
// for details on the service.
//
// GroundStation methods are safe to use concurrently. It is not safe to
// modify mutate any of the struct's properties though.
type GroundStation struct {
*client.Client
}
// Used for custom client initialization logic
var initClient func(*client.Client)
<|fim▁hole|>// Used for custom request initialization logic
var initRequest func(*request.Request)
// Service information constants
const (
ServiceName = "GroundStation" // Name of service.
EndpointsID = "groundstation" // ID to lookup a service endpoint with.
ServiceID = "GroundStation" // ServiceID is a unique identifer of a specific service.
)
// New creates a new instance of the GroundStation client with a session.
// If additional configuration is needed for the client instance use the optional
// aws.Config parameter to add your extra config.
//
// Example:
// // Create a GroundStation client from just a session.
// svc := groundstation.New(mySession)
//
// // Create a GroundStation client with additional configuration
// svc := groundstation.New(mySession, aws.NewConfig().WithRegion("us-west-2"))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *GroundStation {
c := p.ClientConfig(EndpointsID, cfgs...)
if c.SigningNameDerived || len(c.SigningName) == 0 {
c.SigningName = "groundstation"
}
return newClient(*c.Config, c.Handlers, c.Endpoint, c.SigningRegion, c.SigningName)
}
// newClient creates, initializes and returns a new service client instance.
func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegion, signingName string) *GroundStation {
svc := &GroundStation{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
ServiceID: ServiceID,
SigningName: signingName,
SigningRegion: signingRegion,
Endpoint: endpoint,
APIVersion: "2019-05-23",
},
handlers,
),
}
// Handlers
svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)
svc.Handlers.Build.PushBackNamed(restjson.BuildHandler)
svc.Handlers.Unmarshal.PushBackNamed(restjson.UnmarshalHandler)
svc.Handlers.UnmarshalMeta.PushBackNamed(restjson.UnmarshalMetaHandler)
svc.Handlers.UnmarshalError.PushBackNamed(restjson.UnmarshalErrorHandler)
// Run custom client initialization if present
if initClient != nil {
initClient(svc.Client)
}
return svc
}
// newRequest creates a new request for a GroundStation operation and runs any
// custom request initialization.
func (c *GroundStation) newRequest(op *request.Operation, params, data interface{}) *request.Request {
req := c.NewRequest(op, params, data)
// Run custom request initialization if present
if initRequest != nil {
initRequest(req)
}
return req
}<|fim▁end|> | |
<|file_name|>grunt-spritesmith.js<|end_file_name|><|fim▁begin|>// Load in dependencies
var fs = require('fs');
var path = require('path');
var _ = require('underscore');
var async = require('async');
var templater = require('spritesheet-templates');
var spritesmith = require('spritesmith');
var url = require('url2');
// Define class to contain different extension handlers
function ExtFormat() {
this.formatObj = {};
}
ExtFormat.prototype = {
add: function (name, val) {
this.formatObj[name] = val;
},
get: function (filepath) {
// Grab the extension from the filepath
var ext = path.extname(filepath);
var lowerExt = ext.toLowerCase();
// Look up the file extenion from our format object
var formatObj = this.formatObj;
var format = formatObj[lowerExt];
return format;
}
};
// Create img and css formats
var imgFormats = new ExtFormat();
var cssFormats = new ExtFormat();
// Add our img formats
imgFormats.add('.png', 'png');
imgFormats.add('.jpg', 'jpeg');
imgFormats.add('.jpeg', 'jpeg');
// Add our css formats
cssFormats.add('.styl', 'stylus');
cssFormats.add('.stylus', 'stylus');
cssFormats.add('.sass', 'sass');
cssFormats.add('.scss', 'scss');
cssFormats.add('.less', 'less');
cssFormats.add('.json', 'json');
cssFormats.add('.css', 'css');
function getCoordinateName(filepath) {
// Extract the image name (exlcuding extension)
var fullname = path.basename(filepath);
var nameParts = fullname.split('.');
// If there is are more than 2 parts, pop the last one
if (nameParts.length >= 2) {
nameParts.pop();
}
// Return our modified filename
return nameParts.join('.');
}
module.exports = function gruntSpritesmith (grunt) {
// Create a SpriteMaker function
function SpriteMaker() {
// Grab the raw configuration
var data = this.data;
// If we were invoked via `grunt-newer`, re-localize the info
if (data.src === undefined && data.files) {
data = data.files[0] || {};
}
// Determine the origin and destinations
var src = data.src;
var destImg = data.dest;
var destCss = data.destCss;
var cssTemplate = data.cssTemplate;
var that = this;
// Verify all properties are here
if (!src || !destImg || !destCss) {
return grunt.fatal('grunt.sprite requires a src, dest (img), and destCss property');
}
// Expand all filepaths (e.g. `*.png` -> `home.png`)
var srcFiles = grunt.file.expand(src);
// If there are settings for retina
var retinaSrcFiles;
var retinaSrcFilter = data.retinaSrcFilter;
var retinaDestImg = data.retinaDest;
if (retinaSrcFilter || retinaDestImg) {
grunt.log.debug('Retina settings detected');
// Verify our required set is present
if (!retinaSrcFilter || !retinaDestImg) {
return grunt.fatal('Retina settings detected. We must have both `retinaSrcFilter` and `retinaDest` ' +
'provided for retina to work');
}
// Filter out our retina files
retinaSrcFiles = [];
srcFiles = srcFiles.filter(function filterSrcFile (filepath) {
// If we have a retina file, filter it out
if (grunt.file.match(retinaSrcFilter, filepath).length) {
retinaSrcFiles.push(filepath);
return false;
// Otherwise, keep it in the src files
} else {
return true;
}
});
grunt.verbose.writeln('Retina images found: ' + retinaSrcFiles.join(', '));
// If we have a different amount of normal and retina images, complain and leave
if (srcFiles.length !== retinaSrcFiles.length) {
return grunt.fatal('Retina settings detected but ' + retinaSrcFiles.length + ' retina images were found. ' +
'We have ' + srcFiles.length + ' normal images and expect these numbers to line up. ' +
'Please double check `retinaSrcFilter`.');
}
}
// Create an async callback
var cb = this.async();
// Determine the format of the image
var imgOpts = data.imgOpts || {};
var imgFormat = imgOpts.format || imgFormats.get(destImg) || 'png';
// Set up the defautls for imgOpts
_.defaults(imgOpts, {format: imgFormat});
// Prepare spritesmith parameters
var spritesmithParams = {
src: srcFiles,
engine: data.engine,
algorithm: data.algorithm,
padding: data.padding || 0,
algorithmOpts: data.algorithmOpts || {},
engineOpts: data.engineOpts || {},
exportOpts: imgOpts
};
// In parallel
async.parallel([
// Run our normal task
function normalSpritesheet (callback) {
spritesmith(spritesmithParams, callback);
},
// If we have a retina task, run it as well
function retinaSpritesheet (callback) {
// DEV: We don't check length since we could have no images passed in
if (retinaSrcFiles) {
var retinaParams = _.defaults({
src: retinaSrcFiles,
padding: spritesmithParams.padding * 2
}, spritesmithParams);
spritesmith(retinaParams, callback);
} else {
process.nextTick(callback);
}
}
], function handleSpritesheets (err, resultArr) {
// If an error occurred, callback with it
if (err) {
grunt.fatal(err);
return cb(err);
}
// Otherwise, write out the result to destImg
var result = resultArr[0];
var destImgDir = path.dirname(destImg);
grunt.file.mkdir(destImgDir);
fs.writeFileSync(destImg, result.image, 'binary');
// Generate a listing of CSS variables
var coordinates = result.coordinates;
var properties = result.properties;
var spritePath = data.imgPath || url.relative(destCss, destImg);
var spritesheetInfo = {
width: properties.width,
height: properties.height,
image: spritePath
};
var cssVarMap = data.cssVarMap || function noop () {};
var cleanCoords = [];
// Clean up the file name of the file
Object.getOwnPropertyNames(coordinates).sort().forEach(function prepareTemplateData (file) {
// Extract out our name
var name = getCoordinateName(file);
var coords = coordinates[file];
// Specify the image for the sprite
coords.name = name;
coords.source_image = file;
// DEV: `image`, `total_width`, `total_height` are deprecated as they are overwritten in `spritesheet-templates`
coords.image = spritePath;
coords.total_width = properties.width;
coords.total_height = properties.height;
// Map the coordinates through cssVarMap
coords = cssVarMap(coords) || coords;
// Save the cleaned name and coordinates
cleanCoords.push(coords);
});
// If we have retina sprites
var retinaCleanCoords;
var retinaGroups;
var retinaResult = resultArr[1];
var retinaSpritesheetInfo;
if (retinaResult) {
// Write out the result to destImg
var retinaDestImgDir = path.dirname(retinaDestImg);
grunt.file.mkdir(retinaDestImgDir);
fs.writeFileSync(retinaDestImg, retinaResult.image, 'binary');
// Generate a listing of CSS variables
var retinaCoordinates = retinaResult.coordinates;
var retinaProperties = retinaResult.properties;
var retinaSpritePath = data.retinaImgPath || url.relative(destCss, retinaDestImg);
retinaSpritesheetInfo = {
width: retinaProperties.width,
height: retinaProperties.height,
image: retinaSpritePath
};
// DEV: We reuse cssVarMap
retinaCleanCoords = [];
// Clean up the file name of the file
Object.getOwnPropertyNames(retinaCoordinates).sort().forEach(function prepareRetinaTemplateData (file) {
var name = getCoordinateName(file);
var coords = retinaCoordinates[file];
coords.name = name;
coords.source_image = file;
coords.image = retinaSpritePath;
coords.total_width = retinaProperties.width;
coords.total_height = retinaProperties.height;
coords = cssVarMap(coords) || coords;<|fim▁hole|> retinaGroups = cleanCoords.map(function getRetinaGroups (normalSprite, i) {
// Assert that image sizes line up for debugging purposes
var retinaSprite = retinaCleanCoords[i];
if (retinaSprite.width !== normalSprite.width * 2 || retinaSprite.height !== normalSprite.height * 2) {
grunt.log.warn('Normal sprite has inconsistent size with retina sprite. ' +
'"' + normalSprite.name + '" is ' + normalSprite.width + 'x' + normalSprite.height + ' while ' +
'"' + retinaSprite.name + '" is ' + retinaSprite.width + 'x' + retinaSprite.height + '.');
}
// Generate our group
// DEV: Name is inherited from `cssVarMap` on normal sprite
return {
name: normalSprite.name,
index: i
};
});
}
// If we have handlebars helpers, register them
var handlebarsHelpers = data.cssHandlebarsHelpers;
if (handlebarsHelpers) {
Object.keys(handlebarsHelpers).forEach(function registerHelper (helperKey) {
templater.registerHandlebarsHelper(helperKey, handlebarsHelpers[helperKey]);
});
}
// If there is a custom template, use it
var cssFormat = 'spritesmith-custom';
var cssOptions = data.cssOpts || {};
if (cssTemplate) {
if (typeof cssTemplate === 'function') {
templater.addTemplate(cssFormat, cssTemplate);
} else {
templater.addHandlebarsTemplate(cssFormat, fs.readFileSync(cssTemplate, 'utf8'));
}
} else {
// Otherwise, override the cssFormat and fallback to 'json'
cssFormat = data.cssFormat;
if (!cssFormat) {
cssFormat = cssFormats.get(destCss) || 'json';
// If we are dealing with retina items, move to retina flavor (e.g. `scss` -> `scss_retina`)
if (retinaGroups) {
cssFormat += '_retina';
}
}
}
// Render the variables via `spritesheet-templates`
var cssStr = templater({
sprites: cleanCoords,
spritesheet: spritesheetInfo,
spritesheet_info: {
name: data.cssSpritesheetName
},
retina_groups: retinaGroups,
retina_sprites: retinaCleanCoords,
retina_spritesheet: retinaSpritesheetInfo,
retina_spritesheet_info: {
name: data.cssRetinaSpritesheetName
},
retina_groups_info: {
name: data.cssRetinaGroupsName
}
}, {
format: cssFormat,
formatOpts: cssOptions
});
// Write it out to the CSS file
var destCssDir = path.dirname(destCss);
grunt.file.mkdir(destCssDir);
fs.writeFileSync(destCss, cssStr, 'utf8');
// Fail task if errors were logged.
if (that.errorCount) { cb(false); }
// Otherwise, print a success message.
if (retinaDestImg) {
grunt.log.writeln('Files "' + destCss + '", "' + destImg + '", "' + retinaDestImg + '" created.');
} else {
grunt.log.writeln('Files "' + destCss + '", "' + destImg + '" created.');
}
// Callback
cb(true);
});
}
// Export the SpriteMaker function
grunt.registerMultiTask('sprite', 'Spritesheet making utility', SpriteMaker);
};<|fim▁end|> | retinaCleanCoords.push(coords);
});
// Generate groups for our coordinates |
<|file_name|>SequenceA.java<|end_file_name|><|fim▁begin|>package com.moon.threadlocal;
import org.junit.Test;
/**
* Created by Paul on 2017/2/12.
*/<|fim▁hole|> private static int number=0;
@Override
public int getNumber(){
number=number+1;
return number;
}
}<|fim▁end|> | public class SequenceA implements Sequence{ |
<|file_name|>HorizonHoe.java<|end_file_name|><|fim▁begin|>package com.kraz.minehr.items;
import com.kraz.minehr.MineHr;
import com.kraz.minehr.reference.Reference;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.item.ItemHoe;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class HorizonHoe extends ItemHoe {
public HorizonHoe() {
super(MineHr.HorizonToolMaterial);
}
@SideOnly(Side.CLIENT)
public void registerIcons(IIconRegister iconRegister) {
this.itemIcon = iconRegister.registerIcon(Reference.MOD_ID + ":" + this.getUnlocalizedName().substring(5));
}
<|fim▁hole|><|fim▁end|> |
} |
<|file_name|>test_geometry.py<|end_file_name|><|fim▁begin|>import mpi4py, petsc4py
from petsc4py import PETSc
import numpy as np
import pytest
import gridPy
import geometryPy
petsc4py.init()
petscComm = petsc4py.PETSc.COMM_WORLD
comm = petscComm.tompi4py()
rank = comm.Get_rank()
numProcs = comm.Get_size()
PETSc.Sys.Print("Using %d procs" % numProcs)
N1 = int(pytest.config.getoption('N1'))
N2 = int(pytest.config.getoption('N2'))
N3 = int(pytest.config.getoption('N3'))
dim = int(pytest.config.getoption('dim'))
# Geometry parameters
blackHoleSpin = float(pytest.config.getoption('blackHoleSpin'))
hSlope = float(pytest.config.getoption('hSlope'))
numGhost = 3
X1Start = 0.; X1End = 1.
X2Start = 0.; X2End = 1.
X3Start = 0.; X3End = 1.
periodicBoundariesX1 = False
periodicBoundariesX2 = False
periodicBoundariesX3 = False
XCoords = gridPy.coordinatesGridPy(N1, N2, N3,
dim, numGhost,
X1Start, X1End,
X2Start, X2End,
X3Start, X3End
)
X1Coords, X2Coords, X3Coords = XCoords.getCoords(gridPy.CENTER)
geomMinkowski = geometryPy.geometryPy(geometryPy.MINKOWSKI,
0., 0.,
XCoords
)
def test_minkowski_params():
np.testing.assert_equal(N1, geomMinkowski.N1)
np.testing.assert_equal(N2, geomMinkowski.N2)
np.testing.assert_equal(N3, geomMinkowski.N3)
np.testing.assert_equal(dim, geomMinkowski.dim)
np.testing.assert_equal(numGhost, geomMinkowski.numGhost)
def test_minkowski_gCov():
np.testing.assert_allclose(geomMinkowski.gCov[0][0], -1.)
np.testing.assert_allclose(geomMinkowski.gCov[0][1], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[0][2], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[0][3], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[1][0], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[1][1], 1.)
np.testing.assert_allclose(geomMinkowski.gCov[1][2], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[1][3], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[2][0], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[2][1], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[2][2], 1.)
np.testing.assert_allclose(geomMinkowski.gCov[2][3], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[3][0], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[3][1], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[3][2], 0.)
np.testing.assert_allclose(geomMinkowski.gCov[3][3], 1.)
def test_minkowski_gCon():
np.testing.assert_allclose(geomMinkowski.gCon[0][0], -1.)
np.testing.assert_allclose(geomMinkowski.gCon[0][1], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[0][2], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[0][3], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[1][0], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[1][1], 1.)
np.testing.assert_allclose(geomMinkowski.gCon[1][2], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[1][3], 0.)<|fim▁hole|> np.testing.assert_allclose(geomMinkowski.gCon[2][0], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[2][1], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[2][2], 1.)
np.testing.assert_allclose(geomMinkowski.gCon[2][3], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[3][0], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[3][1], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[3][2], 0.)
np.testing.assert_allclose(geomMinkowski.gCon[3][3], 1.)
def test_minkowski_g():
np.testing.assert_allclose(geomMinkowski.g, 1.)
def test_minkowski_alpha():
np.testing.assert_allclose(geomMinkowski.g, 1.)
geomKerrSchild = geometryPy.geometryPy(geometryPy.MODIFIED_KERR_SCHILD,
blackHoleSpin, hSlope,
XCoords
)
# From McKinney and Gammie, 2004
# Check if the coordinate transformations have been done correctly
r = np.exp(X1Coords)
theta = np.pi*X2Coords + 0.5*(1. - hSlope)*np.sin(2.*np.pi*X2Coords)
phi = 2*np.pi*X3Coords
sigma = r**2. + (blackHoleSpin*np.cos(theta) )**2.
delta = r**2. - 2*r + blackHoleSpin**2.
A = (r**2. + blackHoleSpin**2.)**2.
sigmaMinus = r**2. - (blackHoleSpin*np.cos(theta) )**2.
# Coordinate transformation for log spacing in r and concentrating zones in the
# mid plane
dr_dX1 = np.exp(X1Coords)
dtheta_dX2 = np.pi*(1. + (1. - hSlope)*np.cos(2.*np.pi*X2Coords))
d2theta_dX22 = -2.*np.pi*np.pi*(1-hSlope)*np.sin(2.*np.pi*X2Coords);
N1Total = XCoords.N1Total
N2Total = XCoords.N2Total
N3Total = XCoords.N3Total
gCovCheck = np.zeros([4, 4, N3Total, N2Total, N1Total])
gConCheck = np.zeros([4, 4, N3Total, N2Total, N1Total])
gCheck = np.zeros([N3Total, N2Total, N1Total])
gCovCheck[0][0] = -(1. - 2*r/sigma) # dt^2
gCovCheck[0][1] = (2*r/sigma) * dr_dX1 # dt dX1
gCovCheck[0][2] = 0. # dt dX2
gCovCheck[0][3] = -(2.*blackHoleSpin*r*np.sin(theta)**2./sigma) # dt dphi
gCovCheck[1][0] = gCovCheck[0][1]
gCovCheck[1][1] = (1. + 2*r/sigma) * dr_dX1**2. # dX1 dX1
gCovCheck[1][2] = 0.
gCovCheck[1][3] = -blackHoleSpin * (1. + 2*r/sigma)*np.sin(theta)**2. \
* dr_dX1 # dX1 dphi
gCovCheck[2][0] = gCovCheck[0][2]
gCovCheck[2][1] = gCovCheck[1][2]
gCovCheck[2][2] = sigma * dtheta_dX2 * dtheta_dX2 # dX2 dX2
gCovCheck[2][3] = 0. # dX2 dphi
gCovCheck[3][0] = gCovCheck[0][3]
gCovCheck[3][1] = gCovCheck[1][3]
gCovCheck[3][2] = gCovCheck[2][3]
gCovCheck[3][3] = np.sin(theta)**2. \
* (sigma + blackHoleSpin**2. \
* (1. + 2.*r/sigma)*np.sin(theta)**2. \
) # dphi dphi
gCovPerZone = np.zeros([4, 4])
for k in xrange(N3Total):
for j in xrange(N2Total):
for i in xrange(N1Total):
gCovPerZone[0, 0] = gCovCheck[0][0][k, j, i]
gCovPerZone[0, 1] = gCovCheck[0][1][k, j, i]
gCovPerZone[0, 2] = gCovCheck[0][2][k, j, i]
gCovPerZone[0, 3] = gCovCheck[0][3][k, j, i]
gCovPerZone[1, 0] = gCovCheck[1][0][k, j, i]
gCovPerZone[1, 1] = gCovCheck[1][1][k, j, i]
gCovPerZone[1, 2] = gCovCheck[1][2][k, j, i]
gCovPerZone[1, 3] = gCovCheck[1][3][k, j, i]
gCovPerZone[2, 0] = gCovCheck[2][0][k, j, i]
gCovPerZone[2, 1] = gCovCheck[2][1][k, j, i]
gCovPerZone[2, 2] = gCovCheck[2][2][k, j, i]
gCovPerZone[2, 3] = gCovCheck[2][3][k, j, i]
gCovPerZone[3, 0] = gCovCheck[3][0][k, j, i]
gCovPerZone[3, 1] = gCovCheck[3][1][k, j, i]
gCovPerZone[3, 2] = gCovCheck[3][2][k, j, i]
gCovPerZone[3, 3] = gCovCheck[3][3][k, j, i]
gConPerZone = np.linalg.inv(gCovPerZone)
gCheck[k, j, i] = np.sqrt(-np.linalg.det(gCovPerZone))
gConCheck[0][0][k, j, i] = gConPerZone[0, 0]
gConCheck[0][1][k, j, i] = gConPerZone[0, 1]
gConCheck[0][2][k, j, i] = gConPerZone[0, 2]
gConCheck[0][3][k, j, i] = gConPerZone[0, 3]
gConCheck[1][0][k, j, i] = gConPerZone[1, 0]
gConCheck[1][1][k, j, i] = gConPerZone[1, 1]
gConCheck[1][2][k, j, i] = gConPerZone[1, 2]
gConCheck[1][3][k, j, i] = gConPerZone[1, 3]
gConCheck[2][0][k, j, i] = gConPerZone[2, 0]
gConCheck[2][1][k, j, i] = gConPerZone[2, 1]
gConCheck[2][2][k, j, i] = gConPerZone[2, 2]
gConCheck[2][3][k, j, i] = gConPerZone[2, 3]
gConCheck[3][0][k, j, i] = gConPerZone[3, 0]
gConCheck[3][1][k, j, i] = gConPerZone[3, 1]
gConCheck[3][2][k, j, i] = gConPerZone[3, 2]
gConCheck[3][3][k, j, i] = gConPerZone[3, 3]
alphaCheck = 1./np.sqrt(-gConCheck[0][0])
geomKerrSchild.computeConnectionCoeffs()
gammaUpDownDownCheck = np.zeros([4, 4, 4, N3Total, N2Total, N1Total])
gammaUpDownDownCheck[0][0][0] = 2.*r*sigmaMinus / sigma**3.
gammaUpDownDownCheck[0][0][1] = r * (2*r + sigma) * sigmaMinus / sigma**3.
gammaUpDownDownCheck[0][0][2] = -blackHoleSpin**2. * r * np.sin(2.*theta) \
* dtheta_dX2 / sigma**2.
gammaUpDownDownCheck[0][0][3] = -2. * blackHoleSpin * r * np.sin(theta)**2. \
* sigmaMinus / sigma**3.
gammaUpDownDownCheck[0][1][0] = gammaUpDownDownCheck[0][0][1]
gammaUpDownDownCheck[0][1][1] = 2.*r**2.*(r**4. + r*sigmaMinus
- (blackHoleSpin*np.cos(theta))**4.
) / sigma**3.
gammaUpDownDownCheck[0][1][2] = -blackHoleSpin**2. * r**2. * np.sin(2.*theta) \
* dtheta_dX2 / sigma**2.
gammaUpDownDownCheck[0][1][3] = blackHoleSpin * r * (-r*(r**3. + 2*sigmaMinus)
+ ( blackHoleSpin
* np.cos(theta)
)**4.
) * np.sin(theta)**2. \
/ sigma**3.
gammaUpDownDownCheck[0][2][0] = gammaUpDownDownCheck[0][0][2]
gammaUpDownDownCheck[0][2][1] = gammaUpDownDownCheck[0][1][2]
gammaUpDownDownCheck[0][2][2] = -2. * r**2. * dtheta_dX2**2. / sigma
gammaUpDownDownCheck[0][2][3] = blackHoleSpin**3. * r * np.sin(theta)**2. \
* np.sin(2.*theta) * dtheta_dX2 / sigma**2.
gammaUpDownDownCheck[0][3][0] = gammaUpDownDownCheck[0][0][3]
gammaUpDownDownCheck[0][3][1] = gammaUpDownDownCheck[0][1][3]
gammaUpDownDownCheck[0][3][2] = gammaUpDownDownCheck[0][2][3]
gammaUpDownDownCheck[0][3][3] = 2.*r*np.sin(theta)**2. \
* (-r*sigma**2. +
blackHoleSpin**2.*np.sin(theta)**2.*sigmaMinus
) / sigma**3.
gammaUpDownDownCheck[1][0][0] = (blackHoleSpin**2. + r*(-2. + r)) \
* sigmaMinus / (r * sigma**3.)
gammaUpDownDownCheck[1][0][1] = sigmaMinus \
* ( -2.*r + (blackHoleSpin*np.sin(theta))**2.) \
/ sigma**3.
gammaUpDownDownCheck[1][0][2] = 0.
gammaUpDownDownCheck[1][0][3] = -blackHoleSpin * np.sin(theta)**2. \
* (blackHoleSpin**2. + r*(-2. + r)) * sigmaMinus \
/ (r * sigma**3.)
gammaUpDownDownCheck[1][1][0] = gammaUpDownDownCheck[1][0][1]
gammaUpDownDownCheck[1][1][1] = \
( r**4.*(-2. + r)*(1. + r)
+ blackHoleSpin**2. * ( blackHoleSpin**2.*r*(1. + 3.*r)*np.cos(theta)**4. \
+ (blackHoleSpin*np.cos(theta))**4. * np.cos(theta)**2. \
+ r**3.*np.sin(theta)**2. \
+ r*np.cos(theta)**2. \
*(2.*r + 3.*r**3. - (blackHoleSpin*np.sin(theta))**2.)
)
) / sigma**3.
gammaUpDownDownCheck[1][1][2] = -blackHoleSpin**2. * dtheta_dX2 \
* np.sin(2.*theta) \
/ (blackHoleSpin**2. + 2.*r**2.
+ blackHoleSpin**2.*np.cos(2.*theta)
)
gammaUpDownDownCheck[1][1][3] = \
blackHoleSpin * np.sin(theta)**2. * (blackHoleSpin**4. * r * np.cos(theta)**4.
+ r**2*(2.*r + r**3.
-(blackHoleSpin*np.sin(theta))**2.
) \
+ (blackHoleSpin*np.cos(theta))**2. \
* (2.*r*(-1. + r**2.)
+ (blackHoleSpin*np.sin(theta))**2.
)
) / sigma**3.
gammaUpDownDownCheck[1][2][0] = gammaUpDownDownCheck[1][0][2]
gammaUpDownDownCheck[1][2][1] = gammaUpDownDownCheck[1][1][2]
gammaUpDownDownCheck[1][2][2] = -(blackHoleSpin**2. + r*(-2. + r)) \
* dtheta_dX2**2. / sigma
gammaUpDownDownCheck[1][2][3] = 0.
gammaUpDownDownCheck[1][3][0] = gammaUpDownDownCheck[1][0][3]
gammaUpDownDownCheck[1][3][1] = gammaUpDownDownCheck[1][1][3]
gammaUpDownDownCheck[1][3][2] = gammaUpDownDownCheck[1][2][3]
gammaUpDownDownCheck[1][3][3] = \
-(blackHoleSpin**2. + r*(-2. + r) ) * np.sin(theta)**2. \
* (r * sigma**2. -
blackHoleSpin**2.*sigmaMinus*np.sin(theta)**2.
) / (r * sigma**3.)
gammaUpDownDownCheck[2][0][0] = -blackHoleSpin**2. * r * np.sin(2.*theta) \
/ sigma**3. / dtheta_dX2
gammaUpDownDownCheck[2][0][1] = r * gammaUpDownDownCheck[2][0][0]
gammaUpDownDownCheck[2][0][2] = 0.
gammaUpDownDownCheck[2][0][3] = blackHoleSpin*r*(blackHoleSpin**2. + r**2.) \
* np.sin(2.*theta) / sigma**3. / dtheta_dX2
gammaUpDownDownCheck[2][1][0] = gammaUpDownDownCheck[2][0][1]
gammaUpDownDownCheck[2][1][1] = r**2. * gammaUpDownDownCheck[2][0][0]
gammaUpDownDownCheck[2][1][2] = r**2. / sigma
gammaUpDownDownCheck[2][1][3] = (blackHoleSpin*r*np.cos(theta)*np.sin(theta)
*(r**3.*(2. + r)
+ blackHoleSpin**2.
*( 2.*r*(1. + r)*np.cos(theta)**2.
+ blackHoleSpin**2.*np.cos(theta)**4.
+ 2.*r*np.sin(theta)**2.
)
)
) / sigma**3. / dtheta_dX2
gammaUpDownDownCheck[2][2][0] = gammaUpDownDownCheck[2][0][2]
gammaUpDownDownCheck[2][2][1] = gammaUpDownDownCheck[2][1][2]
gammaUpDownDownCheck[2][2][2] = -blackHoleSpin**2.*np.cos(theta)*np.sin(theta) \
*dtheta_dX2/sigma + d2theta_dX22/dtheta_dX2
gammaUpDownDownCheck[2][2][3] = 0.
gammaUpDownDownCheck[2][3][0] = gammaUpDownDownCheck[2][0][3]
gammaUpDownDownCheck[2][3][1] = gammaUpDownDownCheck[2][1][3]
gammaUpDownDownCheck[2][3][2] = gammaUpDownDownCheck[2][2][3]
gammaUpDownDownCheck[2][3][3] = \
-np.cos(theta)*np.sin(theta) \
*(sigma**3. + (blackHoleSpin*np.sin(theta))**2. \
* sigma*(r*(4. + r) + (blackHoleSpin*np.cos(theta)**2.)) \
+ 2.*r*(blackHoleSpin * np.sin(theta))**4. \
) / sigma**3. / dtheta_dX2
gammaUpDownDownCheck[3][0][0] = blackHoleSpin * sigmaMinus / sigma**3.
gammaUpDownDownCheck[3][0][1] = r * gammaUpDownDownCheck[3][0][0]
gammaUpDownDownCheck[3][0][2] = -2.*blackHoleSpin*r*np.cos(theta) \
* dtheta_dX2 / (np.sin(theta) * sigma**2.)
gammaUpDownDownCheck[3][0][3] = -blackHoleSpin**2. * np.sin(theta)**2. \
* sigmaMinus / sigma**3.
gammaUpDownDownCheck[3][1][0] = gammaUpDownDownCheck[3][0][1]
gammaUpDownDownCheck[3][1][1] = blackHoleSpin * r**2. * sigmaMinus \
/ sigma**3.
gammaUpDownDownCheck[3][1][2] = -2.*blackHoleSpin*r \
*(blackHoleSpin**2. + 2.*r*(2. + r)
+ blackHoleSpin**2. * np.cos(2.*theta)
) * np.cos(theta) * dtheta_dX2 \
/ (np.sin(theta) \
* (blackHoleSpin**2. + 2.*r**2.
+ blackHoleSpin**2.*np.cos(2.*theta)
)**2.
)
gammaUpDownDownCheck[3][1][3] = \
r*(r*sigma**2. - (blackHoleSpin*np.sin(theta))**2.*sigmaMinus)/sigma**3.
gammaUpDownDownCheck[3][2][0] = gammaUpDownDownCheck[3][0][2]
gammaUpDownDownCheck[3][2][1] = gammaUpDownDownCheck[3][1][2]
gammaUpDownDownCheck[3][2][2] = -blackHoleSpin * r * dtheta_dX2**2./sigma
gammaUpDownDownCheck[3][2][3] = \
dtheta_dX2*(.25*(blackHoleSpin**2.
+ 2.*r**2. + blackHoleSpin**2.*np.cos(2.*theta)
)**2. * np.cos(theta)/np.sin(theta)
+ blackHoleSpin**2. * r * np.sin(2.*theta)
)/sigma**2.
gammaUpDownDownCheck[3][3][0] = gammaUpDownDownCheck[3][0][3]
gammaUpDownDownCheck[3][3][1] = gammaUpDownDownCheck[3][1][3]
gammaUpDownDownCheck[3][3][2] = gammaUpDownDownCheck[3][2][3]
gammaUpDownDownCheck[3][3][3] = \
(-blackHoleSpin * r * np.sin(theta)**2. * sigma**2. \
+ blackHoleSpin**3. * np.sin(theta)**4. * sigmaMinus) / sigma**3.
def test_modifiedKerrSchild_params():
np.testing.assert_equal(N1, geomKerrSchild.N1)
np.testing.assert_equal(N2, geomKerrSchild.N2)
np.testing.assert_equal(N3, geomKerrSchild.N3)
np.testing.assert_equal(dim, geomKerrSchild.dim)
np.testing.assert_equal(numGhost, geomKerrSchild.numGhost)
def test_modifiedKerrSchild_xCoords():
np.testing.assert_allclose(r, geomKerrSchild.xCoords[0])
np.testing.assert_allclose(theta, geomKerrSchild.xCoords[1])
np.testing.assert_allclose(phi, geomKerrSchild.xCoords[2])
def test_modifiedKerrSchild_gCov():
np.testing.assert_allclose(gCovCheck[0][0], geomKerrSchild.gCov[0][0])
np.testing.assert_allclose(gCovCheck[0][1], geomKerrSchild.gCov[0][1])
np.testing.assert_allclose(gCovCheck[0][2], geomKerrSchild.gCov[0][2])
np.testing.assert_allclose(gCovCheck[0][3], geomKerrSchild.gCov[0][3])
np.testing.assert_allclose(gCovCheck[1][0], geomKerrSchild.gCov[1][0])
np.testing.assert_allclose(gCovCheck[1][1], geomKerrSchild.gCov[1][1])
np.testing.assert_allclose(gCovCheck[1][2], geomKerrSchild.gCov[1][2])
np.testing.assert_allclose(gCovCheck[1][3], geomKerrSchild.gCov[1][3])
np.testing.assert_allclose(gCovCheck[2][0], geomKerrSchild.gCov[2][0])
np.testing.assert_allclose(gCovCheck[2][1], geomKerrSchild.gCov[2][1])
np.testing.assert_allclose(gCovCheck[2][2], geomKerrSchild.gCov[2][2])
np.testing.assert_allclose(gCovCheck[2][3], geomKerrSchild.gCov[2][3])
np.testing.assert_allclose(gCovCheck[3][0], geomKerrSchild.gCov[3][0])
np.testing.assert_allclose(gCovCheck[3][1], geomKerrSchild.gCov[3][1])
np.testing.assert_allclose(gCovCheck[3][2], geomKerrSchild.gCov[3][2])
np.testing.assert_allclose(gCovCheck[3][3], geomKerrSchild.gCov[3][3])
def test_modifiedKerrSchild_gCon():
np.testing.assert_allclose(gConCheck[0][0], geomKerrSchild.gCon[0][0])
np.testing.assert_allclose(gConCheck[0][1], geomKerrSchild.gCon[0][1])
np.testing.assert_allclose(gConCheck[0][2], geomKerrSchild.gCon[0][2])
np.testing.assert_allclose(gConCheck[0][3], geomKerrSchild.gCon[0][3],
atol=1e-14
)
np.testing.assert_allclose(gConCheck[1][0], geomKerrSchild.gCon[1][0])
np.testing.assert_allclose(gConCheck[1][1], geomKerrSchild.gCon[1][1])
np.testing.assert_allclose(gConCheck[1][2], geomKerrSchild.gCon[1][2])
np.testing.assert_allclose(gConCheck[1][3], geomKerrSchild.gCon[1][3])
np.testing.assert_allclose(gConCheck[2][0], geomKerrSchild.gCon[2][0])
np.testing.assert_allclose(gConCheck[2][1], geomKerrSchild.gCon[2][1])
np.testing.assert_allclose(gConCheck[2][2], geomKerrSchild.gCon[2][2])
np.testing.assert_allclose(gConCheck[2][3], geomKerrSchild.gCon[2][3])
np.testing.assert_allclose(gConCheck[3][0], geomKerrSchild.gCon[3][0],
atol=1e-14
)
np.testing.assert_allclose(gConCheck[3][1], geomKerrSchild.gCon[3][1])
np.testing.assert_allclose(gConCheck[3][2], geomKerrSchild.gCon[3][2])
np.testing.assert_allclose(gConCheck[3][3], geomKerrSchild.gCon[3][3])
def test_modifiedKerrSchild_g():
np.testing.assert_allclose(gCheck, geomKerrSchild.g)
def test_modifiedKerrSchild_alpha():
np.testing.assert_allclose(alphaCheck, geomKerrSchild.alpha)
def test_modifiedKerrSchild_gammaUpDownDown():
np.testing.assert_allclose( gammaUpDownDownCheck[0][0][0],
geomKerrSchild.gammaUpDownDown[0][0][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][0][1],
geomKerrSchild.gammaUpDownDown[0][0][1]
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][0][2],
geomKerrSchild.gammaUpDownDown[0][0][2],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][0][3],
geomKerrSchild.gammaUpDownDown[0][0][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][1][0],
geomKerrSchild.gammaUpDownDown[0][1][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][1][1],
geomKerrSchild.gammaUpDownDown[0][1][1]
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][1][2],
geomKerrSchild.gammaUpDownDown[0][1][2],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][1][3],
geomKerrSchild.gammaUpDownDown[0][1][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][2][0],
geomKerrSchild.gammaUpDownDown[0][2][0],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][2][1],
geomKerrSchild.gammaUpDownDown[0][2][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][2][2],
geomKerrSchild.gammaUpDownDown[0][2][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][2][3],
geomKerrSchild.gammaUpDownDown[0][2][3],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][3][0],
geomKerrSchild.gammaUpDownDown[0][3][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][3][1],
geomKerrSchild.gammaUpDownDown[0][3][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][3][2],
geomKerrSchild.gammaUpDownDown[0][3][2],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[0][3][3],
geomKerrSchild.gammaUpDownDown[0][3][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][0][0],
geomKerrSchild.gammaUpDownDown[1][0][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][0][1],
geomKerrSchild.gammaUpDownDown[1][0][1]
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][0][2],
geomKerrSchild.gammaUpDownDown[1][0][2],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][0][3],
geomKerrSchild.gammaUpDownDown[1][0][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][1][0],
geomKerrSchild.gammaUpDownDown[1][1][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][1][1],
geomKerrSchild.gammaUpDownDown[1][1][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][1][2],
geomKerrSchild.gammaUpDownDown[1][1][2],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][1][3],
geomKerrSchild.gammaUpDownDown[1][1][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][2][0],
geomKerrSchild.gammaUpDownDown[1][2][0],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][2][1],
geomKerrSchild.gammaUpDownDown[1][2][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][2][2],
geomKerrSchild.gammaUpDownDown[1][2][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][2][3],
geomKerrSchild.gammaUpDownDown[1][2][3],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][3][0],
geomKerrSchild.gammaUpDownDown[1][3][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][3][1],
geomKerrSchild.gammaUpDownDown[1][3][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][3][2],
geomKerrSchild.gammaUpDownDown[1][3][2],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[1][3][3],
geomKerrSchild.gammaUpDownDown[1][3][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][0][0],
geomKerrSchild.gammaUpDownDown[2][0][0],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][0][1],
geomKerrSchild.gammaUpDownDown[2][0][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][0][2],
geomKerrSchild.gammaUpDownDown[2][0][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][0][3],
geomKerrSchild.gammaUpDownDown[2][0][3],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][1][0],
geomKerrSchild.gammaUpDownDown[2][1][0],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][1][1],
geomKerrSchild.gammaUpDownDown[2][1][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][1][2],
geomKerrSchild.gammaUpDownDown[2][1][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][1][3],
geomKerrSchild.gammaUpDownDown[2][1][3],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][2][0],
geomKerrSchild.gammaUpDownDown[2][2][0],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][2][1],
geomKerrSchild.gammaUpDownDown[2][2][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][2][2],
geomKerrSchild.gammaUpDownDown[2][2][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][2][3],
geomKerrSchild.gammaUpDownDown[2][2][3],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][3][0],
geomKerrSchild.gammaUpDownDown[2][3][0],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][3][1],
geomKerrSchild.gammaUpDownDown[2][3][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][3][2],
geomKerrSchild.gammaUpDownDown[2][3][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[2][3][3],
geomKerrSchild.gammaUpDownDown[2][3][3],
atol=3e-3
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][0][0],
geomKerrSchild.gammaUpDownDown[3][0][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][0][1],
geomKerrSchild.gammaUpDownDown[3][0][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][0][2],
geomKerrSchild.gammaUpDownDown[3][0][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][0][3],
geomKerrSchild.gammaUpDownDown[3][0][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][1][0],
geomKerrSchild.gammaUpDownDown[3][1][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][1][1],
geomKerrSchild.gammaUpDownDown[3][1][1],
atol=1e-7
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][1][2],
geomKerrSchild.gammaUpDownDown[3][1][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][1][3],
geomKerrSchild.gammaUpDownDown[3][1][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][2][0],
geomKerrSchild.gammaUpDownDown[3][2][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][2][1],
geomKerrSchild.gammaUpDownDown[3][2][1]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][2][2],
geomKerrSchild.gammaUpDownDown[3][2][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][2][3],
geomKerrSchild.gammaUpDownDown[3][2][3]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][3][0],
geomKerrSchild.gammaUpDownDown[3][3][0]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][3][1],
geomKerrSchild.gammaUpDownDown[3][3][1]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][3][2],
geomKerrSchild.gammaUpDownDown[3][3][2]
)
np.testing.assert_allclose( gammaUpDownDownCheck[3][3][3],
geomKerrSchild.gammaUpDownDown[3][3][3]
)<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.