text
stringlengths 2
1.04M
| meta
dict |
---|---|
#include "mysql.hh"
MySql::MySql() { driver = get_driver_instance(); }
MySql::~MySql() {}
template <typename T>
void MySql::errorCheck(T function) {
try {
function();
} catch (sql::SQLException &error) {
std::cerr << "MySQL error code: " << error.getErrorCode()
<< ", SQLState: " << error.getSQLState() << '\n';
throw;
}
}
void MySql::connectTo(const std::string &url, const std::string &username,
const std::string &password) {
errorCheck([&]() {
connection = std::unique_ptr<sql::Connection>(
driver->connect(url.c_str(), username.c_str(), password.c_str()));
statement =
std::unique_ptr<sql::Statement>(connection->createStatement());
});
}
void MySql::selectDatabase(const std::string &databaseName) {
errorCheck([&]() { connection->setSchema(databaseName.c_str()); });
}
void MySql::executeQuery(const std::string &query) {
errorCheck([&]() { result.reset(statement->executeQuery(query.c_str())); });
}
void MySql::executeQueryNoResult(const std::string &query) {
errorCheck([&]() { statement->execute(query.c_str()); });
}
std::string MySql::getPreviousResponseColumn(const unsigned int &columnNumber) {
std::string columnInformation;
errorCheck([&]() {
if (result->next()) {
columnInformation = result->getString(columnNumber);
}
});
return columnInformation;
}
std::string MySql::getPreviousResponseColumn(const std::string &columnName) {
std::string columnInformation;
errorCheck([&]() {
if (result->next()) {
columnInformation = result->getString(columnName);
}
});
return columnInformation;
}
std::unique_ptr<sql::ResultSet> MySql::getFullResult() { return std::move(result); }
| {
"content_hash": "ec67c715576528c07a3caa71bbda9e19",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 84,
"avg_line_length": 27.71212121212121,
"alnum_prop": 0.6118097320940404,
"repo_name": "R2D2-2017/R2D2-2017",
"id": "c13391d564e6de6701e43b89d2ed1ecc57f22b61",
"size": "2043",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "modules/RFID/src/mysql.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "180306"
},
{
"name": "CMake",
"bytes": "14738"
},
{
"name": "Python",
"bytes": "11228"
},
{
"name": "Shell",
"bytes": "174"
}
],
"symlink_target": ""
} |
import { Image } from "lively.morphic";
export function uploadItem() {}
export async function uploadFile(file, type, options) {
// file is an instance of the Browser File class
// https://developer.mozilla.org/en-US/docs/Web/API/File
if (type.startsWith("image/")) {
// upload as inlined image
let imageUrl = await fileReadAsDataURL(file);
return new Image({
imageUrl,
autoResize: true,
name: file.name
});
}
return null;
}
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// helpers
function fileReadAsDataURL(file) {
return new Promise((resolve, reject) => {
let reader = new FileReader();
reader.onload = e => resolve(e.target.result);
reader.readAsDataURL(file);
});
} | {
"content_hash": "7663857b0515753f86c9d6f19311ca54",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 60,
"avg_line_length": 23.59375,
"alnum_prop": 0.6013245033112583,
"repo_name": "LivelyKernel/lively.morphic",
"id": "3602d63ae3db548b697c6f9835ea5f9d848b54c4",
"size": "755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "events/html-drop-handler.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4636"
},
{
"name": "HTML",
"bytes": "23462"
},
{
"name": "JavaScript",
"bytes": "1572906"
}
],
"symlink_target": ""
} |
#ifndef _INCREMENTAL_PLOT_HH_
#define _INCREMENTAL_PLOT_HH_
#include <map>
#include <list>
#include <qwt/qwt_plot_magnifier.h>
#include <qwt/qwt_plot.h>
#include "gazebo/math/Vector2d.hh"
#include "gazebo/gui/qt.h"
class QwtPlotCurve;
class QwtPlotDirectPainter;
namespace gazebo
{
namespace gui
{
/// \brief A plotting widget that handles incremental addition of data.
class IncrementalPlot : public QwtPlot
{
Q_OBJECT
/// \brief Constructor
/// \param[in] _parent Pointer to a parent widget
public: IncrementalPlot(QWidget *_parent = NULL);
/// \brief Destructor
public: virtual ~IncrementalPlot();
/// \brief Give QT a size hint.
/// \return Default size of the plot.
public: virtual QSize sizeHint() const;
/// \brief Add a new point to a curve.
/// \param[in] _label Name of the curve to add a point to. A curve
/// will be added if it doesn't exist.
/// \param[in] _pt Point to add.
public slots: void Add(const QString &_label, const QPointF &_pt);
/// \brief Add new points to a curve.
/// \param[in] _label Name of the curve to add a point to. A curve
/// will be added if it doesn't exist.
/// \param[in] _pt Points to add.
public slots: void Add(const QString &_label,
const std::list<QPointF> &_pts);
/// \brief Clear a single curve from the plot.
/// \param[in] _label Name of the curve to remove.
public: void Clear(const QString &_label);
/// \brief Clear all points from the plot.
public: void Clear();
/// \brief Return true if the plot has the labled curve.
/// \param[in] _label Name of the curve to check for.
/// \return True if _label is currently plotted.
public: bool HasCurve(const QString &_label);
/// \brief Update all the curves in the plot
public: void Update();
/// \brief Used to accept drag enter events.
/// \param[in] _evt The drag event.
protected: void dragEnterEvent(QDragEnterEvent *_evt);
/// \brief Used to accept drop events.
/// \param[in] _evt The drop event.
protected: void dropEvent(QDropEvent *_evt);
/// \brief Adjust a curve to fit new data.
/// \param[in] _curve Curve to adjust
private: void AdjustCurve(QwtPlotCurve *_curve);
/// \brief Add a named curve.
/// \param[in] _label Name of the curve.
/// \return A pointer to the new curve.
private: QwtPlotCurve *AddCurve(const QString &_label);
/// \def DiagnosticTimerPtr
/// \brief A map of strings to qwt plot curves.
private: typedef std::map<QString, QwtPlotCurve *> CurveMap;
/// \brief The curve to draw.
private: CurveMap curves;
/// \brief Drawing utility
private: QwtPlotDirectPainter *directPainter;
/// \brief Pointer to the plot maginfier
private: QwtPlotMagnifier *magnifier;
};
}
}
#endif
| {
"content_hash": "8bd5e11e821a3316600f217629619093",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 75,
"avg_line_length": 30.232323232323232,
"alnum_prop": 0.6237888406281323,
"repo_name": "thomas-moulard/gazebo-deb",
"id": "475b347516ec8f3561e3ef55e3356c84658abfe8",
"size": "3609",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "gazebo/gui/IncrementalPlot.hh",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "652008"
},
{
"name": "C++",
"bytes": "6417236"
},
{
"name": "JavaScript",
"bytes": "25255"
}
],
"symlink_target": ""
} |
Just another time tracking tool
| {
"content_hash": "ca371b420d9d73294c3bcb9a9d20e474",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 31,
"avg_line_length": 32,
"alnum_prop": 0.84375,
"repo_name": "n-develop/tickettimer",
"id": "6f4701b9de67539977d59a86e96bf269317d8393",
"size": "46",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "70606"
}
],
"symlink_target": ""
} |
package rescuecore2.standard;
/**
Useful constants for the standard package.
*/
public final class StandardConstants {
/** Config key for the number of fire brigades in the scenario. */
public static final String FIRE_BRIGADE_COUNT_KEY = "scenario.agents.fb";
/** Config key for the number of ambulance teams in the scenario. */
public static final String AMBULANCE_TEAM_COUNT_KEY = "scenario.agents.at";
/** Config key for the number of police forces in the scenario. */
public static final String POLICE_FORCE_COUNT_KEY = "scenario.agents.pf";
/** Config key for the number of fire stations in the scenario. */
public static final String FIRE_STATION_COUNT_KEY = "scenario.agents.fs";
/** Config key for the number of ambulance centres in the scenario. */
public static final String AMBULANCE_CENTRE_COUNT_KEY = "scenario.agents.ac";
/** Config key for the number of police offices in the scenario. */
public static final String POLICE_OFFICE_COUNT_KEY = "scenario.agents.po";
private StandardConstants() {}
} | {
"content_hash": "324a8a9a0137b268bc64ca5dd9f54b44",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 81,
"avg_line_length": 41.26923076923077,
"alnum_prop": 0.7166821994408201,
"repo_name": "alim1369/sos",
"id": "d1cdbacd14caa0e669a879c660b5832a6b578a41",
"size": "1073",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/rescuecore2/standard/StandardConstants.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "3170693"
},
{
"name": "Shell",
"bytes": "1292"
}
],
"symlink_target": ""
} |
set -x
# Uncomment the selected web server.
USE_NGINX=1
#USE_APACHE2=1
# Uncomment the selected database.
USE_MYSQL=1
#USE_PGSQL=1
#USE_SQLITE3=1
# Uncomment if extra images libraries for redmine will be installed.
#USE_IMAGEMAGICK=1
# Set password. Change at your preference.
DB_PASSWORD='dbpasswd'
REDMINE_PASSWORD='redminepwd'
set +x
# Set non-interactive instaler mode, update repos.
export DEBIAN_FRONTEND=noninteractive
sudo apt-get update
# Install and setup database.
if [[ -n ${USE_MYSQL} ]]; then
# Setup and install mysql-server
echo "redmine redmine/instances/default/database-type select mysql" | sudo debconf-set-selections
echo "redmine redmine/instances/default/mysql/method select unix socket" | sudo debconf-set-selections
echo "redmine redmine/instances/default/mysql/app-pass password ${DB_PASSWORD}" | sudo debconf-set-selections
echo "redmine redmine/instances/default/mysql/admin-pass password ${DB_PASSWORD}" | sudo debconf-set-selections
echo "mysql-server mysql-server/root_password password ${DB_PASSWORD}" | sudo debconf-set-selections
echo "mysql-server mysql-server/root_password_again password ${DB_PASSWORD}" | sudo debconf-set-selections
sudo apt-get install -q -y mysql-server mysql-client
sudo apt-get install -q -y redmine-mysql
elif [[ -n ${USE_PGSQL} ]]; then
# Setup and install pgsql-server
echo "redmine redmine/instances/default/database-type select pgsql" | sudo debconf-set-selections
echo "redmine redmine/instances/default/pgsql/method select unix socket" | sudo debconf-set-selections
echo "redmine redmine/instances/default/pgsql/authmethod-admin select ident" | sudo debconf-set-selections
echo "redmine redmine/instances/default/pgsql/authmethod-user select ident" | sudo debconf-set-selections
echo "redmine redmine/instances/default/pgsql/app-pass password" | sudo debconf-set-selections
echo "redmine redmine/instances/default/pgsql/admin-pass password" | sudo debconf-set-selections
echo "dbconfig-common dbconfig-common/pgsql/authmethod-admin select ident" | sudo debconf-set-selections
echo "dbconfig-common dbconfig-common/pgsql/authmethod-user select ident" | sudo debconf-set-selections
sudo apt-get install -q -y postgresql postgresql-contrib
sudo apt-get install -q -y redmine-pgsql
elif [[ -n ${USE_SQLITE3} ]]; then
echo 'redmine redmine/instances/default/database-type select sqlite3' | sudo debconf-set-selections
echo 'redmine redmine/instances/default/db/basepath string /var/lib/dbconfig-common/sqlite3/redmine/instances/default' | sudo debconf-set-selections
# Sqlite is installed and used by default as database if no other is set.
fi
# Install redmine.
echo "redmine redmine/instances/default/app-password password ${REDMINE_PASSWORD}" | sudo debconf-set-selections
echo "redmine redmine/instances/default/app-password-confirm password ${REDMINE_PASSWORD}" | sudo debconf-set-selections
echo "redmine redmine/instances/default/dbconfig-install boolean true" | sudo debconf-set-selections
sudo apt-get install -q -y redmine
# Extra required package for ubuntu 14.04 to make redmine work.
sudo gem install bundler
# Extras
if [[ -n ${USE_IMAGEMAGICK} ]]; then
sudo apt-get install -q -y imagemagick
sudo apt-get install -q -y ruby-rmagick
fi
# Change permissions for redmine directory.
sudo chown www-data:www-data /usr/share/redmine
#############################################
# Install web servers.
#############################################
# nginx as first option.
# ----------------------
if [[ -n ${USE_NGINX} ]]; then
# nginx and thin are required.
sudo apt-get install -q -y nginx thin
# Configure thin.
sudo thin config \
--config /etc/thin1.9.1/redmine.yml \
--chdir /usr/share/redmine \
--environment production \
--servers 2 \
--socket /tmp/thin.redmine.sock \
--pid tmp/pids/thin.pid
# Configure nginx. For now default config is overriden.
sudo dd of=/etc/nginx/sites-available/default << EOF
upstream redmine_upstream {
server unix:/tmp/thin.redmine.0.sock;
server unix:/tmp/thin.redmine.1.sock;
}
server {
listen 80;
server_name 127.0.0.1;
root /usr/share/redmine/public;
location / {
try_files \$uri @redmine_ruby;
}
location @redmine_ruby {
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header Host \$http_host;
proxy_redirect off;
proxy_read_timeout 300;
proxy_pass http://redmine_upstream;
}
}
EOF
# Restart thin
sudo service thin restart
# Restart nginx
sudo service nginx restart
# Else, install apache2.
# ----------------------
elif [[ -n ${USE_APACHE2} ]]; then
# Install apache2
sudo apt-get install -q -y apache2 libapache2-mod-passenger
# Link redmine into apache2.
sudo ln -s /usr/share/redmine/public /var/www/redmine
# Override apache settings.
sudo dd of=/etc/apache2/sites-available/000-default.conf <<EOF
<VirtualHost *:80>
ServerAdmin webmaster@localhost
DocumentRoot /var/www/redmine
ErrorLog ${APACHE_LOG_DIR}/error.log
CustomLog ${APACHE_LOG_DIR}/access.log combined
<Directory /var/www/redmine>
RailsBaseURI /
PassengerResolveSymlinksInDocumentRoot on
</Directory>
</VirtualHost>
EOF
# Configure passenger
sudo dd of=/etc/apache2/mods-available/passenger.conf <<EOF
<IfModule mod_passenger.c>
PassengerDefaultUser www-data
PassengerRoot /usr/lib/ruby/vendor_ruby/phusion_passenger/locations.ini
PassengerDefaultRuby /usr/bin/ruby
</IfModule>
EOF
# Configure security messages.
sed -i 's|Server Tokens .*|Server Tokens Prod|g' /etc/apache2/conf-available/security.conf
# Restart apache2
sudo service apache2 restart
fi
cat <<EOF
################################################
# Now you should be able to see redmine webpage
# http://localhost:8888
################################################
EOF
| {
"content_hash": "9dab0088952a5cb120bbe69240aa638b",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 152,
"avg_line_length": 36.455089820359284,
"alnum_prop": 0.6984231274638634,
"repo_name": "clalarco/vagrant-redmine",
"id": "a27c6ae8bbc9896826d4f3236112204a5bcdeb41",
"size": "6216",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vagrant/bootstrap.sh",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "4778"
},
{
"name": "Shell",
"bytes": "6216"
}
],
"symlink_target": ""
} |
DOCUMENTATION DEPRECATION NOTICE: This file is deprecated. Please refer to the
[new migrated
location](https://github.com/kubernetes-sigs/prow/tree/main/site/content/en/docs/Legacy%20Snapshot/prow/cmd/jenkins-operator/README.md).
Please do not edit this file; instead, make changes to the new location!
The new location is served on the web at
https://docs.prow.k8s.io/docs/legacy-snapshot/.
This file will be deleted on 2022-11-30.
| {
"content_hash": "1f40504214d4e2a15cd7beaac0d67f61",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 136,
"avg_line_length": 43.6,
"alnum_prop": 0.7889908256880734,
"repo_name": "dims/test-infra",
"id": "2361787630b6de555d339b890986ca02c275d253",
"size": "436",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "prow/cmd/jenkins-operator/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "39760"
},
{
"name": "Dockerfile",
"bytes": "38096"
},
{
"name": "Go",
"bytes": "8914262"
},
{
"name": "HCL",
"bytes": "3593"
},
{
"name": "HTML",
"bytes": "82013"
},
{
"name": "JavaScript",
"bytes": "55688"
},
{
"name": "Jinja",
"bytes": "7868"
},
{
"name": "Jsonnet",
"bytes": "77541"
},
{
"name": "Makefile",
"bytes": "42715"
},
{
"name": "Mermaid",
"bytes": "1882"
},
{
"name": "Python",
"bytes": "734681"
},
{
"name": "Shell",
"bytes": "355873"
},
{
"name": "TypeScript",
"bytes": "216504"
}
],
"symlink_target": ""
} |
using UnityEngine;
using System.Collections;
public class CharacterController : MonoBehaviour
{
private void Update ()
{
Vector2 speed = transform.position - Camera.main.ScreenToWorldPoint(Input.mousePosition);
rigidbody2D.velocity = -0.4f* speed;
}
}
| {
"content_hash": "17fbd62bbb3a3aec75fccf2bec369d19",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 97,
"avg_line_length": 21.384615384615383,
"alnum_prop": 0.7194244604316546,
"repo_name": "teinemaa/worldgen2d",
"id": "e8512d15cd9d3a8ff94d0363a3e000cf18582e9f",
"size": "280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Assets/Scripts/CharacterController.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "193987"
},
{
"name": "GLSL",
"bytes": "1031"
}
],
"symlink_target": ""
} |
import React from 'react';
import classNames from 'classnames';
import RcInputNumber from 'rc-input-number';
import StepProps from './PropsType';
import Icon from '../icon';
export default class Stepper extends React.Component<StepProps, any> {
static defaultProps = {
prefixCls: 'am-stepper',
step: 1,
readOnly: false,
showNumber: false,
focusOnUpDown: false,
useTouch: true,
};
render() {
const { className, showNumber, ...restProps } = this.props;
const stepperClass = classNames({
[className as string]: !!className,
['showNumber']: !!showNumber,
});
return (
<RcInputNumber
upHandler={<Icon type={require('./style/assets/plus.svg')} size="xxs" />}
downHandler={<Icon type={require('./style/assets/minus.svg')} size="xxs" />}
{...restProps}
ref="inputNumber"
className={stepperClass}
/>
);
}
}
| {
"content_hash": "58e0e22b0cbdfd8fd95ad332cc0ccf6a",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 84,
"avg_line_length": 26.970588235294116,
"alnum_prop": 0.6259541984732825,
"repo_name": "can-yin-quan/antd-mobile-cyq",
"id": "d99aa09628e4ed287fc1cb8719fe282d904c799b",
"size": "917",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "components/stepper/index.web.tsx",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "150197"
},
{
"name": "HTML",
"bytes": "9680"
},
{
"name": "Java",
"bytes": "4761"
},
{
"name": "JavaScript",
"bytes": "125034"
},
{
"name": "Objective-C",
"bytes": "24702"
},
{
"name": "Python",
"bytes": "1632"
},
{
"name": "Shell",
"bytes": "876"
},
{
"name": "TypeScript",
"bytes": "414891"
}
],
"symlink_target": ""
} |
using System;
using System.Text;
namespace ShortUrl.Core.Patterns
{
public sealed class SimplePatternGenerator : IPatternGenerator
{
private static readonly string _allowedCharacters = @"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
private static readonly Random _random = new Random();
public string Generate()
{
var builder = new StringBuilder(10);
int randomMax = _allowedCharacters.Length - 1;
for (int i = 0; i < 10; i++)
{
builder.Append(_allowedCharacters[_random.Next(0, randomMax)]);
}
return builder.ToString();
}
}
} | {
"content_hash": "43d76fbdf134892ca4f178ad8503f5cb",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 120,
"avg_line_length": 24.458333333333332,
"alnum_prop": 0.7257240204429302,
"repo_name": "ManuelRauber/ShortUrl",
"id": "f7d702e3a9e4b9f618669cdf7c17c74828f4ffdd",
"size": "589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ShortUrl.Core/Patterns/SimplePatternGenerator.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "102"
},
{
"name": "C#",
"bytes": "34104"
}
],
"symlink_target": ""
} |
use algorithmia::prelude::*;
use std::error::Error;
fn apply(input: AlgoIo) -> Result<AlgoIo, Box<Error>> {
Ok(input)
}
fn main() {
handler::run(apply)
}
| {
"content_hash": "17274b814608e2dc3e13b829e7b0d47e",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 55,
"avg_line_length": 16.4,
"alnum_prop": 0.6280487804878049,
"repo_name": "algorithmiaio/algorithmia-rust",
"id": "14cc1b6e4aa0e871c2ade67cb11e1376173760fe",
"size": "164",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/handlers/src/bin/faas_algo_io.rs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Rust",
"bytes": "77669"
}
],
"symlink_target": ""
} |
package pp
import (
"errors"
"fmt"
"github.com/mattn/go-colorable"
"io"
"os"
)
var out io.Writer
func init() {
out = colorable.NewColorableStdout()
}
func Print(a ...interface{}) (n int, err error) {
return fmt.Fprint(out, formatAll(a)...)
}
func Printf(format string, a ...interface{}) (n int, err error) {
return fmt.Fprintf(out, format, formatAll(a)...)
}
func Println(a ...interface{}) (n int, err error) {
return fmt.Fprintln(out, formatAll(a)...)
}
func Sprint(a ...interface{}) string {
return fmt.Sprint(formatAll(a)...)
}
func Sprintf(format string, a ...interface{}) string {
return fmt.Sprintf(format, formatAll(a)...)
}
func Sprintln(a ...interface{}) string {
return fmt.Sprintln(formatAll(a)...)
}
func Fprint(w io.Writer, a ...interface{}) (n int, err error) {
return fmt.Fprint(w, formatAll(a)...)
}
func Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
return fmt.Fprintf(w, format, formatAll(a)...)
}
func Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
return Fprintln(w, formatAll(a)...)
}
func Errorf(format string, a ...interface{}) error {
return errors.New(Sprintf(format, a...))
}
func Fatal(a ...interface{}) {
fmt.Fprint(out, formatAll(a)...)
os.Exit(1)
}
func Fatalf(format string, a ...interface{}) {
fmt.Fprintf(out, format, formatAll(a)...)
os.Exit(1)
}
func Fatalln(a ...interface{}) {
fmt.Fprintln(out, formatAll(a)...)
os.Exit(1)
}
func formatAll(objects []interface{}) []interface{} {
results := []interface{}{}
for _, object := range objects {
results = append(results, format(object))
}
return results
}
| {
"content_hash": "367a7372a471782c4fd283bb3a5c4451",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 79,
"avg_line_length": 20.833333333333332,
"alnum_prop": 0.6516923076923077,
"repo_name": "dmnlk/gomadare",
"id": "7044f734b0d06794888b0e87d89780c684477c83",
"size": "1625",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Godeps/_workspace/src/github.com/k0kubun/pp/pp.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "27701"
}
],
"symlink_target": ""
} |
title: Quality Assurance and Testing with Chai
localeTitle: Garantia de Qualidade e Teste com Chai
---
## Garantia de Qualidade e Teste com Chai
Este é um esboço. [Ajude nossa comunidade a expandi-lo](https://github.com/freecodecamp/guides/tree/master/src/pages/mathematics/quadratic-equations/index.md) .
[Este guia de estilo rápido ajudará a garantir que sua solicitação de recebimento seja aceita](https://github.com/freecodecamp/guides/blob/master/README.md) .
#### Mais Informações: | {
"content_hash": "2fa7f793aabdda4dd9e0325032bc6969",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 160,
"avg_line_length": 49,
"alnum_prop": 0.7877551020408163,
"repo_name": "pahosler/freecodecamp",
"id": "f7e8ff8c928f6cbae979b831fd0677278dde39b5",
"size": "502",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "guide/portuguese/certifications/information-security-and-quality-assurance/quality-assurance-and-testing-with-chai/index.md",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "35491"
},
{
"name": "HTML",
"bytes": "17600"
},
{
"name": "JavaScript",
"bytes": "777274"
}
],
"symlink_target": ""
} |
#ifndef _U_BOOT_H_
#define _U_BOOT_H_
typedef struct bd_info {
unsigned long bi_memstart; /* start of DRAM memory */
phys_size_t bi_memsize; /* size of DRAM memory in bytes */
unsigned long bi_flashstart; /* start of FLASH memory */
unsigned long bi_flashsize; /* size of FLASH memory */
unsigned long bi_flashoffset; /* reserved area for startup monitor */
unsigned long bi_sramstart; /* start of SRAM memory */
unsigned long bi_sramsize; /* size of SRAM memory */
unsigned int bi_baudrate; /* Console Baudrate */
ulong bi_boot_params; /* where this board expects params */
} bd_t;
/* For image.h:image_check_target_arch() */
#define IH_ARCH_DEFAULT IH_ARCH_MICROBLAZE
#endif /* _U_BOOT_H_ */
| {
"content_hash": "c68c6901b7cc24ea0eabb1525dc15003",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 70,
"avg_line_length": 33.904761904761905,
"alnum_prop": 0.6952247191011236,
"repo_name": "EleVenPerfect/S3C2440",
"id": "ab3f23202d67e326b4922074f3e29eece37fa908",
"size": "1205",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bootloader/u-boot-2014.04 for tq2440/arch/microblaze/include/asm/u-boot.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "107623"
},
{
"name": "Awk",
"bytes": "145"
},
{
"name": "C",
"bytes": "53471003"
},
{
"name": "C++",
"bytes": "4794882"
},
{
"name": "CSS",
"bytes": "7584"
},
{
"name": "GDB",
"bytes": "3642"
},
{
"name": "Makefile",
"bytes": "507759"
},
{
"name": "Objective-C",
"bytes": "33048"
},
{
"name": "PHP",
"bytes": "108169"
},
{
"name": "Perl",
"bytes": "213214"
},
{
"name": "Python",
"bytes": "223908"
},
{
"name": "Roff",
"bytes": "197018"
},
{
"name": "Shell",
"bytes": "86972"
},
{
"name": "Tcl",
"bytes": "967"
},
{
"name": "XSLT",
"bytes": "445"
}
],
"symlink_target": ""
} |
'use strict';
var is = require( '../../is' );
var Heap = require( '../../heap' );
var elesfn = ({
dijkstra: function( root, weightFn, directed ){
var options;
if( is.plainObject( root ) && !is.elementOrCollection( root ) ){
options = root;
root = options.root;
weightFn = options.weight;
directed = options.directed;
}
var cy = this._private.cy;
weightFn = is.fn( weightFn ) ? weightFn : function(){ return 1; }; // if not specified, assume each edge has equal weight (1)
var source = is.string( root ) ? this.filter( root )[0] : root[0];
var dist = {};
var prev = {};
var knownDist = {};
var edges = this.edges().filter( function(){ return !this.isLoop(); } );
var nodes = this.nodes();
var getDist = function( node ){
return dist[ node.id() ];
};
var setDist = function( node, d ){
dist[ node.id() ] = d;
Q.updateItem( node );
};
var Q = new Heap( function( a, b ){
return getDist( a ) - getDist( b );
} );
for( var i = 0; i < nodes.length; i++ ){
var node = nodes[ i ];
dist[ node.id() ] = node.same( source ) ? 0 : Infinity;
Q.push( node );
}
var distBetween = function( u, v ){
var uvs = ( directed ? u.edgesTo( v ) : u.edgesWith( v ) ).intersect( edges );
var smallestDistance = Infinity;
var smallestEdge;
for( var i = 0; i < uvs.length; i++ ){
var edge = uvs[ i ];
var weight = weightFn.apply( edge, [ edge ] );
if( weight < smallestDistance || !smallestEdge ){
smallestDistance = weight;
smallestEdge = edge;
}
}
return {
edge: smallestEdge,
dist: smallestDistance
};
};
while( Q.size() > 0 ){
var u = Q.pop();
var smalletsDist = getDist( u );
var uid = u.id();
knownDist[ uid ] = smalletsDist;
if( smalletsDist === Math.Infinite ){
break;
}
var neighbors = u.neighborhood().intersect( nodes );
for( var i = 0; i < neighbors.length; i++ ){
var v = neighbors[ i ];
var vid = v.id();
var vDist = distBetween( u, v );
var alt = smalletsDist + vDist.dist;
if( alt < getDist( v ) ){
setDist( v, alt );
prev[ vid ] = {
node: u,
edge: vDist.edge
};
}
} // for
} // while
return {
distanceTo: function( node ){
var target = is.string( node ) ? nodes.filter( node )[0] : node[0];
return knownDist[ target.id() ];
},
pathTo: function( node ){
var target = is.string( node ) ? nodes.filter( node )[0] : node[0];
var S = [];
var u = target;
if( target.length > 0 ){
S.unshift( target );
while( prev[ u.id() ] ){
var p = prev[ u.id() ];
S.unshift( p.edge );
S.unshift( p.node );
u = p.node;
}
}
return cy.collection( S );
}
};
}
});
module.exports = elesfn;
| {
"content_hash": "a94da9af47faded64937dd3b3de55ae9",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 129,
"avg_line_length": 23.633587786259543,
"alnum_prop": 0.4935400516795866,
"repo_name": "rlugojr/cytoscape.js",
"id": "aa1dbe5e7d64f3a83cc2e42d9f01c9f983eadc1b",
"size": "3096",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/collection/algorithms/dijkstra.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5756"
},
{
"name": "HTML",
"bytes": "13896"
},
{
"name": "JavaScript",
"bytes": "2300943"
}
],
"symlink_target": ""
} |
package com.arjuna.databroker.data.jee.store;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
@Entity
public class DataFlowNodeLinkEntity implements Serializable
{
private static final long serialVersionUID = 1636112437064485251L;
public DataFlowNodeLinkEntity()
{
}
public DataFlowNodeLinkEntity(String id, DataFlowNodeEntity nodeSource, DataFlowNodeEntity nodeSink, DataFlowEntity dataFlow)
{
_id = id;
_nodeSource = nodeSource;
_nodeSink = nodeSink;
_dataFlow = dataFlow;
}
public String getId()
{
return _id;
}
public void setId(String id)
{
_id = id;
}
public DataFlowNodeEntity getNodeSource()
{
return _nodeSource;
}
public void setNodeSource(DataFlowNodeEntity nodeSource)
{
_nodeSource = nodeSource;
}
public DataFlowNodeEntity getNodeSink()
{
return _nodeSink;
}
public void setNodeSink(DataFlowNodeEntity nodeSink)
{
_nodeSink = nodeSink;
}
public void setDataFlow(DataFlowEntity dataFlow)
{
_dataFlow = dataFlow;
}
public DataFlowEntity getDataFlow()
{
return _dataFlow;
}
@Id
@Column(name = "id")
protected String _id;
@ManyToOne
@JoinColumn(name = "nodeSource", nullable=false)
protected DataFlowNodeEntity _nodeSource;
@ManyToOne
@JoinColumn(name = "nodeSink", nullable=false)
protected DataFlowNodeEntity _nodeSink;
@ManyToOne
@JoinColumn(name="dataFlow", nullable=false)
public DataFlowEntity _dataFlow;
}
| {
"content_hash": "b2bb668999eed6d87c920340326f753d",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 129,
"avg_line_length": 21.071428571428573,
"alnum_prop": 0.6649717514124294,
"repo_name": "RISBIC/DataBroker",
"id": "047fb72e4cfdcec42dc3c99c6a2fa7549042f603",
"size": "1884",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data-core-jee-store/src/main/java/com/arjuna/databroker/data/jee/store/DataFlowNodeLinkEntity.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "48278"
},
{
"name": "CSS",
"bytes": "73765"
},
{
"name": "HTML",
"bytes": "222155"
},
{
"name": "Java",
"bytes": "702545"
},
{
"name": "JavaScript",
"bytes": "200980"
}
],
"symlink_target": ""
} |
class ResizeOtrsTicketId < ActiveRecord::Migration
def up
change_column :activities, :otrs_ticket_id, :integer, limit: 8
end
def down
change_column :activities, :otrs_ticket_id, :integer
end
end
| {
"content_hash": "1ec8b430c85b0ea722ab6fe32d010713",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 66,
"avg_line_length": 23.555555555555557,
"alnum_prop": 0.7216981132075472,
"repo_name": "ninech/uberzeit",
"id": "be3a4ab49585a3db9122bd617fd5fa039f3ad6f8",
"size": "212",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20130618132736_resize_otrs_ticket_id.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "106509"
},
{
"name": "CoffeeScript",
"bytes": "18302"
},
{
"name": "HTML",
"bytes": "91271"
},
{
"name": "JavaScript",
"bytes": "171934"
},
{
"name": "Ruby",
"bytes": "448217"
},
{
"name": "Shell",
"bytes": "57"
}
],
"symlink_target": ""
} |
/**
* Automatically generated file. Please do not edit.
* @author Highcharts Config Generator by Karasiq
* @see [[http://api.highcharts.com/highmaps]]
*/
package com.highmaps.config
import scalajs.js, js.`|`
import com.highcharts.CleanJsObject
import com.highcharts.HighchartsUtils._
/**
* @note JavaScript name: <code>series<vector>-data</code>
*/
@js.annotation.ScalaJSDefined
class SeriesVectorData extends com.highcharts.HighchartsGenericObject {
/**
* <p>The length of the vector. The rendered length will relate to the
* <code>vectorLength</code> setting.</p>
*/
val length: js.UndefOr[Double] = js.undefined
/**
* <p>The vector direction in degrees, where 0 is north (pointing towards south).</p>
*/
val direction: js.UndefOr[Double] = js.undefined
/**
* <p>Point specific options for the draggable-points module. Overrides options on
* <code>series.dragDrop</code>.</p>
* <p>Requires the <code>draggable-points</code> module.</p>
* @since 6.2.0
*/
val dragDrop: js.UndefOr[CleanJsObject[SeriesVectorDataDragDrop]] = js.undefined
/**
* <p>The <code>id</code> of a series in the <a href="#drilldown.series">drilldown.series</a>
* array to use for a drilldown for this point.</p>
* @example <a href="https://jsfiddle.net/gh/library/pure/highcharts/highcharts/tree/master/samples/highcharts/drilldown/basic/">Basic drilldown</a>
* @since 3.0.8
*/
val drilldown: js.UndefOr[String] = js.undefined
/**
* <p>An additional, individual class name for the data point's graphic
* representation.</p>
* @since 5.0.0
*/
val className: js.UndefOr[String] = js.undefined
/**
* <p>Individual color for the point. By default the color is pulled from
* the global <code>colors</code> array.</p>
* <p>In styled mode, the <code>color</code> option doesn't take effect. Instead, use
* <code>colorIndex</code>.</p>
* @example <a href="https://jsfiddle.net/gh/library/pure/highcharts/highcharts/tree/master/samples/highcharts/point/color/">Mark the highest point</a>
*/
val color: js.UndefOr[String | js.Object] = js.undefined
/**
* <p>A specific color index to use for the point, so its graphic representations
* are given the class name <code>highcharts-color-{n}</code>. In styled mode this will
* change the color of the graphic. In non-styled mode, the color by is set by
* the <code>fill</code> attribute, so the change in class name won't have a visual effect
* by default.</p>
* @since 5.0.0
*/
val colorIndex: js.UndefOr[Double] = js.undefined
/**
* <p>Individual data label for each point. The options are the same as
* the ones for <a href="#plotOptions.series.dataLabels">plotOptions.series.dataLabels</a>.</p>
* @example <a href="https://jsfiddle.net/gh/library/pure/highcharts/highcharts/tree/master/samples/highcharts/point/datalabels/">Show a label for the last value</a>
*/
val dataLabels: js.Any = js.undefined
/**
* <p>A description of the point to add to the screen reader information
* about the point. Requires the Accessibility module.</p>
* @since 5.0.0
*/
val description: js.UndefOr[String] = js.undefined
/**
* <p>An id for the point. This can be used after render time to get a
* pointer to the point object through <code>chart.get()</code>.</p>
* @example <a href="https://jsfiddle.net/gh/library/pure/highcharts/highcharts/tree/master/samples/highcharts/point/id/">Remove an id'd point</a>
* @since 1.2.0
*/
val id: js.UndefOr[String] = js.undefined
/**
* <p>The rank for this point's data label in case of collision. If two
* data labels are about to overlap, only the one with the highest <code>labelrank</code>
* will be drawn.</p>
*/
val labelrank: js.UndefOr[Double] = js.undefined
/**
* <p>The name of the point as shown in the legend, tooltip, dataLabel
* etc.</p>
* @example <a href="https://jsfiddle.net/gh/library/pure/highcharts/highcharts/tree/master/samples/highcharts/series/data-array-of-objects/">Point names</a>
*/
val name: js.UndefOr[String] = js.undefined
/**
* <p>Whether the data point is selected initially.</p>
*/
val selected: js.UndefOr[Boolean] = js.undefined
/**
* <p>The x value of the point. For datetime axes, the X value is the timestamp
* in milliseconds since 1970.</p>
*/
val x: js.UndefOr[Double] = js.undefined
/**
* <p>The y value of the point.</p>
*/
val y: js.UndefOr[Double] = js.undefined
/**
* <p>Individual point events</p>
*/
val events: js.UndefOr[CleanJsObject[SeriesVectorDataEvents]] = js.undefined
val marker: js.UndefOr[CleanJsObject[SeriesVectorDataMarker]] = js.undefined
}
object SeriesVectorData {
/**
* @param length <p>The length of the vector. The rendered length will relate to the. <code>vectorLength</code> setting.</p>
* @param direction <p>The vector direction in degrees, where 0 is north (pointing towards south).</p>
* @param dragDrop <p>Point specific options for the draggable-points module. Overrides options on. <code>series.dragDrop</code>.</p>. <p>Requires the <code>draggable-points</code> module.</p>
* @param drilldown <p>The <code>id</code> of a series in the <a href="#drilldown.series">drilldown.series</a>. array to use for a drilldown for this point.</p>
* @param className <p>An additional, individual class name for the data point's graphic. representation.</p>
* @param color <p>Individual color for the point. By default the color is pulled from. the global <code>colors</code> array.</p>. <p>In styled mode, the <code>color</code> option doesn't take effect. Instead, use. <code>colorIndex</code>.</p>
* @param colorIndex <p>A specific color index to use for the point, so its graphic representations. are given the class name <code>highcharts-color-{n}</code>. In styled mode this will. change the color of the graphic. In non-styled mode, the color by is set by. the <code>fill</code> attribute, so the change in class name won't have a visual effect. by default.</p>
* @param dataLabels <p>Individual data label for each point. The options are the same as. the ones for <a href="#plotOptions.series.dataLabels">plotOptions.series.dataLabels</a>.</p>
* @param description <p>A description of the point to add to the screen reader information. about the point. Requires the Accessibility module.</p>
* @param id <p>An id for the point. This can be used after render time to get a. pointer to the point object through <code>chart.get()</code>.</p>
* @param labelrank <p>The rank for this point's data label in case of collision. If two. data labels are about to overlap, only the one with the highest <code>labelrank</code>. will be drawn.</p>
* @param name <p>The name of the point as shown in the legend, tooltip, dataLabel. etc.</p>
* @param selected <p>Whether the data point is selected initially.</p>
* @param x <p>The x value of the point. For datetime axes, the X value is the timestamp. in milliseconds since 1970.</p>
* @param y <p>The y value of the point.</p>
* @param events <p>Individual point events</p>
*/
def apply(length: js.UndefOr[Double] = js.undefined, direction: js.UndefOr[Double] = js.undefined, dragDrop: js.UndefOr[CleanJsObject[SeriesVectorDataDragDrop]] = js.undefined, drilldown: js.UndefOr[String] = js.undefined, className: js.UndefOr[String] = js.undefined, color: js.UndefOr[String | js.Object] = js.undefined, colorIndex: js.UndefOr[Double] = js.undefined, dataLabels: js.UndefOr[js.Any] = js.undefined, description: js.UndefOr[String] = js.undefined, id: js.UndefOr[String] = js.undefined, labelrank: js.UndefOr[Double] = js.undefined, name: js.UndefOr[String] = js.undefined, selected: js.UndefOr[Boolean] = js.undefined, x: js.UndefOr[Double] = js.undefined, y: js.UndefOr[Double] = js.undefined, events: js.UndefOr[CleanJsObject[SeriesVectorDataEvents]] = js.undefined, marker: js.UndefOr[CleanJsObject[SeriesVectorDataMarker]] = js.undefined): SeriesVectorData = {
val lengthOuter: js.UndefOr[Double] = length
val directionOuter: js.UndefOr[Double] = direction
val dragDropOuter: js.UndefOr[CleanJsObject[SeriesVectorDataDragDrop]] = dragDrop
val drilldownOuter: js.UndefOr[String] = drilldown
val classNameOuter: js.UndefOr[String] = className
val colorOuter: js.UndefOr[String | js.Object] = color
val colorIndexOuter: js.UndefOr[Double] = colorIndex
val dataLabelsOuter: js.Any = dataLabels
val descriptionOuter: js.UndefOr[String] = description
val idOuter: js.UndefOr[String] = id
val labelrankOuter: js.UndefOr[Double] = labelrank
val nameOuter: js.UndefOr[String] = name
val selectedOuter: js.UndefOr[Boolean] = selected
val xOuter: js.UndefOr[Double] = x
val yOuter: js.UndefOr[Double] = y
val eventsOuter: js.UndefOr[CleanJsObject[SeriesVectorDataEvents]] = events
val markerOuter: js.UndefOr[CleanJsObject[SeriesVectorDataMarker]] = marker
com.highcharts.HighchartsGenericObject.toCleanObject(new SeriesVectorData {
override val length: js.UndefOr[Double] = lengthOuter
override val direction: js.UndefOr[Double] = directionOuter
override val dragDrop: js.UndefOr[CleanJsObject[SeriesVectorDataDragDrop]] = dragDropOuter
override val drilldown: js.UndefOr[String] = drilldownOuter
override val className: js.UndefOr[String] = classNameOuter
override val color: js.UndefOr[String | js.Object] = colorOuter
override val colorIndex: js.UndefOr[Double] = colorIndexOuter
override val dataLabels: js.Any = dataLabelsOuter
override val description: js.UndefOr[String] = descriptionOuter
override val id: js.UndefOr[String] = idOuter
override val labelrank: js.UndefOr[Double] = labelrankOuter
override val name: js.UndefOr[String] = nameOuter
override val selected: js.UndefOr[Boolean] = selectedOuter
override val x: js.UndefOr[Double] = xOuter
override val y: js.UndefOr[Double] = yOuter
override val events: js.UndefOr[CleanJsObject[SeriesVectorDataEvents]] = eventsOuter
override val marker: js.UndefOr[CleanJsObject[SeriesVectorDataMarker]] = markerOuter
})
}
}
| {
"content_hash": "77695d0ea56b3cecee7c210f21e0a5dd",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 884,
"avg_line_length": 55.037234042553195,
"alnum_prop": 0.7066782642311781,
"repo_name": "Karasiq/scalajs-highcharts",
"id": "6d912be48e12b592b7a0ef9d73839d5a9ed5eb52",
"size": "10347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/scala/com/highmaps/config/SeriesVectorData.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Scala",
"bytes": "131509301"
}
],
"symlink_target": ""
} |
import { terser } from 'rollup-plugin-terser';
import ts from '@wessberg/rollup-plugin-ts';
import resolve from '@rollup/plugin-node-resolve';
import babel from '@rollup/plugin-babel';
import pkg from './package.json';
import license from 'rollup-plugin-license';
import filesize from 'rollup-plugin-filesize';
const extensions = ['.js', '.ts'];
export default [
{
input: 'src/HanziWriter.ts',
output: [
{
file: pkg.main,
format: 'cjs',
sourcemap: true,
exports: 'auto',
},
{
file: 'dist/hanzi-writer.min.js',
format: 'iife',
name: 'HanziWriter',
sourcemap: true,
plugins: [terser()],
exports: 'default',
},
{
file: 'dist/hanzi-writer.js',
format: 'iife',
name: 'HanziWriter',
exports: 'default',
},
{
file: pkg.module,
format: 'es',
sourcemap: true,
exports: 'default',
},
],
plugins: [
filesize(),
ts({
transpiler: 'babel',
}),
resolve({ extensions }),
babel({
exclude: 'node_modules/**',
extensions,
babelHelpers: 'bundled',
}),
license({
banner: `Hanzi Writer v<%= pkg.version %> | https://chanind.github.io/hanzi-writer`,
}),
],
},
];
| {
"content_hash": "e18451261bb438ca44dc8bbf6ced127a",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 92,
"avg_line_length": 23.20689655172414,
"alnum_prop": 0.5237741456166419,
"repo_name": "chanind/hanzi-writer",
"id": "1cf57c0cce2ecc502e5d3859aaf3dc50f32f3477",
"size": "1346",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rollup.config.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2148"
},
{
"name": "TypeScript",
"bytes": "263748"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no">
<meta name="description" content="Star burst overlapping billboards.">
<meta name="cesium-sandcastle-labels" content="Showcases">
<title>Cesium Demo</title>
<script type="text/javascript" src="../Sandcastle-header.js"></script>
<script type="text/javascript" src="../../../ThirdParty/requirejs-2.1.20/require.js"></script>
<script type="text/javascript">
require.config({
baseUrl : '../../../Source',
waitSeconds : 60
});
</script>
</head>
<body class="sandcastle-loading" data-sandcastle-bucket="bucket-requirejs.html">
<style>
@import url(../templates/bucket.css);
</style>
<div id="cesiumContainer" class="fullSize"></div>
<div id="loadingOverlay"><h1>Loading...</h1></div>
<div id="toolbar">
<div id="zoomButtons"></div>
</div>
<script id="cesium_sandcastle_script">
function startup(Cesium) {
'use strict';
//Sandcastle_Begin
var viewer = new Cesium.Viewer('cesiumContainer', {
selectionIndicator : false
});
// Add labels clustered at the same location
var numBillboards = 30;
for (var i = 0; i < numBillboards; ++i) {
var position = Cesium.Cartesian3.fromDegrees(-75.59777, 40.03883);
viewer.entities.add({
position : position,
billboard : {
image : '../images/facility.gif',
scale : 2.5
},
label : {
text : 'Label' + i,
show : false
}
});
}
var scene = viewer.scene;
var camera = scene.camera;
var handler = new Cesium.ScreenSpaceEventHandler(scene.canvas);
handler.setInputAction(function(movement) {
// Star burst on left mouse click.
starBurst(movement.position);
}, Cesium.ScreenSpaceEventType.LEFT_CLICK);
handler.setInputAction(function(movement) {
// Remove the star burst when the mouse exits the circle or show the label of the billboard the mouse is hovering over.
updateStarBurst(movement.endPosition);
}, Cesium.ScreenSpaceEventType.MOUSE_MOVE);
camera.moveStart.addEventListener(function() {
// Reset the star burst on camera move because the lines from the center
// because the line end points rely on the screen space positions of the billboards.
undoStarBurst();
});
// State saved across mouse click and move events
var starBurstState = {
enabled : false,
pickedEntities : undefined,
billboardEyeOffsets : undefined,
labelEyeOffsets : undefined,
linePrimitive : undefined,
radius : undefined,
center : undefined,
pixelPadding : 10.0,
angleStart : 0.0,
angleEnd : Cesium.Math.PI,
maxDimension : undefined
};
function offsetBillboard(entity, entityPosition, angle, magnitude, lines, billboardEyeOffsets, labelEyeOffsets) {
var x = magnitude * Math.cos(angle);
var y = magnitude * Math.sin(angle);
var offset = new Cesium.Cartesian2(x, y);
var drawingBufferWidth = scene.drawingBufferWidth;
var drawingBufferHeight = scene.drawingBufferHeight;
var diff = Cesium.Cartesian3.subtract(entityPosition, camera.positionWC, new Cesium.Cartesian3());
var distance = Cesium.Cartesian3.dot(camera.directionWC, diff);
var dimensions = camera.frustum.getPixelDimensions(drawingBufferWidth, drawingBufferHeight, distance, new Cesium.Cartesian2());
Cesium.Cartesian2.multiplyByScalar(offset, Cesium.Cartesian2.maximumComponent(dimensions), offset);
var labelOffset;
var billboardOffset = entity.billboard.eyeOffset;
var eyeOffset = new Cesium.Cartesian3(offset.x, offset.y, 0.0);
entity.billboard.eyeOffset = eyeOffset;
if (Cesium.defined(entity.label)) {
labelOffset = entity.label.eyeOffset;
entity.label.eyeOffset = new Cesium.Cartesian3(offset.x, offset.y, -10.0);
}
var endPoint = Cesium.Matrix4.multiplyByPoint(camera.viewMatrix, entityPosition, new Cesium.Cartesian3());
Cesium.Cartesian3.add(eyeOffset, endPoint, endPoint);
Cesium.Matrix4.multiplyByPoint(camera.inverseViewMatrix, endPoint, endPoint);
lines.push(endPoint);
billboardEyeOffsets.push(billboardOffset);
labelEyeOffsets.push(labelOffset);
}
function starBurst(mousePosition) {
if (Cesium.defined(starBurstState.pickedEntities)) {
return;
}
var pickedObjects = scene.drillPick(mousePosition);
if (!Cesium.defined(pickedObjects) || pickedObjects.length < 2) {
return;
}
var billboardEntities = [];
var length = pickedObjects.length;
var i;
for (i = 0; i < length; ++i) {
var pickedObject = pickedObjects[i];
if (pickedObject.primitive instanceof Cesium.Billboard) {
billboardEntities.push(pickedObject);
}
}
if (billboardEntities.length === 0) {
return;
}
var pickedEntities = starBurstState.pickedEntities = [];
var billboardEyeOffsets = starBurstState.billboardEyeOffsets = [];
var labelEyeOffsets = starBurstState.labelEyeOffsets = [];
var lines = [];
starBurstState.maxDimension = Number.NEGATIVE_INFINITY;
var angleStart = starBurstState.angleStart;
var angleEnd = starBurstState.angleEnd;
var angle = angleStart;
var angleIncrease;
var magnitude;
var magIncrease;
var maxDimension;
// Drill pick gets all of the entities under the mouse pointer.
// Find the billboards and set their pixel offsets in a circle pattern.
length = billboardEntities.length;
i = 0;
while (i < length) {
var object = billboardEntities[i];
if (pickedEntities.length === 0) {
starBurstState.center = Cesium.Cartesian3.clone(object.primitive.position);
}
if (!Cesium.defined(angleIncrease)) {
var width = object.primitive.width;
var height = object.primitive.height;
maxDimension = Math.max(width, height) * object.primitive.scale + starBurstState.pixelPadding;
magnitude = maxDimension + maxDimension * 0.5;
magIncrease = magnitude;
angleIncrease = maxDimension / magnitude;
}
offsetBillboard(object.id, object.primitive.position, angle, magnitude, lines, billboardEyeOffsets, labelEyeOffsets);
pickedEntities.push(object);
var reflectedAngle = angleEnd - angle;
if (i + 1 < length && reflectedAngle - angleIncrease * 0.5 > angle + angleIncrease * 0.5) {
object = billboardEntities[++i];
offsetBillboard(object.id, object.primitive.position, reflectedAngle, magnitude, lines, billboardEyeOffsets, labelEyeOffsets);
pickedEntities.push(object);
}
angle += angleIncrease;
if (reflectedAngle - angleIncrease * 0.5 < angle + angleIncrease * 0.5) {
magnitude += magIncrease;
angle = angleStart;
angleIncrease = maxDimension / magnitude;
}
++i;
}
// Add lines from the pick center out to the translated billboard.
var instances = [];
length = lines.length;
for (i = 0; i < length; ++i) {
var pickedEntity = pickedEntities[i];
starBurstState.maxDimension = Math.max(pickedEntity.primitive.width, pickedEntity.primitive.height, starBurstState.maxDimension);
instances.push(new Cesium.GeometryInstance({
geometry : new Cesium.SimplePolylineGeometry({
positions : [starBurstState.center, lines[i]],
followSurface : false,
granularity : Cesium.Math.PI_OVER_FOUR
}),
attributes : {
color : Cesium.ColorGeometryInstanceAttribute.fromColor(Cesium.Color.WHITE)
}
}));
}
starBurstState.linePrimitive = scene.primitives.add(new Cesium.Primitive({
geometryInstances : instances,
appearance : new Cesium.PerInstanceColorAppearance({
flat : true,
translucent : false
}),
asynchronous : false
}));
viewer.selectedEntity = undefined;
starBurstState.radius = magnitude + magIncrease;
}
function updateStarBurst(mousePosition) {
if (!Cesium.defined(starBurstState.pickedEntities)) {
return;
}
if (!starBurstState.enabled) {
// For some reason we get a mousemove event on click, so
// do not show a label on the first event.
starBurstState.enabled = true;
return;
}
// Remove the star burst if the mouse exits the screen space circle.
// If the mouse is inside the circle, show the label of the billboard the mouse is hovering over.
var screenPosition = Cesium.SceneTransforms.wgs84ToWindowCoordinates(scene, starBurstState.center);
var fromCenter = Cesium.Cartesian2.subtract(mousePosition, screenPosition, new Cesium.Cartesian2());
var radius = starBurstState.radius;
if (Cesium.Cartesian2.magnitudeSquared(fromCenter) > radius * radius || fromCenter.y > 3.0 * (starBurstState.maxDimension + starBurstState.pixelPadding)) {
undoStarBurst();
} else {
showLabels(mousePosition);
}
}
function undoStarBurst() {
var pickedEntities = starBurstState.pickedEntities;
if (!Cesium.defined(pickedEntities)) {
return;
}
var billboardEyeOffsets = starBurstState.billboardEyeOffsets;
var labelEyeOffsets = starBurstState.labelEyeOffsets;
// Reset billboard and label pixel offsets.
// Hide overlapping labels.
for (var i = 0; i < pickedEntities.length; ++i) {
var entity = pickedEntities[i].id;
entity.billboard.eyeOffset = billboardEyeOffsets[i];
if (Cesium.defined(entity.label)) {
entity.label.eyeOffset = labelEyeOffsets[i];
entity.label.show = false;
}
}
// Remove lines from the scene.
// Free resources and reset state.
scene.primitives.remove(starBurstState.linePrimitive);
starBurstState.linePrimitive = undefined;
starBurstState.pickedEntities = undefined;
starBurstState.billboardEyeOffsets = undefined;
starBurstState.labelEyeOffsets = undefined;
starBurstState.radius = undefined;
starBurstState.enabled = false;
}
var currentObject;
function showLabels(mousePosition) {
var pickedObjects = scene.drillPick(mousePosition);
var pickedObject;
if (Cesium.defined(pickedObjects)) {
var length = pickedObjects.length;
for (var i = 0; i < length; ++i) {
if (pickedObjects[i].primitive instanceof Cesium.Billboard) {
pickedObject = pickedObjects[i];
break;
}
}
}
if (pickedObject !== currentObject) {
if (Cesium.defined(pickedObject) && Cesium.defined(pickedObject.id.label)) {
if (Cesium.defined(currentObject)) {
currentObject.id.label.show = false;
}
currentObject = pickedObject;
pickedObject.id.label.show = true;
} else if (Cesium.defined(currentObject)) {
currentObject.id.label.show = false;
currentObject = undefined;
}
}
}
//Sandcastle_End
Sandcastle.finishedLoading();
}
if (typeof Cesium !== "undefined") {
startup(Cesium);
} else if (typeof require === "function") {
require(["Cesium"], startup);
}
</script>
</body>
</html> | {
"content_hash": "3f51e4a732bd77173f0c1621f54ae721",
"timestamp": "",
"source": "github",
"line_count": 330,
"max_line_length": 159,
"avg_line_length": 34.84848484848485,
"alnum_prop": 0.6614782608695652,
"repo_name": "ceos-seo/Data_Cube_v2",
"id": "1815226732e31bc9736380419b1e444fa3be7f93",
"size": "11500",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ui/django_site_v2/data_cube_ui/static/assets/js/Cesium-1.23/Apps/Sandcastle/gallery/Star Burst.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "1959"
},
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "CSS",
"bytes": "772075"
},
{
"name": "GLSL",
"bytes": "165400"
},
{
"name": "HTML",
"bytes": "1457619"
},
{
"name": "JavaScript",
"bytes": "50036576"
},
{
"name": "Jupyter Notebook",
"bytes": "16917211"
},
{
"name": "Makefile",
"bytes": "6773"
},
{
"name": "Python",
"bytes": "1174107"
},
{
"name": "Shell",
"bytes": "7641"
}
],
"symlink_target": ""
} |
// Definition of StatisticsSchema
import { SimpleSchema } from 'meteor/aldeed:simple-schema';
// schema for options used in each questions
export const StatisticsSchema = new SimpleSchema({
session: {
type: String,
},
// scales are one of: ['anxiety', 'depression', 'hopeless', 'suicide']
scale: {
type: String,
},
score: {
type: Number,
},
category: {
type: String,
},
// optional batch
batch: {
type: String,
optional: true,
defaultValue: ""
},
// auto-managed properties
createdAt: {
type: Date,
optional: true,
autoValue() {
if (this.isInsert) {
return new Date();
}
if (this.isUpsert) {
return { $setOnInsert: new Date() };
}
// Otherwise prevent user from supplying their own value
this.unset();
},
},
updatedAt: {
type: Date,
denyInsert: true,
optional: true,
autoValue() {
if (this.isUpdate) {
return new Date();
}
},
},
});
/*
_id: {
type: String,
unique: true,
autoValue: function () {
return Meteor.uuid();
}
},
*/ | {
"content_hash": "f806f11f1d09f2cdb44d240005ac3724",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 74,
"avg_line_length": 20.58730158730159,
"alnum_prop": 0.48342328450269856,
"repo_name": "dipu-bd/depression-detection-system",
"id": "2369058cde813dcecf15a9f213fb4e8f9af35a24",
"size": "1297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "imports/schema/statistics.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "21711"
},
{
"name": "HTML",
"bytes": "23935"
},
{
"name": "JavaScript",
"bytes": "64133"
}
],
"symlink_target": ""
} |
import json
from os import path
import warnings
import numpy
import six
from chainer import reporter
from chainer import serializer as serializer_module
from chainer.training import extension
from chainer.training import trigger as trigger_module
try:
import matplotlib # NOQA
_available = True
except (ImportError, TypeError):
_available = False
def _check_available():
if not _available:
warnings.warn('matplotlib is not installed on your environment, '
'so nothing will be plotted at this time. '
'Please install matplotlib to plot figures.\n\n'
' $ pip install matplotlib\n')
class PlotReport(extension.Extension):
"""Trainer extension to output plots.
This extension accumulates the observations of the trainer to
:class:`~chainer.DictSummary` at a regular interval specified by a supplied
trigger, and plot a graph with using them.
There are two triggers to handle this extension. One is the trigger to
invoke this extension, which is used to handle the timing of accumulating
the results. It is set to ``1, 'iteration'`` by default. The other is the
trigger to determine when to emit the result. When this trigger returns
True, this extension appends the summary of accumulated values to the list
of past summaries, and writes the list to the log file. Then, this
extension makes a new fresh summary object which is used until the next
time that the trigger fires.
It also adds ``'epoch'`` and ``'iteration'`` entries to each result
dictionary, which are the epoch and iteration counts at the output.
.. warning::
If your environment needs to specify a backend of matplotlib
explicitly, please call ``matplotlib.use`` before calling
``trainer.run``. For example:
.. code-block:: python
import matplotlib
matplotlib.use('Agg')
trainer.extend(
extensions.PlotReport(['main/loss', 'validation/main/loss'],
'epoch', file_name='loss.png'))
trainer.run()
Then, once one of instances of this extension is called,
``matplotlib.use`` will have no effect.
For the details, please see here:
https://matplotlib.org/faq/usage_faq.html#what-is-a-backend
Args:
y_keys (iterable of strs): Keys of values regarded as y. If this is
None, nothing is output to the graph.
x_key (str): Keys of values regarded as x. The default value is
'iteration'.
trigger: Trigger that decides when to aggregate the result and output
the values. This is distinct from the trigger of this extension
itself. If it is a tuple in the form ``<int>, 'epoch'`` or ``<int>,
'iteration'``, it is passed to :class:`IntervalTrigger`.
postprocess: Callback to postprocess the result dictionaries. Figure
object, Axes object, and all plot data are passed to this callback
in this order. This callback can modify the figure.
file_name (str): Name of the figure file under the output directory.
It can be a format string.
marker (str): The marker used to plot the graph. Default is ``'x'``. If
``None`` is given, it draws with no markers.
grid (bool): Set the axis grid on if True. Default is True.
"""
def __init__(self, y_keys, x_key='iteration', trigger=(1, 'epoch'),
postprocess=None, file_name='plot.png', marker='x',
grid=True):
_check_available()
self._x_key = x_key
if isinstance(y_keys, str):
y_keys = (y_keys,)
self._y_keys = y_keys
self._trigger = trigger_module.get_trigger(trigger)
self._file_name = file_name
self._marker = marker
self._grid = grid
self._postprocess = postprocess
self._init_summary()
self._data = {k: [] for k in y_keys}
@staticmethod
def available():
_check_available()
return _available
def __call__(self, trainer):
if _available:
# Dynamically import pyplot to call matplotlib.use()
# after importing chainer.training.extensions
import matplotlib.pyplot as plt
else:
return
keys = self._y_keys
observation = trainer.observation
summary = self._summary
if keys is None:
summary.add(observation)
else:
summary.add({k: observation[k] for k in keys if k in observation})
if self._trigger(trainer):
stats = self._summary.compute_mean()
stats_cpu = {}
for name, value in six.iteritems(stats):
stats_cpu[name] = float(value) # copy to CPU
updater = trainer.updater
stats_cpu['epoch'] = updater.epoch
stats_cpu['iteration'] = updater.iteration
x = stats_cpu[self._x_key]
data = self._data
for k in keys:
if k in stats_cpu:
data[k].append((x, stats_cpu[k]))
f = plt.figure()
a = f.add_subplot(111)
a.set_xlabel(self._x_key)
if self._grid:
a.grid()
for k in keys:
xy = data[k]
if len(xy) == 0:
continue
xy = numpy.array(xy)
a.plot(xy[:, 0], xy[:, 1], marker=self._marker, label=k)
if a.has_data():
if self._postprocess is not None:
self._postprocess(f, a, summary)
l = a.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
f.savefig(path.join(trainer.out, self._file_name),
bbox_extra_artists=(l,), bbox_inches='tight')
plt.close()
self._init_summary()
def serialize(self, serializer):
if isinstance(serializer, serializer_module.Serializer):
serializer('_plot_{}'.format(self._file_name),
json.dumps(self._data))
else:
self._data = json.loads(
serializer('_plot_{}'.format(self._file_name), ''))
def _init_summary(self):
self._summary = reporter.DictSummary()
| {
"content_hash": "ec609db5795d22b39e3881ae9f506f4d",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 79,
"avg_line_length": 34.89673913043478,
"alnum_prop": 0.5854228313346831,
"repo_name": "anaruse/chainer",
"id": "4877dc4f8ec7bac858ad21afa01d82faefe83cc1",
"size": "6421",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "chainer/training/extensions/plot_report.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "3723858"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Mycotaxon 43: 255 (1992)
#### Original name
Chrysosporium globiferum var. niveum Skou, 1992
### Remarks
null | {
"content_hash": "a50b397ef177901f62c7b92c20d40496",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 47,
"avg_line_length": 15.153846153846153,
"alnum_prop": 0.7258883248730964,
"repo_name": "mdoering/backbone",
"id": "2c1513278f28392a009fe752528b907bd03c52cc",
"size": "268",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Eurotiomycetes/Onygenales/Onygenaceae/Chrysosporium/Chrysosporium globiferum/Chrysosporium globiferum niveum/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
#ifndef ScriptGCEvent_h
#define ScriptGCEvent_h
#include "v8.h"
#include <wtf/Vector.h>
namespace WebCore {
struct HeapInfo {
HeapInfo()
: usedJSHeapSize(0)
, totalJSHeapSize(0)
, jsHeapSizeLimit(0)
{
}
size_t usedJSHeapSize;
size_t totalJSHeapSize;
size_t jsHeapSizeLimit;
};
class ScriptGCEventListener;
class GCEventData {
public:
typedef Vector<ScriptGCEventListener*> GCEventListeners;
GCEventData()
: m_startTime(0.0)
, m_usedHeapSize(0)
{ }
void clear()
{
m_startTime = 0.0;
m_usedHeapSize = 0;
}
GCEventListeners& listeners() { return m_listeners; }
double startTime() { return m_startTime; }
void setStartTime(double startTime) { m_startTime = startTime; }
size_t usedHeapSize() { return m_usedHeapSize; }
void setUsedHeapSize(size_t usedHeapSize) { m_usedHeapSize = usedHeapSize; }
private:
double m_startTime;
size_t m_usedHeapSize;
GCEventListeners m_listeners;
};
#if ENABLE(INSPECTOR)
class ScriptGCEvent
{
public:
static void addEventListener(ScriptGCEventListener*);
static void removeEventListener(ScriptGCEventListener*);
static void getHeapSize(HeapInfo&);
private:
static void gcEpilogueCallback(v8::GCType type, v8::GCCallbackFlags flags);
static void gcPrologueCallback(v8::GCType type, v8::GCCallbackFlags flags);
static size_t getUsedHeapSize();
};
#endif // ENABLE(INSPECTOR)
} // namespace WebCore
#endif // !defined(ScriptGCEvent_h)
| {
"content_hash": "b7fbd2f30b24926c41114ecc42955716",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 80,
"avg_line_length": 21.08219178082192,
"alnum_prop": 0.6842105263157895,
"repo_name": "leighpauls/k2cro4",
"id": "2fcd0fa1f080cfd80f0b6a5dbaac0bd8df04aacb",
"size": "3073",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "third_party/WebKit/Source/WebCore/bindings/v8/ScriptGCEvent.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "3062"
},
{
"name": "AppleScript",
"bytes": "25392"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "68131038"
},
{
"name": "C",
"bytes": "242794338"
},
{
"name": "C#",
"bytes": "11024"
},
{
"name": "C++",
"bytes": "353525184"
},
{
"name": "Common Lisp",
"bytes": "3721"
},
{
"name": "D",
"bytes": "1931"
},
{
"name": "Emacs Lisp",
"bytes": "1639"
},
{
"name": "F#",
"bytes": "4992"
},
{
"name": "FORTRAN",
"bytes": "10404"
},
{
"name": "Java",
"bytes": "3845159"
},
{
"name": "JavaScript",
"bytes": "39146656"
},
{
"name": "Lua",
"bytes": "13768"
},
{
"name": "Matlab",
"bytes": "22373"
},
{
"name": "Objective-C",
"bytes": "21887598"
},
{
"name": "PHP",
"bytes": "2344144"
},
{
"name": "Perl",
"bytes": "49033099"
},
{
"name": "Prolog",
"bytes": "2926122"
},
{
"name": "Python",
"bytes": "39863959"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Racket",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "304063"
},
{
"name": "Scheme",
"bytes": "14853"
},
{
"name": "Shell",
"bytes": "9195117"
},
{
"name": "Tcl",
"bytes": "1919771"
},
{
"name": "Verilog",
"bytes": "3092"
},
{
"name": "Visual Basic",
"bytes": "1430"
},
{
"name": "eC",
"bytes": "5079"
}
],
"symlink_target": ""
} |
#include "cfg.h"
#include "instr.h"
#include "PinDisasm.h"
#include "prog.h"
extern "C" {
#include "xed-interface.h"
}
BasicBlock *Cfg::addBasicBlock(addr_t addr) {
BasicBlock *b;
b = new BasicBlock(addr);
assert(b);
cfg_t::addVertex(b);
b->cfg = this;
assert(cfg_t::hasVertex(b));
debug3("Created new bb %.8x %p\n", addr, b);
can_have_self_loops = true;
return b;
}
void Cfg::delBasicBlock(BasicBlock *bb) {
debug3("Deleting BB %p %.8x\n", bb, bb->getAddress());
cfg_t::delVertex(bb);
if (entry == bb)
entry = NULL;
exits.erase(bb);
delete bb;
}
void Cfg::linkBasicBlocks(BasicBlock *sbb, BasicBlock *dbb) {
BasicBlockEdge *e;
bool r;
e = new BasicBlockEdge(sbb, dbb);
assert(e);
debug3("Linking BBS %.8x-%.8x %.8x-%.8x %p %p %p\n", sbb->getAddress(),
sbb->getAddress() + sbb->getSize(),
dbb->getAddress(), dbb->getAddress() + dbb->getSize(),
sbb, dbb, e);
r = cfg_t::addEdge(sbb, dbb, e);
if (!r) {
delete e;
}
assert(cfg_t::hasEdge(sbb, dbb));
sanityCheck();
can_have_self_loops = true;
}
void Cfg::unlinkBasicBlocks(BasicBlock *sbb, BasicBlock *dbb) {
BasicBlockEdge *e;
e = cfg_t::delEdge(sbb, dbb);
debug3("Unlinking BBS %.8x-%.8x %.8x-%.8x %p %p %p\n", sbb->getAddress(),
sbb->getAddress() + sbb->getSize(),
dbb->getAddress(), dbb->getAddress() + dbb->getSize(),
sbb, dbb, e);
if (e)
delete e;
}
void Cfg::clear() {
std::set<BasicBlockEdge *> edges;
std::set<BasicBlock *> bbs;
for (Cfg::const_edge_iterator it = edges_begin(); it != edges_end();
it++) {
edges.insert(*it);
}
for (std::set<BasicBlockEdge *>::iterator it = edges.begin();
it != edges.end(); it++) {
unlinkBasicBlocks((*it)->getSource(), (*it)->getTarget());
}
for (Cfg::const_bb_iterator it = bb_begin(); it != bb_end(); it++) {
bbs.insert(*it);
}
for (std::set<BasicBlock *>::iterator it = bbs.begin();
it != bbs.end(); it++) {
delBasicBlock(*it);
}
addr2bb.clear();
calls.clear();
exits.clear();
entry = NULL;
assert(getNumVertex() == 0);
assert(getNumEdges() == 0);
}
BasicBlock *Cfg::splitBasicBlock(BasicBlock *oldbb, addr_t before) {
BasicBlock *newbb1, *newbb2;
cfg_t::vertex_descriptor oldbb_vd, newbb1_vd, newbb2_vd;
cfg_t::out_edge_iterator oei, oee;
cfg_t::in_edge_iterator iei, iee;
std::list< std::pair<BasicBlock *, BasicBlock *> > todel;
std::list< std::pair<BasicBlock *, BasicBlock *> > toadd;
assert(addr2bb.find(before) != addr2bb.end());
assert(oldbb->address <= before && oldbb->address + oldbb->size > before);
debug3("Splitting BB %.8x-%.8x (%p) @ %.8x (%d instructions)\n",
oldbb->address, oldbb->address + oldbb->size, oldbb, before,
oldbb->instructions.size());
newbb1 = addBasicBlock(oldbb->address);
newbb2 = addBasicBlock(before);
// Scan the list of instructions and assign those before the split point to
// newbb1 and the remaining ones to newbb2
for (size_t i = 0; i < oldbb->instructions.size(); i++) {
Instruction *inst;
inst = oldbb->instructions[i];
debug3(" Instruction %.8x-%.8x -- %.8x\n", inst->address,
inst->address + inst->size, before);
assert((inst->address < before &&
inst->address + inst->size - 1 < before) ||
inst->address >= before);
if (inst->address < before) {
debug3(" adding to bb1 %.8x\n", newbb1->address);
newbb1->addInstruction(inst);
addr2bb[inst->address] = newbb1;
} else {
debug3(" adding to bb2 %.8x\n", newbb2->address);
newbb2->addInstruction(inst);
addr2bb[inst->address] = newbb2;
}
}
// Update the entry point of the cfg if needed
if (entry == oldbb) {
setEntry(newbb1);
}
// Update the exit points of the cfg if needed
if (exits.find(oldbb) != exits.end()) {
exits.erase(oldbb);
exits.insert(newbb2);
}
oldbb_vd = cfg_t::getVertex(oldbb);
newbb1_vd = cfg_t::getVertex(newbb1);
newbb2_vd = cfg_t::getVertex(newbb2);
// Link the predecessors of the oldbb with newbb1
for (boost::tie(iei, iee) = boost::in_edges(oldbb_vd, cfg_t::graph);
iei != iee; ++iei) {
BasicBlock *pred = cfg_t::getVertex(boost::source(*iei, cfg_t::graph));
debug3(" Processing incoming edge %.8x-%.8x -- %.8x-%.8x\n",
pred->address, pred->address + pred->size, oldbb->address,
oldbb->address + oldbb->size);
todel.push_back(std::pair<BasicBlock *, BasicBlock *>(pred, oldbb));
toadd.push_back(std::pair<BasicBlock *, BasicBlock *>(pred, newbb1));
// linkBasicBlocks(pred, newbb1);
}
// Link the successors of the oldbb with newbb2
for (tie(oei, oee) = boost::out_edges(oldbb_vd, cfg_t::graph);
oei != oee; ++oei) {
BasicBlock *succ = cfg_t::getVertex(boost::target(*oei, cfg_t::graph));
debug3(" Processing outgoing edge %.8x-%.8x -- %.8x-%.8x\n",
oldbb->address, oldbb->address + oldbb->size, succ->address,
succ->address + succ->size);
todel.push_back(std::pair<BasicBlock *, BasicBlock *>(oldbb, succ));
toadd.push_back(std::pair<BasicBlock *, BasicBlock *>(newbb2, succ));
// linkBasicBlocks(newbb2, succ);
}
// Remove old links
while (!todel.empty()) {
unlinkBasicBlocks(todel.front().first, todel.front().second);
todel.pop_front();
}
while (!toadd.empty()) {
linkBasicBlocks(toadd.front().first, toadd.front().second);
toadd.pop_front();
}
// XXX: hack to delete instructions when the oldbb is destroyed
oldbb->instructions.clear();
delBasicBlock(oldbb);
linkBasicBlocks(newbb1, newbb2);
debug3("Splitted\n");
return newbb2;
}
bool Cfg::addInstruction(addr_t addr, byte_t *bytes, size_t len, int pos,
addr_t prev, bool isret) {
bool changed = false;
BasicBlock *curbb, *prevbb = NULL;
Instruction *inst = NULL;
bool isrep = false, prev_isrep = false;
// Dirty hack to detect whether previous instruction was a rep
isrep = len == 2 && (*bytes == 0xf3 || *bytes == 0xf2);
prev_isrep = prev && (*((byte_t *) prev) == 0xf3 || *((byte_t *) prev) == 0xf2);
debug3("Processing instruction %.8x-%.8x %d %.8x\n", addr, addr + len - 1,
pos, prev);
// Check if instruction has been processed already and if any BB must be
// split
if (addr2bb.find(addr) != addr2bb.end()) {
// Instruction already processed
prevbb = curbb = addr2bb[addr];
// Sanity checks
assert(prevbb->instructions.size() > 0);
if (pos == BASICBLOCK_TAIL) {
// Temporary disabled
// assert_msg(addr + len == prevbb->address + prevbb->size,
// "eip:%.8x len:%d prevbb:%.8x size:%.8x", addr, len,
// prevbb->address, prevbb->size);
;
} else if (pos == BASICBLOCK_HEAD || pos == BASICBLOCK_MIDDLE) {
assert_msg(addr >= prevbb->address, "%.8x >= %.8x (%.8x)", addr, prevbb->address, prev);
assert(addr + len <= prevbb->address + prevbb->size);
}
// Check whether we need to split the BB. Need to split if pos = head,
// but the instruction is not currently the first of the BB
if ((pos == BASICBLOCK_HEAD && prevbb->address < addr)) {
// Split
curbb = splitBasicBlock(prevbb, addr);
// Sanity checks
assert(prev);
assert(addr2bb.find(prev) != addr2bb.end());
prevbb = addr2bb[prev];
debug3(" linking %.8x-%.8x with %.8x-%.8x\n",
prevbb->address, prevbb->address + prevbb->size,
curbb->address, curbb->address + curbb->size);
linkBasicBlocks(prevbb, curbb);
changed = true;
} else {
assert(prev == 0 || addr2bb.find(prev) != addr2bb.end());
if (prev) {
prevbb = addr2bb[prev];
// Check if we have to add and edge or if it already exists
if (prevbb != curbb && !hasEdge(prevbb, curbb)) {
debug3(" linking %.8x-%.8x with %.8x-%.8x "
"(already executed but edge was missing)\n",
prevbb->address, prevbb->address + prevbb->size,
curbb->address, curbb->address + curbb->size);
linkBasicBlocks(prevbb, curbb);
}
}
}
} else {
debug3("Instruction seen for the first time\n");
// First time we see the instruction
assert(addr);
inst = new Instruction(addr, bytes, len);
assert(inst);
if (!prev) {
// First instruction of the function
if (addr2bb.size()) {
for (std::map<addr_t, BasicBlock *>::const_iterator it = addr2bb.begin();
it != addr2bb.end(); it++) {
debug2(" %.8x %.8x\n", it->first, it->second->getAddress());
}
}
assert_msg(addr2bb.size() == 0, "eip:%.8x prev:%.8x bbs:%d", addr,
prev, addr2bb.size());
assert(!entry);
// This assertion claims that the first instruction in
// a function should not be a return.
// This initially sounded reasonable, but it failed for
// instance in a binary for g++ (cc1plus) we tested with.
//assert(!isret);
curbb = addBasicBlock(addr);
setEntry(curbb);
curbb->addInstruction(inst);
changed = true;
debug3("Creating new BB @%.8x (%p) to hold the instruction\n",
addr, curbb);
} else {
assert(addr2bb.find(prev) != addr2bb.end());
prevbb = curbb = addr2bb[prev];
if (pos != BASICBLOCK_HEAD && !isrep && !prev_isrep) {
curbb->addInstruction(inst);
changed = true;
debug3("Appending instruction %.8x to BB %.8x (%p)\n",
addr, curbb->address, curbb);
} else {
if (isrep || prev_isrep) {
debug2("Creating a new BB because %s%s\n",
isrep ? "isrep" : "",
prev_isrep ? "prev_isrep" : "");
}
curbb = addBasicBlock(addr);
curbb->addInstruction(inst);
linkBasicBlocks(prevbb, curbb);
changed = true;
debug3("Creating new BB @%.8x (%p) to hold the instruction\n",
addr, curbb);
debug3("Linking BB %.8x with BB %.8x\n", prevbb->address,
curbb->address);
}
if (isret) {
exits.insert(curbb);
}
if (isrep) {
debug2("Adding self-loop for rep\n");
linkBasicBlocks(curbb, curbb);
}
}
addr2bb[addr] = curbb;
}
// debug3("\n", "");
return changed;
}
size_t Cfg::getBasicBlocksNo() {
return cfg_t::getNumVertex();
}
void Cfg::addCall(addr_t caller, Function *callee) {
// Mark the node as an exitpoint if it is calling exit
if (strcmp(callee->getName(), "exit") == 0) {
assert(addr2bb.find(caller) != addr2bb.end());
exits.insert(addr2bb[caller]);
}
calls[caller].insert(callee);
}
void Cfg::check() {
;
}
void Cfg::decode() {
if (!decoded) {
for(Cfg::const_bb_iterator it = bb_begin(); it != bb_end(); it++) {
BasicBlock *bb = *it;
bb->decode();
}
}
decoded = true;
}
std::string Cfg::dot() {
std::set<Function *> functions;
std::string r = "";
char tmp[1024];
int j = 0;
if (getNumVertex() > 0) {
computeWeakTopologicalOrdering();
debug2("Weak topological ordering: %s\n", wto2string().c_str());
}
r = "digraph G {\n";
for (Cfg::const_bb_iterator bbit = bb_begin();
bbit != bb_end(); bbit++) {
BasicBlock *bb = *bbit;
if (entry == bb) {
sprintf(tmp, "bb_%.8x [label=\"%.8x-%.8x (%d)\", "
"color=\"green\" %s];\n",
bb->getAddress(), bb->getAddress(),
bb->getAddress() + bb->getSize() - 1, j,
bb->isExecuted() ? "style=filled" : "");
} else if (exits.find(bb) != exits.end()) {
sprintf(tmp, "bb_%.8x [label=\"%.8x-%.8x (%d)\", color=\"red\" %s];"
"\n", bb->getAddress(), bb->getAddress(),
bb->getAddress() + bb->getSize() - 1, j,
bb->isExecuted() ? "style=filled" : "");
} else {
sprintf(tmp, "bb_%.8x [label=\"%.8x-%.8x (%d)\" %s];\n",
bb->getAddress(), bb->getAddress(),
bb->getAddress() + bb->getSize() - 1, j,
bb->isExecuted() ? "style=filled" : "");
}
r += " " + std::string(tmp);
j++;
}
for (std::map<addr_t, functions_t>::iterator it = calls.begin();
it != calls.end(); it++) {
functions.insert(it->second.begin(), it->second.end());
}
for (std::set<Function *>::iterator it = functions.begin();
it != functions.end(); it++) {
sprintf(tmp, "func_%.8x [label=\"%s@%.8x [%s]\", color=blue, "
"shape=rectangle,URL=\"%.8x.svg\"];\n",
(*it)->getAddress(), (*it)->getName(),
(*it)->getAddress(), (*it)->getModule(),
(*it)->getAddress());
r += " " + std::string(tmp);
}
for (std::map<addr_t, functions_t>::iterator it1 = calls.begin();
it1 != calls.end(); it1++) {
for (functions_t::iterator it2 = it1->second.begin();
it2 != it1->second.end(); it2++) {
sprintf(tmp, "bb_%.8x -> func_%.8x [color=blue];\n",
addr2bb[it1->first]->getAddress(),
(*it2)->getAddress());
r += " " + std::string(tmp);
}
}
for (Cfg::const_edge_iterator eit = edge_begin();
eit != edge_end(); eit++) {
BasicBlockEdge *e = *eit;
BasicBlock *source = e->getSource(), *target = e->getTarget();
if (isSubComponentOf(source, target)) {
sprintf(tmp, "bb_%.8x -> bb_%.8x [color=purple];\n",
e->getSource()->getAddress(),
e->getTarget()->getAddress());
r += " " + std::string(tmp);
} else {
sprintf(tmp, "bb_%.8x -> bb_%.8x;\n", e->getSource()->getAddress(),
e->getTarget()->getAddress());
r += " " + std::string(tmp);
}
}
/*
if (idoms[bb2vertex[bb1]] != cfg_traits::null_vertex()) {
sprintf(tmp, "bb_%.8x -> bb_%.8x [color=\"cyan\"];\n",
basicblocks[idoms[bb2vertex[bb1]]]->getAddress(),
bb1->getAddress());
r += " " + std::string(tmp);
}
// */
#if 0
for (Cfg::const_bb_iterator bbit = bb_begin();
bbit != bb_end(); bbit++) {
BasicBlock *c = getComponent(*bbit);
if (c)
sprintf(tmp, "bb_%.8x -> bb_%.8x[color=pink,style=dashed];\n",
(*bbit)->getAddress(), c->getAddress());
r += " " + std::string(tmp);
}
#endif
r += "}";
return r;
}
std::string Cfg::vcg() {
std::set<Function *> functions;
std::string r = "";
char tmp[1024];
int j = 0;
r = "graph: {\n";
for (Cfg::const_bb_iterator bbit = bb_begin();
bbit != bb_end(); bbit++) {
BasicBlock *bb = *bbit;
if (entry == bb) {
sprintf(tmp, "node: { title: \"bb_%.8x\" "
"label: \"%.8x-%.8x (%d)\" color: green}\n",
bb->getAddress(), bb->getAddress(),
bb->getAddress() + bb->getSize() - 1, j);
} else if (exits.find(bb) != exits.end()) {
sprintf(tmp, "node: { title: \"bb_%.8x\" "
"label: \"%.8x-%.8x (%d)\" color: red}\n",
bb->getAddress(), bb->getAddress(),
bb->getAddress() + bb->getSize() - 1, j);
} else {
sprintf(tmp, "node: { title: \"bb_%.8x\" "
"label: \"%.8x-%.8x (%d)\"}\n",
bb->getAddress(), bb->getAddress(),
bb->getAddress() + bb->getSize() - 1, j);
}
r += " " + std::string(tmp);
j++;
}
for (std::map<addr_t, functions_t>::iterator it = calls.begin();
it != calls.end(); it++) {
functions.insert(it->second.begin(), it->second.end());
}
for (std::set<Function *>::iterator it = functions.begin();
it != functions.end(); it++) {
sprintf(tmp, "node: { title: \"func_%.8x\" "
"label: \"%s@%.8x [%s]\" color: blue}\n",
(*it)->getAddress(), (*it)->getName(),
(*it)->getAddress(), (*it)->getModule());
r += " " + std::string(tmp);
}
for (std::map<addr_t, functions_t>::iterator it1 = calls.begin();
it1 != calls.end(); it1++) {
for (functions_t::iterator it2 = it1->second.begin();
it2 != it1->second.end(); it2++) {
sprintf(tmp, "edge: { sourcename: \"bb_%.8x\" "
"targetname: \"func_%.8x\" color: blue}\n",
addr2bb[it1->first]->getAddress(),
(*it2)->getAddress());
r += " " + std::string(tmp);
}
}
for (Cfg::const_edge_iterator eit = edge_begin();
eit != edge_end(); eit++) {
BasicBlockEdge *e = *eit;
sprintf(tmp, "edge: { sourcename: \"bb_%.8x\" "
"targetname: \"bb_%.8x\"}\n", e->getSource()->getAddress(),
e->getTarget()->getAddress());
r += " " + std::string(tmp);
}
/*
if (idoms[bb2vertex[bb1]] != cfg_traits::null_vertex()) {
sprintf(tmp, "edge: { sourcename: \"bb_%.8x\" "
"targetname: \"bb_%.8x\" color: cyan}\n",
basicblocks[idoms[bb2vertex[bb1]]]->getAddress(),
bb1->getAddress());
r += " " + std::string(tmp);
}
// */
r += "}";
return r;
}
void Cfg::sanityCheck(bool aggressive) {
cfg_t::vertex_iterator vi, ve;
cfg_t::edge_iterator ei, ee;
cfg_t::out_edge_iterator oei, oee;
cfg_t::in_edge_iterator iei, iee;
assert(boost::num_vertices(graph) == vertex_rev_map.size());
assert(boost::num_edges(graph) == edge_rev_map.size());
for (boost::tie(vi, ve) = boost::vertices(graph); vi != ve; vi++) {
assert(vertex_rev_map.find(vertex_map[*vi]) != vertex_rev_map.end());
assert(hasVertex(vertex_map[*vi]));
assert(hasVertex(vertex_rev_map[vertex_map[*vi]]));
}
for (boost::tie(ei, ee) = boost::edges(graph); ei != ee; ei++) {
assert(edge_rev_map.find(edge_map[*ei]) != edge_rev_map.end());
assert(hasEdge(edge_map[*ei]->getSource(), edge_map[*ei]->getTarget()));
}
if (!aggressive)
return;
if (exits.empty())
debug("Function %.8x has no exit nodes\n",
function->getAddress());
#if 0
assert_msg(!exits.empty(), "Function %.8x has no exit nodes",
function->getAddress());
for (const_bb_iterator it = bb_begin(); it != bb_end(); it++) {
assert_msg(getNumSuccessors(*it) > 0 ||
exits.find(*it) != exits.end(),
"BasicBlock %.8x in function %.8x has not successor and is "
"not an exit node", (*it)->getAddress(),
function->getAddress());
}
#endif
}
int disassemble(addr_t addr, addr_t &next1, addr_t &next2,
xed_category_enum_t &category, char *buf = NULL,
size_t bufsize = 0) {
xed_state_t dstate;
xed_decoded_inst_t xedd;
xed_error_enum_t xed_error;
int len;
xed_tables_init();
xed_state_zero(&dstate);
xed_state_init(&dstate,
XED_MACHINE_MODE_LEGACY_32,
XED_ADDRESS_WIDTH_32b,
XED_ADDRESS_WIDTH_32b);
xed_decoded_inst_zero_set_mode(&xedd, &dstate);
xed_error = xed_decode(&xedd, (const xed_uint8_t*) addr, 16);
assert(xed_error == XED_ERROR_NONE);
const xed_inst_t *inst= xed_decoded_inst_inst(&xedd);
category = xed_decoded_inst_get_category(&xedd);
len = xed_decoded_inst_get_length(&xedd);
if (buf)
xed_decoded_inst_dump(&xedd, buf, bufsize);
next1 = next2 = 0xFFFFFFFF;
switch (category) {
case XED_CATEGORY_COND_BR:
next1 = addr + len;
if (xed_operand_name(xed_inst_operand(inst, 0)) == XED_OPERAND_RELBR)
next2 = addr + len +
xed_decoded_inst_get_branch_displacement(&xedd);
else
debug("!! Instruction %.8x uses an indirect jump target\n", addr);
break;
case XED_CATEGORY_UNCOND_BR:
if (xed_operand_name(xed_inst_operand(inst, 0)) == XED_OPERAND_RELBR)
next1 = addr + len +
xed_decoded_inst_get_branch_displacement(&xedd);
else if (xed_operand_name(xed_inst_operand(inst, 0)) ==
XED_OPERAND_IMM0)
next1 = xed_decoded_inst_get_unsigned_immediate(&xedd);
else
debug("!! Instruction %.8x uses an indirect jump target\n", addr);
break;
case XED_CATEGORY_RET:
break;
case XED_CATEGORY_CALL:
next1 = addr + len;
if (xed_operand_name(xed_inst_operand(inst, 0)) == XED_OPERAND_RELBR)
next2 = addr + len +
xed_decoded_inst_get_branch_displacement(&xedd);
else if (xed_operand_name(xed_inst_operand(inst, 0)) ==
XED_OPERAND_IMM0)
next2 = xed_decoded_inst_get_unsigned_immediate(&xedd);
else
debug("!! Instruction %.8x uses an indirect call target\n", addr);
break;
default:
next1 = addr + len;
break;
}
return len;
}
void Cfg::augmentCfg(std::list<std::pair<addr_t, addr_t> > &wlist,
std::set<addr_t> &done,
std::map<addr_t, Function *> &funcs) {
addr_t curr, prev, next1, next2;
int len = 0, pos = 0;
bool isret;
xed_category_enum_t category, prev_category;
BasicBlock *prevbb = NULL;
static char assembly[128];
curr = wlist.front().first;
prev = wlist.front().second;
wlist.pop_front();
if (prev)
disassemble(prev, next1, next2, prev_category);
len = disassemble(curr, next1, next2, category, assembly, sizeof(assembly));
if (!prev || prev_category == XED_CATEGORY_RET
|| prev_category == XED_CATEGORY_CALL
|| prev_category == XED_CATEGORY_COND_BR
|| prev_category == XED_CATEGORY_UNCOND_BR)
// Previous instruction is a tail
pos = BASICBLOCK_HEAD;
else if (category == XED_CATEGORY_RET
|| category == XED_CATEGORY_CALL
|| category == XED_CATEGORY_COND_BR
|| category == XED_CATEGORY_UNCOND_BR)
// Current instruction is a tail
pos = BASICBLOCK_TAIL;
else
// Anything else
pos = BASICBLOCK_MIDDLE;
debug2(" Statically processing instruction %.8x "
"(%d bytes long, successor of %.8x, pos %d)\n",
curr, len, prev, pos);
// Add instruction to the CFG if not in there already
addInstruction(curr, (byte_t *) curr, len, pos, prev,
category == XED_CATEGORY_RET);
// Add a call target if necessary
if (category == XED_CATEGORY_CALL && next2 != 0xFFFFFFFF) {
if (function->getProg()->isPlt(next2)) {
next2 = derefplt(curr, next2,
function->getProg()->getBase(curr, ".got.plt"));
}
if (next2) {
if (funcs.find(next2) == funcs.end()) {
// Function already seen
funcs[next2] = new Function(next2);
funcs[next2]->setPending(true);
}
addCall(curr, funcs.find(next2)->second);
} else {
// This should not happen, but it happens and I don't know why!
debug("Invalid NULL call target\n");
}
}
// Update the worklist
if (done.find(curr) == done.end()) {
if (next1 != 0xFFFFFFFF) {
debug2("\t adding %.8x to the worklist\n", next1);
wlist.push_back(std::pair<addr_t, addr_t>(next1, curr));
}
if (next2 != 0xFFFFFFFF && category != XED_CATEGORY_CALL) {
debug2("\t adding %.8x to the worklist\n", next2);
wlist.push_back(std::pair<addr_t, addr_t>(next2, curr));
}
}
// Mask instruction as processed
done.insert(curr);
}
// Statically augment the CFG. The process consists of two passes: (1)
// recursive traversal disassembly starting from the entry point, (2) recursive
// traversal starting from indirect control transfer instrutions
void Cfg::augmentCfg(addr_t start, std::map<addr_t, Function *> &funcs) {
std::list<std::pair<addr_t, addr_t> > wlist;
std::set<addr_t> done;
addr_t prev = 0;
debug2("Augmenting CFG of %.8x\n", start);
if (strcmp(function->getName(), "exit") == 0 ||
strcmp(function->getName(), "pthread_exit") == 0) {
debug2("Skipping exit because we do not want to know what happens "
"after\n");
clear();
addInstruction(function->getAddress(), (byte_t *) "\xc3", 1,
BASICBLOCK_HEAD, 0, true);
entry = addr2bb[function->getAddress()];
exits.insert(entry);
return;
}
// First pass, recursive traversal disassembly
wlist.push_back(std::pair<addr_t, addr_t>(start, prev));
while (!wlist.empty()) {
augmentCfg(wlist, done, funcs);
}
debug2("First pass completed\n");
// Second pass, disassembly targets of indirect calls and jumps that have
// been reached dynamically but couldn't be reached during the first pass
// for obvious reasons
for (Cfg::const_bb_iterator bbit = bb_begin();
bbit != bb_end(); bbit++) {
// The basic block hasn't been processed yet
if (done.find((*bbit)->getAddress()) == done.end()) {
// Schedule the block for disassemly (one entry in the worklist for
// each predecessor)
for (Cfg::const_pred_iterator pit = pred_begin(*bbit);
pit != pred_end(*bbit); pit++) {
addr_t tmp0;
xed_category_enum_t tmp1;
static char buf[128];
prev = (*((*pit)->inst_end() - 1))->getAddress();
disassemble(prev, tmp0, tmp0, tmp1, buf, sizeof(buf));
debug2("Found unprocessed basic block %.8x-%.8x "
"(reached from %.8x %s)\n",
(*bbit)->getAddress(),
(*bbit)->getAddress() + (*bbit)->getSize(), prev, buf);
wlist.push_back(std::pair<addr_t, addr_t>
((*bbit)->getAddress(), prev));
}
}
}
while (!wlist.empty()) {
augmentCfg(wlist, done, funcs);
}
debug2("Second pass completed\n");
}
void Cfg::setExecuted(addr_t i) {
assert(addr2bb.find(i) != addr2bb.end());
BasicBlock *bb = addr2bb[i];
assert(bb->getAddress() <= i && bb->getAddress() + bb->getSize() > i);
for (instructions_t::const_iterator iit = bb->instructions.begin();
iit != bb->instructions.end(); iit++) {
if ((*iit)->getAddress() == i) {
(*iit)->setExecuted();
return;
}
}
assert(0);
}
bool Cfg::isExecuted(addr_t i) {
assert_msg(addr2bb.find(i) != addr2bb.end(), "%.8x not in %.8x", i, entry->getAddress());
BasicBlock *bb = addr2bb[i];
assert(bb->getAddress() <= i && bb->getAddress() + bb->getSize() > i);
for (instructions_t::const_iterator iit = bb->instructions.begin();
iit != bb->instructions.end(); iit++) {
if ((*iit)->getAddress() == i) {
return (*iit)->isExecuted();
}
}
assert(0);
return false;
}
// Remove self loops in the graph to simplify abstract interpretation
void Cfg::removeSelfLoops() {
bool done = !can_have_self_loops;
while (!done) {
done = true;
for (Cfg::const_bb_iterator bbit = bb_begin();
bbit != bb_end(); bbit++) {
if (hasEdge(*bbit, *bbit)) {
// Found a self loop
debug2("Detected self loop in %.8x (%.8x -> %.8x)\n",
function->getAddress(), (*bbit)->getAddress(),
(*bbit)->getAddress());
// Create a new empty basic block
BasicBlock *dummybb = addBasicBlock(0);
// Remove the self loop
unlinkBasicBlocks(*bbit, *bbit);
// Build a list of predecessors to process (can't modify edges
// during the iteration)
std::list<BasicBlock *> preds;
for (Cfg::const_pred_iterator pbbit = pred_begin(*bbit);
pbbit != pred_end(*bbit); pbbit++) {
preds.push_back(*pbbit);
}
// Remove old links and add new ones
while (!preds.empty()) {
unlinkBasicBlocks(preds.front(), *bbit);
linkBasicBlocks(preds.front(), dummybb);
preds.pop_front();
}
// Link the dummy bb with the one with the self loop and vice
// versa
linkBasicBlocks(dummybb, *bbit);
linkBasicBlocks(*bbit, dummybb);
// Update the entry point of the cfg if needed
if (entry == *bbit) {
setEntry(dummybb);
}
done = false;
break;
}
}
}
can_have_self_loops = false;
}
functions_t::const_iterator Cfg::call_targets_begin(const BasicBlock &bb) {
assert(!bb.instructions.empty());
Instruction *i = bb.instructions.back();
assert(i->isCall());
return calls.find(i->getAddress() & ~0x80000000)->second.begin();
}
functions_t::const_iterator Cfg::call_targets_end(const BasicBlock &bb) {
assert(!bb.instructions.empty());
Instruction *i = bb.instructions.back();
assert(i->isCall());
return calls.find(i->getAddress() & ~0x80000000)->second.end();
}
std::string Cfg::wto2string() {
int j = 0, k;
char buf[32];
std::string r = "";
assert(wto_computed);
for (const_wto_iterator bbit = wto_begin(); bbit !=
wto_end(); bbit++) {
BasicBlock *bb = getVertex(*bbit);
k = j;
while (k > 0 && getComponentNo(bb) < k--) {
r += ")";
}
sprintf(buf, " %s", (j < getComponentNo(bb)) ? "(" : "");
r += buf;
sprintf(buf, "%.8x|%d", bb->getAddress(), getComponentNo(bb));
r += buf;
j = getComponentNo(bb);
}
k = j;
while (k-- > 0) {
r += ")";
}
return r;
}
// ****************************************************************************
BasicBlock::~BasicBlock() {
for (instructions_t::iterator it = instructions.begin();
it != instructions.end(); it++) {
delete *it;
}
}
void BasicBlock::addInstruction(Instruction *i) {
instructions.push_back(i);
i->basicblock = this;
size += i->getSize();
}
size_t BasicBlock::getInstructionsNo() {
return instructions.size();
}
addr_t BasicBlock::getAddress() {
return address;
}
size_t BasicBlock::getSize() {
return size;
}
Cfg *BasicBlock::getCfg() {
return cfg;
}
void BasicBlock::decode() {
if (!decoded) {
for (instructions_t::iterator it = instructions.begin();
it != instructions.end(); it++) {
(*it)->decode();
}
}
decoded = true;
}
int BasicBlock::getNumPredecessors() {
return cfg->getNumPredecessors(this);
}
bool BasicBlock::isCall() {
decode();
return !instructions.empty() && instructions.back()->isCall();
}
bool BasicBlock::isReturn() {
decode();
return !instructions.empty() && instructions.back()->isCall();
}
bool BasicBlock::isExecuted() {
for (instructions_t::iterator it = instructions.begin();
it != instructions.end(); it++) {
if ((*it)->isExecuted())
return true;
}
return false;
}
Instruction *BasicBlock::getInstruction(addr_t i) {
assert(i >= address && i < address + size);
for (instructions_t::iterator it = instructions.begin();
it != instructions.end(); it++) {
if ((*it)->getAddress() == i)
return *it;
}
fprintf(stderr, "ERROR: Could not find instruction %.8x", i);
assert(0);
return 0;
}
// ****************************************************************************
BasicBlockEdge::BasicBlockEdge(BasicBlock *s, BasicBlock *t) {
source = s;
target = t;
}
BasicBlockEdge::~BasicBlockEdge() {
;
}
BasicBlock *BasicBlockEdge::getSource() {
return source;
}
BasicBlock *BasicBlockEdge::getTarget() {
return target;
}
std::ostream& operator<<(std::ostream& os, const BasicBlock& B) {
for (instructions_t::const_iterator I = B.inst_begin(), E =
B.inst_end(); I != E; ++I) {
os << **I << "----- next BB -----" << std::endl;
}
return os;
}
// Local Variables:
// mode: c++
// c-basic-offset: 4
// compile-command: "dchroot -c typeinfer -d make"
// End:
| {
"content_hash": "2676fc37e707c189e15251b5b2642f37",
"timestamp": "",
"source": "github",
"line_count": 1069,
"max_line_length": 93,
"avg_line_length": 28.128157156220766,
"alnum_prop": 0.5880474907712262,
"repo_name": "bitblaze-fuzzball/d-s-se-directed-tests",
"id": "d5e0c85fdb318e10244fea6fa3b6d3b2f54e584d",
"size": "30069",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cfg.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "257"
},
{
"name": "Batchfile",
"bytes": "13545"
},
{
"name": "C",
"bytes": "534200"
},
{
"name": "C++",
"bytes": "549713"
},
{
"name": "Makefile",
"bytes": "20692"
},
{
"name": "OCaml",
"bytes": "20346"
},
{
"name": "Perl",
"bytes": "13918"
},
{
"name": "Python",
"bytes": "5430"
},
{
"name": "Shell",
"bytes": "12483"
}
],
"symlink_target": ""
} |
<?php
use yii\helpers\Html;
/* @var $this yii\web\View */
/* @var $model backend\modules\masterdata\models\Quotationdetail */
$this->title = 'Create Quotationdetail';
$this->params['breadcrumbs'][] = ['label' => 'Quotationdetails', 'url' => ['index']];
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="quotationdetail-create">
<h1><?= Html::encode($this->title) ?></h1>
<?= $this->render('_form_qd', [
'model' => $model,
]) ?>
</div>
| {
"content_hash": "1173284efc24bcde1329f225ca2253ea",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 85,
"avg_line_length": 22.571428571428573,
"alnum_prop": 0.5928270042194093,
"repo_name": "ariandi/ktmavia",
"id": "16794ce618fcf0fa8971c026b05947429180db42",
"size": "474",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "backend/modules/masterdata/views/quotation/create_qd.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1541"
},
{
"name": "CSS",
"bytes": "838998"
},
{
"name": "HTML",
"bytes": "3011603"
},
{
"name": "JavaScript",
"bytes": "2866501"
},
{
"name": "PHP",
"bytes": "1968627"
},
{
"name": "Python",
"bytes": "32324"
}
],
"symlink_target": ""
} |
package lazarus;
import org.newdawn.slick.Animation;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.geom.Rectangle;
import org.newdawn.slick.geom.Vector2f;
import org.newdawn.slick.state.StateBasedGame;
public class FireBall extends Bullet {
/**
* Constructor of the Player
* @param pSprite Animation
* @param nX Bullet X default position
* @param nY Bullet Y default position
*/
public FireBall( Animation pSprite, Vector2f p, Vector2f v) {
super( pSprite, p, v);
}
/**
* Bullet update function
* @param gc GameContainer
* @param sb StateBasedGame
* @param delta Time between frame
*/
public void update(GameContainer gc, StateBasedGame sb, int delta){
time += delta;
v.y += CONST.FIREBALL_MASS * CONST.G_FORCE * delta;
Rectangle rec = new Rectangle( p.x + v.x * delta, p.y + v.y * delta, aniSprite.getWidth(), aniSprite.getHeight());
if( playstate.mainLevel.isSolid( rec)){
rec = new Rectangle( p.x + v.x * delta, p.y, aniSprite.getWidth(), aniSprite.getHeight());
Rectangle recHor = new Rectangle( p.x, p.y + v.y * delta, aniSprite.getWidth(), aniSprite.getHeight());
if( !playstate.mainLevel.isSolid( rec))
v.y *= -1;
else if( !playstate.mainLevel.isSolid( recHor))
v.x *= -1;
}
p.x += v.x * delta;
p.y += v.y * delta;
if( time > 6000)
delete = true;
doCollision();
}
public void doCollision(){
for( Mob mob : playstate.getMobs()){
if( getCollisionRect().intersects( mob.getCollisionRect())){
if( mob.ping()){
mob.dmg( 30);
}
}
}
}
}
| {
"content_hash": "d6756a6bcf4402978c07d591c3d79023",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 119,
"avg_line_length": 26.296875,
"alnum_prop": 0.6173499702911468,
"repo_name": "PhiBabin/Project-Lazarus",
"id": "95f62ffa443709f05a26772fc7bda00e1f3039bf",
"size": "1683",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/lazarus/FireBall.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "67322"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<title>CMSIS-SVD: SVDConv.exe</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<link href="cmsis.css" rel="stylesheet" type="text/css" />
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
</script>
<link href="stylsheetf" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 46px;">
<td id="projectlogo"><img alt="Logo" src="CMSIS_Logo_Final.png"/></td>
<td style="padding-left: 0.5em;">
<div id="projectname">CMSIS-SVD
 <span id="projectnumber">Version 1.2</span>
</div>
<div id="projectbrief">CMSIS System View Description</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<div id="CMSISnav" class="tabs1">
<ul class="tablist">
<li><a href="../../General/html/index.html"><span>CMSIS</span></a></li>
<li><a href="../../Core/html/index.html"><span>CORE</span></a></li>
<li><a href="../../Driver/html/index.html"><span>Driver</span></a></li>
<li><a href="../../DSP/html/index.html"><span>DSP</span></a></li>
<li><a href="../../RTOS/html/index.html"><span>RTOS API</span></a></li>
<li><a href="../../Pack/html/index.html"><span>Pack</span></a></li>
<li class="current"><a href="../../SVD/html/index.html"><span>SVD</span></a></li>
</ul>
</div>
<!-- Generated by Doxygen 1.8.2 -->
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li class="current"><a href="pages.html"><span>Usage and Description</span></a></li>
<li><a href="modules.html"><span>Reference</span></a></li>
</ul>
</div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('svd__s_v_d_conv_pg.html','');});
</script>
<div id="doc-content">
<div class="header">
<div class="headertitle">
<div class="title">SVDConv.exe </div> </div>
</div><!--header-->
<div class="contents">
<div class="textblock"><p><b>SVDConv.exe</b> is a command-line utility to validate CMSIS-SVD files and to generate CMSIS-compliant device header files. <b>SVDConv.exe</b> is distributed with the <b>ARM::CMSIS</b> Pack (in the CMSIS\Utilities directory) together with the CMSIS-SVD schema file.</p>
<p><b>SVDConv.exe</b> performs the following operations:</p>
<ul>
<li>Checks the syntactical and structural compliance with the specified CMSIS-SVD format.</li>
<li>Checks the consistency, correctness, and completeness of the CMSIS-SVD file against the CMSIS-SVD schema file.</li>
<li>Generates CMSIS-compliant device header files, which can be used for software development.</li>
</ul>
<h2>Operation</h2>
<p><b>SVDConv.exe</b> is invoked form the command line. The general command format is: </p>
<div class="fragment"><div class="line">SVDConv.exe <SVD_file> <options></div>
</div><!-- fragment --><p> </p>
<table class="cmtable" summary="SVDConv Args">
<tr>
<th><options> </th><th>Short Name </th><th>Description </th></tr>
<tr>
<td><em>none</em> </td><td>Validation </td><td>Perform a validation check of the SVD file. Errors and warnings are printed on screen. </td></tr>
<tr>
<td>-b </td><td>Log File </td><td>Specify the log file name for writing messages. Default: screen. </td></tr>
<tr>
<td>-o </td><td>Output Path </td><td>Specify an output path for the generated device header file or log file. Default: current directory. </td></tr>
<tr>
<td>—generate=header </td><td>Generate Device Header File </td><td>Generates the device header file. The name of the generated file is derived from the value of the tag <device<name> in the CMSIS-SVD file. Refer to <a class="el" href="group__svd__xml__device__gr.html">Device Level</a>. </td></tr>
<tr>
<td>—fields=macro </td><td>Bit-field Macros </td><td>Generates position and mask C-Macros for each field description contained in the CMSIS-SVD input file. Must be used in combination with <em>—generate=header</em>. </td></tr>
<tr>
<td>—fields=struct </td><td>Bit-field Structs </td><td>Generates bit fields for each field description contained in the CMSIS-SVD input file. Must be used in combination with <em>—generate=header</em>. </td></tr>
<tr>
<td>—fields=struct-ansic </td><td>ANSI Bit-field Structs </td><td>Generates MISRA-compliant structures for each bitfield. The generated code <b>is not CMSIS-compliant</b>! Must be used in combination with <em>—generate=header</em>. </td></tr>
</table>
<h2>Return Codes</h2>
<p><b>SVDConv.exe</b> returns the following codes: <br/>
</p>
<table class="doxtable">
<tr>
<th align="center">Code </th><th align="left">Description </th><th align="left">Action</th></tr>
<tr>
<td align="center">0 </td><td align="left">OK </td><td align="left">No action required. Validation and conversion performed without errors. </td></tr>
<tr>
<td align="center">1 </td><td align="left">WARNINGS </td><td align="left">Warnings should be checked an possibly removed. The header file is created and could be used. </td></tr>
<tr>
<td align="center">2 </td><td align="left">ERRORS </td><td align="left">Errors in the SVD description file. Important elements are missing and must be corrected. </td></tr>
<tr>
<td align="center">3 </td><td align="left">Error in command line </td><td align="left">Check and correct the command line arguments. </td></tr>
</table>
<p><b>Examples</b> <br/>
</p>
<ol type="1">
<li>Retrieve help information on screen. <div class="fragment"><div class="line">SVDConv.exe </div>
</div><!-- fragment --> <br/>
</li>
<li>Perform a consistency check by passing only the SVD file name. Errors and warnings are printed on screen. <div class="fragment"><div class="line">SVDConv.exe ARM_Example.svd </div>
</div><!-- fragment --> <br/>
The result is printed on screen: <pre class="fragment"> MVCM3110.svd(1688) : info
<description> missing for value '2 : MODE2'
MVCM3110.svd(1692) : info
<description> missing for value '3 : MODE3'
MVCM3110.svd(1696) : info
<description> missing for value '4 : MODE4'
Area of improvements:
* Description contains 267 <fields> defined without associated <enumeratedValues>
Found 0 Errors and 1 Warnings
Return Code: 1 (WARNINGS)</pre> <br/>
</li>
<li>Generate the header file. Performs a consistency check. Errors and warnings are printed on screen. <div class="fragment"><div class="line">SVDConv.exe ARM_Example.svd --generate=header</div>
</div><!-- fragment --> <br/>
Code snippet from the generated header file showing the structure for <b>TIMER0</b>. <br/>
<div class="fragment"><div class="line"><span class="comment">/* ================ TIMER0 ================ */</span></div>
<div class="line"><span class="keyword">typedef</span> <span class="keyword">struct </span>{ </div>
<div class="line"> __IO uint32_t CR; </div>
<div class="line"> __IO uint16_t SR; </div>
<div class="line"> __I uint16_t RESERVED0[5];</div>
<div class="line"> __IO uint16_t INT; </div>
<div class="line"> __I uint16_t RESERVED1[7];</div>
<div class="line"> __IO uint32_t COUNT; </div>
<div class="line"> __IO uint32_t MATCH; </div>
<div class="line"> <span class="keyword">union </span>{</div>
<div class="line"> __O uint32_t PRESCALE_WR; </div>
<div class="line"> __I uint32_t PRESCALE_RD; </div>
<div class="line"> };</div>
<div class="line"> __I uint32_t RESERVED2[9];</div>
<div class="line"> __IO uint32_t RELOAD[4]; </div>
<div class="line">} TIMER0_Type;</div>
</div><!-- fragment --> <br/>
</li>
<li>Generate the header file containing bit fields. Performs a consistency check. Errors and warnings are printed on screen. <div class="fragment"><div class="line">SVDConv.exe ARM_Example.svd --generate=header --fields=<span class="keyword">struct</span></div>
</div><!-- fragment --> <br/>
Code snippet from the generated header file showing the structure for <b>TIMER0</b>. <br/>
Compare to the code snippet above. <div class="fragment"><div class="line"><span class="comment">/* ================ TIMER0 ================ */</span></div>
<div class="line"><span class="keyword">typedef</span> <span class="keyword">struct </span>{ </div>
<div class="line"> <span class="keyword">union </span>{</div>
<div class="line"> __IO uint32_t CR; </div>
<div class="line"> <span class="keyword">struct </span>{</div>
<div class="line"> __IO uint32_t EN : 1; </div>
<div class="line"> __O uint32_t RST : 1; </div>
<div class="line"> __IO uint32_t CNT : 2; </div>
<div class="line"> __IO uint32_t MODE : 3; </div>
<div class="line"> __IO uint32_t PSC : 1; </div>
<div class="line"> __IO uint32_t CNTSRC : 4; </div>
<div class="line"> __IO uint32_t CAPSRC : 4; </div>
<div class="line"> __IO uint32_t CAPEDGE : 2; </div>
<div class="line"> uint32_t : 2;</div>
<div class="line"> __IO uint32_t TRGEXT : 2; </div>
<div class="line"> uint32_t : 2;</div>
<div class="line"> __IO uint32_t RELOAD : 2; </div>
<div class="line"> __IO uint32_t IDR : 2; </div>
<div class="line"> uint32_t : 3;</div>
<div class="line"> __IO uint32_t S : 1; </div>
<div class="line"> } CR_b; </div>
<div class="line"> };</div>
<div class="line"> </div>
<div class="line"> <span class="keyword">union </span>{</div>
<div class="line"> __IO uint16_t SR; </div>
<div class="line"> <span class="keyword">struct </span>{</div>
<div class="line"> __I uint16_t RUN : 1; </div>
<div class="line"> uint16_t : 7;</div>
<div class="line"> __IO uint16_t MATCH : 1; </div>
<div class="line"> __IO uint16_t UN : 1; </div>
<div class="line"> __IO uint16_t OV : 1; </div>
<div class="line"> uint16_t : 1;</div>
<div class="line"> __I uint16_t RST : 1; </div>
<div class="line"> uint16_t : 1;</div>
<div class="line"> __I uint16_t RELOAD : 2; </div>
<div class="line"> } SR_b; </div>
<div class="line"> };</div>
<div class="line"> __I uint16_t RESERVED0[5];</div>
<div class="line"> </div>
<div class="line"> <span class="keyword">union </span>{</div>
<div class="line"> __IO uint16_t INT; </div>
<div class="line"> <span class="keyword">struct </span>{</div>
<div class="line"> __IO uint16_t EN : 1; </div>
<div class="line"> uint16_t : 3;</div>
<div class="line"> __IO uint16_t MODE : 3; </div>
<div class="line"> } INT_b; </div>
<div class="line"> };</div>
<div class="line"> __I uint16_t RESERVED1[7];</div>
<div class="line"> __IO uint32_t COUNT; </div>
<div class="line"> __IO uint32_t MATCH; </div>
<div class="line"> <span class="keyword">union </span>{</div>
<div class="line"> __O uint32_t PRESCALE_WR; </div>
<div class="line"> __I uint32_t PRESCALE_RD; </div>
<div class="line"> };</div>
<div class="line"> __I uint32_t RESERVED2[9];</div>
<div class="line"> __IO uint32_t RELOAD[4]; </div>
<div class="line">} TIMER0_Type;</div>
</div><!-- fragment --> </li>
</ol>
</div></div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="footer">Generated on Wed Sep 24 2014 12:16:31 for CMSIS-SVD by ARM Ltd. All rights reserved.
<!--
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.2
-->
</li>
</ul>
</div>
</body>
</html>
| {
"content_hash": "5ded7dad1406046fe15a73299aaa2b1c",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 315,
"avg_line_length": 58.021186440677965,
"alnum_prop": 0.5783246914481852,
"repo_name": "AtmelUniversityFrance/SAM4E-XPRO",
"id": "300fc5f8053332dfb96cccac3ac3d2fcf207e4d3",
"size": "13693",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Examples/CMSIS/CMSIS/Documentation/SVD/html/svd__s_v_d_conv_pg.html",
"mode": "33188",
"license": "bsd-2-clause",
"language": [],
"symlink_target": ""
} |
package io.mediachain.rpc
import io.grpc.{Status, StatusRuntimeException}
sealed trait RPCError {
val statusCode: Status.Code
val description: String
def asStatus: Status =
Status.fromCodeValue(statusCode.value)
.withDescription(description)
def asException: StatusRuntimeException = {
new StatusRuntimeException(asStatus)
}
}
object RPCError {
def fromException(e: StatusRuntimeException): RPCError = {
val status = e.getStatus
status.getCode match {
case c if c == Status.NOT_FOUND.getCode =>
NotFound(status.getDescription)
case c if c == Status.FAILED_PRECONDITION.getCode =>
FailedPrecondition(status.getDescription)
case _ =>
GRPCError(status)
}
}
// Catch-all for error types not specifically handled below
case class GRPCError(status: Status) extends RPCError {
val statusCode = status.getCode
val description = status.getDescription
override def asStatus = status
}
// Resource not found (404)
case class NotFound(description: String) extends RPCError {
val statusCode = Status.NOT_FOUND.getCode
}
case class FailedPrecondition(description: String) extends RPCError {
val statusCode = Status.FAILED_PRECONDITION.getCode
}
}
| {
"content_hash": "9c2bcff352d33804138e02294cbc1195",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 71,
"avg_line_length": 24.784313725490197,
"alnum_prop": 0.7151898734177216,
"repo_name": "mediachain/L-SPACE",
"id": "280f24f7e901f44bd9b2813a74f6825e41edbefe",
"size": "1264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rpc/src/main/scala/io/mediachain/rpc/RPCError.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Scala",
"bytes": "141028"
},
{
"name": "Shell",
"bytes": "103"
}
],
"symlink_target": ""
} |
namespace Microsoft.Azure.CognitiveServices.Vision.Face
{
using Models;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for FaceListOperations.
/// </summary>
public static partial class FaceListOperationsExtensions
{
/// <summary>
/// Create an empty face list with user-specified faceListId, name, an optional
/// userData and recognitionModel. Up to 64 face lists are allowed in one
/// subscription.
/// <br /> Face list is a list of faces, up to 1,000 faces, and used by
/// [Face - Find
/// Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar).
/// <br /> After creation, user should use [FaceList - Add
/// Face](https://docs.microsoft.com/rest/api/faceapi/facelist/addfacefromurl)
/// to import the faces. No image will be stored. Only the extracted face
/// features are stored on server until [FaceList -
/// Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is
/// called.
/// <br /> Find Similar is used for scenario like finding celebrity-like
/// faces, similar face filtering, or as a light way face identification. But
/// if the actual use is to identify person, please use
/// [PersonGroup](https://docs.microsoft.com/rest/api/faceapi/persongroup) /
/// [LargePersonGroup](https://docs.microsoft.com/rest/api/faceapi/largepersongroup)
/// and [Face -
/// Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
/// <br /> Please consider
/// [LargeFaceList](https://docs.microsoft.com/rest/api/faceapi/largefacelist)
/// when the face number is large. It can support up to 1,000,000 faces.
/// <br />'recognitionModel' should be specified to associate with this
/// face list. The default value for 'recognitionModel' is 'recognition_01', if
/// the latest model needed, please explicitly specify the model you need in
/// this parameter. New faces that are added to an existing face list will use
/// the recognition model that's already associated with the collection.
/// Existing face features in a face list can't be updated to features
/// extracted by another version of recognition model.
/// Please Refer to [Specify a face recognition
/// model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='faceListId'>
/// Id referencing a particular face list.
/// </param>
/// <param name='name'>
/// User defined name, maximum length is 128.
/// </param>
/// <param name='userData'>
/// User specified data. Length should not exceed 16KB.
/// </param>
/// <param name='recognitionModel'>
/// Possible values include: 'recognition_01', 'recognition_02',
/// 'recognition_03', 'recognition_04'
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task CreateAsync(this IFaceListOperations operations, string faceListId, string name = default(string), string userData = default(string), string recognitionModel = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.CreateWithHttpMessagesAsync(faceListId, name, userData, recognitionModel, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Retrieve a face list’s faceListId, name, userData, recognitionModel and
/// faces in the face list.
///
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='faceListId'>
/// Id referencing a particular face list.
/// </param>
/// <param name='returnRecognitionModel'>
/// A value indicating whether the operation should return 'recognitionModel'
/// in response.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<FaceList> GetAsync(this IFaceListOperations operations, string faceListId, bool? returnRecognitionModel = false, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(faceListId, returnRecognitionModel, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Update information of a face list.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='faceListId'>
/// Id referencing a particular face list.
/// </param>
/// <param name='name'>
/// User defined name, maximum length is 128.
/// </param>
/// <param name='userData'>
/// User specified data. Length should not exceed 16KB.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task UpdateAsync(this IFaceListOperations operations, string faceListId, string name = default(string), string userData = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.UpdateWithHttpMessagesAsync(faceListId, name, userData, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Delete a specified face list.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='faceListId'>
/// Id referencing a particular face list.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IFaceListOperations operations, string faceListId, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.DeleteWithHttpMessagesAsync(faceListId, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// List face lists’ faceListId, name, userData and recognitionModel. <br
/// />
/// To get face information inside faceList use [FaceList -
/// Get](https://docs.microsoft.com/rest/api/faceapi/facelist/get)
///
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='returnRecognitionModel'>
/// A value indicating whether the operation should return 'recognitionModel'
/// in response.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<FaceList>> ListAsync(this IFaceListOperations operations, bool? returnRecognitionModel = false, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(returnRecognitionModel, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Delete a face from a face list by specified faceListId and persistedFaceId.
/// <br /> Adding/deleting faces to/from a same face list are processed
/// sequentially and to/from different face lists are in parallel.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='faceListId'>
/// Id referencing a particular face list.
/// </param>
/// <param name='persistedFaceId'>
/// Id referencing a particular persistedFaceId of an existing face.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteFaceAsync(this IFaceListOperations operations, string faceListId, System.Guid persistedFaceId, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.DeleteFaceWithHttpMessagesAsync(faceListId, persistedFaceId, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Add a face to a specified face list, up to 1,000 faces.
/// <br /> To deal with an image contains multiple faces, input face can
/// be specified as an image with a targetFace rectangle. It returns a
/// persistedFaceId representing the added face. No image will be stored. Only
/// the extracted face feature will be stored on server until [FaceList -
/// Delete
/// Face](https://docs.microsoft.com/rest/api/faceapi/facelist/deleteface) or
/// [FaceList -
/// Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is
/// called.
/// <br /> Note persistedFaceId is different from faceId generated by
/// [Face -
/// Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
/// * Higher face image quality means better detection and recognition
/// precision. Please consider high-quality faces: frontal, clear, and face
/// size is 200x200 pixels (100 pixels between eyes) or bigger.
/// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The
/// allowed image file size is from 1KB to 6MB.
/// * "targetFace" rectangle should contain one face. Zero or multiple faces
/// will be regarded as an error. If the provided "targetFace" rectangle is not
/// returned from [Face -
/// Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl),
/// there’s no guarantee to detect and add the face successfully.
/// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose,
/// or large occlusions will cause failures.
/// * Adding/deleting faces to/from a same face list are processed sequentially
/// and to/from different face lists are in parallel.
/// * The minimum detectable face size is 36x36 pixels in an image no larger
/// than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels
/// will need a proportionally larger minimum face size.
/// * Different 'detectionModel' values can be provided. To use and compare
/// different detection models, please refer to [How to specify a detection
/// model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='faceListId'>
/// Id referencing a particular face list.
/// </param>
/// <param name='url'>
/// Publicly reachable URL of an image
/// </param>
/// <param name='userData'>
/// User-specified data about the face for any purpose. The maximum length is
/// 1KB.
/// </param>
/// <param name='targetFace'>
/// A face rectangle to specify the target face to be added to a person in the
/// format of "targetFace=left,top,width,height". E.g.
/// "targetFace=10,10,100,100". If there is more than one face in the image,
/// targetFace is required to specify which face to add. No targetFace means
/// there is only one face detected in the entire image.
/// </param>
/// <param name='detectionModel'>
/// Name of detection model. Detection model is used to detect faces in the
/// submitted image. A detection model name can be provided when performing
/// Face - Detect or (Large)FaceList - Add Face or (Large)PersonGroup - Add
/// Face. The default value is 'detection_01', if another model is needed,
/// please explicitly specify it. Possible values include: 'detection_01',
/// 'detection_02', 'detection_03'
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<PersistedFace> AddFaceFromUrlAsync(this IFaceListOperations operations, string faceListId, string url, string userData = default(string), IList<int> targetFace = default(IList<int>), string detectionModel = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.AddFaceFromUrlWithHttpMessagesAsync(faceListId, url, userData, targetFace, detectionModel, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Add a face to a specified face list, up to 1,000 faces.
/// <br /> To deal with an image contains multiple faces, input face can
/// be specified as an image with a targetFace rectangle. It returns a
/// persistedFaceId representing the added face. No image will be stored. Only
/// the extracted face feature will be stored on server until [FaceList -
/// Delete
/// Face](https://docs.microsoft.com/rest/api/faceapi/facelist/deleteface) or
/// [FaceList -
/// Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is
/// called.
/// <br /> Note persistedFaceId is different from faceId generated by
/// [Face -
/// Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
/// * Higher face image quality means better detection and recognition
/// precision. Please consider high-quality faces: frontal, clear, and face
/// size is 200x200 pixels (100 pixels between eyes) or bigger.
/// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The
/// allowed image file size is from 1KB to 6MB.
/// * "targetFace" rectangle should contain one face. Zero or multiple faces
/// will be regarded as an error. If the provided "targetFace" rectangle is not
/// returned from [Face -
/// Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl),
/// there’s no guarantee to detect and add the face successfully.
/// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose,
/// or large occlusions will cause failures.
/// * Adding/deleting faces to/from a same face list are processed sequentially
/// and to/from different face lists are in parallel.
/// * The minimum detectable face size is 36x36 pixels in an image no larger
/// than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels
/// will need a proportionally larger minimum face size.
/// * Different 'detectionModel' values can be provided. To use and compare
/// different detection models, please refer to [How to specify a detection
/// model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='faceListId'>
/// Id referencing a particular face list.
/// </param>
/// <param name='image'>
/// An image stream.
/// </param>
/// <param name='userData'>
/// User-specified data about the face for any purpose. The maximum length is
/// 1KB.
/// </param>
/// <param name='targetFace'>
/// A face rectangle to specify the target face to be added to a person in the
/// format of "targetFace=left,top,width,height". E.g.
/// "targetFace=10,10,100,100". If there is more than one face in the image,
/// targetFace is required to specify which face to add. No targetFace means
/// there is only one face detected in the entire image.
/// </param>
/// <param name='detectionModel'>
/// Name of detection model. Detection model is used to detect faces in the
/// submitted image. A detection model name can be provided when performing
/// Face - Detect or (Large)FaceList - Add Face or (Large)PersonGroup - Add
/// Face. The default value is 'detection_01', if another model is needed,
/// please explicitly specify it. Possible values include: 'detection_01',
/// 'detection_02', 'detection_03'
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<PersistedFace> AddFaceFromStreamAsync(this IFaceListOperations operations, string faceListId, Stream image, string userData = default(string), IList<int> targetFace = default(IList<int>), string detectionModel = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.AddFaceFromStreamWithHttpMessagesAsync(faceListId, image, userData, targetFace, detectionModel, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| {
"content_hash": "ecde74b5c62c817322e2345f75b19694",
"timestamp": "",
"source": "github",
"line_count": 334,
"max_line_length": 331,
"avg_line_length": 58.47904191616767,
"alnum_prop": 0.5918492729879172,
"repo_name": "AsrOneSdk/azure-sdk-for-net",
"id": "b2752e1041f556f46ae304a213bca0a52a24a80b",
"size": "19893",
"binary": false,
"copies": "3",
"ref": "refs/heads/psSdkJson6Current",
"path": "sdk/cognitiveservices/Vision.Face/src/Generated/FaceListOperationsExtensions.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "15473"
},
{
"name": "Bicep",
"bytes": "13438"
},
{
"name": "C#",
"bytes": "72203239"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "5652"
},
{
"name": "HTML",
"bytes": "6169271"
},
{
"name": "JavaScript",
"bytes": "16012"
},
{
"name": "PowerShell",
"bytes": "649218"
},
{
"name": "Shell",
"bytes": "31287"
},
{
"name": "Smarty",
"bytes": "11135"
}
],
"symlink_target": ""
} |
package backup
import (
"archive/tar"
"compress/gzip"
"context"
"fmt"
"io"
"net/http"
"os"
"strconv"
"time"
"emperror.dev/errors"
"github.com/cenkalti/backoff/v4"
"github.com/pterodactyl/wings/server/filesystem"
"github.com/juju/ratelimit"
"github.com/pterodactyl/wings/config"
"github.com/pterodactyl/wings/remote"
)
type S3Backup struct {
Backup
}
var _ BackupInterface = (*S3Backup)(nil)
func NewS3(client remote.Client, uuid string, ignore string) *S3Backup {
return &S3Backup{
Backup{
client: client,
Uuid: uuid,
Ignore: ignore,
adapter: S3BackupAdapter,
},
}
}
// Remove removes a backup from the system.
func (s *S3Backup) Remove() error {
return os.Remove(s.Path())
}
// WithLogContext attaches additional context to the log output for this backup.
func (s *S3Backup) WithLogContext(c map[string]interface{}) {
s.logContext = c
}
// Generate creates a new backup on the disk, moves it into the S3 bucket via
// the provided presigned URL, and then deletes the backup from the disk.
func (s *S3Backup) Generate(ctx context.Context, basePath, ignore string) (*ArchiveDetails, error) {
defer s.Remove()
a := &filesystem.Archive{
BasePath: basePath,
Ignore: ignore,
}
s.log().WithField("path", s.Path()).Info("creating backup for server")
if err := a.Create(s.Path()); err != nil {
return nil, err
}
s.log().Info("created backup successfully")
rc, err := os.Open(s.Path())
if err != nil {
return nil, errors.Wrap(err, "backup: could not read archive from disk")
}
defer rc.Close()
if err := s.generateRemoteRequest(ctx, rc); err != nil {
return nil, err
}
ad, err := s.Details(ctx)
if err != nil {
return nil, errors.WrapIf(err, "backup: failed to get archive details after upload")
}
return ad, nil
}
// Restore will read from the provided reader assuming that it is a gzipped
// tar reader. When a file is encountered in the archive the callback function
// will be triggered. If the callback returns an error the entire process is
// stopped, otherwise this function will run until all files have been written.
//
// This restoration uses a workerpool to use up to the number of CPUs available
// on the machine when writing files to the disk.
func (s *S3Backup) Restore(ctx context.Context, r io.Reader, callback RestoreCallback) error {
reader := r
// Steal the logic we use for making backups which will be applied when restoring
// this specific backup. This allows us to prevent overloading the disk unintentionally.
if writeLimit := int64(config.Get().System.Backups.WriteLimit * 1024 * 1024); writeLimit > 0 {
reader = ratelimit.Reader(r, ratelimit.NewBucketWithRate(float64(writeLimit), writeLimit))
}
gr, err := gzip.NewReader(reader)
if err != nil {
return err
}
defer gr.Close()
tr := tar.NewReader(gr)
for {
select {
case <-ctx.Done():
return nil
default:
// Do nothing, fall through to the next block of code in this loop.
}
header, err := tr.Next()
if err != nil {
if err == io.EOF {
break
}
return err
}
if header.Typeflag == tar.TypeReg {
if err := callback(header.Name, tr, header.FileInfo().Mode(), header.AccessTime, header.ModTime); err != nil {
return err
}
}
}
return nil
}
// Generates the remote S3 request and begins the upload.
func (s *S3Backup) generateRemoteRequest(ctx context.Context, rc io.ReadCloser) error {
defer rc.Close()
s.log().Debug("attempting to get size of backup...")
size, err := s.Backup.Size()
if err != nil {
return err
}
s.log().WithField("size", size).Debug("got size of backup")
s.log().Debug("attempting to get S3 upload urls from Panel...")
urls, err := s.client.GetBackupRemoteUploadURLs(context.Background(), s.Backup.Uuid, size)
if err != nil {
return err
}
s.log().Debug("got S3 upload urls from the Panel")
s.log().WithField("parts", len(urls.Parts)).Info("attempting to upload backup to s3 endpoint...")
uploader := newS3FileUploader(rc)
for i, part := range urls.Parts {
// Get the size for the current part.
var partSize int64
if i+1 < len(urls.Parts) {
partSize = urls.PartSize
} else {
// This is the remaining size for the last part,
// there is not a minimum size limit for the last part.
partSize = size - (int64(i) * urls.PartSize)
}
// Attempt to upload the part.
if _, err := uploader.uploadPart(ctx, part, partSize); err != nil {
s.log().WithField("part_id", i+1).WithError(err).Warn("failed to upload part")
return err
}
s.log().WithField("part_id", i+1).Info("successfully uploaded backup part")
}
s.log().WithField("parts", len(urls.Parts)).Info("backup has been successfully uploaded")
return nil
}
type s3FileUploader struct {
io.ReadCloser
client *http.Client
}
// newS3FileUploader returns a new file uploader instance.
func newS3FileUploader(file io.ReadCloser) *s3FileUploader {
return &s3FileUploader{
ReadCloser: file,
// We purposefully use a super high timeout on this request since we need to upload
// a 5GB file. This assumes at worst a 10Mbps connection for uploading. While technically
// you could go slower we're targeting mostly hosted servers that should have 100Mbps
// connections anyways.
client: &http.Client{Timeout: time.Hour * 2},
}
}
// backoff returns a new expoential backoff implementation using a context that
// will also stop the backoff if it is canceled.
func (fu *s3FileUploader) backoff(ctx context.Context) backoff.BackOffContext {
b := backoff.NewExponentialBackOff()
b.Multiplier = 2
b.MaxElapsedTime = time.Minute
return backoff.WithContext(b, ctx)
}
// uploadPart attempts to upload a given S3 file part to the S3 system. If a
// 5xx error is returned from the endpoint this will continue with an exponential
// backoff to try and successfully upload the part.
//
// Once uploaded the ETag is returned to the caller.
func (fu *s3FileUploader) uploadPart(ctx context.Context, part string, size int64) (string, error) {
r, err := http.NewRequestWithContext(ctx, http.MethodPut, part, nil)
if err != nil {
return "", errors.Wrap(err, "backup: could not create request for S3")
}
r.ContentLength = size
r.Header.Add("Content-Length", strconv.Itoa(int(size)))
r.Header.Add("Content-Type", "application/x-gzip")
// Limit the reader to the size of the part.
r.Body = Reader{Reader: io.LimitReader(fu.ReadCloser, size)}
var etag string
err = backoff.Retry(func() error {
res, err := fu.client.Do(r)
if err != nil {
if errors.Is(err, context.DeadlineExceeded) || errors.Is(err, context.Canceled) {
return backoff.Permanent(err)
}
// Don't use a permanent error here, if there is a temporary resolution error with
// the URL due to DNS issues we want to keep re-trying.
return errors.Wrap(err, "backup: S3 HTTP request failed")
}
_ = res.Body.Close()
if res.StatusCode != http.StatusOK {
err := errors.New(fmt.Sprintf("backup: failed to put S3 object: [HTTP/%d] %s", res.StatusCode, res.Status))
// Only attempt a backoff retry if this error is because of a 5xx error from
// the S3 endpoint. Any 4xx error should be treated as an error that a retry
// would not fix.
if res.StatusCode >= http.StatusInternalServerError {
return err
}
return backoff.Permanent(err)
}
// Get the ETag from the uploaded part, this should be sent with the
// CompleteMultipartUpload request.
etag = res.Header.Get("ETag")
return nil
}, fu.backoff(ctx))
if err != nil {
if v, ok := err.(*backoff.PermanentError); ok {
return "", v.Unwrap()
}
return "", err
}
return etag, nil
}
// Reader provides a wrapper around an existing io.Reader
// but implements io.Closer in order to satisfy an io.ReadCloser.
type Reader struct {
io.Reader
}
func (Reader) Close() error {
return nil
}
| {
"content_hash": "41db77e2b033c999bc9e7c5733837828",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 113,
"avg_line_length": 29.475471698113207,
"alnum_prop": 0.7009345794392523,
"repo_name": "Pterodactyl/wings",
"id": "85faa59ddfee564a55cc6e135a828a8468054374",
"size": "7811",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "server/backup/backup_s3.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "49324"
},
{
"name": "Makefile",
"bytes": "570"
},
{
"name": "Shell",
"bytes": "1141"
}
],
"symlink_target": ""
} |
using System;
using System.Threading;
namespace Polly.Specs.Helpers.Custom.AddBehaviourIfHandle
{
internal class AddBehaviourIfHandlePolicy : Policy
{
private readonly Action<Exception> _behaviourIfHandle;
internal AddBehaviourIfHandlePolicy(Action<Exception> behaviourIfHandle, PolicyBuilder policyBuilder)
: base(policyBuilder)
{
_behaviourIfHandle = behaviourIfHandle ?? throw new ArgumentNullException(nameof(behaviourIfHandle));
}
protected override TResult Implementation<TResult>(
Func<Context, CancellationToken, TResult> action,
Context context,
CancellationToken cancellationToken)
{
return AddBehaviourIfHandleEngine.Implementation(
ExceptionPredicates,
ResultPredicates<TResult>.None,
outcome => _behaviourIfHandle(outcome.Exception),
action,
context,
cancellationToken
);
}
}
internal class AddBehaviourIfHandlePolicy<TResult> : Policy<TResult>
{
private readonly Action<DelegateResult<TResult>> _behaviourIfHandle;
internal AddBehaviourIfHandlePolicy(
Action<DelegateResult<TResult>> behaviourIfHandle,
PolicyBuilder<TResult> policyBuilder)
: base(policyBuilder)
{
_behaviourIfHandle = behaviourIfHandle ?? throw new ArgumentNullException(nameof(behaviourIfHandle));
}
protected override TResult Implementation(Func<Context, CancellationToken, TResult> action, Context context, CancellationToken cancellationToken)
{
return AddBehaviourIfHandleEngine.Implementation(
ExceptionPredicates,
ResultPredicates,
_behaviourIfHandle,
action,
context,
cancellationToken
);
}
}
}
| {
"content_hash": "6b9328371c3016d33e93e2516c87cd9c",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 153,
"avg_line_length": 35.32142857142857,
"alnum_prop": 0.6334681496461072,
"repo_name": "michael-wolfenden/Polly",
"id": "71b5709b49a37c39034fad41c4757e525e606602",
"size": "1980",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/Polly.Specs/Helpers/Custom/AddBehaviourIfHandle/AddBehaviourIfHandlePolicy.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "129"
},
{
"name": "C#",
"bytes": "448209"
},
{
"name": "PowerShell",
"bytes": "3558"
}
],
"symlink_target": ""
} |
package net.sourceforge.plantuml.flashcode;
import java.awt.Color;
import java.awt.image.BufferedImage;
public interface FlashCodeUtils {
public BufferedImage exportFlashcode(String s, Color fore, Color back);
}
| {
"content_hash": "6067f31af7b4a380cd0dc629e64ffd3c",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 72,
"avg_line_length": 19.818181818181817,
"alnum_prop": 0.8027522935779816,
"repo_name": "talsma-ict/umldoclet",
"id": "c682dde8b15fcb0d3166dc52d233cc14dc955a37",
"size": "1307",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/plantuml-asl/src/net/sourceforge/plantuml/flashcode/FlashCodeUtils.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "56"
},
{
"name": "CSS",
"bytes": "99"
},
{
"name": "Java",
"bytes": "11407401"
},
{
"name": "JavaScript",
"bytes": "2582"
}
],
"symlink_target": ""
} |
#ifndef itkDivideRealFunctor_h
#define itkDivideRealFunctor_h
#include <itkNumericTraits.h>
namespace itk
{
namespace Functor
{
/**
* \class DivReal
* \brief Promotes arguments to real type and performs division
*
* The result is then static_cast'ed to the output pixel type.
*
* \ingroup SimpleITKFiltersModule
*/
template< class TInput1, class TInput2, class TOutput >
class DivReal
{
public:
// Use default copy, assigned and destructor
bool operator!=(const DivReal &) const
{
return false;
}
bool operator==(const DivReal & other) const
{
return !( *this != other );
}
inline TOutput operator()(const TInput1 & A, const TInput2 & B) const
{
if ( B != (TInput2)0 )
{
return static_cast<TOutput>( static_cast<typename NumericTraits<TInput1>::RealType>(A)
/
static_cast<typename NumericTraits<TInput2>::RealType >(B) );
}
else
{
return NumericTraits< TOutput >::max( static_cast<TOutput>(A) );
}
}
};
}
}
#endif // itkDivisionRealImageFilter_h
| {
"content_hash": "a5c931beb9b381cb2fb4d234fa55d2ea",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 96,
"avg_line_length": 21.58823529411765,
"alnum_prop": 0.6303360581289736,
"repo_name": "SimpleITK/itkSimpleITKFiltersModule",
"id": "96d22d83724f38e8616e587ddc016c3c7c385451",
"size": "1860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/itkDivideRealFunctor.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "104840"
},
{
"name": "CMake",
"bytes": "15787"
},
{
"name": "Shell",
"bytes": "529"
}
],
"symlink_target": ""
} |
CREATE SEQUENCE SEQ_IP_REV_ACTIVITY_ID INCREMENT BY 1 START WITH 1 NOCACHE;
-- Add proposal_id to PROPOSAL_IP_REV_ACTIVITY table
alter table PROPOSAL_IP_REV_ACTIVITY add PROPOSAL_ID NUMBER(12,0) NOT NULL;
-- Add FK constraint to PROPOSAL table
ALTER TABLE PROPOSAL_IP_REV_ACTIVITY
ADD CONSTRAINT FK_PROPOSAL_ID
FOREIGN KEY (PROPOSAL_ID)
REFERENCES PROPOSAL (PROPOSAL_ID);
| {
"content_hash": "da951648d72298db203fcf0f6799f03b",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 76,
"avg_line_length": 32,
"alnum_prop": 0.7708333333333334,
"repo_name": "blackcathacker/kc.preclean",
"id": "bbd40214f0526372533fa25d6b1bdcaa39237c4d",
"size": "429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coeus-db/coeus-db-sql/src/main/resources/org/kuali/coeus/coeus-sql/log/Release_2_0_logs/KRACOEUS-2524.sql",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "96034"
},
{
"name": "Java",
"bytes": "27623677"
},
{
"name": "JavaScript",
"bytes": "749782"
},
{
"name": "Perl",
"bytes": "1278"
},
{
"name": "Scheme",
"bytes": "8283377"
},
{
"name": "Shell",
"bytes": "69314"
},
{
"name": "XSLT",
"bytes": "20298494"
}
],
"symlink_target": ""
} |
license: Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---
SQLResultSetList
=======
SQL 問い合わせから返される行を保持した SQLResultSet のプロパティーのうちの1つです。
プロパティー
-------
- __length__: SQL 問い合わせによって返される行の行数を表します
メソッド
-------
- __item__: 指定された行を JavaScript オブジェクトとして返します
詳細
-------
SQLResultSetList は SQL の select 文によって返されるデータを保持しています。このオブジェクトは select 文によって返された行の数を表す length プロパティーを持っています。ある行のデータを取得するためには、行番号を指定した `item` メソッドを使用します。この item メソッドは JavaScript オブジェクトを返します。この JavaScript オブジェクトは select 文が実行されたデータベースのカラムをプロパティーとして持っています。
サポートされているプラットフォーム
-------------------
- Android
- BlackBerry WebWorks (OS 6.0 以上)
- iPhone
- webOS
- Tizen
Execute SQL の例
------------------
function queryDB(tx) {
tx.executeSql('SELECT * FROM DEMO', [], querySuccess, errorCB);
}
function querySuccess(tx, results) {
var len = results.rows.length;
console.log("DEMO table: " + len + " 行見つかりました。");
for (var i=0; i<len; i++){
console.log("行 = " + i + " ID = " + results.rows.item(i).id + " Data = " + results.rows.item(i).data);
}
}
function errorCB(err) {
alert("SQL 実行中にエラーが発生しました: "+err.code);
}
var db = window.openDatabase("Database", "1.0", "Cordova Demo", 200000);
db.transaction(queryDB, errorCB);
詳細な使用例
------------
<!DOCTYPE html>
<html>
<head>
<title>Storage の使用例</title>
<script type="text/javascript" charset="utf-8" src="cordova-2.1.0.js"></script>
<script type="text/javascript" charset="utf-8">
// Cordova の読み込み完了まで待機
//
document.addEventListener("deviceready", onDeviceReady, false);
// データベースを操作
//
function populateDB(tx) {
tx.executeSql('DROP TABLE IF EXISTS DEMO');
tx.executeSql('CREATE TABLE IF NOT EXISTS DEMO (id unique, data)');
tx.executeSql('INSERT INTO DEMO (id, data) VALUES (1, "First row")');
tx.executeSql('INSERT INTO DEMO (id, data) VALUES (2, "Second row")');
}
// データベースに問い合わせ
//
function queryDB(tx) {
tx.executeSql('SELECT * FROM DEMO', [], querySuccess, errorCB);
}
// 問い合わせ成功時のコールバック
//
function querySuccess(tx, results) {
var len = results.rows.length;
console.log("DEMO table: " + len + " 行見つかりました。");
for (var i=0; i<len; i++){
console.log("行 = " + i + " ID = " + results.rows.item(i).id + " Data = " + results.rows.item(i).data);
}
}
// トランザクション失敗時のコールバック
//
function errorCB(err) {
console.log("SQL 実行中にエラーが発生しました: "+err.code);
}
// トランザクション成功時のコールバック
//
function successCB() {
var db = window.openDatabase("Database", "1.0", "Cordova Demo", 200000);
db.transaction(queryDB, errorCB);
}
// Cordova 準備完了
//
function onDeviceReady() {
var db = window.openDatabase("Database", "1.0", "Cordova Demo", 200000);
db.transaction(populateDB, errorCB, successCB);
}
</script>
</head>
<body>
<h1>使用例</h1>
<p>データベース</p>
</body>
</html>
| {
"content_hash": "d708b0eb7607026a3ae07d5dad4bb0c7",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 251,
"avg_line_length": 29.86764705882353,
"alnum_prop": 0.5873953717380601,
"repo_name": "kant2002/cordova-docs",
"id": "1f1ab37c0fa9f17cfc8196fb3cba6e99f82bbaaa",
"size": "4906",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "docs/ja/2.1.0/cordova/storage/sqlresultsetlist/sqlresultsetlist.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5116"
},
{
"name": "CSS",
"bytes": "10901"
},
{
"name": "HTML",
"bytes": "110669"
},
{
"name": "JavaScript",
"bytes": "62601"
}
],
"symlink_target": ""
} |
/*
Concepts and parts of this file have been contributed by Andre R.
*/
/**
* @file cmsis_os.h
* @brief CMSIS RTOS module macros and structures.
*
* @addtogroup CMSIS_OS
* @{
*/
#ifndef _CMSIS_OS_H_
#define _CMSIS_OS_H_
#include "ch.h"
/*===========================================================================*/
/* Module constants. */
/*===========================================================================*/
/**
* @brief API version.
*/
#define osCMSIS 0x10002
/**
* @brief Kernel version.
*/
#define osKernelSystemId "KERNEL V1.00"
/**
* @brief ChibiOS/RT version encoded for CMSIS.
*/
#define osCMSIS_KERNEL ((CH_KERNEL_MAJOR << 16) | \
(CH_KERNEL_MINOR << 8) | \
(CH_KERNEL_PATCH))
/**
* @name CMSIS Capabilities
* @{
*/
#define osFeature_MainThread 1
#define osFeature_Pool 1
#define osFeature_MailQ 0
#define osFeature_MessageQ 1
#define osFeature_Signals 24
#define osFeature_Semaphore ((1U << 31) - 1U)
#define osFeature_Wait 0
#define osFeature_SysTick 1
/**< @} */
/**
* @brief Wait forever specification for timeouts.
*/
#define osWaitForever TIME_INFINITE
/**
* @brief System tick frequency.
*/
#define osKernelSysTickFrequency CH_CFG_FREQUENCY
/*===========================================================================*/
/* Module pre-compile time settings. */
/*===========================================================================*/
/**
* @brief Number of pre-allocated static semaphores/mutexes.
*/
#if !defined(CMSIS_CFG_DEFAULT_STACK)
#define CMSIS_CFG_DEFAULT_STACK 256
#endif
/**
* @brief Number of pre-allocated static semaphores/mutexes.
*/
#if !defined(CMSIS_CFG_NUM_SEMAPHORES)
#define CMSIS_CFG_NUM_SEMAPHORES 4
#endif
/**
* @brief Number of pre-allocated static timers.
*/
#if !defined(CMSIS_CFG_NUM_TIMERS)
#define CMSIS_CFG_NUM_TIMERS 4
#endif
/*===========================================================================*/
/* Derived constants and error checks. */
/*===========================================================================*/
#if !CH_CFG_USE_MEMPOOLS
#error "CMSIS RTOS requires CH_CFG_USE_MEMPOOLS"
#endif
#if !CH_CFG_USE_EVENTS
#error "CMSIS RTOS requires CH_CFG_USE_EVENTS"
#endif
#if !CH_CFG_USE_EVENTS_TIMEOUT
#error "CMSIS RTOS requires CH_CFG_USE_EVENTS_TIMEOUT"
#endif
#if !CH_CFG_USE_SEMAPHORES
#error "CMSIS RTOS requires CH_CFG_USE_SEMAPHORES"
#endif
#if !CH_CFG_USE_DYNAMIC
#error "CMSIS RTOS requires CH_CFG_USE_DYNAMIC"
#endif
/*===========================================================================*/
/* Module data structures and types. */
/*===========================================================================*/
/**
* @brief Type of priority levels.
*/
typedef enum {
osPriorityIdle = -3,
osPriorityLow = -2,
osPriorityBelowNormal = -1,
osPriorityNormal = 0,
osPriorityAboveNormal = +1,
osPriorityHigh = +2,
osPriorityRealtime = +3,
osPriorityError = 0x84
} osPriority;
/**
* @brief Type of error codes.
*/
typedef enum {
osOK = 0,
osEventSignal = 0x08,
osEventMessage = 0x10,
osEventMail = 0x20,
osEventTimeout = 0x40,
osErrorParameter = 0x80,
osErrorResource = 0x81,
osErrorTimeoutResource = 0xC1,
osErrorISR = 0x82,
osErrorISRRecursive = 0x83,
osErrorPriority = 0x84,
osErrorNoMemory = 0x85,
osErrorValue = 0x86,
osErrorOS = 0xFF,
os_status_reserved = 0x7FFFFFFF
} osStatus;
/**
* @brief Type of a timer mode.
*/
typedef enum {
osTimerOnce = 0,
osTimerPeriodic = 1
} os_timer_type;
/**
* @brief Type of thread functions.
*/
typedef void (*os_pthread) (void const *argument);
/**
* @brief Type of timer callback.
*/
typedef void (*os_ptimer) (void const *argument);
/**
* @brief Type of pointer to thread control block.
*/
typedef thread_t *osThreadId;
/**
* @brief Type of pointer to timer control block.
*/
typedef struct os_timer_cb {
virtual_timer_t vt;
os_timer_type type;
os_ptimer ptimer;
void *argument;
uint32_t millisec;
} *osTimerId;
/**
* @brief Type of pointer to mutex control block.
*/
typedef binary_semaphore_t *osMutexId;
/**
* @brief Type of pointer to semaphore control block.
*/
typedef semaphore_t *osSemaphoreId;
/**
* @brief Type of pointer to memory pool control block.
*/
typedef memory_pool_t *osPoolId;
/**
* @brief Type of pointer to message queue control block.
*/
typedef struct mailbox *osMessageQId;
/**
* @brief Type of an event.
*/
typedef struct {
osStatus status;
union {
uint32_t v;
void *p;
int32_t signals;
} value;
union {
/* osMailQId mail_id;*/
osMessageQId message_id;
} def;
} osEvent;
/**
* @brief Type of a thread definition block.
*/
typedef struct os_thread_def {
os_pthread pthread;
osPriority tpriority;
uint32_t stacksize;
} osThreadDef_t;
/**
* @brief Type of a timer definition block.
*/
typedef struct os_timer_def {
os_ptimer ptimer;
} osTimerDef_t;
/**
* @brief Type of a mutex definition block.
*/
typedef struct os_mutex_def {
uint32_t dummy;
} osMutexDef_t;
/**
* @brief Type of a semaphore definition block.
*/
typedef struct os_semaphore_def {
uint32_t dummy;
} osSemaphoreDef_t;
/**
* @brief Type of a memory pool definition block.
*/
typedef struct os_pool_def {
uint32_t pool_sz;
uint32_t item_sz;
memory_pool_t *pool;
void *items;
} osPoolDef_t;
/**
* @brief Type of a message queue definition block.
*/
typedef struct os_messageQ_def {
uint32_t queue_sz;
uint32_t item_sz;
mailbox_t *mailbox;
void *items;
} osMessageQDef_t;
/*===========================================================================*/
/* Module macros. */
/*===========================================================================*/
/**
* @brief Convert a microseconds value to a RTOS kernel system timer value.
*/
#define osKernelSysTickMicroSec(microsec) (((uint64_t)microsec * \
(osKernelSysTickFrequency)) / \
1000000)
/**
* @brief Create a Thread definition.
*/
#if defined(osObjectsExternal)
#define osThreadDef(name, priority, instances, stacksz) \
extern const osThreadDef_t os_thread_def_##name
#else
#define osThreadDef(name, priority, stacksz) \
const osThreadDef_t os_thread_def_##name = { \
(name), \
(priority), \
(stacksz) \
}
#endif
/**
* @brief Access a Thread definition.
*/
#define osThread(name) &os_thread_def_##name
/**
* @brief Define a Timer object.
*/
#if defined(osObjectsExternal)
#define osTimerDef(name, function) \
extern const osTimerDef_t os_timer_def_##name
#else
#define osTimerDef(name, function) \
const osTimerDef_t os_timer_def_##name = { \
(function) \
}
#endif
/**
* @brief Access a Timer definition.
*/
#define osTimer(name) &os_timer_def_##name
/**
* @brief Define a Mutex.
*/
#if defined (osObjectsExternal)
#define osMutexDef(name) extern const osMutexDef_t os_mutex_def_##name
#else
#define osMutexDef(name) const osMutexDef_t os_mutex_def_##name = {0}
#endif
/**
* @brief Access a Mutex definition.
*/
#define osMutex(name) &os_mutex_def_##name
/**
* @brief Define a Semaphore.
*/
#if defined (osObjectsExternal)
#define osSemaphoreDef(name) \
extern const osSemaphoreDef_t os_semaphore_def_##name
#else // define the object
#define osSemaphoreDef(name) \
const osSemaphoreDef_t os_semaphore_def_##name = {0}
#endif
/**
* @brief Access a Semaphore definition.
*/
#define osSemaphore(name) &os_semaphore_def_##name
/**
* @brief Define a Memory Pool.
*/
#if defined(osObjectsExternal)
#define osPoolDef(name, no, type) \
extern const osPoolDef_t os_pool_def_##name
#else
#define osPoolDef(name, no, type) \
static const type os_pool_buf_##name[no]; \
static memory_pool_t os_pool_obj_##name; \
const osPoolDef_t os_pool_def_##name = { \
(no), \
sizeof (type), \
(void *)&os_pool_obj_##name, \
(void *)&os_pool_buf_##name[0] \
}
#endif
/**
* @brief Access a Memory Pool definition.
*/
#define osPool(name) &os_pool_def_##name
/**
* @brief Define a Message Queue.
*/
#if defined (osObjectsExternal)
#define osMessageQDef(name, queue_sz, type) \
extern const osMessageQDef_t os_messageQ_def_##name
#else
#define osMessageQDef(name, queue_sz, type) \
static const msg_t os_messageQ_buf_##name[queue_sz]; \
static mailbox_t os_messageQ_obj_##name; \
const osMessageQDef_t os_messageQ_def_##name = { \
(queue_sz), \
sizeof (type) \
(void *)&os_messageQ_obj_##name, \
(void *)&os_messageQ_buf_##name[0] \
}
#endif
/**
* @brief Access a Message Queue definition.
*/
#define osMessageQ(name) &os_messageQ_def_##name
/*===========================================================================*/
/* External declarations. */
/*===========================================================================*/
extern int32_t cmsis_os_started;
#ifdef __cplusplus
extern "C" {
#endif
osStatus osKernelInitialize(void);
osStatus osKernelStart(void);
osThreadId osThreadCreate(const osThreadDef_t *thread_def, void *argument);
osStatus osThreadTerminate(osThreadId thread_id);
osStatus osThreadSetPriority(osThreadId thread_id, osPriority newprio);
/*osEvent osWait(uint32_t millisec);*/
osTimerId osTimerCreate(const osTimerDef_t *timer_def,
os_timer_type type,
void *argument);
osStatus osTimerStart(osTimerId timer_id, uint32_t millisec);
osStatus osTimerStop(osTimerId timer_id);
osStatus osTimerDelete(osTimerId timer_id);
int32_t osSignalSet(osThreadId thread_id, int32_t signals);
int32_t osSignalClear(osThreadId thread_id, int32_t signals);
osEvent osSignalWait(int32_t signals, uint32_t millisec);
osSemaphoreId osSemaphoreCreate(const osSemaphoreDef_t *semaphore_def,
int32_t count);
int32_t osSemaphoreWait(osSemaphoreId semaphore_id, uint32_t millisec);
osStatus osSemaphoreRelease(osSemaphoreId semaphore_id);
osStatus osSemaphoreDelete(osSemaphoreId semaphore_id);
osMutexId osMutexCreate(const osMutexDef_t *mutex_def);
osStatus osMutexWait(osMutexId mutex_id, uint32_t millisec);
osStatus osMutexRelease(osMutexId mutex_id);
osStatus osMutexDelete(osMutexId mutex_id);
osPoolId osPoolCreate(const osPoolDef_t *pool_def);
void *osPoolAlloc(osPoolId pool_id);
void *osPoolCAlloc(osPoolId pool_id);
osStatus osPoolFree(osPoolId pool_id, void *block);
osMessageQId osMessageCreate(const osMessageQDef_t *queue_def,
osThreadId thread_id);
osStatus osMessagePut(osMessageQId queue_id,
uint32_t info,
uint32_t millisec);
osEvent osMessageGet(osMessageQId queue_id,
uint32_t millisec);
#ifdef __cplusplus
}
#endif
/*===========================================================================*/
/* Module inline functions. */
/*===========================================================================*/
/**
* @brief To be or not to be.
*/
static inline int32_t osKernelRunning(void) {
return cmsis_os_started;
}
/**
* @brief System ticks since start.
*/
static inline uint32_t osKernelSysTick(void) {
return (uint32_t)chVTGetSystemTimeX();
}
/**
* @brief Returns the current thread.
*/
static inline osThreadId osThreadGetId(void) {
return (osThreadId)chThdGetSelfX();
}
/**
* @brief Thread time slice yield.
*/
static inline osStatus osThreadYield(void) {
chThdYield();
return osOK;
}
/**
* @brief Returns priority of a thread.
*/
static inline osPriority osThreadGetPriority(osThreadId thread_id) {
return thread_id->p_prio;
}
/**
* @brief Thread delay in milliseconds.
*/
static inline osStatus osDelay(uint32_t millisec) {
chThdSleepMilliseconds(millisec);
return osOK;
}
#endif /* _CMSIS_OS_H_ */
/** @} */
| {
"content_hash": "95a584086334c0b0e73642adb15352e1",
"timestamp": "",
"source": "github",
"line_count": 503,
"max_line_length": 79,
"avg_line_length": 28.75944333996024,
"alnum_prop": 0.5036637633070649,
"repo_name": "SVentas/SmartMDC",
"id": "4ae6af943ca602ab623e7a85c64a49e72f12f494",
"size": "15207",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "chibios_3.0.x/rt/ports/ARMCMx/cmsis_os/cmsis_os.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "14346"
},
{
"name": "C",
"bytes": "3866354"
},
{
"name": "C++",
"bytes": "282925"
},
{
"name": "Makefile",
"bytes": "44526"
},
{
"name": "Objective-C",
"bytes": "158295"
}
],
"symlink_target": ""
} |
/*!
* Angular Material Design
* https://github.com/angular/material
* @license MIT
* v0.7.1
*/
goog.provide('ng.material.components.toolbar');
goog.require('ng.material.components.content');
goog.require('ng.material.core');
(function() {
'use strict';
/**
* @ngdoc module
* @name material.components.toolbar
*/
angular.module('material.components.toolbar', [
'material.core',
'material.components.content'
])
.directive('mdToolbar', mdToolbarDirective);
/**
* @ngdoc directive
* @name mdToolbar
* @module material.components.toolbar
* @restrict E
* @description
* `md-toolbar` is used to place a toolbar in your app.
*
* Toolbars are usually used above a content area to display the title of the
* current page, and show relevant action buttons for that page.
*
* You can change the height of the toolbar by adding either the
* `md-medium-tall` or `md-tall` class to the toolbar.
*
* @usage
* <hljs lang="html">
* <div layout="column" layout-fill>
* <md-toolbar>
*
* <div class="md-toolbar-tools">
* <span>My App's Title</span>
*
* <!-- fill up the space between left and right area -->
* <span flex></span>
*
* <md-button>
* Right Bar Button
* </md-button>
* </div>
*
* </md-toolbar>
* <md-content>
* Hello!
* </md-content>
* </div>
* </hljs>
*
* @param {boolean=} md-scroll-shrink Whether the header should shrink away as
* the user scrolls down, and reveal itself as the user scrolls up.
* Note: for scrollShrink to work, the toolbar must be a sibling of a
* `md-content` element, placed before it. See the scroll shrink demo.
*
*
* @param {number=} md-shrink-speed-factor How much to change the speed of the toolbar's
* shrinking by. For example, if 0.25 is given then the toolbar will shrink
* at one fourth the rate at which the user scrolls down. Default 0.5.
*/
function mdToolbarDirective($$rAF, $mdConstant, $mdUtil, $mdTheming) {
return {
restrict: 'E',
controller: angular.noop,
link: function(scope, element, attr) {
$mdTheming(element);
if (angular.isDefined(attr.mdScrollShrink)) {
setupScrollShrink();
}
function setupScrollShrink() {
// Current "y" position of scroll
var y = 0;
// Store the last scroll top position
var prevScrollTop = 0;
var shrinkSpeedFactor = attr.mdShrinkSpeedFactor || 0.5;
var toolbarHeight;
var contentElement;
var debouncedContentScroll = $$rAF.throttle(onContentScroll);
var debouncedUpdateHeight = $mdUtil.debounce(updateToolbarHeight, 5 * 1000);
// Wait for $mdContentLoaded event from mdContent directive.
// If the mdContent element is a sibling of our toolbar, hook it up
// to scroll events.
scope.$on('$mdContentLoaded', onMdContentLoad);
function onMdContentLoad($event, newContentEl) {
// Toolbar and content must be siblings
if (element.parent()[0] === newContentEl.parent()[0]) {
// unhook old content event listener if exists
if (contentElement) {
contentElement.off('scroll', debouncedContentScroll);
}
newContentEl.on('scroll', debouncedContentScroll);
newContentEl.attr('scroll-shrink', 'true');
contentElement = newContentEl;
$$rAF(updateToolbarHeight);
}
}
function updateToolbarHeight() {
toolbarHeight = element.prop('offsetHeight');
// Add a negative margin-top the size of the toolbar to the content el.
// The content will start transformed down the toolbarHeight amount,
// so everything looks normal.
//
// As the user scrolls down, the content will be transformed up slowly
// to put the content underneath where the toolbar was.
contentElement.css(
'margin-top',
(-toolbarHeight * shrinkSpeedFactor) + 'px'
);
onContentScroll();
}
function onContentScroll(e) {
var scrollTop = e ? e.target.scrollTop : prevScrollTop;
debouncedUpdateHeight();
y = Math.min(
toolbarHeight / shrinkSpeedFactor,
Math.max(0, y + scrollTop - prevScrollTop)
);
element.css(
$mdConstant.CSS.TRANSFORM,
'translate3d(0,' + (-y * shrinkSpeedFactor) + 'px,0)'
);
contentElement.css(
$mdConstant.CSS.TRANSFORM,
'translate3d(0,' + ((toolbarHeight - y) * shrinkSpeedFactor) + 'px,0)'
);
prevScrollTop = scrollTop;
}
}
}
};
}
mdToolbarDirective.$inject = ["$$rAF", "$mdConstant", "$mdUtil", "$mdTheming"];
})();
| {
"content_hash": "68b97e45fbef07b94335c07982147bb6",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 88,
"avg_line_length": 29.8944099378882,
"alnum_prop": 0.613546644504467,
"repo_name": "tw4qa/sw2at-ui",
"id": "a198b0e193025c5b165ceaeed5eb725f00791cb0",
"size": "4813",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "app/assets/javascripts/swat/bower_components/angular-material/modules/closure/toolbar/toolbar.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "69551"
},
{
"name": "CoffeeScript",
"bytes": "17094"
},
{
"name": "HTML",
"bytes": "10949"
},
{
"name": "JavaScript",
"bytes": "18979"
},
{
"name": "Ruby",
"bytes": "103402"
}
],
"symlink_target": ""
} |
! Program to test dummy procedures
subroutine bar()
end subroutine
subroutine foo2(p)
external p
call p()
end subroutine
subroutine foo(p)
external p
! We never actually discover if this is a function or a subroutine
call foo2(p)
end subroutine
program intrinsic_minmax
implicit none
external bar
call foo(bar)
end program
| {
"content_hash": "3d06947d09f055c84e158f6cbf88a58e",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 68,
"avg_line_length": 15.130434782608695,
"alnum_prop": 0.7442528735632183,
"repo_name": "the-linix-project/linix-kernel-source",
"id": "06fa21614ed2df2e32954e011d4ec876ef3fa711",
"size": "348",
"binary": false,
"copies": "210",
"ref": "refs/heads/master",
"path": "gccsrc/gcc-4.7.2/gcc/testsuite/gfortran.fortran-torture/execute/mystery_proc.f90",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Ada",
"bytes": "38139979"
},
{
"name": "Assembly",
"bytes": "3723477"
},
{
"name": "Awk",
"bytes": "83739"
},
{
"name": "C",
"bytes": "103607293"
},
{
"name": "C#",
"bytes": "55726"
},
{
"name": "C++",
"bytes": "38577421"
},
{
"name": "CLIPS",
"bytes": "6933"
},
{
"name": "CSS",
"bytes": "32588"
},
{
"name": "Emacs Lisp",
"bytes": "13451"
},
{
"name": "FORTRAN",
"bytes": "4294984"
},
{
"name": "GAP",
"bytes": "13089"
},
{
"name": "Go",
"bytes": "11277335"
},
{
"name": "Haskell",
"bytes": "2415"
},
{
"name": "Java",
"bytes": "45298678"
},
{
"name": "JavaScript",
"bytes": "6265"
},
{
"name": "Matlab",
"bytes": "56"
},
{
"name": "OCaml",
"bytes": "148372"
},
{
"name": "Objective-C",
"bytes": "995127"
},
{
"name": "Objective-C++",
"bytes": "436045"
},
{
"name": "PHP",
"bytes": "12361"
},
{
"name": "Pascal",
"bytes": "40318"
},
{
"name": "Perl",
"bytes": "358808"
},
{
"name": "Python",
"bytes": "60178"
},
{
"name": "SAS",
"bytes": "1711"
},
{
"name": "Scilab",
"bytes": "258457"
},
{
"name": "Shell",
"bytes": "2610907"
},
{
"name": "Tcl",
"bytes": "17983"
},
{
"name": "TeX",
"bytes": "1455571"
},
{
"name": "XSLT",
"bytes": "156419"
}
],
"symlink_target": ""
} |
API Version: 2018-06-01-preview
## Template format
To create a Microsoft.ApiManagement/service/apis/policies resource, add the following JSON to the resources section of your template.
```json
{
"name": "policy",
"type": "Microsoft.ApiManagement/service/apis/policies",
"apiVersion": "2018-06-01-preview",
"properties": {
"policyContent": "string",
"contentFormat": "string"
}
}
```
## Property values
The following tables describe the values you need to set in the schema.
<a id="Microsoft.ApiManagement/service/apis/policies" />
### Microsoft.ApiManagement/service/apis/policies object
| Name | Type | Required | Value |
| ---- | ---- | ---- | ---- |
| name | enum | Yes | The identifier of the Policy. - policy |
| type | enum | Yes | Microsoft.ApiManagement/service/apis/policies |
| apiVersion | enum | Yes | 2018-06-01-preview |
| properties | object | Yes | Properties of the Policy. - [PolicyContractProperties object](#PolicyContractProperties) |
<a id="PolicyContractProperties" />
### PolicyContractProperties object
| Name | Type | Required | Value |
| ---- | ---- | ---- | ---- |
| policyContent | string | Yes | Json escaped Xml Encoded contents of the Policy. |
| contentFormat | enum | No | Format of the policyContent. - xml, xml-link, rawxml, rawxml-link |
| {
"content_hash": "13e8cd026bead39bb30b789fd385bd5a",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 133,
"avg_line_length": 35.4054054054054,
"alnum_prop": 0.683206106870229,
"repo_name": "waynekuo/azure-resource-manager-schemas",
"id": "b9c0fad8bcad850604df44437f064eee9daf8edf",
"size": "1377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "schemas/2018-06-01-preview/service/apis/policies.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "244"
},
{
"name": "JavaScript",
"bytes": "44014"
}
],
"symlink_target": ""
} |
FROM qnib/supervisor
MAINTAINER "Christian Kniep <[email protected]>"
VOLUME /videos/input
VOLUME /videos/output
CMD /bin/supervisord -c /etc/supervisord.conf
| {
"content_hash": "40a95a63508665e94b72ff4c64645283",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 49,
"avg_line_length": 23.142857142857142,
"alnum_prop": 0.7962962962962963,
"repo_name": "ChristianKniep/docker-convert",
"id": "7bbfcde160d1a5d3202edffa2e442b181f8df8d3",
"size": "195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Dockerfile",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "195"
}
],
"symlink_target": ""
} |
using System;
namespace Memento.Core.Ntfs
{
/// <summary>
/// This is where the parts of the file are located on the volume.
/// </summary>
public interface IFragment
{
/// <summary>
/// Logical cluster number, location on disk.
/// </summary>
UInt64 Lcn { get; }
/// <summary>
/// Virtual cluster number of next fragment.
/// </summary>
UInt64 NextVcn { get; }
}
}
| {
"content_hash": "226a7799e1352a01048ee402b6bf518f",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 70,
"avg_line_length": 21.857142857142858,
"alnum_prop": 0.5359477124183006,
"repo_name": "Memento1990/Memento",
"id": "10e3517a43590b1551de8a363afdd98ab52a1712",
"size": "1545",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Memento.Core/Memento.Core.Ntfs/IFragment.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "221"
},
{
"name": "C",
"bytes": "607"
},
{
"name": "C#",
"bytes": "1853639"
},
{
"name": "C++",
"bytes": "5660"
},
{
"name": "CSS",
"bytes": "14798"
},
{
"name": "HTML",
"bytes": "10675"
},
{
"name": "JavaScript",
"bytes": "1798677"
},
{
"name": "Visual Basic",
"bytes": "60280"
}
],
"symlink_target": ""
} |
<!-- Static navbar -->
<nav class="navbar navbar-default navbar-static-top">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false" aria-controls="navbar">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="{% url 'home' %}">MVP Landing</a>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li><a href="{% url 'home' %}">Home</a></li>
</ul>
</div><!--/.nav-collapse -->
</div>
</nav> | {
"content_hash": "c731c5ff7d651edccb8743ef38985f9c",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 154,
"avg_line_length": 43.63157894736842,
"alnum_prop": 0.5367913148371531,
"repo_name": "ahsanwtc/django-boilerplate",
"id": "9fe2e21219f7a99f5b6131ed042cea67d79b2aff",
"size": "829",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "expshare/templates/navbar.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "78"
},
{
"name": "HTML",
"bytes": "5661"
},
{
"name": "JavaScript",
"bytes": "2771"
},
{
"name": "Python",
"bytes": "5587"
}
],
"symlink_target": ""
} |
import React from 'react';
import { storiesOf } from '@storybook/react';
import { withNotes } from '@storybook/addon-notes';
// For these stories to work, you must build the static version of the
// example storybooks *before* running this storybook.
const chapter = storiesOf('App|acceptance', module);
const style = {
border: 0,
position: 'absolute',
top: 0,
left: 0,
width: '100vw',
height: '100vh',
};
[
'cra-kitchen-sink',
'vue-kitchen-sink',
'angular-cli',
'polymer-cli',
'mithril-kitchen-sink',
].forEach(name => {
chapter.add(
name,
withNotes(`You must build the storybook for the ${name} example for this story to work.`)(
() => <iframe style={style} title={name} src={`${name}/index.html`} />
)
);
});
| {
"content_hash": "53865c287840f1e2b84ce0233cae5e9b",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 94,
"avg_line_length": 23.8125,
"alnum_prop": 0.6443569553805775,
"repo_name": "rhalff/storybook",
"id": "84e4915b2d555f561d401dd7461395ff6848bdf3",
"size": "762",
"binary": false,
"copies": "1",
"ref": "refs/heads/addon-actions",
"path": "examples/official-storybook/stories/app-acceptance.stories.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2668"
},
{
"name": "HTML",
"bytes": "19272"
},
{
"name": "Java",
"bytes": "2658"
},
{
"name": "JavaScript",
"bytes": "740774"
},
{
"name": "Objective-C",
"bytes": "8846"
},
{
"name": "Python",
"bytes": "3468"
},
{
"name": "Shell",
"bytes": "7425"
},
{
"name": "TypeScript",
"bytes": "30458"
},
{
"name": "Vue",
"bytes": "13203"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.cloudformation.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.cloudformation.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* Stack StAX Unmarshaller
*/
public class StackStaxUnmarshaller implements Unmarshaller<Stack, StaxUnmarshallerContext> {
public Stack unmarshall(StaxUnmarshallerContext context) throws Exception {
Stack stack = new Stack();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument()) targetDepth += 2;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument()) return stack;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("StackId", targetDepth)) {
stack.setStackId(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("StackName", targetDepth)) {
stack.setStackName(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Description", targetDepth)) {
stack.setDescription(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Parameters/member", targetDepth)) {
stack.getParameters().add(ParameterStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("CreationTime", targetDepth)) {
stack.setCreationTime(DateStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("LastUpdatedTime", targetDepth)) {
stack.setLastUpdatedTime(DateStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("StackStatus", targetDepth)) {
stack.setStackStatus(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("StackStatusReason", targetDepth)) {
stack.setStackStatusReason(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("DisableRollback", targetDepth)) {
stack.setDisableRollback(BooleanStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("NotificationARNs/member", targetDepth)) {
stack.getNotificationARNs().add(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("TimeoutInMinutes", targetDepth)) {
stack.setTimeoutInMinutes(IntegerStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Capabilities/member", targetDepth)) {
stack.getCapabilities().add(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Outputs/member", targetDepth)) {
stack.getOutputs().add(OutputStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Tags/member", targetDepth)) {
stack.getTags().add(TagStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return stack;
}
}
}
}
private static StackStaxUnmarshaller instance;
public static StackStaxUnmarshaller getInstance() {
if (instance == null) instance = new StackStaxUnmarshaller();
return instance;
}
}
| {
"content_hash": "8efe717f8cf6e9645bba2360b003c393",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 110,
"avg_line_length": 43.820754716981135,
"alnum_prop": 0.5913885898815932,
"repo_name": "XidongHuang/aws-sdk-for-java",
"id": "91b7aef8bbe64f73161c47ee1d41e2d0372b050c",
"size": "5232",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/main/java/com/amazonaws/services/cloudformation/model/transform/StackStaxUnmarshaller.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "33052271"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<!--
Copyright (c) 2013 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<link rel="import" href="/tracing/base/task.html">
<link rel="import" href="/tracing/core/test_utils.html">
<link rel="import" href="/tracing/model/event_set.html">
<link rel="import" href="/tracing/model/model.html">
<link rel="import" href="/tracing/ui/timeline_view.html">
<script>
'use strict';
tr.b.unittest.testSuite(function() {
const Task = tr.b.Task;
function setupTimeline() {
const container = document.createElement('track-view-container');
container.id = 'track_view_container';
const view = document.createElement('tr-ui-timeline-view');
Polymer.dom(view).appendChild(container);
view.trackViewContainer_ = container;
return view;
}
const createFullyPopulatedModel = function(opt_withError, opt_withMetadata) {
const withError = opt_withError !== undefined ? opt_withError : true;
const withMetadata = opt_withMetadata !== undefined ?
opt_withMetadata : true;
const numTests = 50;
let testIndex = 0;
const startTime = 0;
const model = new tr.Model();
const io = new tr.importer.ImportOptions();
io.showImportWarnings = false;
model.importOptions = io;
for (testIndex = 0; testIndex < numTests; ++testIndex) {
const process = model.getOrCreateProcess(10000 + testIndex);
if (testIndex % 2 === 0) {
const thread = process.getOrCreateThread('Thread Name Here');
thread.sliceGroup.pushSlice(new tr.model.ThreadSlice(
'foo', 'a', 0, startTime, {}, 1));
thread.sliceGroup.pushSlice(new tr.model.ThreadSlice(
'bar', 'b', 0, startTime + 23, {}, 10));
} else {
const thread = process.getOrCreateThread('Name');
thread.sliceGroup.pushSlice(new tr.model.ThreadSlice(
'foo', 'a', 0, startTime + 4, {}, 11));
thread.sliceGroup.pushSlice(new tr.model.ThreadSlice(
'bar', 'b', 0, startTime + 22, {}, 14));
}
}
const p1000 = model.getOrCreateProcess(1000);
const objects = p1000.objects;
objects.idWasCreated('0x1000', 'tr.e.cc', 'LayerTreeHostImpl', 10);
objects.addSnapshot('0x1000', 'tr.e.cc', 'LayerTreeHostImpl', 10,
'snapshot-1');
objects.addSnapshot('0x1000', 'tr.e.cc', 'LayerTreeHostImpl', 25,
'snapshot-2');
objects.addSnapshot('0x1000', 'tr.e.cc', 'LayerTreeHostImpl', 40,
'snapshot-3');
objects.idWasDeleted('0x1000', 'tr.e.cc', 'LayerTreeHostImpl', 45);
model.updateCategories_();
// Add a known problematic piece of data to test the import errors UI.
model.importWarning({
type: 'test_error',
message: 'Synthetic Import Error'
});
model.updateBounds();
// Add data with metadata information stored
model.metadata.push({name: 'a', value: 'testA'});
model.metadata.push({name: 'b', value: 'testB'});
model.metadata.push({name: 'c', value: 'testC'});
return model;
};
const visibleTracks = function(trackButtons) {
return trackButtons.reduce(function(numVisible, button) {
const style = button.parentElement.style;
const visible = (style.display.indexOf('none') === -1);
return visible ? numVisible + 1 : numVisible;
}, 0);
};
const modelsEquivalent = function(lhs, rhs) {
if (lhs.length !== rhs.length) return false;
return lhs.every(function(lhsItem, index) {
const rhsItem = rhs[index];
return rhsItem.regexpText === lhsItem.regexpText &&
rhsItem.isOn === lhsItem.isOn;
});
};
test('instantiate', function() {
const model11 = createFullyPopulatedModel(true, true);
const view = setupTimeline();
view.style.height = '400px';
view.style.border = '1px solid black';
view.model = model11;
const simpleButton1 = document.createElement('tr-ui-b-toolbar-button');
Polymer.dom(simpleButton1).textContent = 'M';
Polymer.dom(view.leftControls).appendChild(simpleButton1);
const simpleButton2 = document.createElement('tr-ui-b-toolbar-button');
Polymer.dom(simpleButton2).textContent = 'am button';
Polymer.dom(view.leftControls).appendChild(simpleButton2);
this.addHTMLOutput(view);
});
test('changeModelToSomethingDifferent', function() {
const model00 = createFullyPopulatedModel(false, false);
const model11 = createFullyPopulatedModel(true, true);
const view = setupTimeline();
view.style.height = '400px';
view.model = model00;
view.model = undefined;
view.model = model11;
view.model = model00;
});
test('setModelToSameThingAgain', function() {
const model = createFullyPopulatedModel(false, false);
// Create a view with am model.
const view = setupTimeline();
view.style.height = '400px';
view.model = model;
const sc = view.brushingStateController;
// Mutate the model and update the view.
const t123 = model.getOrCreateProcess(123).getOrCreateThread(123);
t123.sliceGroup.pushSlice(tr.c.TestUtils.newSliceEx(
{title: 'somethingUnusual', start: 0, duration: 5}));
view.model = model;
// Verify that the new bits of the model show up in the view.
const selection = new tr.model.EventSet();
const filter = new tr.c.TitleOrCategoryFilter('somethingUnusual');
const filterTask = sc.addAllEventsMatchingFilterToSelectionAsTask(
filter, selection);
Task.RunSynchronously(filterTask);
assert.strictEqual(selection.length, 1);
});
});
</script>
| {
"content_hash": "f4feb2a647bd2466249be8371c4f93ed",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 79,
"avg_line_length": 35.05625,
"alnum_prop": 0.6655375289712961,
"repo_name": "benschmaus/catapult",
"id": "f6431a503f67fb9c131d881ea14b40118e7caa6e",
"size": "5609",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tracing/tracing/ui/timeline_view_test.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4902"
},
{
"name": "C++",
"bytes": "43486"
},
{
"name": "CSS",
"bytes": "24873"
},
{
"name": "Go",
"bytes": "58279"
},
{
"name": "HTML",
"bytes": "11801772"
},
{
"name": "JavaScript",
"bytes": "518002"
},
{
"name": "Makefile",
"bytes": "1588"
},
{
"name": "Python",
"bytes": "6141932"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
} |
package hudson;
import java.io.InputStream;
import java.nio.file.Files;
import jenkins.util.SystemProperties;
import com.google.common.collect.Lists;
import hudson.Plugin.DummyImpl;
import hudson.PluginWrapper.Dependency;
import hudson.model.Hudson;
import jenkins.util.AntClassLoader;
import hudson.util.CyclicGraphDetector;
import hudson.util.CyclicGraphDetector.CycleDetectedException;
import hudson.util.IOUtils;
import hudson.util.MaskingClassLoader;
import hudson.util.VersionNumber;
import jenkins.ClassLoaderReflectionToolkit;
import jenkins.ExtensionFilter;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.taskdefs.Expand;
import org.apache.tools.ant.taskdefs.Zip;
import org.apache.tools.ant.types.FileSet;
import org.apache.tools.ant.types.PatternSet;
import org.apache.tools.ant.types.Resource;
import org.apache.tools.ant.types.ZipFileSet;
import org.apache.tools.ant.types.resources.MappedResourceCollection;
import org.apache.tools.ant.util.GlobPatternMapper;
import org.apache.tools.zip.ZipEntry;
import org.apache.tools.zip.ZipExtraField;
import org.apache.tools.zip.ZipOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jenkinsci.bytecode.Transformer;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import javax.annotation.Nonnull;
import static org.apache.commons.io.FilenameUtils.getBaseName;
public class ClassicPluginStrategy implements PluginStrategy {
/**
* Filter for jar files.
*/
private static final FilenameFilter JAR_FILTER = new FilenameFilter() {
public boolean accept(File dir,String name) {
return name.endsWith(".jar");
}
};
private PluginManager pluginManager;
/**
* All the plugins eventually delegate this classloader to load core, servlet APIs, and SE runtime.
*/
private final MaskingClassLoader coreClassLoader = new MaskingClassLoader(getClass().getClassLoader());
public ClassicPluginStrategy(PluginManager pluginManager) {
this.pluginManager = pluginManager;
}
@Override public String getShortName(File archive) throws IOException {
Manifest manifest;
if (isLinked(archive)) {
manifest = loadLinkedManifest(archive);
} else {
try (JarFile jf = new JarFile(archive, false)) {
manifest = jf.getManifest();
}
}
return PluginWrapper.computeShortName(manifest, archive.getName());
}
private static boolean isLinked(File archive) {
return archive.getName().endsWith(".hpl") || archive.getName().endsWith(".jpl");
}
private static Manifest loadLinkedManifest(File archive) throws IOException {
// resolve the .hpl file to the location of the manifest file
try {
// Locate the manifest
String firstLine;
try (InputStream manifestHeaderInput = Files.newInputStream(archive.toPath())) {
firstLine = IOUtils.readFirstLine(manifestHeaderInput, "UTF-8");
}
if (firstLine.startsWith("Manifest-Version:")) {
// this is the manifest already
} else {
// indirection
archive = resolve(archive, firstLine);
}
// Read the manifest
try (InputStream manifestInput = Files.newInputStream(archive.toPath())) {
return new Manifest(manifestInput);
}
} catch (IOException e) {
throw new IOException("Failed to load " + archive, e);
}
}
@Override public PluginWrapper createPluginWrapper(File archive) throws IOException {
final Manifest manifest;
URL baseResourceURL = null;
File expandDir = null;
// if .hpi, this is the directory where war is expanded
boolean isLinked = isLinked(archive);
if (isLinked) {
manifest = loadLinkedManifest(archive);
} else {
if (archive.isDirectory()) {// already expanded
expandDir = archive;
} else {
File f = pluginManager.getWorkDir();
expandDir = new File(f == null ? archive.getParentFile() : f, getBaseName(archive.getName()));
explode(archive, expandDir);
}
File manifestFile = new File(expandDir, PluginWrapper.MANIFEST_FILENAME);
if (!manifestFile.exists()) {
throw new IOException(
"Plugin installation failed. No manifest at "
+ manifestFile);
}
try (InputStream fin = Files.newInputStream(manifestFile.toPath())) {
manifest = new Manifest(fin);
}
}
final Attributes atts = manifest.getMainAttributes();
// TODO: define a mechanism to hide classes
// String export = manifest.getMainAttributes().getValue("Export");
List<File> paths = new ArrayList<File>();
if (isLinked) {
parseClassPath(manifest, archive, paths, "Libraries", ",");
parseClassPath(manifest, archive, paths, "Class-Path", " +"); // backward compatibility
baseResourceURL = resolve(archive,atts.getValue("Resource-Path")).toURI().toURL();
} else {
File classes = new File(expandDir, "WEB-INF/classes");
if (classes.exists())
paths.add(classes);
File lib = new File(expandDir, "WEB-INF/lib");
File[] libs = lib.listFiles(JAR_FILTER);
if (libs != null)
paths.addAll(Arrays.asList(libs));
baseResourceURL = expandDir.toPath().toUri().toURL();
}
File disableFile = new File(archive.getPath() + ".disabled");
if (disableFile.exists()) {
LOGGER.info("Plugin " + archive.getName() + " is disabled");
}
// compute dependencies
List<PluginWrapper.Dependency> dependencies = new ArrayList<PluginWrapper.Dependency>();
List<PluginWrapper.Dependency> optionalDependencies = new ArrayList<PluginWrapper.Dependency>();
String v = atts.getValue("Plugin-Dependencies");
if (v != null) {
for (String s : v.split(",")) {
PluginWrapper.Dependency d = new PluginWrapper.Dependency(s);
if (d.optional) {
optionalDependencies.add(d);
} else {
dependencies.add(d);
}
}
}
fix(atts,optionalDependencies);
// Register global classpath mask. This is useful for hiding JavaEE APIs that you might see from the container,
// such as database plugin for JPA support. The Mask-Classes attribute is insufficient because those classes
// also need to be masked by all the other plugins that depend on the database plugin.
String masked = atts.getValue("Global-Mask-Classes");
if(masked!=null) {
for (String pkg : masked.trim().split("[ \t\r\n]+"))
coreClassLoader.add(pkg);
}
ClassLoader dependencyLoader = new DependencyClassLoader(coreClassLoader, archive, Util.join(dependencies,optionalDependencies));
dependencyLoader = getBaseClassLoader(atts, dependencyLoader);
return new PluginWrapper(pluginManager, archive, manifest, baseResourceURL,
createClassLoader(paths, dependencyLoader, atts), disableFile, dependencies, optionalDependencies);
}
private static void fix(Attributes atts, List<PluginWrapper.Dependency> optionalDependencies) {
String pluginName = atts.getValue("Short-Name");
String jenkinsVersion = atts.getValue("Jenkins-Version");
if (jenkinsVersion==null)
jenkinsVersion = atts.getValue("Hudson-Version");
optionalDependencies.addAll(getImpliedDependencies(pluginName, jenkinsVersion));
}
/**
* Returns all the plugin dependencies that are implicit based on a particular Jenkins version
* @since 2.0
*/
@Nonnull
public static List<PluginWrapper.Dependency> getImpliedDependencies(String pluginName, String jenkinsVersion) {
List<PluginWrapper.Dependency> out = new ArrayList<>();
for (DetachedPlugin detached : DETACHED_LIST) {
// don't fix the dependency for itself, or else we'll have a cycle
if (detached.shortName.equals(pluginName)) {
continue;
}
if (BREAK_CYCLES.contains(pluginName + '/' + detached.shortName)) {
LOGGER.log(Level.FINE, "skipping implicit dependency {0} → {1}", new Object[] {pluginName, detached.shortName});
continue;
}
// some earlier versions of maven-hpi-plugin apparently puts "null" as a literal in Hudson-Version. watch out for them.
if (jenkinsVersion == null || jenkinsVersion.equals("null") || new VersionNumber(jenkinsVersion).compareTo(detached.splitWhen) <= 0) {
out.add(new PluginWrapper.Dependency(detached.shortName + ':' + detached.requiredVersion));
LOGGER.log(Level.FINE, "adding implicit dependency {0} → {1} because of {2}", new Object[] {pluginName, detached.shortName, jenkinsVersion});
}
}
return out;
}
@Deprecated
protected ClassLoader createClassLoader(List<File> paths, ClassLoader parent) throws IOException {
return createClassLoader( paths, parent, null );
}
/**
* Creates the classloader that can load all the specified jar files and delegate to the given parent.
*/
protected ClassLoader createClassLoader(List<File> paths, ClassLoader parent, Attributes atts) throws IOException {
if (atts != null) {
String usePluginFirstClassLoader = atts.getValue( "PluginFirstClassLoader" );
if (Boolean.valueOf( usePluginFirstClassLoader )) {
PluginFirstClassLoader classLoader = new PluginFirstClassLoader();
classLoader.setParentFirst( false );
classLoader.setParent( parent );
classLoader.addPathFiles( paths );
return classLoader;
}
}
AntClassLoader2 classLoader = new AntClassLoader2(parent);
classLoader.addPathFiles(paths);
return classLoader;
}
/**
* Get the list of all plugins that have ever been {@link DetachedPlugin detached} from Jenkins core.
* @return A {@link List} of {@link DetachedPlugin}s.
*/
@Restricted(NoExternalUse.class)
public static @Nonnull List<DetachedPlugin> getDetachedPlugins() {
return DETACHED_LIST;
}
/**
* Get the list of plugins that have been detached since a specific Jenkins release version.
* @param since The Jenkins version.
* @return A {@link List} of {@link DetachedPlugin}s.
*/
@Restricted(NoExternalUse.class)
public static @Nonnull List<DetachedPlugin> getDetachedPlugins(@Nonnull VersionNumber since) {
List<DetachedPlugin> detachedPlugins = new ArrayList<>();
for (DetachedPlugin detachedPlugin : DETACHED_LIST) {
if (!detachedPlugin.getSplitWhen().isOlderThan(since)) {
detachedPlugins.add(detachedPlugin);
}
}
return detachedPlugins;
}
/**
* Is the named plugin a plugin that was detached from Jenkins at some point in the past.
* @param pluginId The plugin ID.
* @return {@code true} if the plugin is a plugin that was detached from Jenkins at some
* point in the past, otherwise {@code false}.
*/
@Restricted(NoExternalUse.class)
public static boolean isDetachedPlugin(@Nonnull String pluginId) {
for (DetachedPlugin detachedPlugin : DETACHED_LIST) {
if (detachedPlugin.getShortName().equals(pluginId)) {
return true;
}
}
return false;
}
/**
* Information about plugins that were originally in the core.
* <p>
* A detached plugin is one that has any of the following characteristics:
* <ul>
* <li>
* Was an existing plugin that at some time previously bundled with the Jenkins war file.
* </li>
* <li>
* Was previous code in jenkins core that was split to a separate-plugin (but may not have
* ever been bundled in a jenkins war file - i.e. it gets split after this 2.0 update).
* </li>
* </ul>
*/
@Restricted(NoExternalUse.class)
public static final class DetachedPlugin {
private final String shortName;
/**
* Plugins built for this Jenkins version (and earlier) will automatically be assumed to have
* this plugin in its dependency.
*
* When core/pom.xml version is 1.123-SNAPSHOT when the code is removed, then this value should
* be "1.123.*" (because 1.124 will be the first version that doesn't include the removed code.)
*/
private final VersionNumber splitWhen;
private final String requiredVersion;
private DetachedPlugin(String shortName, String splitWhen, String requiredVersion) {
this.shortName = shortName;
this.splitWhen = new VersionNumber(splitWhen);
this.requiredVersion = requiredVersion;
}
/**
* Get the short name of the plugin.
* @return The short name of the plugin.
*/
public String getShortName() {
return shortName;
}
/**
* Get the Jenkins version from which the plugin was detached.
* @return The Jenkins version from which the plugin was detached.
*/
public VersionNumber getSplitWhen() {
return splitWhen;
}
/**
* Gets the minimum required version for the current version of Jenkins.
*
* @return the minimum required version for the current version of Jenkins.
* @sice 2.16
*/
public VersionNumber getRequiredVersion() {
return new VersionNumber(requiredVersion);
}
}
private static final List<DetachedPlugin> DETACHED_LIST = Collections.unmodifiableList(Arrays.asList(
new DetachedPlugin("maven-plugin", "1.296", "1.296"),
new DetachedPlugin("subversion", "1.310", "1.0"),
new DetachedPlugin("cvs", "1.340", "0.1"),
new DetachedPlugin("ant", "1.430.*", "1.0"),
new DetachedPlugin("javadoc", "1.430.*", "1.0"),
new DetachedPlugin("external-monitor-job", "1.467.*", "1.0"),
new DetachedPlugin("ldap", "1.467.*", "1.0"),
new DetachedPlugin("pam-auth", "1.467.*", "1.0"),
new DetachedPlugin("mailer", "1.493.*", "1.2"),
new DetachedPlugin("matrix-auth", "1.535.*", "1.0.2"),
new DetachedPlugin("windows-slaves", "1.547.*", "1.0"),
new DetachedPlugin("antisamy-markup-formatter", "1.553.*", "1.0"),
new DetachedPlugin("matrix-project", "1.561.*", "1.0"),
new DetachedPlugin("junit", "1.577.*", "1.0"),
new DetachedPlugin("bouncycastle-api", "2.16.*", "2.16.0")
));
/** Implicit dependencies that are known to be unnecessary and which must be cut out to prevent a dependency cycle among bundled plugins. */
private static final Set<String> BREAK_CYCLES = new HashSet<String>(Arrays.asList(
"script-security/matrix-auth",
"script-security/windows-slaves",
"script-security/antisamy-markup-formatter",
"script-security/matrix-project",
"credentials/matrix-auth",
"credentials/windows-slaves"
));
/**
* Computes the classloader that takes the class masking into account.
*
* <p>
* This mechanism allows plugins to have their own versions for libraries that core bundles.
*/
private ClassLoader getBaseClassLoader(Attributes atts, ClassLoader base) {
String masked = atts.getValue("Mask-Classes");
if(masked!=null)
base = new MaskingClassLoader(base, masked.trim().split("[ \t\r\n]+"));
return base;
}
public void initializeComponents(PluginWrapper plugin) {
}
public <T> List<ExtensionComponent<T>> findComponents(Class<T> type, Hudson hudson) {
List<ExtensionFinder> finders;
if (type==ExtensionFinder.class) {
// Avoid infinite recursion of using ExtensionFinders to find ExtensionFinders
finders = Collections.<ExtensionFinder>singletonList(new ExtensionFinder.Sezpoz());
} else {
finders = hudson.getExtensionList(ExtensionFinder.class);
}
/**
* See {@link ExtensionFinder#scout(Class, Hudson)} for the dead lock issue and what this does.
*/
if (LOGGER.isLoggable(Level.FINER))
LOGGER.log(Level.FINER,"Scout-loading ExtensionList: "+type, new Throwable());
for (ExtensionFinder finder : finders) {
finder.scout(type, hudson);
}
List<ExtensionComponent<T>> r = Lists.newArrayList();
for (ExtensionFinder finder : finders) {
try {
r.addAll(finder.find(type, hudson));
} catch (AbstractMethodError e) {
// backward compatibility
for (T t : finder.findExtensions(type, hudson))
r.add(new ExtensionComponent<T>(t));
}
}
List<ExtensionComponent<T>> filtered = Lists.newArrayList();
for (ExtensionComponent<T> e : r) {
if (ExtensionFilter.isAllowed(type,e))
filtered.add(e);
}
return filtered;
}
public void load(PluginWrapper wrapper) throws IOException {
// override the context classloader. This no longer makes sense,
// but it is left for the backward compatibility
ClassLoader old = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(wrapper.classLoader);
try {
String className = wrapper.getPluginClass();
if(className==null) {
// use the default dummy instance
wrapper.setPlugin(new DummyImpl());
} else {
try {
Class<?> clazz = wrapper.classLoader.loadClass(className);
Object o = clazz.newInstance();
if(!(o instanceof Plugin)) {
throw new IOException(className+" doesn't extend from hudson.Plugin");
}
wrapper.setPlugin((Plugin) o);
} catch (LinkageError | ClassNotFoundException e) {
throw new IOException("Unable to load " + className + " from " + wrapper.getShortName(),e);
} catch (IllegalAccessException | InstantiationException e) {
throw new IOException("Unable to create instance of " + className + " from " + wrapper.getShortName(),e);
}
}
// initialize plugin
try {
Plugin plugin = wrapper.getPlugin();
plugin.setServletContext(pluginManager.context);
startPlugin(wrapper);
} catch(Throwable t) {
// gracefully handle any error in plugin.
throw new IOException("Failed to initialize",t);
}
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
public void startPlugin(PluginWrapper plugin) throws Exception {
plugin.getPlugin().start();
}
@Override
public void updateDependency(PluginWrapper depender, PluginWrapper dependee) {
DependencyClassLoader classLoader = findAncestorDependencyClassLoader(depender.classLoader);
if (classLoader != null) {
classLoader.updateTransientDependencies();
LOGGER.log(Level.INFO, "Updated dependency of {0}", depender.getShortName());
}
}
private DependencyClassLoader findAncestorDependencyClassLoader(ClassLoader classLoader)
{
for (; classLoader != null; classLoader = classLoader.getParent()) {
if (classLoader instanceof DependencyClassLoader) {
return (DependencyClassLoader)classLoader;
}
if (classLoader instanceof AntClassLoader) {
// AntClassLoaders hold parents not only as AntClassLoader#getParent()
// but also as AntClassLoader#getConfiguredParent()
DependencyClassLoader ret = findAncestorDependencyClassLoader(
((AntClassLoader)classLoader).getConfiguredParent()
);
if (ret != null) {
return ret;
}
}
}
return null;
}
private static File resolve(File base, String relative) {
File rel = new File(relative);
if(rel.isAbsolute())
return rel;
else
return new File(base.getParentFile(),relative);
}
private static void parseClassPath(Manifest manifest, File archive, List<File> paths, String attributeName, String separator) throws IOException {
String classPath = manifest.getMainAttributes().getValue(attributeName);
if(classPath==null) return; // attribute not found
for (String s : classPath.split(separator)) {
File file = resolve(archive, s);
if(file.getName().contains("*")) {
// handle wildcard
FileSet fs = new FileSet();
File dir = file.getParentFile();
fs.setDir(dir);
fs.setIncludes(file.getName());
for( String included : fs.getDirectoryScanner(new Project()).getIncludedFiles() ) {
paths.add(new File(dir,included));
}
} else {
if(!file.exists())
throw new IOException("No such file: "+file);
paths.add(file);
}
}
}
/**
* Explodes the plugin into a directory, if necessary.
*/
private static void explode(File archive, File destDir) throws IOException {
destDir.mkdirs();
// timestamp check
File explodeTime = new File(destDir,".timestamp2");
if(explodeTime.exists() && explodeTime.lastModified()==archive.lastModified())
return; // no need to expand
// delete the contents so that old files won't interfere with new files
Util.deleteRecursive(destDir);
try {
Project prj = new Project();
unzipExceptClasses(archive, destDir, prj);
createClassJarFromWebInfClasses(archive, destDir, prj);
} catch (BuildException x) {
throw new IOException("Failed to expand " + archive,x);
}
try {
new FilePath(explodeTime).touch(archive.lastModified());
} catch (InterruptedException e) {
throw new AssertionError(e); // impossible
}
}
/**
* Repackage classes directory into a jar file to make it remoting friendly.
* The remoting layer can cache jar files but not class files.
*/
private static void createClassJarFromWebInfClasses(File archive, File destDir, Project prj) throws IOException {
File classesJar = new File(destDir, "WEB-INF/lib/classes.jar");
ZipFileSet zfs = new ZipFileSet();
zfs.setProject(prj);
zfs.setSrc(archive);
zfs.setIncludes("WEB-INF/classes/");
MappedResourceCollection mapper = new MappedResourceCollection();
mapper.add(zfs);
GlobPatternMapper gm = new GlobPatternMapper();
gm.setFrom("WEB-INF/classes/*");
gm.setTo("*");
mapper.add(gm);
final long dirTime = archive.lastModified();
// this ZipOutputStream is reused and not created for each directory
try (ZipOutputStream wrappedZOut = new ZipOutputStream(new NullOutputStream()) {
@Override
public void putNextEntry(ZipEntry ze) throws IOException {
ze.setTime(dirTime+1999); // roundup
super.putNextEntry(ze);
}
}) {
Zip z = new Zip() {
/**
* Forces the fixed timestamp for directories to make sure
* classes.jar always get a consistent checksum.
*/
protected void zipDir(Resource dir, ZipOutputStream zOut, String vPath,
int mode, ZipExtraField[] extra)
throws IOException {
// use wrappedZOut instead of zOut
super.zipDir(dir,wrappedZOut,vPath,mode,extra);
}
};
z.setProject(prj);
z.setTaskType("zip");
classesJar.getParentFile().mkdirs();
z.setDestFile(classesJar);
z.add(mapper);
z.execute();
}
}
private static void unzipExceptClasses(File archive, File destDir, Project prj) {
Expand e = new Expand();
e.setProject(prj);
e.setTaskType("unzip");
e.setSrc(archive);
e.setDest(destDir);
PatternSet p = new PatternSet();
p.setExcludes("WEB-INF/classes/");
e.addPatternset(p);
e.execute();
}
/**
* Used to load classes from dependency plugins.
*/
final class DependencyClassLoader extends ClassLoader {
/**
* This classloader is created for this plugin. Useful during debugging.
*/
private final File _for;
private List<Dependency> dependencies;
/**
* Topologically sorted list of transient dependencies.
*/
private volatile List<PluginWrapper> transientDependencies;
public DependencyClassLoader(ClassLoader parent, File archive, List<Dependency> dependencies) {
super(parent);
this._for = archive;
this.dependencies = dependencies;
}
private void updateTransientDependencies() {
// This will be recalculated at the next time.
transientDependencies = null;
}
private List<PluginWrapper> getTransitiveDependencies() {
if (transientDependencies==null) {
CyclicGraphDetector<PluginWrapper> cgd = new CyclicGraphDetector<PluginWrapper>() {
@Override
protected List<PluginWrapper> getEdges(PluginWrapper pw) {
List<PluginWrapper> dep = new ArrayList<PluginWrapper>();
for (Dependency d : pw.getDependencies()) {
PluginWrapper p = pluginManager.getPlugin(d.shortName);
if (p!=null && p.isActive())
dep.add(p);
}
return dep;
}
};
try {
for (Dependency d : dependencies) {
PluginWrapper p = pluginManager.getPlugin(d.shortName);
if (p!=null && p.isActive())
cgd.run(Collections.singleton(p));
}
} catch (CycleDetectedException e) {
throw new AssertionError(e); // such error should have been reported earlier
}
transientDependencies = cgd.getSorted();
}
return transientDependencies;
}
// public List<PluginWrapper> getDependencyPluginWrappers() {
// List<PluginWrapper> r = new ArrayList<PluginWrapper>();
// for (Dependency d : dependencies) {
// PluginWrapper w = pluginManager.getPlugin(d.shortName);
// if (w!=null) r.add(w);
// }
// return r;
// }
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
if (PluginManager.FAST_LOOKUP) {
for (PluginWrapper pw : getTransitiveDependencies()) {
try {
Class<?> c = ClassLoaderReflectionToolkit._findLoadedClass(pw.classLoader, name);
if (c!=null) return c;
return ClassLoaderReflectionToolkit._findClass(pw.classLoader, name);
} catch (ClassNotFoundException e) {
//not found. try next
}
}
} else {
for (Dependency dep : dependencies) {
PluginWrapper p = pluginManager.getPlugin(dep.shortName);
if(p!=null)
try {
return p.classLoader.loadClass(name);
} catch (ClassNotFoundException _) {
// try next
}
}
}
throw new ClassNotFoundException(name);
}
@Override
protected Enumeration<URL> findResources(String name) throws IOException {
HashSet<URL> result = new HashSet<URL>();
if (PluginManager.FAST_LOOKUP) {
for (PluginWrapper pw : getTransitiveDependencies()) {
Enumeration<URL> urls = ClassLoaderReflectionToolkit._findResources(pw.classLoader, name);
while (urls != null && urls.hasMoreElements())
result.add(urls.nextElement());
}
} else {
for (Dependency dep : dependencies) {
PluginWrapper p = pluginManager.getPlugin(dep.shortName);
if (p!=null) {
Enumeration<URL> urls = p.classLoader.getResources(name);
while (urls != null && urls.hasMoreElements())
result.add(urls.nextElement());
}
}
}
return Collections.enumeration(result);
}
@Override
protected URL findResource(String name) {
if (PluginManager.FAST_LOOKUP) {
for (PluginWrapper pw : getTransitiveDependencies()) {
URL url = ClassLoaderReflectionToolkit._findResource(pw.classLoader, name);
if (url!=null) return url;
}
} else {
for (Dependency dep : dependencies) {
PluginWrapper p = pluginManager.getPlugin(dep.shortName);
if(p!=null) {
URL url = p.classLoader.getResource(name);
if (url!=null)
return url;
}
}
}
return null;
}
}
/**
* {@link AntClassLoader} with a few methods exposed, {@link Closeable} support, and {@link Transformer} support.
*/
private final class AntClassLoader2 extends AntClassLoader implements Closeable {
private final Vector pathComponents;
private AntClassLoader2(ClassLoader parent) {
super(parent,true);
try {
Field $pathComponents = AntClassLoader.class.getDeclaredField("pathComponents");
$pathComponents.setAccessible(true);
pathComponents = (Vector)$pathComponents.get(this);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw new Error(e);
}
}
public void addPathFiles(Collection<File> paths) throws IOException {
for (File f : paths)
addPathFile(f);
}
public void close() throws IOException {
cleanup();
}
/**
* As of 1.8.0, {@link AntClassLoader} doesn't implement {@link #findResource(String)}
* in any meaningful way, which breaks fast lookup. Implement it properly.
*/
@Override
protected URL findResource(String name) {
URL url = null;
// try and load from this loader if the parent either didn't find
// it or wasn't consulted.
Enumeration e = pathComponents.elements();
while (e.hasMoreElements() && url == null) {
File pathComponent = (File) e.nextElement();
url = getResourceURL(pathComponent, name);
if (url != null) {
log("Resource " + name + " loaded from ant loader", Project.MSG_DEBUG);
}
}
return url;
}
@Override
protected Class defineClassFromData(File container, byte[] classData, String classname) throws IOException {
if (!DISABLE_TRANSFORMER)
classData = pluginManager.getCompatibilityTransformer().transform(classname, classData, this);
return super.defineClassFromData(container, classData, classname);
}
}
public static boolean useAntClassLoader = SystemProperties.getBoolean(ClassicPluginStrategy.class.getName()+".useAntClassLoader");
private static final Logger LOGGER = Logger.getLogger(ClassicPluginStrategy.class.getName());
public static boolean DISABLE_TRANSFORMER = SystemProperties.getBoolean(ClassicPluginStrategy.class.getName()+".noBytecodeTransformer");
}
| {
"content_hash": "fb5c1ba810d396ac4c271f07ca3f056f",
"timestamp": "",
"source": "github",
"line_count": 857,
"max_line_length": 157,
"avg_line_length": 40.34422403733956,
"alnum_prop": 0.5912075198843094,
"repo_name": "ajshastri/jenkins",
"id": "17722cef44742cb0561288fad27f5be94cdbc0d5",
"size": "35794",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/src/main/java/hudson/ClassicPluginStrategy.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2091"
},
{
"name": "CSS",
"bytes": "289559"
},
{
"name": "GAP",
"bytes": "6289"
},
{
"name": "Groovy",
"bytes": "155051"
},
{
"name": "HTML",
"bytes": "813899"
},
{
"name": "Java",
"bytes": "8784395"
},
{
"name": "JavaScript",
"bytes": "347514"
},
{
"name": "Perl",
"bytes": "14402"
},
{
"name": "Ruby",
"bytes": "21112"
},
{
"name": "Shell",
"bytes": "8998"
}
],
"symlink_target": ""
} |
<?php
namespace DDiff\Database\Schema;
use DDiff\Configuration\Database\DatabaseConfigurationInterface;
use DDiff\Database\SchemaInterface;
/**
* Interface ProviderInterface
* @package DDiff\Database\Schema
*/
interface ProviderInterface
{
/**
* @param \PDO $pdo
* @param DatabaseConfigurationInterface $configuration
* @return SchemaInterface
*/
public function getSchema(\PDO $pdo, DatabaseConfigurationInterface $configuration) : SchemaInterface;
}
| {
"content_hash": "7396e5130d58719b830986af6cea3722",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 106,
"avg_line_length": 24.3,
"alnum_prop": 0.7530864197530864,
"repo_name": "oncesk/ddiff",
"id": "b55180b0f7bf44511e98d4b5d06190ac429434c5",
"size": "486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Database/Schema/ProviderInterface.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "135859"
}
],
"symlink_target": ""
} |
use na::Bounded;
use na;
use num::Float;
use ncollide::geometry::Contact;
use volumetric::InertiaTensor;
use resolution::constraint::velocity_constraint::VelocityConstraint;
use object::RigidBody;
use math::{Scalar, Point, Vect, Orientation};
/// The correction coefficient used by the constraint solver.
pub enum CorrectionMode {
/// Penetration are solved by the penalty method.
Velocity(Scalar),
/// Penetration are solved by the penalty method together with a hard repositioning.
VelocityAndPosition(Scalar, Scalar, Scalar),
/// Penetration are solved by the penalty method together with a hard repositioning.
///
/// The amount of velocity correction is bounded by threshold.
VelocityAndPositionThresold(Scalar, Scalar, Scalar)
}
impl CorrectionMode {
#[inline]
/// The velocity correction coefficient.
pub fn vel_corr_factor(&self) -> Scalar {
match *self {
CorrectionMode::Velocity(ref v) => v.clone(),
CorrectionMode::VelocityAndPosition(ref v, _, _) => v.clone(),
CorrectionMode::VelocityAndPositionThresold(ref v, _, _) => v.clone()
}
}
#[inline]
/// The position correction coefficient.
pub fn pos_corr_factor(&self) -> Scalar {
match *self {
CorrectionMode::VelocityAndPosition(_, ref p, _) => p.clone(),
CorrectionMode::VelocityAndPositionThresold(_, ref p, _) => p.clone(),
CorrectionMode::Velocity(_) => na::zero()
}
}
#[inline]
/// The minimum penetration depth required to switch on the hard repositioning based method.
pub fn min_depth_for_pos_corr(&self) -> Scalar {
match *self {
CorrectionMode::VelocityAndPosition(_, _, ref t) => t.clone(),
CorrectionMode::VelocityAndPositionThresold(_, _, ref t) => t.clone(),
CorrectionMode::Velocity(_) => Bounded::max_value()
}
}
#[inline]
/// The max penetration depth the velocity correction will attempt to correct.
pub fn max_depth_for_vel_corr(&self) -> Scalar {
match *self {
CorrectionMode::VelocityAndPosition(_, _, _) => Bounded::max_value(),
CorrectionMode::VelocityAndPositionThresold(_, _, ref t) => t.clone(),
CorrectionMode::Velocity(_) => Bounded::max_value()
}
}
}
pub struct CorrectionParameters {
pub corr_mode: CorrectionMode,
pub joint_corr: Scalar,
pub rest_eps: Scalar
}
pub fn reinit_to_first_order_equation(dt: Scalar,
coll: &Contact<Point>,
constraint: &mut VelocityConstraint,
correction: &CorrectionParameters) {
/*
* Fill b
*/
if coll.depth >= correction.corr_mode.min_depth_for_pos_corr() {
constraint.objective = correction.corr_mode.pos_corr_factor() * coll.depth.max(na::zero()) / dt;
}
else {
constraint.objective = na::zero();
}
/*
* Reset forces
*/
constraint.impulse = na::zero();
}
pub fn fill_second_order_equation(dt: Scalar,
coll: &Contact<Point>,
rb1: &RigidBody,
rb2: &RigidBody,
rconstraint: &mut VelocityConstraint,
idr: usize,
fconstraints: &mut [VelocityConstraint],
idf: usize,
cache: &[Scalar],
correction: &CorrectionParameters) {
let restitution = rb1.restitution() * rb2.restitution();
let center = na::center(&coll.world1, &coll.world2);
fill_velocity_constraint(dt.clone(),
coll.normal.clone(),
center.clone(),
restitution,
coll.depth.clone(),
cache[0].clone(), // coll.impulses[0].clone(),
na::zero(),
Bounded::max_value(),
rb1,
rb2,
rconstraint,
correction);
let friction = rb1.friction() * rb2.friction();
// To bound the friction we use the last frame normal impulse.
// That means we have to make a special case for the first time the contact appears.
// In that case, we estimate the impulse by the derired normal correction.
let mut i = 0;
na::orthonormal_subspace_basis(&coll.normal, |friction_axis| {
let constraint = &mut fconstraints[idf + i];
fill_velocity_constraint(dt.clone(),
friction_axis,
center.clone(),
na::zero(),
na::zero(),
cache[i + 1].clone(), // coll.impulses[i].clone(),
na::zero(), // dont setup the limit now
na::zero(), // dont setup the limit now
rb1,
rb2,
constraint,
correction);
constraint.friction_coeff = friction.clone();
constraint.friction_limit_id = idr;
i = i + 1;
true
})
}
pub fn fill_constraint_geometry(normal: Vect,
rot_axis1: Orientation,
rot_axis2: Orientation,
rb1: &Option<&RigidBody>,
rb2: &Option<&RigidBody>,
constraint: &mut VelocityConstraint) {
constraint.normal = normal;
constraint.inv_projected_mass = na::zero();
match *rb1 {
Some(ref rb) => {
// rotation axis
constraint.weighted_normal1 = constraint.normal * rb.inv_mass();
constraint.rot_axis1 = rot_axis1;
constraint.weighted_rot_axis1 = rb.inv_inertia().apply(&constraint.rot_axis1);
constraint.inv_projected_mass = constraint.inv_projected_mass +
na::dot(&constraint.normal, &constraint.weighted_normal1) +
na::dot(&constraint.rot_axis1, &constraint.weighted_rot_axis1);
},
None => { }
}
match *rb2 {
Some(ref rb) => {
// rotation axis
constraint.weighted_normal2 = constraint.normal * rb.inv_mass();
constraint.rot_axis2 = rot_axis2;
constraint.weighted_rot_axis2 = rb.inv_inertia().apply(&constraint.rot_axis2);
constraint.inv_projected_mass = constraint.inv_projected_mass +
na::dot(&constraint.normal, &constraint.weighted_normal2) +
na::dot(&constraint.rot_axis2, &constraint.weighted_rot_axis2);
},
None => { }
}
let _1: Scalar = na::one();
constraint.inv_projected_mass = _1 / constraint.inv_projected_mass;
}
fn fill_velocity_constraint(dt: Scalar,
normal: Vect,
center: Point,
restitution: Scalar,
depth: Scalar,
initial_impulse: Scalar,
lobound: Scalar,
hibound: Scalar,
rb1: &RigidBody,
rb2: &RigidBody,
constraint: &mut VelocityConstraint,
correction: &CorrectionParameters) {
let rot_axis1 = na::cross(&(center - *rb1.center_of_mass()), &-normal);
let rot_axis2 = na::cross(&(center - *rb2.center_of_mass()), &normal);
let opt_rb1 = if rb1.can_move() { Some(rb1) } else { None };
let opt_rb2 = if rb2.can_move() { Some(rb2) } else { None };
fill_constraint_geometry(normal, rot_axis1, rot_axis2, &opt_rb1, &opt_rb2, constraint);
/*
* Fill indice
*/
constraint.id1 = rb1.index();
constraint.id2 = rb2.index();
/*
* correction amount
*/
constraint.objective = relative_velocity(
&opt_rb1,
&opt_rb2,
&constraint.normal,
&constraint.rot_axis1,
&constraint.rot_axis2,
&dt);
if constraint.objective < -correction.rest_eps {
constraint.objective = constraint.objective + restitution * constraint.objective
}
constraint.objective = -constraint.objective;
if depth < na::zero() {
constraint.objective = constraint.objective + depth / dt
}
else if depth < correction.corr_mode.max_depth_for_vel_corr() {
constraint.objective = constraint.objective + depth * correction.corr_mode.vel_corr_factor() / dt
}
// for warm-starting
constraint.impulse = if depth < na::zero() { na::zero() } else { initial_impulse };
/*
* constraint bounds
*/
constraint.lobound = lobound;
constraint.hibound = hibound;
}
pub fn relative_velocity(rb1: &Option<&RigidBody>,
rb2: &Option<&RigidBody>,
normal: &Vect,
rot_axis1: &Orientation,
rot_axis2: &Orientation,
dt: &Scalar)
-> Scalar {
let mut dvel: Scalar = na::zero();
match *rb1 {
Some(ref rb) => {
dvel = dvel - na::dot(&(rb.lin_vel() + rb.lin_acc() * *dt), normal)
+ na::dot(&(rb.ang_vel() + rb.ang_acc() * *dt), rot_axis1);
},
None => { }
}
match *rb2 {
Some(ref rb) => {
dvel = dvel + na::dot(&(rb.lin_vel() + rb.lin_acc() * *dt), normal)
+ na::dot(&(rb.ang_vel() + rb.ang_acc() * *dt), rot_axis2);
},
None => { }
}
dvel
}
| {
"content_hash": "b24d82a5b7c5ab991fc0374f8e6bfe18",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 105,
"avg_line_length": 37.667870036101085,
"alnum_prop": 0.4928119608970673,
"repo_name": "MichaelRiss/nphysics",
"id": "ef54db00db0b02c97c1895ec6ba5a4228fb82a3e",
"size": "10434",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/resolution/constraint/contact_equation.rs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "701"
},
{
"name": "Rust",
"bytes": "298728"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML>
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (11.0.16) on Thu Sep 15 09:38:29 CEST 2022 -->
<title>RoutingSearchParameters.ImprovementSearchLimitParametersOrBuilder (com.google.ortools:ortools-java 9.4.1874 API)</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="dc.created" content="2022-09-15">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<link rel="stylesheet" type="text/css" href="../../../../jquery/jquery-ui.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
<script type="text/javascript" src="../../../../jquery/jszip/dist/jszip.min.js"></script>
<script type="text/javascript" src="../../../../jquery/jszip-utils/dist/jszip-utils.min.js"></script>
<!--[if IE]>
<script type="text/javascript" src="../../../../jquery/jszip-utils/dist/jszip-utils-ie.min.js"></script>
<![endif]-->
<script type="text/javascript" src="../../../../jquery/jquery-3.5.1.js"></script>
<script type="text/javascript" src="../../../../jquery/jquery-ui.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="RoutingSearchParameters.ImprovementSearchLimitParametersOrBuilder (com.google.ortools:ortools-java 9.4.1874 API)";
}
}
catch(err) {
}
//-->
var data = {"i0":6,"i1":6};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
var pathtoroot = "../../../../";
var useModuleDirectories = true;
loadScripts(document, 'script');</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<header role="banner">
<nav role="navigation">
<div class="fixedNav">
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a id="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a id="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../index.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/RoutingSearchParameters.ImprovementSearchLimitParametersOrBuilder.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses.html">All Classes</a></li>
</ul>
<ul class="navListSearch">
<li><label for="search">SEARCH:</label>
<input type="text" id="search" value="search" disabled="disabled">
<input type="reset" id="reset" value="reset" disabled="disabled">
</li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a id="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
</div>
<div class="navPadding"> </div>
<script type="text/javascript"><!--
$('.navPadding').css('padding-top', $('.fixedNav').css("height"));
//-->
</script>
</nav>
</header>
<!-- ======== START OF CLASS DATA ======== -->
<main role="main">
<div class="header">
<div class="subTitle"><span class="packageLabelInType">Package</span> <a href="package-summary.html">com.google.ortools.constraintsolver</a></div>
<h2 title="Interface RoutingSearchParameters.ImprovementSearchLimitParametersOrBuilder" class="title">Interface RoutingSearchParameters.ImprovementSearchLimitParametersOrBuilder</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Superinterfaces:</dt>
<dd><code>com.google.protobuf.MessageLiteOrBuilder</code>, <code>com.google.protobuf.MessageOrBuilder</code></dd>
</dl>
<dl>
<dt>All Known Implementing Classes:</dt>
<dd><code><a href="RoutingSearchParameters.ImprovementSearchLimitParameters.html" title="class in com.google.ortools.constraintsolver">RoutingSearchParameters.ImprovementSearchLimitParameters</a></code>, <code><a href="RoutingSearchParameters.ImprovementSearchLimitParameters.Builder.html" title="class in com.google.ortools.constraintsolver">RoutingSearchParameters.ImprovementSearchLimitParameters.Builder</a></code></dd>
</dl>
<dl>
<dt>Enclosing class:</dt>
<dd><a href="RoutingSearchParameters.html" title="class in com.google.ortools.constraintsolver">RoutingSearchParameters</a></dd>
</dl>
<hr>
<pre>public static interface <span class="typeNameLabel">RoutingSearchParameters.ImprovementSearchLimitParametersOrBuilder</span>
extends com.google.protobuf.MessageOrBuilder</pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<section role="region">
<ul class="blockList">
<li class="blockList"><a id="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colSecond" scope="col">Method</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>double</code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="#getImprovementRateCoefficient()">getImprovementRateCoefficient</a></span>()</code></th>
<td class="colLast">
<div class="block">
Parameter that regulates exchange rate between objective improvement and
number of neighbors spent.</div>
</td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code>int</code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="#getImprovementRateSolutionsDistance()">getImprovementRateSolutionsDistance</a></span>()</code></th>
<td class="colLast">
<div class="block">
Parameter that specifies the distance between improvements taken into
consideration for calculating the improvement rate.</div>
</td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a id="methods.inherited.from.class.com.google.protobuf.MessageLiteOrBuilder">
<!-- -->
</a>
<h3>Methods inherited from interface com.google.protobuf.MessageLiteOrBuilder</h3>
<code>isInitialized</code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a id="methods.inherited.from.class.com.google.protobuf.MessageOrBuilder">
<!-- -->
</a>
<h3>Methods inherited from interface com.google.protobuf.MessageOrBuilder</h3>
<code>findInitializationErrors, getAllFields, getDefaultInstanceForType, getDescriptorForType, getField, getInitializationErrorString, getOneofFieldDescriptor, getRepeatedField, getRepeatedFieldCount, getUnknownFields, hasField, hasOneof</code></li>
</ul>
</li>
</ul>
</section>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<section role="region">
<ul class="blockList">
<li class="blockList"><a id="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a id="getImprovementRateCoefficient()">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getImprovementRateCoefficient</h4>
<pre class="methodSignature">double getImprovementRateCoefficient()</pre>
<div class="block"><pre>
Parameter that regulates exchange rate between objective improvement and
number of neighbors spent. The smaller the value, the sooner the limit
stops the search. Must be positive.
</pre>
<code>double improvement_rate_coefficient = 38;</code></div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The improvementRateCoefficient.</dd>
</dl>
</li>
</ul>
<a id="getImprovementRateSolutionsDistance()">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>getImprovementRateSolutionsDistance</h4>
<pre class="methodSignature">int getImprovementRateSolutionsDistance()</pre>
<div class="block"><pre>
Parameter that specifies the distance between improvements taken into
consideration for calculating the improvement rate.
Example: For 5 objective improvements = (10, 8, 6, 4, 2), and the
solutions_distance parameter of 2, then the improvement_rate will be
computed for (10, 6), (8, 4), and (6, 2).
</pre>
<code>int32 improvement_rate_solutions_distance = 39;</code></div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The improvementRateSolutionsDistance.</dd>
</dl>
</li>
</ul>
</li>
</ul>
</section>
</li>
</ul>
</div>
</div>
</main>
<!-- ========= END OF CLASS DATA ========= -->
<footer role="contentinfo">
<nav role="navigation">
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a id="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a id="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../index.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/RoutingSearchParameters.ImprovementSearchLimitParametersOrBuilder.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a id="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</nav>
<p class="legalCopy"><small>Copyright © 2022. All rights reserved.</small></p>
</footer>
</body>
</html>
| {
"content_hash": "f0331e7aed2dfe30e91c40aee5d4516d",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 423,
"avg_line_length": 35.98456790123457,
"alnum_prop": 0.678102753237842,
"repo_name": "or-tools/docs",
"id": "05566be0ee7aa77688151d74f468477c0be6b5ad",
"size": "11659",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "docs/javadoc/com/google/ortools/constraintsolver/RoutingSearchParameters.ImprovementSearchLimitParametersOrBuilder.html",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
Imports System.Collections.Immutable
Imports Microsoft.CodeAnalysis.ExpressionEvaluator
Imports Microsoft.CodeAnalysis.VisualBasic.Symbols
Imports Microsoft.CodeAnalysis.VisualBasic.Symbols.Metadata.PE
Namespace Microsoft.CodeAnalysis.VisualBasic.ExpressionEvaluator
Friend NotInheritable Class EETypeNameDecoder
Inherits TypeNameDecoder(Of PEModuleSymbol, TypeSymbol)
Private ReadOnly _compilation As VisualBasicCompilation
Friend Sub New(compilation As VisualBasicCompilation, moduleSymbol As PEModuleSymbol)
MyBase.New(SymbolFactory.Instance, moduleSymbol)
_compilation = compilation
End Sub
Protected Overrides Function GetIndexOfReferencedAssembly(identity As AssemblyIdentity) As Integer
Dim assemblies = GetAssemblies()
' Find assembly matching identity.
Dim index = assemblies.IndexOf(Function(assembly, id) id.Equals(assembly.Identity), identity)
If index >= 0 Then
Return index
End If
If identity.IsWindowsComponent() Then
' Find placeholder Windows.winmd assembly (created
' in MetadataUtilities.MakeAssemblyReferences).
index = assemblies.IndexOf(Function(assembly, unused) assembly.Identity.IsWindowsRuntime(), DirectCast(Nothing, Object))
If index >= 0 Then
' Find module in Windows.winmd matching identity.
Dim modules = assemblies(index).Modules
Dim moduleIndex = modules.IndexOf(Function(m, id) id.Equals(GetComponentAssemblyIdentity(m)), identity)
If moduleIndex >= 0 Then
Return index
End If
End If
End If
Return -1
End Function
Protected Overrides Function IsContainingAssembly(identity As AssemblyIdentity) As Boolean
Return False
End Function
Protected Overrides Function LookupNestedTypeDefSymbol(container As TypeSymbol, ByRef emittedName As MetadataTypeName) As TypeSymbol
Return container.LookupMetadataType(emittedName)
End Function
Protected Overrides Function LookupTopLevelTypeDefSymbol(referencedAssemblyIndex As Integer, ByRef emittedName As MetadataTypeName) As TypeSymbol
Dim assembly = GetAssemblies()(referencedAssemblyIndex)
Return assembly.LookupTopLevelMetadataType(emittedName, digThroughForwardedTypes:=True)
End Function
Protected Overrides Function LookupTopLevelTypeDefSymbol(ByRef emittedName As MetadataTypeName, ByRef isNoPiaLocalType As Boolean) As TypeSymbol
Return moduleSymbol.LookupTopLevelMetadataType(emittedName, isNoPiaLocalType)
End Function
Private Function GetAssemblies() As ImmutableArray(Of AssemblySymbol)
Return _compilation.Assembly.Modules.Single().GetReferencedAssemblySymbols()
End Function
Private Shared Function GetComponentAssemblyIdentity([module] As ModuleSymbol) As AssemblyIdentity
Return DirectCast([module], PEModuleSymbol).Module.ReadAssemblyIdentityOrThrow()
End Function
End Class
End Namespace
| {
"content_hash": "8496c6c167b3bb46d6298cdf41267bd0",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 153,
"avg_line_length": 48.029411764705884,
"alnum_prop": 0.7033067973055726,
"repo_name": "yetangye/roslyn",
"id": "1c4787b7e149a47e2a041c488cd159b81c4288f9",
"size": "3268",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/ExpressionEvaluator/VisualBasic/Source/ExpressionCompiler/EETypeNameDecoder.vb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "7448"
},
{
"name": "C#",
"bytes": "68988450"
},
{
"name": "C++",
"bytes": "403096"
},
{
"name": "F#",
"bytes": "421"
},
{
"name": "PowerShell",
"bytes": "801"
},
{
"name": "Shell",
"bytes": "8828"
},
{
"name": "Smalltalk",
"bytes": "1269462"
},
{
"name": "Visual Basic",
"bytes": "56924947"
}
],
"symlink_target": ""
} |
namespace policy {
// A policy loader that loads policy from the managed app configuration
// introduced in iOS 7.
class POLICY_EXPORT PolicyLoaderIOS : public AsyncPolicyLoader {
public:
explicit PolicyLoaderIOS(
scoped_refptr<base::SequencedTaskRunner> task_runner);
~PolicyLoaderIOS() override;
// AsyncPolicyLoader implementation.
void InitOnBackgroundThread() override;
scoped_ptr<PolicyBundle> Load() override;
base::Time LastModificationTime() override;
private:
void UserDefaultsChanged();
// Loads the Chrome policies in |dictionary| into the given |bundle|.
static void LoadNSDictionaryToPolicyBundle(NSDictionary* dictionary,
PolicyBundle* bundle);
// Used to manage the registration for NSNotificationCenter notifications.
base::scoped_nsobject<id> notification_observer_;
// Timestamp of the last notification.
// Used to coalesce repeated notifications into a single Load() call.
base::Time last_notification_time_;
// Used to Bind() a WeakPtr to |this| for the callback passed to the
// |notification_observer_|.
base::WeakPtrFactory<PolicyLoaderIOS> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(PolicyLoaderIOS);
};
} // namespace policy
#endif // COMPONENTS_POLICY_CORE_COMMON_POLICY_LOADER_IOS_H_
| {
"content_hash": "6fff11e37f9629e8fc0f132df03042a1",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 76,
"avg_line_length": 33.61538461538461,
"alnum_prop": 0.7345537757437071,
"repo_name": "js0701/chromium-crosswalk",
"id": "29e2bd311fb38fc9aa6770ca01240f89765d830f",
"size": "1830",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "components/policy/core/common/policy_loader_ios.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
When the initial action has been dispatched, you can update your application state in your reducers as you normally would.
A common pattern for offline-friendly apps is to *optimistically update UI state*. In practice, this means that as soon as a user performs an action, we update the UI to look as if the action had already succeeded. This makes our applications resilient to network latency, and improves the perceived performance of our app.
When we optimistically update state, we need to ensure that if the action does permanently fail, the user is appropriately notified and the application state is rolled back. To allow you this opportunity, Redux Offline will fire the action you specified in `meta.offline.rollback`. The error object returned by the effects reconciler will be set as the payload.
An example of an optimistic update:
```js
const action = userId => ({
type: 'FOLLOW_USER',
payload: { userId },
meta: {
offline: {
effect: //...,
rollback: { type: 'FOLLOW_USER_ROLLBACK', meta: { userId }}
}
}
});
// optimistically update the state, revert on rollback
const followingUsersReducer = (state, action) {
switch(action.type) {
case 'FOLLOW_USER':
return { ...state, [action.payload.userId]: true };
case 'FOLLOW_USER_ROLLBACK':
return omit(state, [action.meta.userId]);
default:
return state;
}
}
```
| {
"content_hash": "1cec215ee81e0c0b54be5cf10267cffe",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 361,
"avg_line_length": 44.83870967741935,
"alnum_prop": 0.7230215827338129,
"repo_name": "jevakallio/redux-offline",
"id": "0022e88bdb52f28ce7b731e0cc175f8b4bc30a99",
"size": "1422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/basics/rollback.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "139233"
}
],
"symlink_target": ""
} |
package com.netflix.spinnaker.clouddriver.titus.deploy.actions;
import com.netflix.spinnaker.clouddriver.saga.flow.SagaFlow;
import com.netflix.spinnaker.clouddriver.saga.models.Saga;
import com.netflix.spinnaker.clouddriver.titus.JobType;
import com.netflix.spinnaker.clouddriver.titus.TitusException;
import com.netflix.spinnaker.clouddriver.titus.deploy.actions.PrepareTitusDeploy.PrepareTitusDeployCommand;
import javax.annotation.Nonnull;
import org.springframework.stereotype.Component;
@Component
public class TitusServiceJobPredicate implements SagaFlow.ConditionPredicate {
@Override
public boolean test(Saga saga) {
return saga.getEvents().stream()
.filter(e -> PrepareTitusDeployCommand.class.isAssignableFrom(e.getClass()))
.findFirst()
.map(
e ->
JobType.SERVICE.isEqual(
((PrepareTitusDeployCommand) e).getDescription().getJobType()))
.orElseThrow(
() ->
new TitusException(
"Could not determine job type: No TitusDeployDescription found"));
}
@Nonnull
@Override
public String getName() {
return "titusServiceJobPredicate";
}
}
| {
"content_hash": "c4178291081acb4061702fb92f833078",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 107,
"avg_line_length": 35.1764705882353,
"alnum_prop": 0.7165551839464883,
"repo_name": "spinnaker/clouddriver",
"id": "c8521b4e2dafbc6928976e51bca475662359889f",
"size": "1790",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "clouddriver-titus/src/main/java/com/netflix/spinnaker/clouddriver/titus/deploy/actions/TitusServiceJobPredicate.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "7641380"
},
{
"name": "Java",
"bytes": "7248003"
},
{
"name": "Kotlin",
"bytes": "282069"
},
{
"name": "Shell",
"bytes": "3066"
},
{
"name": "Slim",
"bytes": "2423"
}
],
"symlink_target": ""
} |
package net.finmath.montecarlo.interestrate.products;
import java.time.LocalDate;
import java.time.Month;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import net.finmath.exception.CalculationException;
import net.finmath.marketdata.model.AnalyticModel;
import net.finmath.marketdata.model.AnalyticModelFromCurvesAndVols;
import net.finmath.marketdata.model.curves.Curve;
import net.finmath.marketdata.model.curves.CurveInterpolation;
import net.finmath.marketdata.model.curves.DiscountCurve;
import net.finmath.marketdata.model.curves.ForwardCurve;
import net.finmath.marketdata.model.curves.ForwardCurveInterpolation;
import net.finmath.montecarlo.interestrate.CalibrationProduct;
import net.finmath.montecarlo.interestrate.LIBORMarketModel;
import net.finmath.montecarlo.interestrate.LIBORModelMonteCarloSimulationModel;
import net.finmath.montecarlo.interestrate.LIBORMonteCarloSimulationFromLIBORModel;
import net.finmath.montecarlo.interestrate.models.LIBORMarketModelFromCovarianceModel;
import net.finmath.montecarlo.interestrate.models.LIBORMarketModelFromCovarianceModel.Measure;
import net.finmath.montecarlo.interestrate.models.covariance.LIBORCorrelationModelExponentialDecay;
import net.finmath.montecarlo.interestrate.models.covariance.LIBORCovarianceModelFromVolatilityAndCorrelation;
import net.finmath.montecarlo.interestrate.models.covariance.LIBORVolatilityModelFromGivenMatrix;
import net.finmath.montecarlo.process.EulerSchemeFromProcessModel;
import net.finmath.time.TimeDiscretizationFromArray;
import net.finmath.time.businessdaycalendar.BusinessdayCalendar;
import net.finmath.time.businessdaycalendar.BusinessdayCalendarExcludingTARGETHolidays;
/**
* @author Christian Fries
*/
@RunWith(Parameterized.class)
public class SimpleCappedFlooredFloatingRateBondTest {
@Parameters(name="{0}")
public static Collection<Object[]> generateData()
{
return Arrays.asList(new Object[][] {
{ Measure.SPOT }, { Measure.TERMINAL }
});
}
private final int numberOfPaths = 10000;
private final Measure measure;
public SimpleCappedFlooredFloatingRateBondTest(final Measure measure) {
// Store measure
this.measure = measure;
}
@Test
public void test() throws CalculationException {
/*
* Create Monte-Carlo model
*/
final LIBORModelMonteCarloSimulationModel model = createLIBORMarketModel(numberOfPaths, measure);
/*
* Create Product
*/
final double[] fixingDates = (new TimeDiscretizationFromArray(0.0, 9, 0.5)).getAsDoubleArray();
final double[] paymentDates = (new TimeDiscretizationFromArray(0.5, 9, 0.5)).getAsDoubleArray();
final double maturity = 0.5 + 9 * 0.5;
final double[] floors = null;
final double[] caps = null;
final double[] spreads = null;
final AbstractLIBORMonteCarloProduct product = new SimpleCappedFlooredFloatingRateBond("", fixingDates, paymentDates, spreads, floors, caps, maturity);
final double value = product.getValue(model);
System.out.println("Value of floating rate bond (measure = " + measure + "): " + value);
if(measure == Measure.SPOT) {
Assert.assertEquals("Value of floating rate bond.", 1.0, value, 1E-10);
}
if(measure == Measure.TERMINAL) {
Assert.assertEquals("Value of floating rate bond.", 1.0, value, 2E-2);
}
}
public static LIBORModelMonteCarloSimulationModel createLIBORMarketModel(final int numberOfPaths, final Measure measure) throws CalculationException {
final LocalDate referenceDate = LocalDate.of(2014, Month.AUGUST, 12);
// Create the forward curve (initial value of the LIBOR market model)
final ForwardCurve forwardCurve = ForwardCurveInterpolation.createForwardCurveFromForwards(
"forwardCurve" /* name of the curve */,
referenceDate,
"6M",
new BusinessdayCalendarExcludingTARGETHolidays(),
BusinessdayCalendar.DateRollConvention.FOLLOWING,
CurveInterpolation.InterpolationMethod.LINEAR,
CurveInterpolation.ExtrapolationMethod.CONSTANT,
CurveInterpolation.InterpolationEntity.VALUE,
ForwardCurveInterpolation.InterpolationEntityForward.FORWARD,
null,
null,
new double[] {0.5 , 1.0 , 2.0 , 5.0 , 40.0} /* fixings of the forward */,
new double[] {0.05, 0.05, 0.05, 0.05, 0.05} /* forwards */
);
// No discount curve - single curve model
final DiscountCurve discountCurve = null;
// AnalyticModel model = new AnalyticModelFromCuvesAndVols(new CurveInterface[] { forwardCurve , discountCurve });
final AnalyticModel model = new AnalyticModelFromCurvesAndVols(new Curve[] { forwardCurve });
/*
* Create the libor tenor structure and the initial values
*/
final double liborPeriodLength = 0.5;
final double liborRateTimeHorzion = 40.0;
final TimeDiscretizationFromArray liborPeriodDiscretization = new TimeDiscretizationFromArray(0.0, (int) (liborRateTimeHorzion / liborPeriodLength), liborPeriodLength);
/*
* Create a simulation time discretization
*/
final double lastTime = 40.0;
final double dt = 0.5;
final TimeDiscretizationFromArray timeDiscretizationFromArray = new TimeDiscretizationFromArray(0.0, (int) (lastTime / dt), dt);
/*
* Create a volatility structure v[i][j] = sigma_j(t_i)
*/
final double[][] volatility = new double[timeDiscretizationFromArray.getNumberOfTimeSteps()][liborPeriodDiscretization.getNumberOfTimeSteps()];
for (int timeIndex = 0; timeIndex < volatility.length; timeIndex++) {
for (int liborIndex = 0; liborIndex < volatility[timeIndex].length; liborIndex++) {
// Create a very simple volatility model here
final double time = timeDiscretizationFromArray.getTime(timeIndex);
final double maturity = liborPeriodDiscretization.getTime(liborIndex);
final double timeToMaturity = maturity - time;
double instVolatility;
if(timeToMaturity <= 0) {
instVolatility = 0; // This forward rate is already fixed, no volatility
} else {
instVolatility = 0.3 + 0.2 * Math.exp(-0.25 * timeToMaturity);
}
// Store
volatility[timeIndex][liborIndex] = instVolatility;
}
}
final LIBORVolatilityModelFromGivenMatrix volatilityModel = new LIBORVolatilityModelFromGivenMatrix(timeDiscretizationFromArray, liborPeriodDiscretization, volatility);
/*
* Create a correlation model rho_{i,j} = exp(-a * abs(T_i-T_j))
*/
final int numberOfFactors = 5;
final double correlationDecayParam = 0.2;
final LIBORCorrelationModelExponentialDecay correlationModel = new LIBORCorrelationModelExponentialDecay(
timeDiscretizationFromArray, liborPeriodDiscretization, numberOfFactors,
correlationDecayParam);
/*
* Combine volatility model and correlation model to a covariance model
*/
final LIBORCovarianceModelFromVolatilityAndCorrelation covarianceModel =
new LIBORCovarianceModelFromVolatilityAndCorrelation(timeDiscretizationFromArray,
liborPeriodDiscretization, volatilityModel, correlationModel);
// BlendedLocalVolatlityModel (future extension)
// AbstractLIBORCovarianceModel covarianceModel2 = new BlendedLocalVolatlityModel(covarianceModel, 0.00, false);
// Set model properties
final Map<String, String> properties = new HashMap<>();
// Choose the simulation measure
properties.put("measure", measure.name());
// Choose log normal model
properties.put("stateSpace", LIBORMarketModelFromCovarianceModel.StateSpace.LOGNORMAL.name());
// Empty array of calibration items - hence, model will use given covariance
final CalibrationProduct[] calibrationItems = new CalibrationProduct[0];
/*
* Create corresponding LIBOR Market Model
*/
final LIBORMarketModel liborMarketModel = new LIBORMarketModelFromCovarianceModel(
liborPeriodDiscretization, model, forwardCurve, discountCurve, covarianceModel, calibrationItems, properties);
final EulerSchemeFromProcessModel process = new EulerSchemeFromProcessModel(liborMarketModel,
new net.finmath.montecarlo.BrownianMotionLazyInit(timeDiscretizationFromArray,
numberOfFactors, numberOfPaths, 3141 /* seed */), EulerSchemeFromProcessModel.Scheme.PREDICTOR_CORRECTOR);
return new LIBORMonteCarloSimulationFromLIBORModel(liborMarketModel, process);
}
}
| {
"content_hash": "cbd180743a9c8ab72d450befdbfaf7a9",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 170,
"avg_line_length": 40.64563106796116,
"alnum_prop": 0.7808431864325809,
"repo_name": "finmath/finmath-lib",
"id": "ac75faa608c3dd203a1ba85a250314bb0b788ef3",
"size": "8488",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java8/net/finmath/montecarlo/interestrate/products/SimpleCappedFlooredFloatingRateBondTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "119"
},
{
"name": "Java",
"bytes": "11108450"
},
{
"name": "Shell",
"bytes": "2527"
}
],
"symlink_target": ""
} |
'use strict';
const chai = require('chai');
const expect = chai.expect;
const supertest = require('supertest');
const express = require('express');
const request = require('request-promise');
const apiMonkey = require('../index');
const app = express();
const port = process.env.PORT || process.env.port || 3000;
app.use(apiMonkey());
app.get('/test', (req, res) => {
request.get({
uri: `http://localhost:${port}/nested`,
headers: req.monkeyHeaders
})
.then(data => {
res.status(200).end();
});
});
app.put('/test/:id', (req, res) => {
res.status(200).end();
});
app.get('/nested', (req, res) => {
res.status(200).end();
});
app.listen(port);
console.log(`API-Monkey Test API listening at ${port}`);
/**
* API Monkey Test Specs
*/
describe('A normal Request', () => {
it('should be successful', done => {
let startTime = new Date().getTime();
supertest(app)
.get('/test')
.expect(200)
.end((err, res) => {
if (!err && res) {
let endTime = new Date().getTime();
let executionTime = endTime - startTime;
expect(executionTime).to.be.lessThan(200);
}
done(err);
});
});
});
describe('A monkey Request', () => {
it('should be successful with none parameters', done => {
let startTime = new Date().getTime();
supertest(app)
.get('/test')
.set('Monkey_GET_test', 'none/none')
.expect(200)
.end((err, res) => {
if (!err && res) {
let endTime = new Date().getTime();
let executionTime = endTime - startTime;
expect(executionTime).to.be.lessThan(200);
}
done(err);
});
});
it('should be successfully delayed', done => {
let startTime = new Date().getTime();
supertest(app)
.get('/test')
.set('Monkey_GET_test', '1000/none')
.expect(200)
.end((err, res) => {
if (!err && res) {
let endTime = new Date().getTime();
let executionTime = endTime - startTime;
expect(executionTime).to.be.greaterThan(1000);
}
done(err);
});
});
it('should throw an error', done => {
let startTime = new Date().getTime();
supertest(app)
.get('/test')
.set('Monkey_GET_test', 'none/true')
.expect(500)
.end((err, res) => {
if (!err && res) {
let endTime = new Date().getTime();
let executionTime = endTime - startTime;
expect(executionTime).to.be.lessThan(200);
}
done(err);
});
});
it('should throw a custom error', done => {
let startTime = new Date().getTime();
supertest(app)
.get('/test')
.set('Monkey_GET_test', 'none/404')
.expect(404)
.end((err, res) => {
if (!err && res) {
let endTime = new Date().getTime();
let executionTime = endTime - startTime;
expect(executionTime).to.be.lessThan(200);
}
done(err);
});
});
it('should throw a delayed error', done => {
let startTime = new Date().getTime();
supertest(app)
.get('/test')
.set('Monkey_GET_test', '1000/true')
.expect(500)
.end((err, res) => {
if (!err && res) {
let endTime = new Date().getTime();
let executionTime = endTime - startTime;
expect(executionTime).to.be.greaterThan(1000);
}
done(err);
});
});
it('should be delayed with forwarded monkey header', done => {
let startTime = new Date().getTime();
supertest(app)
.get('/test')
.set('Monkey_GET_nested', '1000/none')
.expect(200)
.end((err, res) => {
if (!err && res) {
let endTime = new Date().getTime();
let executionTime = endTime - startTime;
expect(executionTime).to.be.greaterThan(1000);
}
done(err);
});
});
it('should throw an error on a wildcard monkey route', done => {
let startTime = new Date().getTime();
supertest(app)
.put('/test/123')
.set('Monkey_PUT_test_*', 'none/true')
.expect(500)
.end((err, res) => {
if (!err && res) {
let endTime = new Date().getTime();
let executionTime = endTime - startTime;
expect(executionTime).to.be.lessThan(200);
}
done(err);
});
});
});
| {
"content_hash": "fd174b7f7839add0ac0b280a8cb527d5",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 66,
"avg_line_length": 26.21556886227545,
"alnum_prop": 0.5294655093650068,
"repo_name": "mfressdorf/api-monkey",
"id": "3f8a8dfa08b4d4933889ddc9f480c290635d8279",
"size": "4378",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/monkeySpec.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "9981"
}
],
"symlink_target": ""
} |
<resources>
<integer name="max_question_length">70</integer>
</resources>
| {
"content_hash": "56910e3c98b2720750c8e271aa99fe3f",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 52,
"avg_line_length": 26,
"alnum_prop": 0.7051282051282052,
"repo_name": "rafaeladson/kotoba-droid",
"id": "3c30166a04818e839499fa45414db45cd25b4cd6",
"size": "78",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kotoba/res/values-xlarge/constants.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "92986"
}
],
"symlink_target": ""
} |
<?php
namespace MainBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* Vente
*
* @ORM\Table()
* @ORM\Entity(repositoryClass="MainBundle\Entity\VenteRepository")
*/
class Vente
{
/**
* @var integer
*
* @ORM\Column(name="id", type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
*/
private $id;
/**
* @ORM\ManyToOne(targetEntity="Produit")
* @ORM\JoinColumn(name="idProduit", referencedColumnName="id")
**/
private $produit;
/**
* @ORM\ManyToOne(targetEntity="Client")
* @ORM\JoinColumn(name="idClient", referencedColumnName="id")
**/
private $client;
/**
* @ORM\ManyToOne(targetEntity="Evenement")
* @ORM\JoinColumn(name="idEvenement", referencedColumnName="id")
**/
private $evenement;
/**
* Get id
*
* @return integer
*/
public function getId()
{
return $this->id;
}
/**
* Set produit
*
* @param \MainBundle\Entity\Produit $produit
* @return Vente
*/
public function setProduit(\MainBundle\Entity\Produit $produit = null)
{
$this->produit = $produit;
return $this;
}
/**
* Get produit
*
* @return \MainBundle\Entity\Produit
*/
public function getProduit()
{
return $this->produit;
}
/**
* Set client
*
* @param \MainBundle\Entity\Client $client
* @return Vente
*/
public function setClient(\MainBundle\Entity\Client $client = null)
{
$this->client = $client;
return $this;
}
/**
* Get client
*
* @return \MainBundle\Entity\Client
*/
public function getClient()
{
return $this->client;
}
/**
* Set evenement
*
* @param \MainBundle\Entity\Evenement $evenement
* @return Vente
*/
public function setEvenement(\MainBundle\Entity\Evenement $evenement = null)
{
$this->evenement = $evenement;
return $this;
}
/**
* Get evenement
*
* @return \MainBundle\Entity\Evenement
*/
public function getEvenement()
{
return $this->evenement;
}
}
| {
"content_hash": "8bee7ec01e2b51aa79598d2054d19e96",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 80,
"avg_line_length": 18.388429752066116,
"alnum_prop": 0.5483146067415731,
"repo_name": "dpp972/attractif",
"id": "a42cf74c8e6d82bd4b2af12968e82c6fba861dac",
"size": "2225",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/MainBundle/Entity/Vente.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "120334"
},
{
"name": "JavaScript",
"bytes": "3067"
},
{
"name": "PHP",
"bytes": "157700"
},
{
"name": "Ruby",
"bytes": "851"
},
{
"name": "Shell",
"bytes": "728"
}
],
"symlink_target": ""
} |
FOUNDATION_EXPORT double ARISocketsVersionNumber;
//! Project version string for ARISockets.
FOUNDATION_EXPORT const unsigned char ARISocketsVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <ARISockets/PublicHeader.h>
// Frameworks have no bridging headers anymore. The only way to expose functions
// is to put them in the public header.
// I think the originals are not mapped because they are using varargs
FOUNDATION_EXPORT int ari_fcntlVi (int fildes, int cmd, int val);
FOUNDATION_EXPORT int ari_ioctlVip(int fildes, unsigned long request, int *val);
| {
"content_hash": "7d267983458c80c2970a30d1d03a2c59",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 135,
"avg_line_length": 45.57142857142857,
"alnum_prop": 0.7931034482758621,
"repo_name": "luosheng/SwiftSockets",
"id": "a6e3d9bcc25a46c9d4d9e766bb69e3849c20bfd9",
"size": "799",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "ARISockets/ARISockets.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "312"
},
{
"name": "Objective-C",
"bytes": "799"
},
{
"name": "Swift",
"bytes": "53721"
}
],
"symlink_target": ""
} |
How To Release
==============
Due to Maven Central's very particular requirements, the release process is a bit
elaborate and requires a good deal of local configuration. This guide should walk
you through it. It won't do anyone outside of KeepSafe any good, but the workflow
is representative of just about any project deploying via Sonatype.
We currently deploy to Maven Central (via Sonatype's OSS Nexus instance).
### Prerequisites
1. A *published* GPG code-signing key
1. A Sonatype Nexus OSS account with permission to publish in com.getkeepsafe
1. Permission to push directly to https://github.com/KeepSafe/ReLinker
### Setup
1. Add your GPG key to your github profile - this is required
for github to know that your commits and tags are "verified".
1. Configure your code-signing key in ~/.gradle/gradle.properties:
```gradle
signing.keyId=<key ID of your GPG signing key>
signing.password=<your key's passphrase>
signing.secretKeyRingFile=/path/to/your/secring.gpg
```
1. Configure your Sonatype credentials in ~/.gradle/gradle.properties:
```gradle
mavenCentralUsername=<nexus username>
mavenCentralPassword=<nexus password>
SONATYPE_STAGING_PROFILE=com.getkeepsafe
```
1. Configure git with your codesigning key; make sure it's the same as the one
you use to sign binaries (i.e. it's the same one you added to gradle.properties):
```bash
# Do this for the ReLinker repo only
git config user.email "[email protected]"
git config user.signingKey "your-key-id"
```
### Pushing a build
1. Edit gradle.properties, update the VERSION property for the new version release
1. Edit changelog, add relevant changes, note the date and new version (follow the existing pattern)
1. Add new `## [Unreleased]` header for next release
1. Verify that the everything works:
```bash
./gradlew clean check
```
1. Make a *signed* commit:
```bash
git commit -S -m "Release version X.Y.Z"
```
1. Make a *signed* tag:
```bash
git tag -s -a X.Y.Z
```
1. Upload binaries to Staging:
```bash
./gradlew publish
```
1. Publish to Release:
```bash
./gradlew closeAndReleaseRepository
```
1. Wait until that's done. It takes a while to publish and be available in [MavenCentral](https://repo.maven.apache.org/maven2/com/getkeepsafe/). Monitor until the latest published version is visible.
1. Hooray, we're in Maven Central now!
1. Push all of our work to Github to make it official. Check previous [releases](https://github.com/KeepSafe/ReLinker/releases) and edit tag release changes:
```bash
git push --tags origin master
```
| {
"content_hash": "4f09ec77081343b3c88c83348c875f4e",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 200,
"avg_line_length": 36.88732394366197,
"alnum_prop": 0.7258495609011073,
"repo_name": "KeepSafe/ReLinker",
"id": "e6d9f336db2a265518523aa55fa168434382acaf",
"size": "2619",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RELEASING.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "63233"
}
],
"symlink_target": ""
} |
package jodd.mutable;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class MutableTest {
@Test
public void testMutableInteger() {
MutableInteger m = new MutableInteger();
assertTrue(m instanceof Number);
m.setValue(27);
assertEquals(27, m.intValue());
assertEquals(27, m.longValue());
assertEquals(27, m.shortValue());
assertEquals(27, m.byteValue());
assertEquals(27, m.floatValue(), 0.1);
assertEquals(27, m.doubleValue(), 0.1);
Number m2 = m.clone();
assertEquals(m2, m);
assertEquals(0, m.compareTo((MutableInteger) m2));
}
@Test
public void testMutableLong() {
MutableLong m = new MutableLong();
assertTrue(m instanceof Number);
m.setValue(27);
assertEquals(27, m.intValue());
assertEquals(27, m.longValue());
assertEquals(27, m.shortValue());
assertEquals(27, m.byteValue());
assertEquals(27, m.floatValue(), 0.1);
assertEquals(27, m.doubleValue(), 0.1);
Number m2 = m.clone();
assertEquals(m2, m);
assertEquals(0, m.compareTo((MutableLong) m2));
}
@Test
public void testMutableShort() {
MutableShort m = new MutableShort();
assertTrue(m instanceof Number);
m.setValue(27);
assertEquals(27, m.intValue());
assertEquals(27, m.longValue());
assertEquals(27, m.shortValue());
assertEquals(27, m.byteValue());
assertEquals(27, m.floatValue(), 0.1);
assertEquals(27, m.doubleValue(), 0.1);
Number m2 = m.clone();
assertEquals(m2, m);
assertEquals(0, m.compareTo((MutableShort) m2));
}
@Test
public void testMutableByte() {
MutableByte m = new MutableByte();
assertTrue(m instanceof Number);
m.setValue(27);
assertEquals(27, m.intValue());
assertEquals(27, m.longValue());
assertEquals(27, m.shortValue());
assertEquals(27, m.byteValue());
assertEquals(27, m.floatValue(), 0.1);
assertEquals(27, m.doubleValue(), 0.1);
Number m2 = m.clone();
assertEquals(m2, m);
assertEquals(0, m.compareTo((MutableByte) m2));
}
@Test
public void testMutableFloat() {
MutableFloat m = new MutableFloat();
assertTrue(m instanceof Number);
m.setValue(27);
assertEquals(27, m.intValue());
assertEquals(27, m.longValue());
assertEquals(27, m.shortValue());
assertEquals(27, m.byteValue());
assertEquals(27, m.floatValue(), 0.1);
assertEquals(27, m.doubleValue(), 0.1);
Number m2 = m.clone();
assertEquals(m2, m);
assertEquals(0, m.compareTo((MutableFloat) m2));
}
@Test
public void testMutableDouble() {
MutableDouble m = new MutableDouble();
assertTrue(m instanceof Number);
m.setValue(27);
assertEquals(27, m.intValue());
assertEquals(27, m.longValue());
assertEquals(27, m.shortValue());
assertEquals(27, m.byteValue());
assertEquals(27, m.floatValue(), 0.1);
assertEquals(27, m.doubleValue(), 0.1);
Number m2 = m.clone();
assertEquals(m2, m);
assertEquals(0, m.compareTo((MutableDouble) m2));
}
@Test
public void testMutableBoolean() {
MutableBoolean m = new MutableBoolean();
m.setValue(true);
assertEquals(true, m.getValue());
Object m2 = m.clone();
assertEquals(m2, m);
assertEquals(0, m.compareTo((MutableBoolean) m2));
}
} | {
"content_hash": "113f0aa5830aa2b7d31ee8b551bee4c0",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 52,
"avg_line_length": 23.291970802919707,
"alnum_prop": 0.6844249451582576,
"repo_name": "mtakaki/jodd",
"id": "fb88486915aa0e4617b16d16bef554e5d4cf7555",
"size": "4574",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "jodd-core/src/test/java/jodd/mutable/MutableTest.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Groovy",
"bytes": "3636"
},
{
"name": "HTML",
"bytes": "4130647"
},
{
"name": "Java",
"bytes": "7667501"
},
{
"name": "Lex",
"bytes": "3794"
},
{
"name": "Python",
"bytes": "29467"
},
{
"name": "Shell",
"bytes": "325"
}
],
"symlink_target": ""
} |
package org.jtuples;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
/**
* This class provides skeletal implementations of common methods for tuples.
*
* @author Andre Santos
* @author Benjamim Sonntag
*/
public abstract class AbstractTuple implements Tuple {
/**
* {@inheritDoc}
*/
@Override
public List<?> asList() {
return Collections.unmodifiableList(Arrays.asList(this.toArray()));
}
/**
* {@inheritDoc}
*/
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
else if (!(obj instanceof Tuple)) {
return false;
}
else {
return equalsTuple((Tuple) obj);
}
}
private boolean equalsTuple(Tuple other) {
if (this.arity() != other.arity()) {
return false;
}
else {
return Arrays.equals(this.toArray(), other.toArray());
}
}
/**
* {@inheritDoc}
*/
@Override
public final int hashCode() {
return this.asList().hashCode();
}
/**
* {@inheritDoc}
*/
@Override
public final String toString() {
return this.asList().stream()
.map(String::valueOf)
.collect(Collectors.joining(", ", "(", ")"));
}
}
| {
"content_hash": "c01363ccb1aef4a78b29826ba029127b",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 77,
"avg_line_length": 21.333333333333332,
"alnum_prop": 0.5426136363636364,
"repo_name": "git-afsantos/jTuples",
"id": "ac754b1255b5a6c5941cffa1a6a09dbcb2689470",
"size": "2524",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jtuples/src/main/java/org/jtuples/AbstractTuple.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "4340111"
}
],
"symlink_target": ""
} |
package org.springframework.context.support;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
/**
* Standalone XML application context, taking the context definition files
* from the class path, interpreting plain paths as class path resource names
* that include the package path (e.g. "mypackage/myresource.txt"). Useful for
* test harnesses as well as for application contexts embedded within JARs.
*
* <p>The config location defaults can be overridden via {@link #getConfigLocations},
* Config locations can either denote concrete files like "/myfiles/context.xml"
* or Ant-style patterns like "/myfiles/*-context.xml" (see the
* {@link org.springframework.util.AntPathMatcher} javadoc for pattern details).
*
* <p>Note: In case of multiple config locations, later bean definitions will
* override ones defined in earlier loaded files. This can be leveraged to
* deliberately override certain bean definitions via an extra XML file.
*
* <p><b>This is a simple, one-stop shop convenience ApplicationContext.
* Consider using the {@link GenericApplicationContext} class in combination
* with an {@link org.springframework.beans.factory.xml.XmlBeanDefinitionReader}
* for more flexible context setup.</b>
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see #getResource
* @see #getResourceByPath
* @see GenericApplicationContext
*/
public class ClassPathXmlApplicationContext extends AbstractXmlApplicationContext {
private Resource[] configResources;
private String[] configLocations;
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML file and automatically refreshing the context.
* @param configLocation resource location
* @throws BeansException if context creation failed
*/
public ClassPathXmlApplicationContext(String configLocation) throws BeansException {
this(new String[] {configLocation}, true, null);
}
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML files and automatically refreshing the context.
* @param configLocations array of resource locations
* @throws BeansException if context creation failed
*/
public ClassPathXmlApplicationContext(String[] configLocations) throws BeansException {
this(configLocations, true, null);
}
/**
* Create a new ClassPathXmlApplicationContext with the given parent,
* loading the definitions from the given XML files and automatically
* refreshing the context.
* @param configLocations array of resource locations
* @param parent the parent context
* @throws BeansException if context creation failed
*/
public ClassPathXmlApplicationContext(String[] configLocations, ApplicationContext parent) throws BeansException {
this(configLocations, true, parent);
}
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML files.
* @param configLocations array of resource locations
* @param refresh whether to automatically refresh the context,
* loading all bean definitions and creating all singletons.
* Alternatively, call refresh manually after further configuring the context.
* @throws BeansException if context creation failed
* @see #refresh()
*/
public ClassPathXmlApplicationContext(String[] configLocations, boolean refresh) throws BeansException {
this(configLocations, refresh, null);
}
/**
* Create a new ClassPathXmlApplicationContext with the given parent,
* loading the definitions from the given XML files.
* @param configLocations array of resource locations
* @param refresh whether to automatically refresh the context,
* loading all bean definitions and creating all singletons.
* Alternatively, call refresh manually after further configuring the context.
* @param parent the parent context
* @throws BeansException if context creation failed
* @see #refresh()
*/
public ClassPathXmlApplicationContext(String[] configLocations, boolean refresh, ApplicationContext parent)
throws BeansException {
super(parent);
this.configLocations = StringUtils.trimArrayElements(configLocations);
if (refresh) {
refresh();
}
}
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML file and automatically refreshing the context.
* <p>This is a convenience method to load class path resources relative to a
* given Class. For full flexibility, consider using a GenericApplicationContext
* with an XmlBeanDefinitionReader and a ClassPathResource argument.
* @param path relative (or absolute) path within the class path
* @param clazz the class to load resources with (basis for the given paths)
* @throws BeansException if context creation failed
* @see org.springframework.core.io.ClassPathResource#ClassPathResource(String, Class)
* @see org.springframework.context.support.GenericApplicationContext
* @see org.springframework.beans.factory.xml.XmlBeanDefinitionReader
*/
public ClassPathXmlApplicationContext(String path, Class clazz) throws BeansException {
this(new String[] {path}, clazz);
}
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML files and automatically refreshing the context.
* @param paths array of relative (or absolute) paths within the class path
* @param clazz the class to load resources with (basis for the given paths)
* @throws BeansException if context creation failed
* @see org.springframework.core.io.ClassPathResource#ClassPathResource(String, Class)
* @see org.springframework.context.support.GenericApplicationContext
* @see org.springframework.beans.factory.xml.XmlBeanDefinitionReader
*/
public ClassPathXmlApplicationContext(String[] paths, Class clazz) throws BeansException {
this(paths, clazz, null);
}
/**
* Create a new ClassPathXmlApplicationContext with the given parent,
* loading the definitions from the given XML files and automatically
* refreshing the context.
* @param paths array of relative (or absolute) paths within the class path
* @param clazz the class to load resources with (basis for the given paths)
* @param parent the parent context
* @throws BeansException if context creation failed
* @see org.springframework.core.io.ClassPathResource#ClassPathResource(String, Class)
* @see org.springframework.context.support.GenericApplicationContext
* @see org.springframework.beans.factory.xml.XmlBeanDefinitionReader
*/
public ClassPathXmlApplicationContext(String[] paths, Class clazz, ApplicationContext parent)
throws BeansException {
super(parent);
Assert.notNull(paths, "Path array must not be null");
Assert.notNull(clazz, "Class argument must not be null");
this.configResources = new Resource[paths.length];
for (int i = 0; i < paths.length; i++) {
this.configResources[i] = new ClassPathResource(paths[i], clazz);
}
refresh();
}
protected Resource[] getConfigResources() {
return this.configResources;
}
protected String[] getConfigLocations() {
return this.configLocations;
}
}
| {
"content_hash": "ec5d6118f2a060dbed5c7cb05a6aedba",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 115,
"avg_line_length": 41.162921348314605,
"alnum_prop": 0.7765797734406988,
"repo_name": "mattxia/spring-2.5-analysis",
"id": "9438ddc8943b5d7b6667cb2fe3df45c15e945a6c",
"size": "7947",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/org/springframework/context/support/ClassPathXmlApplicationContext.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "18516"
},
{
"name": "CSS",
"bytes": "20368"
},
{
"name": "Groovy",
"bytes": "2361"
},
{
"name": "Java",
"bytes": "16366293"
},
{
"name": "Ruby",
"bytes": "623"
},
{
"name": "Shell",
"bytes": "684"
},
{
"name": "XSLT",
"bytes": "2674"
}
],
"symlink_target": ""
} |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_dom_network_cfstatechangeevent_h
#define mozilla_dom_network_cfstatechangeevent_h
#include "nsIDOMCFStateChangeEvent.h"
#include "nsDOMEvent.h"
namespace mozilla {
namespace dom {
namespace network {
class CFStateChangeEvent : public nsDOMEvent,
public nsIDOMCFStateChangeEvent
{
bool mSuccess;
uint16_t mAction;
uint16_t mReason;
nsString mNumber;
uint16_t mTimeSeconds;
uint16_t mServiceClass;
public:
NS_DECL_ISUPPORTS_INHERITED
NS_FORWARD_TO_NSDOMEVENT
NS_DECL_NSIDOMCFSTATECHANGEEVENT
static already_AddRefed<CFStateChangeEvent>
Create(bool aSuccess,
uint16_t aAction,
uint16_t aReason,
nsAString& aNumber,
uint16_t aTimeSeconds,
uint16_t aServiceClass);
nsresult
Dispatch(nsIDOMEventTarget* aTarget, const nsAString& aEventType)
{
NS_ASSERTION(aTarget, "Null pointer!");
NS_ASSERTION(!aEventType.IsEmpty(), "Empty event type!");
nsresult rv = InitEvent(aEventType, false, false);
NS_ENSURE_SUCCESS(rv, rv);
SetTrusted(true);
nsDOMEvent* thisEvent = this;
bool dummy;
rv = aTarget->DispatchEvent(thisEvent, &dummy);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
private:
CFStateChangeEvent()
: nsDOMEvent(nullptr, nullptr)
{ }
~CFStateChangeEvent()
{ }
};
}
}
}
#endif // mozilla_dom_network_cfstatechangeevent_h
| {
"content_hash": "a8c85f0dbc58383751ac7e948563af4f",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 76,
"avg_line_length": 22.9,
"alnum_prop": 0.6980661260137243,
"repo_name": "sergecodd/FireFox-OS",
"id": "c377692fd47a88f3729831a03faf39d911ba4f3f",
"size": "1603",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "B2G/gecko/dom/network/src/CFStateChangeEvent.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "443"
},
{
"name": "ApacheConf",
"bytes": "85"
},
{
"name": "Assembly",
"bytes": "5123438"
},
{
"name": "Awk",
"bytes": "46481"
},
{
"name": "Batchfile",
"bytes": "56250"
},
{
"name": "C",
"bytes": "101720951"
},
{
"name": "C#",
"bytes": "38531"
},
{
"name": "C++",
"bytes": "148896543"
},
{
"name": "CMake",
"bytes": "23541"
},
{
"name": "CSS",
"bytes": "2758664"
},
{
"name": "DIGITAL Command Language",
"bytes": "56757"
},
{
"name": "Emacs Lisp",
"bytes": "12694"
},
{
"name": "Erlang",
"bytes": "889"
},
{
"name": "FLUX",
"bytes": "34449"
},
{
"name": "GLSL",
"bytes": "26344"
},
{
"name": "Gnuplot",
"bytes": "710"
},
{
"name": "Groff",
"bytes": "447012"
},
{
"name": "HTML",
"bytes": "43343468"
},
{
"name": "IDL",
"bytes": "1455122"
},
{
"name": "Java",
"bytes": "43261012"
},
{
"name": "JavaScript",
"bytes": "46646658"
},
{
"name": "Lex",
"bytes": "38358"
},
{
"name": "Logos",
"bytes": "21054"
},
{
"name": "Makefile",
"bytes": "2733844"
},
{
"name": "Matlab",
"bytes": "67316"
},
{
"name": "Max",
"bytes": "3698"
},
{
"name": "NSIS",
"bytes": "421625"
},
{
"name": "Objective-C",
"bytes": "877657"
},
{
"name": "Objective-C++",
"bytes": "737713"
},
{
"name": "PHP",
"bytes": "17415"
},
{
"name": "Pascal",
"bytes": "6780"
},
{
"name": "Perl",
"bytes": "1153180"
},
{
"name": "Perl6",
"bytes": "1255"
},
{
"name": "PostScript",
"bytes": "1139"
},
{
"name": "PowerShell",
"bytes": "8252"
},
{
"name": "Protocol Buffer",
"bytes": "26553"
},
{
"name": "Python",
"bytes": "8453201"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3481"
},
{
"name": "Ruby",
"bytes": "5116"
},
{
"name": "Scilab",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "3383832"
},
{
"name": "SourcePawn",
"bytes": "23661"
},
{
"name": "TeX",
"bytes": "879606"
},
{
"name": "WebIDL",
"bytes": "1902"
},
{
"name": "XSLT",
"bytes": "13134"
},
{
"name": "Yacc",
"bytes": "112744"
}
],
"symlink_target": ""
} |
// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* @name: S12.6.1_A13_T1;
* @section: 12.6.1, 13;
* @assertion: FunctionDeclaration within a "do-while" Block is not allowed;
* @description: Declaring function within a "do-while" loop;
* @negative;
*/
do{
function __func(){};
} while(0);
| {
"content_hash": "0a69094cd16d92d9f4df73ff296a6be2",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 75,
"avg_line_length": 27.214285714285715,
"alnum_prop": 0.6850393700787402,
"repo_name": "rapha/sputnik",
"id": "ac7bb5f8ce30ce9ee12fbc7204abc03bb65d3f09",
"size": "381",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tests/Conformance/12_Statement/12.6_Iteration_Statements/12.6.1_The_do_while_Statement/S12.6.1_A13_T1.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "6163734"
},
{
"name": "Python",
"bytes": "13630"
}
],
"symlink_target": ""
} |
package keybase1
import (
"github.com/keybase/go-framed-msgpack-rpc/rpc"
context "golang.org/x/net/context"
"time"
)
type LogArg struct {
SessionID int `codec:"sessionID" json:"sessionID"`
Level LogLevel `codec:"level" json:"level"`
Text Text `codec:"text" json:"text"`
}
type LogUiInterface interface {
Log(context.Context, LogArg) error
}
func LogUiProtocol(i LogUiInterface) rpc.Protocol {
return rpc.Protocol{
Name: "keybase.1.logUi",
Methods: map[string]rpc.ServeHandlerDescription{
"log": {
MakeArg: func() interface{} {
var ret [1]LogArg
return &ret
},
Handler: func(ctx context.Context, args interface{}) (ret interface{}, err error) {
typedArgs, ok := args.(*[1]LogArg)
if !ok {
err = rpc.NewTypeError((*[1]LogArg)(nil), args)
return
}
err = i.Log(ctx, typedArgs[0])
return
},
},
},
}
}
type LogUiClient struct {
Cli rpc.GenericClient
}
func (c LogUiClient) Log(ctx context.Context, __arg LogArg) (err error) {
err = c.Cli.Call(ctx, "keybase.1.logUi.log", []interface{}{__arg}, nil, 0*time.Millisecond)
return
}
| {
"content_hash": "0350d3fe92a3e1846a19619b07f16bec",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 92,
"avg_line_length": 23.102040816326532,
"alnum_prop": 0.6457597173144877,
"repo_name": "keybase/client",
"id": "5739a96f28955b8b3e2deeb3163ffcb5c485468d",
"size": "1297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "go/protocol/keybase1/log_ui.go",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "17403"
},
{
"name": "C",
"bytes": "183175"
},
{
"name": "C++",
"bytes": "26935"
},
{
"name": "CMake",
"bytes": "2524"
},
{
"name": "CSS",
"bytes": "46433"
},
{
"name": "CoffeeScript",
"bytes": "28635"
},
{
"name": "Dockerfile",
"bytes": "19841"
},
{
"name": "Go",
"bytes": "32360664"
},
{
"name": "HTML",
"bytes": "7113636"
},
{
"name": "Java",
"bytes": "144690"
},
{
"name": "JavaScript",
"bytes": "113705"
},
{
"name": "Makefile",
"bytes": "8579"
},
{
"name": "Objective-C",
"bytes": "1419995"
},
{
"name": "Objective-C++",
"bytes": "34802"
},
{
"name": "Perl",
"bytes": "2673"
},
{
"name": "Python",
"bytes": "25189"
},
{
"name": "Roff",
"bytes": "108890"
},
{
"name": "Ruby",
"bytes": "38112"
},
{
"name": "Shell",
"bytes": "186628"
},
{
"name": "Starlark",
"bytes": "1928"
},
{
"name": "Swift",
"bytes": "217"
},
{
"name": "TypeScript",
"bytes": "2493"
},
{
"name": "XSLT",
"bytes": "914"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Text;
namespace BigAtom
{
public class VoxelCube
{
int pow2Size;
int voxelWidth;
public VoxelCube(int cubePow2Size)
{
this.pow2Size = cubePow2Size;
voxelWidth = (int)Math.Pow(2, cubePow2Size);
}
}
}
| {
"content_hash": "d3945d8be21b57f83be57f8468a5befe",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 56,
"avg_line_length": 17.05,
"alnum_prop": 0.592375366568915,
"repo_name": "MatterHackers/agg-sharp",
"id": "ec9d245eadf8e46edefbb9609c6cab0835ef92c2",
"size": "343",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "examples/BigAtom/VoxelCube.cs",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C#",
"bytes": "11639559"
},
{
"name": "Smalltalk",
"bytes": "4964"
}
],
"symlink_target": ""
} |
/** Request/response streams to and from node http request/responses
*
*
*/
var http = require('http')
var util = require('util')
var lob = require("lob-enc")
var ChannelStream = require("./stream.class")
var THTP = {
Request : {
//fromHTTP : THTP_Request_fromHTTP,
toHTTP : THTP_Request_toHTTP
},
Response : {
fromHTTP : THTP_Response_fromHTTP
//toHTTP : THTP_Response_toHTTP
}
}
module.exports = THTP;
util.inherits(THTP_Request_toHTTP, http.IncomingMessage);
util.inherits(THTP_Response_fromHTTP, http.ServerResponse);
function THTP_Request_toHTTP(packet,link, stream){
// mimic http://nodejs.org/api/http.html#http_http_incomingmessage
http.IncomingMessage.call(this)
this.method = packet.json[':method'];
this.url = packet.json[':path'] || "/";
this.headers = packet.json;
this.headers['x-hashname'] = link.hashname; // for any http handler visibility
this.hashname = link.hashname;
this.connection = {
remoteAddress : link.hashname
, cork : function(){
//noop
}
}
this.on = stream.on.bind(stream)
this.read = stream.read.bind(stream)
this.pipe = stream.pipe.bind(stream)
}
function THTP_Response_fromHTTP(req, link, stream){
// mimic http://nodejs.org/api/http.html#http_http_incomingmessage
http.ServerResponse.call(this, req)
this.connection = {
remoteAddress : link.hashname,
_httpMessage : this,
cork : function(){
//console.log('res cork')
}
, uncork : function(){
//console.log("uncork")
}
}
var head = false
//this.on = stream.on.bind(stream)
this._writeRaw = stream._write.bind(stream);
this._write = stream._write.bind(stream)
this.on('pipe',function(from){
from.on('end',function(body, a2, a3){
stream.end()
})
})
this.end = function(data, enc, callback){
if (!head)
this.writeHead(200);
stream.end(data)
}
this.writeHead = function(statusCode, reasonPhrase, headers)
{
// don't double!
if(head){
//console.log('double call to thtp writeHead',this.statusCode,(new Error()).stack)
return this;
};
head = true;
// be friendly
if(!headers && typeof reasonPhrase == 'object')
{
headers = reasonPhrase;
reasonPhrase = false;
} else if (!headers ){
headers = this._headers
}
this.statusCode = parseInt(statusCode) || 500;
var json = {};
json[':status'] = this.statusCode;
if(reasonPhrase) json[':reason'] = reasonPhrase;
if(headers) Object.keys(headers).forEach(function(header){
json[header.toLowerCase()] = headers[header];
});
stream.write(lob.encode(json, false));
return this;
}
}
| {
"content_hash": "be75a5e2885c1df4483fcb0ffa9d665b",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 88,
"avg_line_length": 24.55045871559633,
"alnum_prop": 0.6434977578475336,
"repo_name": "vweevers/telehash-js",
"id": "15a2d092822f0d070b514c02e599560fe68d30ad",
"size": "2676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ext/thtp.class.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "739"
},
{
"name": "JavaScript",
"bytes": "124469"
},
{
"name": "Makefile",
"bytes": "105"
}
],
"symlink_target": ""
} |
#include <vector>
#include <cstdlib>
#include <locale>
#include <codecvt>
#include <cctype>
namespace Xor
{
static std::wstring_convert<std::codecvt_utf8<wchar_t>> wConverter;
String::String(const std::wstring &wstr)
: m_str(wConverter.to_bytes(wstr))
{
updateView();
}
String::String(const wchar_t *wstr)
: m_str(wConverter.to_bytes(wstr))
{
updateView();
}
String String::vformat(const char *fmt, va_list ap)
{
char buffer[1024];
int size = vsnprintf(buffer, sizeof(buffer), fmt, ap);
if (size > sizeof(buffer))
{
std::vector<char> largeBuffer(size);
int wrote = vsnprintf(largeBuffer.data(), size, fmt, ap);
XOR_ASSERT(size == wrote, "Unexpected amount of characters written.");
return String(largeBuffer.data(), size - 1);
}
else
{
return String(buffer);
}
}
String String::format(const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
String s = vformat(fmt, ap);
va_end(ap);
return s;
}
std::wstring String::wideStr() const
{
return wConverter.from_bytes(m_str);
}
String StringView::str() const
{
return String(begin(), end());
}
std::string StringView::stdString() const
{
return std::string(begin(), end());
}
std::wstring StringView::wideStr() const
{
return wConverter.from_bytes(str().m_str);
}
String operator+(StringView a, StringView b)
{
std::string s(a.begin(), a.end());
s.append(b.data(), b.size());
return String(std::move(s));
}
String StringView::capitalize() const
{
auto s = stdString();
s[0] = std::toupper(s[0]);
return String(std::move(s));
}
String StringView::lower() const
{
auto s = stdString();
for (char &c : s) c = std::tolower(c);
return String(std::move(s));
}
std::vector<String> StringView::split(StringView separators, int maxSplit) const
{
std::vector<String> result;
splitForEach([&] (StringView s)
{
result.emplace_back(s.str());
}, separators, maxSplit);
return result;
}
std::vector<String> StringView::splitNonEmpty(StringView separators, int maxSplit) const
{
std::vector<String> result;
splitForEach([&] (StringView s)
{
if (!s.empty())
result.emplace_back(s.str());
}, separators, maxSplit);
return result;
}
std::vector<String> StringView::lines() const
{
return replace("\r", "").split("\n");
}
String StringView::strip(StringView separators, bool leftStrip, bool rightStrip) const
{
size_t start = 0;
size_t end = size();
if (leftStrip)
{
for (; start < end; ++start)
{
if (!separators.contains(operator[](start)))
break;
}
}
if (rightStrip)
{
for (; end > start; --end)
{
if (!separators.contains(operator[](end - 1)))
break;
}
}
return String(begin() + start, begin() + end);
}
String StringView::swapCase() const
{
auto s = stdString();
for (char &c : s)
{
if (std::isupper(c))
c = std::tolower(c);
else
c = std::toupper(c);
}
return String(std::move(s));
}
String StringView::upper() const
{
auto s = stdString();
for (char &c : s) c = std::toupper(c);
return String(std::move(s));
}
String StringView::leftJustify(int width, char filler) const
{
int len = std::max(width, length());
std::string result;
result.reserve(len);
result.append(begin(), end());
for (int i = length(); i < len; ++i)
result.push_back(filler);
return String(std::move(result));
}
String StringView::rightJustify(int width, char filler) const
{
int len = std::max(width, length());
std::string result;
result.reserve(len);
for (int i = length(); i < len; ++i)
result.push_back(filler);
result.append(begin(), end());
return String(std::move(result));
}
String StringView::center(int width, char filler) const
{
int len = std::max(width, length());
std::string result;
result.reserve(len);
int fill = len - width;
int left = fill / 2;
int right = fill - left;
for (int i = 0; i < left; ++i) result.push_back(filler);
result.append(begin(), end());
for (int i = 0; i < right; ++i) result.push_back(filler);
return String(std::move(result));
}
String StringView::replace(StringView old, StringView replacement, int maxReplace) const
{
int hits = count(old);
if (hits == 0 || maxReplace == 0)
return str();
if (maxReplace < 0) maxReplace = -1;
int diff = replacement.length() - old.length();
int len = length() + hits * diff;
std::string result;
result.reserve(len);
int i = 0;
while (i < length())
{
if (from(i).startsWith(old))
{
// If maxReplace is negative, we always replace
if (maxReplace != 0)
{
result.append(replacement.begin(),
replacement.end());
--maxReplace;
}
else
{
result.append(old.begin(),
old.end());
}
i += old.length();
}
else
{
result.push_back(operator[](i));
++i;
}
}
return String(std::move(result));
}
String StringView::replace(int start, int end, StringView replacement) const
{
start = idx(start);
end = idx(end);
std::string result;
result.reserve(length() + replacement.length() + start - end);
result.insert(result.end(), begin(), begin() + start);
result.insert(result.end(), replacement.begin(), replacement.end());
result.insert(result.end(), begin() + end, this->end());
return String(std::move(result));
}
String StringView::repeat(uint count) const
{
if (count == 0)
return String();
std::string result;
std::string s = stdString();
result.reserve(count * static_cast<uint>(length()));
for (uint i = 0; i < count; ++i)
result += s;
return String(std::move(result));
}
}
| {
"content_hash": "7144da62c7674e5b1f3364932f9f807a",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 92,
"avg_line_length": 26.845588235294116,
"alnum_prop": 0.479320734045467,
"repo_name": "jknuuttila/xor-renderer",
"id": "0361753e1b95e86fe6516b1004eb4cc8a5a467d9",
"size": "7347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Core/String.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5827"
},
{
"name": "C++",
"bytes": "788747"
},
{
"name": "HLSL",
"bytes": "70463"
},
{
"name": "Python",
"bytes": "660"
}
],
"symlink_target": ""
} |
<md-dialog aria-label="{action} + 'Container'" class="container-dialog">
<md-toolbar layout="column" layout-align="center center">
<md-subheader class="md-sticky-no-effect"><span class="action">{{action}} </span>Container <span class="short-id">{{ containerShortId }}</span></md-subheader>
</md-toolbar>
<md-content>
<md-list>
<md-item>
<div>Restart:</div>
<md-checkbox ng-model="isRestart" ng-change="change(isRestart)" aria-label="Restart"></md-checkbox>
</md-item>
<md-item>
<div>Seconds: </div><small>Number of seconds to wait before killing the container</small>
<div layout="row" flex="flex">
<md-slider layout="column" flex="80" ng-disabled="action == 'start'" ng-model="params.t" min="0" max="60" aria-label="green" id="green-slider" class="md-accent"></md-slider>
<div layout="column" layout-align="left center" flex="flex">
<input layout="column" type="number" ng-model="params.t" aria-label="green" aria-controls="green-slider" style="margin: auto 0;"/>
</div>
</div>
</md-item>
<md-item>
<p>{{ content }}</p>
</md-item>
</md-list>
</md-content>
<div layout="row" class="md-actions">
<md-button ng-click="cancel()">Cancel</md-button>
<md-button ng-click="ok()" class="md-primary">Yes</md-button>
</div>
</md-dialog> | {
"content_hash": "dbe25e65a12ba9885f59246f65d7ff8c",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 183,
"avg_line_length": 47.724137931034484,
"alnum_prop": 0.611271676300578,
"repo_name": "dockerboard/bluewhale",
"id": "8fca8ac1d8432e3591cf9241df5bba408ddfaa38",
"size": "1385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dist/js/modules/containers/views/container.running.dialog.tpl.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3541"
},
{
"name": "HTML",
"bytes": "70197"
},
{
"name": "JavaScript",
"bytes": "45134"
}
],
"symlink_target": ""
} |
function[c] = least_coeffs(l,u,p,v,k,D,f, varargin)
% least_coeffs -- Computes basis coefficients for interpolation
%
% c = least_coeffs(l,u,p,v,k,dim,f, [ip=alpha!, connection=speye])
%
% Using the 5-tuple (l,u,p,v,k) that is output from de Boor's algorithm
% deboor_lu, this function uses the point-evaluations f to compute the
% basis coefficients necessary for interpolation. Obviously, f must
% be an N-vector, where N is the number of rows of W (or l, or p, or u).
% Unfortunately, the dimension of the problem cannot be divined from the
% 5-tuple, so the extra input dim denoting the dimension is necessary.
%
% The interpolant may be evaluated as
%
% p(x) = sum_n c(n) * p(x,n),
%
% where p(x,n) is a function that evaluates the n'th basis function at the
% location x. (e.g. speclab.monomials.multimonomial is one such function). The
% output c has length size(W,2).
%
% Note that this function is vectorized in the columns of f, meaning that if
% f is a collection of column vectors, this function returns a collection of
% column vectors containing the basis representation.
persistent invu invl dim subdim ip strict_inputs spdiag
if isempty(invu)
from labtools import spdiag
from labtools import strict_inputs
from labtools.linalg import triu_back_substitute as invu
from labtools.linalg import tril_forward_substitute as invl
%from speclab.common.tensor import space_dimension as dim
from speclab.common.tensor import polynomial_space_dimension as dim
%from speclab.common.tensor import subspace_dimension as subdim
from speclab.common.tensor import polynomial_subspace_dimension as subdim
from speclab.orthopoly.interp import monomial_ip as ip
end
opt = strict_inputs({'ip', 'connection'}, {ip, @speye}, [], varargin{:});
% These are the coefficients for the cardinal interpolants under the de Boor
% inner product. The remainder of the code translates these coefficients into
% the basis coefficients.
a = diag(diag(u))*invu(u, invl(l, p*f));
N = size(a,1);
indices = [find(diff(k)); N]; % index markers delinating degree change between rows
c = zeros([dim(D,k(end)) size(f,2)]);
row_id = 1;
v_position = 1;
v = v.';
connmat = opt.connection(dim(D,k(end)));
for q = 0:k(end);
% Determine the columns of W associated with this degree
current_dim = subdim(D, q); % The current size of k-vectors
cols = dim(D,q-1)+1;
cols = cols:(cols+current_dim-1);
% Determine the rows of W associated with this degree
rows = row_id:indices(q+1);
degree_indices = v_position:(v_position+length(rows)*current_dim-1);
degree_indices = reshape(degree_indices.', [current_dim, length(rows)]).';
Cmat = connmat(cols,cols);
% First compute inv(u)*v:
v(degree_indices) = invu(u(rows,rows), v(degree_indices));
% "Orthodox" way
%c(cols,:) = (a(rows,:).'*W(rows,cols)).';
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% About to do some crazy stuff
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%M = opt.ip(D,q);
M = Cmat*opt.ip(D,q);
%c(cols,:) = (a(rows,:).'*(v(degree_indices)*M)).';
%M = inv(connmat(cols,cols))*M;
%M = spdiag(1./diag(connmat(cols,cols)))*M;
%c = c + (a(rows,:).'*(v(degree_indices)*M*Cmat)).';
c(cols,:) = (a(rows,:).'*(v(degree_indices)*M)).';
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% I think de Boor pulled a fast one on me: the 'orthodox' computation
% generates coefficients for the scaled multimonomials alpha!*x^alpha. Here
% I've included the scaling alpha! in the coefficients to make things
% compatible with mulitmonomial
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
row_id = rows(end)+1;
v_position = degree_indices(end)+1;
end
| {
"content_hash": "8a36411607e49bdc5cfa376bcc525ea9",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 83,
"avg_line_length": 38.333333333333336,
"alnum_prop": 0.6627717391304347,
"repo_name": "cygnine/speclab",
"id": "9c96c411c46c001ed39c27d4e090a92b6bf18059",
"size": "3680",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "orthopoly/interp/least_coeffs.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "704722"
}
],
"symlink_target": ""
} |
namespace _13.Office_Stuff
{
using System.Collections.Generic;
public class Company
{
public string CompanyName { get; set; }
public List<Order> Orders { get; set; }
}
}
| {
"content_hash": "eecd624494441065127d78a1c59d37bf",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 47,
"avg_line_length": 18.636363636363637,
"alnum_prop": 0.6097560975609756,
"repo_name": "MihailDobrev/SoftUni",
"id": "3f30aaa8cc879b47970d7fbca59fbf48b2d85181",
"size": "207",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "C# Fundamentals/C# Advanced/14. LINQ - Exercise/13. Office Stuff/Company.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "921"
},
{
"name": "Batchfile",
"bytes": "1673"
},
{
"name": "C#",
"bytes": "3102431"
},
{
"name": "CSS",
"bytes": "1301866"
},
{
"name": "HTML",
"bytes": "449336"
},
{
"name": "Java",
"bytes": "43616"
},
{
"name": "JavaScript",
"bytes": "739855"
},
{
"name": "PHP",
"bytes": "244170"
},
{
"name": "PLpgSQL",
"bytes": "17609"
}
],
"symlink_target": ""
} |
/*
* This is not the original file distributed by the Apache Software Foundation
* It has been modified by the Hipparchus project
*/
package org.hipparchus.optim;
import org.hipparchus.exception.MathIllegalArgumentException;
import org.junit.Assert;
import org.junit.Test;
public class SimpleValueCheckerTest {
@Test(expected=MathIllegalArgumentException.class)
public void testIterationCheckPrecondition() {
new SimpleValueChecker(1e-1, 1e-2, 0);
}
@Test
public void testIterationCheck() {
final int max = 10;
final SimpleValueChecker checker = new SimpleValueChecker(1e-1, 1e-2, max);
Assert.assertTrue(checker.converged(max, null, null));
Assert.assertTrue(checker.converged(max + 1, null, null));
}
@Test
public void testIterationCheckDisabled() {
final SimpleValueChecker checker = new SimpleValueChecker(1e-8, 1e-8);
final PointValuePair a = new PointValuePair(new double[] { 1d }, 1d);
final PointValuePair b = new PointValuePair(new double[] { 10d }, 10d);
Assert.assertFalse(checker.converged(-1, a, b));
Assert.assertFalse(checker.converged(0, a, b));
Assert.assertFalse(checker.converged(1000000, a, b));
Assert.assertTrue(checker.converged(-1, a, a));
Assert.assertTrue(checker.converged(-1, b, b));
}
}
| {
"content_hash": "6605930df79bc348372d41138ad45773",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 83,
"avg_line_length": 32.595238095238095,
"alnum_prop": 0.6888239590942293,
"repo_name": "Hipparchus-Math/hipparchus",
"id": "608903e7534f5d62f0a2e323c65f9c5a4a2774fe",
"size": "2171",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "hipparchus-optim/src/test/java/org/hipparchus/optim/SimpleValueCheckerTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "67"
},
{
"name": "Java",
"bytes": "15801055"
},
{
"name": "Python",
"bytes": "7212"
},
{
"name": "R",
"bytes": "57047"
},
{
"name": "Shell",
"bytes": "4024"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<sem:triples uri="http://www.lds.org/vrl/publications-and-documents/ces-and-scriptures/old-testament-teaching-guide-publication" xmlns:sem="http://marklogic.com/semantics">
<sem:triple>
<sem:subject>http://www.lds.org/vrl/publications-and-documents/ces-and-scriptures/old-testament-teaching-guide-publication</sem:subject>
<sem:predicate>http://www.w3.org/2004/02/skos/core#prefLabel</sem:predicate>
<sem:object datatype="xsd:string" xml:lang="eng">Old Testament Teaching Guide (Publication)</sem:object>
</sem:triple>
<sem:triple>
<sem:subject>http://www.lds.org/vrl/publications-and-documents/ces-and-scriptures/old-testament-teaching-guide-publication</sem:subject>
<sem:predicate>http://www.w3.org/2004/02/skos/core#inScheme</sem:predicate>
<sem:object datatype="sem:iri">http://www.lds.org/concept-scheme/vrl</sem:object>
</sem:triple>
<sem:triple>
<sem:subject>http://www.lds.org/vrl/publications-and-documents/ces-and-scriptures/old-testament-teaching-guide-publication</sem:subject>
<sem:predicate>http://www.lds.org/core#entityType</sem:predicate>
<sem:object datatype="sem:iri">http://www.lds.org/Topic</sem:object>
</sem:triple>
</sem:triples>
| {
"content_hash": "47ed11f44439c6a983c28b2af9a5156f",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 172,
"avg_line_length": 69,
"alnum_prop": 0.7423510466988728,
"repo_name": "freshie/ml-taxonomies",
"id": "11830bdc187079c82d87815de32b55d5e141990f",
"size": "1242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "roxy/data/gospel-topical-explorer-v2/taxonomies/vrl/publications-and-documents/ces-and-scriptures/old-testament-teaching-guide-publication.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4422"
},
{
"name": "CSS",
"bytes": "38665"
},
{
"name": "HTML",
"bytes": "356"
},
{
"name": "JavaScript",
"bytes": "411651"
},
{
"name": "Ruby",
"bytes": "259121"
},
{
"name": "Shell",
"bytes": "7329"
},
{
"name": "XQuery",
"bytes": "857170"
},
{
"name": "XSLT",
"bytes": "13753"
}
],
"symlink_target": ""
} |
"""Testing utilities."""
# Copyright (c) 2011, 2012
# Authors: Pietro Berkes,
# Andreas Muller
# Mathieu Blondel
# Olivier Grisel
# Arnaud Joly
# Denis Engemann
# License: BSD 3 clause
import os
import inspect
import pkgutil
import warnings
import sys
import re
import platform
import scipy as sp
import scipy.io
from functools import wraps
try:
# Python 2
from urllib2 import urlopen
from urllib2 import HTTPError
except ImportError:
# Python 3+
from urllib.request import urlopen
from urllib.error import HTTPError
import sklearn
from sklearn.base import BaseEstimator
# Conveniently import all assertions in one place.
from nose.tools import assert_equal
from nose.tools import assert_not_equal
from nose.tools import assert_true
from nose.tools import assert_false
from nose.tools import assert_raises
from nose.tools import raises
from nose import SkipTest
from nose import with_setup
from numpy.testing import assert_almost_equal
from numpy.testing import assert_array_equal
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_array_less
import numpy as np
from sklearn.base import (ClassifierMixin, RegressorMixin, TransformerMixin,
ClusterMixin)
__all__ = ["assert_equal", "assert_not_equal", "assert_raises",
"assert_raises_regexp", "raises", "with_setup", "assert_true",
"assert_false", "assert_almost_equal", "assert_array_equal",
"assert_array_almost_equal", "assert_array_less",
"assert_less", "assert_less_equal",
"assert_greater", "assert_greater_equal"]
try:
from nose.tools import assert_in, assert_not_in
except ImportError:
# Nose < 1.0.0
def assert_in(x, container):
assert_true(x in container, msg="%r in %r" % (x, container))
def assert_not_in(x, container):
assert_false(x in container, msg="%r in %r" % (x, container))
try:
from nose.tools import assert_raises_regex
except ImportError:
# for Py 2.6
def assert_raises_regex(expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Helper function to check for message patterns in exceptions"""
not_raised = False
try:
callable_obj(*args, **kwargs)
not_raised = True
except Exception as e:
error_message = str(e)
if not re.compile(expected_regexp).search(error_message):
raise AssertionError("Error message should match pattern "
"%r. %r does not." %
(expected_regexp, error_message))
if not_raised:
raise AssertionError("Should have raised %r" %
expected_exception(expected_regexp))
# assert_raises_regexp is deprecated in Python 3.4 in favor of
# assert_raises_regex but lets keep the bacward compat in scikit-learn with
# the old name for now
assert_raises_regexp = assert_raises_regex
def _assert_less(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a < b, message
def _assert_greater(a, b, msg=None):
message = "%r is not greater than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a > b, message
def assert_less_equal(a, b, msg=None):
message = "%r is not lower than or equal to %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a <= b, message
def assert_greater_equal(a, b, msg=None):
message = "%r is not greater than or equal to %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a >= b, message
def assert_warns(warning_class, func, *args, **kw):
"""Test that a certain warning occurs.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`
Returns
-------
result : the return value of `func`
"""
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
result = func(*args, **kw)
if hasattr(np, 'VisibleDeprecationWarning'):
# Filter out numpy-specific warnings in numpy >= 1.9
w = [e for e in w
if not e.category is np.VisibleDeprecationWarning]
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
found = any(warning.category is warning_class for warning in w)
if not found:
raise AssertionError("%s did not give warning: %s( is %s)"
% (func.__name__, warning_class, w))
return result
def assert_warns_message(warning_class, message, func, *args, **kw):
# very important to avoid uncontrolled state propagation
"""Test that a certain warning occurs and with a certain message.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
message : str | callable
The entire message or a substring to test for. If callable,
it takes a string as argument and will trigger an assertion error
if it returns `False`.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`.
Returns
-------
result : the return value of `func`
"""
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
if hasattr(np, 'VisibleDeprecationWarning'):
# Let's not catch the numpy internal DeprecationWarnings
warnings.simplefilter('ignore', np.VisibleDeprecationWarning)
# Trigger a warning.
result = func(*args, **kw)
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
if not w[0].category is warning_class:
raise AssertionError("First warning for %s is not a "
"%s( is %s)"
% (func.__name__, warning_class, w[0]))
# substring will match, the entire message with typo won't
msg = w[0].message # For Python 3 compatibility
msg = str(msg.args[0] if hasattr(msg, 'args') else msg)
if callable(message): # add support for certain tests
check_in_message = message
else:
check_in_message = lambda msg: message in msg
if not check_in_message(msg):
raise AssertionError("The message received ('%s') for <%s> is "
"not the one you expected ('%s')"
% (msg, func.__name__, message
))
return result
# To remove when we support numpy 1.7
def assert_no_warnings(func, *args, **kw):
# XXX: once we may depend on python >= 2.6, this can be replaced by the
# warnings module context manager.
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
result = func(*args, **kw)
if hasattr(np, 'VisibleDeprecationWarning'):
# Filter out numpy-specific warnings in numpy >= 1.9
w = [e for e in w
if not e.category is np.VisibleDeprecationWarning]
if len(w) > 0:
raise AssertionError("Got warnings when calling %s: %s"
% (func.__name__, w))
return result
def ignore_warnings(obj=None):
""" Context manager and decorator to ignore warnings
Note. Using this (in both variants) will clear all warnings
from all python modules loaded. In case you need to test
cross-module-warning-logging this is not your tool of choice.
Examples
--------
>>> with ignore_warnings():
... warnings.warn('buhuhuhu')
>>> def nasty_warn():
... warnings.warn('buhuhuhu')
... print(42)
>>> ignore_warnings(nasty_warn)()
42
"""
if callable(obj):
return _ignore_warnings(obj)
else:
return _IgnoreWarnings()
def _ignore_warnings(fn):
"""Decorator to catch and hide warnings without visual nesting"""
@wraps(fn)
def wrapper(*args, **kwargs):
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
return fn(*args, **kwargs)
w[:] = []
return wrapper
class _IgnoreWarnings(object):
"""Improved and simplified Python warnings context manager
Copied from Python 2.7.5 and modified as required.
"""
def __init__(self):
"""
Parameters
==========
category : warning class
The category to filter. Defaults to Warning. If None,
all categories will be muted.
"""
self._record = True
self._module = sys.modules['warnings']
self._entered = False
self.log = []
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
clean_warning_registry() # be safe and not propagate state + chaos
warnings.simplefilter('always')
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
self.log = []
def showwarning(*args, **kwargs):
self.log.append(warnings.WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return self.log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
self.log[:] = []
clean_warning_registry() # be safe and not propagate state + chaos
try:
from nose.tools import assert_less
except ImportError:
assert_less = _assert_less
try:
from nose.tools import assert_greater
except ImportError:
assert_greater = _assert_greater
def _assert_allclose(actual, desired, rtol=1e-7, atol=0,
err_msg='', verbose=True):
actual, desired = np.asanyarray(actual), np.asanyarray(desired)
if np.allclose(actual, desired, rtol=rtol, atol=atol):
return
msg = ('Array not equal to tolerance rtol=%g, atol=%g: '
'actual %s, desired %s') % (rtol, atol, actual, desired)
raise AssertionError(msg)
if hasattr(np.testing, 'assert_allclose'):
assert_allclose = np.testing.assert_allclose
else:
assert_allclose = _assert_allclose
def assert_raise_message(exception, message, function, *args, **kwargs):
"""Helper function to test error messages in exceptions"""
try:
function(*args, **kwargs)
raise AssertionError("Should have raised %r" % exception(message))
except exception as e:
error_message = str(e)
assert_in(message, error_message)
def fake_mldata(columns_dict, dataname, matfile, ordering=None):
"""Create a fake mldata data set.
Parameters
----------
columns_dict: contains data as
columns_dict[column_name] = array of data
dataname: name of data set
matfile: file-like object or file name
ordering: list of column_names, determines the ordering in the data set
Note: this function transposes all arrays, while fetch_mldata only
transposes 'data', keep that into account in the tests.
"""
datasets = dict(columns_dict)
# transpose all variables
for name in datasets:
datasets[name] = datasets[name].T
if ordering is None:
ordering = sorted(list(datasets.keys()))
# NOTE: setting up this array is tricky, because of the way Matlab
# re-packages 1D arrays
datasets['mldata_descr_ordering'] = sp.empty((1, len(ordering)),
dtype='object')
for i, name in enumerate(ordering):
datasets['mldata_descr_ordering'][0, i] = name
scipy.io.savemat(matfile, datasets, oned_as='column')
class mock_mldata_urlopen(object):
def __init__(self, mock_datasets):
"""Object that mocks the urlopen function to fake requests to mldata.
`mock_datasets` is a dictionary of {dataset_name: data_dict}, or
{dataset_name: (data_dict, ordering).
`data_dict` itself is a dictionary of {column_name: data_array},
and `ordering` is a list of column_names to determine the ordering
in the data set (see `fake_mldata` for details).
When requesting a dataset with a name that is in mock_datasets,
this object creates a fake dataset in a StringIO object and
returns it. Otherwise, it raises an HTTPError.
"""
self.mock_datasets = mock_datasets
def __call__(self, urlname):
dataset_name = urlname.split('/')[-1]
if dataset_name in self.mock_datasets:
resource_name = '_' + dataset_name
from io import BytesIO
matfile = BytesIO()
dataset = self.mock_datasets[dataset_name]
ordering = None
if isinstance(dataset, tuple):
dataset, ordering = dataset
fake_mldata(dataset, resource_name, matfile, ordering)
matfile.seek(0)
return matfile
else:
raise HTTPError(urlname, 404, dataset_name + " is not available",
[], None)
def install_mldata_mock(mock_datasets):
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = mock_mldata_urlopen(mock_datasets)
def uninstall_mldata_mock():
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = urlopen
# Meta estimators need another estimator to be instantiated.
META_ESTIMATORS = ["OneVsOneClassifier",
"OutputCodeClassifier", "OneVsRestClassifier", "RFE",
"RFECV", "BaseEnsemble"]
# estimators that there is no way to default-construct sensibly
OTHER = ["Pipeline", "FeatureUnion", "GridSearchCV", "RandomizedSearchCV"]
# some trange ones
DONT_TEST = ['SparseCoder', 'EllipticEnvelope', 'DictVectorizer',
'LabelBinarizer', 'LabelEncoder', 'MultiLabelBinarizer',
'TfidfTransformer', 'IsotonicRegression', 'OneHotEncoder',
'RandomTreesEmbedding', 'FeatureHasher', 'DummyClassifier',
'DummyRegressor', 'TruncatedSVD', 'PolynomialFeatures']
def all_estimators(include_meta_estimators=False, include_other=False,
type_filter=None, include_dont_test=False):
"""Get a list of all estimators from sklearn.
This function crawls the module and gets all classes that inherit
from BaseEstimator. Classes that are defined in test-modules are not
included.
By default meta_estimators such as GridSearchCV are also not included.
Parameters
----------
include_meta_estimators : boolean, default=False
Whether to include meta-estimators that can be constructed using
an estimator as their first argument. These are currently
BaseEnsemble, OneVsOneClassifier, OutputCodeClassifier,
OneVsRestClassifier, RFE, RFECV.
include_other : boolean, default=False
Wether to include meta-estimators that are somehow special and can
not be default-constructed sensibly. These are currently
Pipeline, FeatureUnion and GridSearchCV
include_dont_test : boolean, default=False
Whether to include "special" label estimator or test processors.
type_filter : string or None, default=None
Which kind of estimators should be returned. If None, no filter is
applied and all estimators are returned. Possible values are
'classifier', 'regressor', 'cluster' and 'transformer' to get
estimators only of these specific types.
Returns
-------
estimators : list of tuples
List of (name, class), where ``name`` is the class name as string
and ``class`` is the actuall type of the class.
"""
def is_abstract(c):
if not(hasattr(c, '__abstractmethods__')):
return False
if not len(c.__abstractmethods__):
return False
return True
all_classes = []
# get parent folder
path = sklearn.__path__
for importer, modname, ispkg in pkgutil.walk_packages(
path=path, prefix='sklearn.', onerror=lambda x: None):
if ".tests." in modname:
continue
module = __import__(modname, fromlist="dummy")
classes = inspect.getmembers(module, inspect.isclass)
all_classes.extend(classes)
all_classes = set(all_classes)
estimators = [c for c in all_classes
if (issubclass(c[1], BaseEstimator)
and c[0] != 'BaseEstimator')]
# get rid of abstract base classes
estimators = [c for c in estimators if not is_abstract(c[1])]
if not include_dont_test:
estimators = [c for c in estimators if not c[0] in DONT_TEST]
if not include_other:
estimators = [c for c in estimators if not c[0] in OTHER]
# possibly get rid of meta estimators
if not include_meta_estimators:
estimators = [c for c in estimators if not c[0] in META_ESTIMATORS]
if type_filter == 'classifier':
estimators = [est for est in estimators
if issubclass(est[1], ClassifierMixin)]
elif type_filter == 'regressor':
estimators = [est for est in estimators
if issubclass(est[1], RegressorMixin)]
elif type_filter == 'transformer':
estimators = [est for est in estimators
if issubclass(est[1], TransformerMixin)]
elif type_filter == 'cluster':
estimators = [est for est in estimators
if issubclass(est[1], ClusterMixin)]
elif type_filter is not None:
raise ValueError("Parameter type_filter must be 'classifier', "
"'regressor', 'transformer', 'cluster' or None, got"
" %s." % repr(type_filter))
# We sort in order to have reproducible test failures
return sorted(estimators)
def set_random_state(estimator, random_state=0):
if "random_state" in estimator.get_params().keys():
estimator.set_params(random_state=random_state)
def if_matplotlib(func):
"""Test decorator that skips test if matplotlib not installed. """
@wraps(func)
def run_test(*args, **kwargs):
try:
import matplotlib
matplotlib.use('Agg', warn=False)
# this fails if no $DISPLAY specified
matplotlib.pylab.figure()
except:
raise SkipTest('Matplotlib not available.')
else:
return func(*args, **kwargs)
return run_test
def if_not_mac_os(versions=('10.7', '10.8', '10.9'),
message='Multi-process bug in Mac OS X >= 10.7 '
'(see issue #636)'):
"""Test decorator that skips test if OS is Mac OS X and its
major version is one of ``versions``.
"""
mac_version, _, _ = platform.mac_ver()
skip = '.'.join(mac_version.split('.')[:2]) in versions
def decorator(func):
if skip:
@wraps(func)
def func(*args, **kwargs):
raise SkipTest(message)
return func
return decorator
def clean_warning_registry():
"""Safe way to reset warnings """
warnings.resetwarnings()
reg = "__warningregistry__"
for mod_name, mod in list(sys.modules.items()):
if 'six.moves' in mod_name:
continue
if hasattr(mod, reg):
getattr(mod, reg).clear()
def check_skip_network():
if int(os.environ.get('SKLEARN_SKIP_NETWORK_TESTS', 0)):
raise SkipTest("Text tutorial requires large dataset download")
def check_skip_travis():
"""Skip test if being run on Travis."""
if os.environ.get('TRAVIS') == "true":
raise SkipTest("This test needs to be skipped on Travis")
with_network = with_setup(check_skip_network)
with_travis = with_setup(check_skip_travis)
| {
"content_hash": "c917c2c8cd809ff78ff32d0d7755cff3",
"timestamp": "",
"source": "github",
"line_count": 643,
"max_line_length": 78,
"avg_line_length": 33.50077760497667,
"alnum_prop": 0.6133884220788264,
"repo_name": "evgchz/scikit-learn",
"id": "ee0cc4b65666b502d8dd390867d28074ff576854",
"size": "21541",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "sklearn/utils/testing.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "18469430"
},
{
"name": "C++",
"bytes": "1808975"
},
{
"name": "JavaScript",
"bytes": "22298"
},
{
"name": "Makefile",
"bytes": "4901"
},
{
"name": "PowerShell",
"bytes": "13427"
},
{
"name": "Python",
"bytes": "5642425"
},
{
"name": "Shell",
"bytes": "8730"
}
],
"symlink_target": ""
} |
<?php
namespace Cake\Test\TestCase\TestSuite;
use Cake\Console\Shell;
use Cake\Core\Configure;
use Cake\TestSuite\ConsoleIntegrationTestCase;
class ConsoleIntegrationTestCaseTest extends ConsoleIntegrationTestCase
{
/**
* setUp
*
* @return void
*/
public function setUp()
{
parent::setUp();
Configure::write('App.namespace', 'TestApp');
}
/**
* tests exec when using the command runner
*
* @return void
*/
public function testExecWithCommandRunner()
{
$this->useCommandRunner();
$this->exec('routes');
$this->assertExitCode(Shell::CODE_SUCCESS);
}
/**
* tests exec
*
* @return void
*/
public function testExec()
{
$this->exec('');
$this->assertOutputContains('Welcome to CakePHP');
$this->assertExitCode(Shell::CODE_ERROR);
}
/**
* tests that exec catches a StopException
*
* @return void
*/
public function testExecShellWithStopException()
{
$this->exec('integration abort_shell');
$this->assertExitCode(Shell::CODE_ERROR);
$this->assertErrorContains('Shell aborted');
}
/**
* tests that exec catches a StopException
*
* @return void
*/
public function testExecCommandWithStopException()
{
$this->useCommandRunner();
$this->exec('abort_command');
$this->assertExitCode(127);
$this->assertErrorContains('Command aborted');
}
/**
* tests a valid core command
*
* @return void
*/
public function testExecCoreCommand()
{
$this->exec('routes');
$this->assertExitCode(Shell::CODE_SUCCESS);
}
/**
* tests exec with an arg and an option
*
* @return void
*/
public function testExecWithArgsAndOption()
{
$this->exec('integration args_and_options arg --opt="some string"');
$this->assertErrorEmpty();
$this->assertOutputContains('arg: arg');
$this->assertOutputContains('opt: some string');
$this->assertExitCode(Shell::CODE_SUCCESS);
}
/**
* tests exec with missing required argument
*
* @return void
*/
public function testExecWithMissingRequiredArg()
{
$this->exec('integration args_and_options');
$this->assertOutputEmpty();
$this->assertErrorContains('Missing required arguments');
$this->assertErrorContains('arg is required');
$this->assertExitCode(Shell::CODE_ERROR);
}
/**
* tests exec with input
*
* @return void
*/
public function testExecWithInput()
{
$this->exec('integration bridge', ['javascript']);
$this->assertErrorContains('No!');
$this->assertExitCode(Shell::CODE_ERROR);
}
/**
* tests exec with multiple inputs
*
* @return void
*/
public function testExecWithMultipleInput()
{
$this->exec('integration bridge', ['cake', 'blue']);
$this->assertOutputContains('You may pass');
$this->assertExitCode(Shell::CODE_SUCCESS);
}
/**
* tests assertOutputRegExp assertion
*
* @return void
*/
public function testAssertOutputRegExp()
{
$this->exec('routes');
$this->assertOutputRegExp('/^\+[\-\+]+\+$/m');
}
/**
* tests assertErrorRegExp assertion
*
* @return void
*/
public function testAssertErrorRegExp()
{
$this->exec('integration args_and_options');
$this->assertErrorRegExp('/\<error\>(.+)\<\/error\>/');
}
/**
* tests _commandStringToArgs
*
* @return void
*/
public function testCommandStringToArgs()
{
$result = $this->_commandStringToArgs('command --something=nothing --with-spaces="quote me on that" \'quoted \"arg\"\'');
$expected = [
'command',
'--something=nothing',
'--with-spaces=quote me on that',
'quoted \"arg\"',
];
$this->assertSame($expected, $result);
$json = json_encode(['key' => '"val"', 'this' => true]);
$result = $this->_commandStringToArgs(" --json='$json'");
$expected = [
'--json=' . $json
];
$this->assertSame($expected, $result);
}
}
| {
"content_hash": "12db4361255589c2efca1bc8e5c00a23",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 129,
"avg_line_length": 23.005235602094242,
"alnum_prop": 0.5612198452435139,
"repo_name": "davidyell/cakephp",
"id": "682d1e21768fbdb376d6238e00e8b242ce526d72",
"size": "4920",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/TestCase/TestSuite/ConsoleIntegrationTestCaseTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15859"
},
{
"name": "JavaScript",
"bytes": "171"
},
{
"name": "Makefile",
"bytes": "6407"
},
{
"name": "PHP",
"bytes": "10377029"
},
{
"name": "Shell",
"bytes": "777"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "068e1e388becee1f443c92d61b0882d4",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "1e9e5f6b2c5b9d8e1c41e4a00591ff2772de1f24",
"size": "189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Laurales/Lauraceae/Rhodostemonodaphne/Rhodostemonodaphne negrensis/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package mpicbg.imglib.interpolation.linear;
import mpicbg.imglib.image.Image;
import mpicbg.imglib.interpolation.InterpolatorFactory;
import mpicbg.imglib.outofbounds.OutOfBoundsStrategyFactory;
import mpicbg.imglib.type.numeric.RealType;
public class LinearInterpolatorFactory<T extends RealType<T>> extends InterpolatorFactory<T>
{
public LinearInterpolatorFactory( final OutOfBoundsStrategyFactory<T> outOfBoundsStrategyFactory )
{
super(outOfBoundsStrategyFactory);
}
@SuppressWarnings("unchecked")
@Override
public LinearInterpolator<T> createInterpolator( final Image<T> img )
{
if ( img.getNumDimensions() == 1 )
{
return new LinearInterpolator1D<T>( img, this, outOfBoundsStrategyFactory );
}
else if ( img.getNumDimensions() == 2 )
{
return new LinearInterpolator2D<T>( img, this, outOfBoundsStrategyFactory );
}
else if ( img.getNumDimensions() == 3 )
{
if ( RealType.class.isInstance( img.createType() ))
/* inconvertible types due to javac bug 6548436: return (LinearInterpolator<T>)new LinearInterpolator3DFloat( (Image<FloatType>)img, (LinearInterpolatorFactory<FloatType>)this, (OutOfBoundsStrategyFactory<FloatType>)outOfBoundsStrategyFactory ); */
return (LinearInterpolator<T>)new LinearInterpolator3DRealType( (Image)img, (LinearInterpolatorFactory)this, (OutOfBoundsStrategyFactory)outOfBoundsStrategyFactory );
else
return new LinearInterpolator3D<T>( img, this, outOfBoundsStrategyFactory );
}
else
{
return new LinearInterpolator<T>( img, this, outOfBoundsStrategyFactory );
}
}
}
| {
"content_hash": "db804c23c988966213f535bb054a5706",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 252,
"avg_line_length": 38.292682926829265,
"alnum_prop": 0.7770700636942676,
"repo_name": "tomka/imglib",
"id": "500a4661e3512845c30358a486ab2f9b34dc09ab",
"size": "3203",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mpicbg/imglib/interpolation/linear/LinearInterpolatorFactory.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "1095368"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html class="minimal">
<title>Canvas test: 2d.strokeRect.zero.1</title>
<script src="../tests.js"></script>
<link rel="stylesheet" href="../tests.css">
<link rel="prev" href="minimal.2d.strokeRect.path.html" title="2d.strokeRect.path">
<link rel="next" href="minimal.2d.strokeRect.zero.2.html" title="2d.strokeRect.zero.2">
<body>
<p id="passtext">Pass</p>
<p id="failtext">Fail</p>
<!-- TODO: handle "script did not run" case -->
<p class="output">These images should be identical:</p>
<canvas id="c" class="output" width="100" height="50"><p class="fallback">FAIL (fallback content)</p></canvas>
<p class="output expectedtext">Expected output:<p><img src="clear-100x50.png" class="output expected" id="expected" alt="">
<ul id="d"></ul>
<script>
_addTest(function(canvas, ctx) {
ctx.strokeStyle = '#f00';
ctx.lineWidth = 250;
ctx.strokeRect(50, 25, 0, 0);
_assertPixel(canvas, 50,25, 0,0,0,0, "50,25", "0,0,0,0");
});
</script>
| {
"content_hash": "1214c2b56f5f64374f4db709cdf61c5b",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 123,
"avg_line_length": 35.18518518518518,
"alnum_prop": 0.6768421052631579,
"repo_name": "gameclosure/webgl-2d",
"id": "e01491f3e561b4319c94777b78865a40edfba059",
"size": "950",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "test/philip.html5.org/tests/minimal.2d.strokeRect.zero.1.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "300971"
}
],
"symlink_target": ""
} |
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
class Dashboard extends CI_Controller {
public function __construct()
{
parent::__construct();
//Do your magic here
$this->load->library(array('template'));
$this->load->model(array('User'));
}
public function index()
{
if ($this->session->userdata('login')=='1') {
if ($this->session->userdata('slug')=='admin') {
redirect('dashboard/admin','refresh');
}elseif ($this->session->userdata('slug')=='dosen') {
redirect('dashboard/dosen','refresh');
}elseif ($this->session->userdata('slug')=='mhs') {
redirect('dashboard/mhs','refresh');
}else{
$this->User->logout();
}
}else{
$this->User->logout();
}
}
public function admin()
{
$this->User->allowUsers('admin');
$data = array(
'title' => 'Dashboard Admin',
);
$this->template->admin('admin/dashAdmin',$data);
}
public function dosen()
{
$this->User->allowUsers('dosen');
$data = array(
'title' => 'Dashboard Dosen',
);
$this->template->dosen('dosen/dashDosen',$data);
}
public function mhs()
{
$this->User->allowUsers('mhs');
$data = array(
'title' => 'Dashboard Mahasiswa',
);
$this->template->mhs('mhs/dashMhs',$data);
}
public function logout()
{
$this->User->logout();
}
}
/* End of file Dashboard.php */
/* Location: ./application/controllers/Dashboard.php */ | {
"content_hash": "27fa4f61259f719d4321cb1fd92e2b94",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 63,
"avg_line_length": 21.895522388059703,
"alnum_prop": 0.5835037491479209,
"repo_name": "agusedyc/krs-an",
"id": "ec24ed468aa7f234ad5963a732af84ebdc44344d",
"size": "1467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/controllers/Dashboard.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "240"
},
{
"name": "HTML",
"bytes": "5633"
},
{
"name": "PHP",
"bytes": "6248762"
}
],
"symlink_target": ""
} |

## Click's Hexagonal Domain-Driven Architecture
Cheddar is a Java framework for enterprise applications on Amazon Web Services (AWS) using _domain-driven design_ (DDD). Bounded contexts are implemented as microservices, which are integrated using an _event-driven architecture_ and expose a REST API. Cheddar has full AWS integration using native services such as [SQS](http://aws.amazon.com/sqs/), [SNS](http://aws.amazon.com/sns/), [DynamoDB](http://aws.amazon.com/dynamodb/) and [CloudSearch](http://aws.amazon.com/cloudsearch/).
It's worth subscribing to our [blog](http://www.clicktravel.com/engineering-blog) for handy tips on how to get the best out of Cheddar, and for all the latest news.
## Domain-Driven Design
Domain-Driven Design (DDD) is an approach for developing software that closely aligns the implementation to evolving business concepts.
DDD promotes focus on the subjects most important to the business problem at hand, identifying these as _core domains_. Complex systems are decomposed to several orthogonal domains using _strategic modelling_, avoiding cross-contamination and enabling modelling of relationships between domains.
To talk about the domain, a _domain model_ and supporting [_ubiquitous language_](http://martinfowler.com/bliki/UbiquitousLanguage.html) is used. _Domain experts_ use the ubiquitous language every day when talking about the domain. The ubiquitous language and domain model form the basis for a solution that addresses the domain, known as a _bounded context_. A [rich domain model](http://www.martinfowler.com/bliki/AnemicDomainModel.html) encapsulates all its domain (business) logic.
Domain models publish _domain events_ when something of potential interest occurs in the model. Domain events may be consumed by the local or (after mapping by an _anti-corruption layer_) foreign bounded contexts. This application of the [observer pattern](http://en.wikipedia.org/wiki/Observer_pattern) promotes decoupling of the domains.
Practical concerns irrelevant to the domain but vital for a working implementation are kept out of the domain, such as persistence, security and transactions.
By modelling the business in software, DDD enables building of flexible, scalable solutions tightly aligned with business goals.
### More resources
#### Online articles
- [Wikipedia article on Domain-Driven Design](http://en.wikipedia.org/wiki/Domain-driven_design)
- [Vaughn Vernon's blog](http://vaughnvernon.co/)
- [Martin Fowler's blog articles on Domain-Driven Design](http://martinfowler.com/tags/domain%20driven%20design.html)
- [DDD Community](http://dddcommunity.org/)
- [Short intro to DDD](http://domainlanguage.com/ddd/)
- [Domain-Driven Design presentation](http://www.slideshare.net/panesofglass/domain-driven-design)
- [An Introduction To Domain-Driven Design](http://msdn.microsoft.com/en-us/magazine/dd419654.aspx)
#### Books
- [Vaughn Vernon - Implementing Domain-Driven Design](https://vaughnvernon.co/?page_id=168)
- [Eric Evans - Domain-driven Design: Tackling Complexity in the Heart of Software](http://www.amazon.co.uk/Domain-driven-Design-Tackling-Complexity-Software/dp/0321125215)
- [InfoQ free book - Domain Driven Design Quickly](http://www.infoq.com/minibooks/domain-driven-design-quickly)
## Cheddar applications
Cheddar provides a well-defined Java project structure for implementing each bounded context as a REST HTTP application hosted on AWS.
### Hexagonal architecture
Cheddar uses a [_hexagonal architecture_](http://alistair.cockburn.us/Hexagonal+architecture) to house each bounded context, meaning an _application layer_ and an _adapter layer_ surround the domain model.

#### Domain model
Central to the implementation is the domain model, containing rich domain objects, repositories and supporting domain services. All domain logic belongs in the domain model.
#### Application layer
The application layer is responsible for co-ordination of operations performed on the domain model, application of security and transaction boundaries. The public interfaces for the application layer form the API for the bounded context. This API satisfies the use cases for the bounded context. No domain logic resides in the application layer.
#### Adapter layer
The adapter layer adapts all communication for the bounded context, both inbound and outbound. It abstracts technical detail for various communication forms:
* Messaging using [SQS](http://aws.amazon.com/sqs/)
* Persistence using [DynamoDB](http://aws.amazon.com/dynamodb/)
* Multiple data versions and formats
* RESTful web services
The adapter layer is also responsible for mapping between foreign concepts outside the bounded context and the native ubiquitous language inside.
The adapter layer maps between data types present in the APIs and a _canonical data model_ (CDM). The CDM is shared by all bounded contexts and is authored using [XML Schema](http://www.w3schools.com/schema/). The REST resource representations are defined using types defined in the CDM.
### Event-driven architecture

The bounded contexts are integrated using a loosely coupled event-driven architecture. The adapter layer supports this integration by implementing event messaging via SQS and SNS.
### Cheddar software stack
Each bounded context is packaged as a standalone Java application. It can be deployed on any number of [AWS EC2](http://aws.amazon.com/ec2/) instances according to scaling needs. [Grizzly](https://grizzly.java.net/) and [Jersey](https://jersey.java.net/) are used as the basis for the REST HTTP server application. Any client capable of consuming REST services can easily work with Cheddar applications.
### Testing

The [_Test Pyramid_](http://martinfowler.com/bliki/TestPyramid.html) shows categories of automated tests for a system. As we move up the pyramid, the tests increase in scope but decrease in number as each test has a higher maintenance cost. Typically [Junit](http://junit.org/) or [TestNG](http://testng.org/) is used for unit testing of individual or small groups of classes. For service and end-to-end (e2e) tests, acceptance test tools such as [JBehave](http://jbehave.org/) or [Concordion](http://concordion.org/) would be used. Service tests focus on a single bounded context, whereas e2e tests are executed against the system as a whole.
#### Service testing support
Cheddar has explicit support for in-process [service testing](http://martinfowler.com/articles/microservice-testing/). All services external to the service under test are replaced by [test doubles](http://www.martinfowler.com/bliki/TestDouble.html) which serve to isolate the service under test. Cheddar includes test doubles of common AWS services; DynamoDB, SQS, SNS, S3 and CloudSearch. These are simple in-process implementations which act as substitutes for the real external services. Tests have in-process access to the state of the test doubles for easy state set-up and test verification. Service tests are executed against the application directly, which is configured using [Spring Test](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/testing.html) in a [Jersey Test](https://jersey.java.net/documentation/latest/test-framework.html) container.
| {
"content_hash": "27ca5b2fe6eb0ad6251e083e5fc1cf61",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 882,
"avg_line_length": 96.34177215189874,
"alnum_prop": 0.7942451714623571,
"repo_name": "clicktravel-james/Cheddar",
"id": "28d121cf3987a47e8659dc7569fe535b845d494f",
"size": "7622",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1926357"
},
{
"name": "XSLT",
"bytes": "7641"
}
],
"symlink_target": ""
} |
Werkzeug
========
*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
Werkzeug is a comprehensive `WSGI`_ web application library. It began as
a simple collection of various utilities for WSGI applications and has
become one of the most advanced WSGI utility libraries.
It includes:
- An interactive debugger that allows inspecting stack traces and
source code in the browser with an interactive interpreter for any
frame in the stack.
- A full-featured request object with objects to interact with
headers, query args, form data, files, and cookies.
- A response object that can wrap other WSGI applications and handle
streaming data.
- A routing system for matching URLs to endpoints and generating URLs
for endpoints, with an extensible system for capturing variables
from URLs.
- HTTP utilities to handle entity tags, cache control, dates, user
agents, cookies, files, and more.
- A threaded WSGI server for use while developing applications
locally.
- A test client for simulating HTTP requests during testing without
requiring running a server.
Werkzeug doesn't enforce any dependencies. It is up to the developer to
choose a template engine, database adapter, and even how to handle
requests. It can be used to build all sorts of end user applications
such as blogs, wikis, or bulletin boards.
`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
providing more structure and patterns for defining powerful
applications.
.. _WSGI: https://wsgi.readthedocs.io/en/latest/
.. _Flask: https://www.palletsprojects.com/p/flask/
Installing
----------
Install and update using `pip`_:
.. code-block:: text
pip install -U Werkzeug
.. _pip: https://pip.pypa.io/en/stable/getting-started/
A Simple Example
----------------
.. code-block:: python
from werkzeug.wrappers import Request, Response
@Request.application
def application(request):
return Response('Hello, World!')
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('localhost', 4000, application)
Donate
------
The Pallets organization develops and supports Werkzeug and other
popular packages. In order to grow the community of contributors and
users, and allow the maintainers to devote more time to the projects,
`please donate today`_.
.. _please donate today: https://palletsprojects.com/donate
Links
-----
- Documentation: https://werkzeug.palletsprojects.com/
- Changes: https://werkzeug.palletsprojects.com/changes/
- PyPI Releases: https://pypi.org/project/Werkzeug/
- Source Code: https://github.com/pallets/werkzeug/
- Issue Tracker: https://github.com/pallets/werkzeug/issues/
- Website: https://palletsprojects.com/p/werkzeug/
- Twitter: https://twitter.com/PalletsTeam
- Chat: https://discord.gg/pallets
| {
"content_hash": "b8e659009e739af94435701d586a3785",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 76,
"avg_line_length": 31.63736263736264,
"alnum_prop": 0.732198680097256,
"repo_name": "fkazimierczak/werkzeug",
"id": "f1592a5699e0cb7675346bb80e946dbda08b9357",
"size": "2879",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "README.rst",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "6705"
},
{
"name": "HTML",
"bytes": "124"
},
{
"name": "JavaScript",
"bytes": "10524"
},
{
"name": "Python",
"bytes": "1136488"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_65) on Thu Jan 22 19:02:37 GMT 2015 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Class org.apache.taverna.scufl2.validation.correctness.report.EmptyIterationStrategyTopNodeProblem (Apache Taverna Language APIs (Scufl2, Databundle) 0.16.1-incubating-SNAPSHOT API)</title>
<meta name="date" content="2015-01-22">
<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.taverna.scufl2.validation.correctness.report.EmptyIterationStrategyTopNodeProblem (Apache Taverna Language APIs (Scufl2, Databundle) 0.16.1-incubating-SNAPSHOT API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/report/EmptyIterationStrategyTopNodeProblem.html" title="class in org.apache.taverna.scufl2.validation.correctness.report">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/apache/taverna/scufl2/validation/correctness/report/class-use/EmptyIterationStrategyTopNodeProblem.html" target="_top">Frames</a></li>
<li><a href="EmptyIterationStrategyTopNodeProblem.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.taverna.scufl2.validation.correctness.report.EmptyIterationStrategyTopNodeProblem" class="title">Uses of Class<br>org.apache.taverna.scufl2.validation.correctness.report.EmptyIterationStrategyTopNodeProblem</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/report/EmptyIterationStrategyTopNodeProblem.html" title="class in org.apache.taverna.scufl2.validation.correctness.report">EmptyIterationStrategyTopNodeProblem</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.apache.taverna.scufl2.validation.correctness">org.apache.taverna.scufl2.validation.correctness</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.apache.taverna.scufl2.validation.correctness">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/report/EmptyIterationStrategyTopNodeProblem.html" title="class in org.apache.taverna.scufl2.validation.correctness.report">EmptyIterationStrategyTopNodeProblem</a> in <a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/package-summary.html">org.apache.taverna.scufl2.validation.correctness</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/package-summary.html">org.apache.taverna.scufl2.validation.correctness</a> that return types with arguments of type <a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/report/EmptyIterationStrategyTopNodeProblem.html" title="class in org.apache.taverna.scufl2.validation.correctness.report">EmptyIterationStrategyTopNodeProblem</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/HashSet.html?is-external=true" title="class or interface in java.util">HashSet</a><<a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/report/EmptyIterationStrategyTopNodeProblem.html" title="class in org.apache.taverna.scufl2.validation.correctness.report">EmptyIterationStrategyTopNodeProblem</a>></code></td>
<td class="colLast"><span class="strong">ReportCorrectnessValidationListener.</span><code><strong><a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/ReportCorrectnessValidationListener.html#getEmptyIterationStrategyTopNodeProblems()">getEmptyIterationStrategyTopNodeProblems</a></strong>()</code> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../org/apache/taverna/scufl2/validation/correctness/report/EmptyIterationStrategyTopNodeProblem.html" title="class in org.apache.taverna.scufl2.validation.correctness.report">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/apache/taverna/scufl2/validation/correctness/report/class-use/EmptyIterationStrategyTopNodeProblem.html" target="_top">Frames</a></li>
<li><a href="EmptyIterationStrategyTopNodeProblem.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2015 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
</body>
</html>
| {
"content_hash": "bbe5fe25a34c0d51995356c6064cc24d",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 520,
"avg_line_length": 51.92993630573248,
"alnum_prop": 0.6742303446584079,
"repo_name": "stain/incubator-taverna-site",
"id": "09147bb5464f455ce6083c34bbab5cfb638a7032",
"size": "8153",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "src/site/resources/javadoc/taverna-language/org/apache/taverna/scufl2/validation/correctness/report/class-use/EmptyIterationStrategyTopNodeProblem.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "3813"
},
{
"name": "CSS",
"bytes": "10003"
},
{
"name": "Clojure",
"bytes": "49413"
},
{
"name": "HTML",
"bytes": "82818"
},
{
"name": "M",
"bytes": "1705"
},
{
"name": "Perl",
"bytes": "3148"
},
{
"name": "Python",
"bytes": "31455"
},
{
"name": "Shell",
"bytes": "374"
}
],
"symlink_target": ""
} |
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="Dashboard app">
<meta name="author" content="Dan Esparza">
<title>NLogReader</title>
<script type="text/javascript" src="config.js"></script>
<!-- Latest compiled and minified CSS -->
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap.min.css">
<!-- Optional theme -->
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap-theme.min.css">
<!-- Styles for the date range picker -->
<link href="css/daterangepicker-bs3.css" rel="stylesheet" type='text/css'>
<!-- Styles for the react datagrid -->
<link href="css/react-datagrid.css" rel="stylesheet" type='text/css'>
<!-- Custom styles for this template -->
<link href="css/app.css" rel="stylesheet" type='text/css'>
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div id="logreaderdapp"></div>
<!-- jQuery (necessary for Ajax & Bootstrap's JavaScript plugins) -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<!-- Latest compiled and minified Bootstrap js -->
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.2/js/bootstrap.min.js"></script>
<!-- App bundle -->
<script src="js/bundle.js" type='text/javascript'></script>
</body>
</html> | {
"content_hash": "4ad6d7a6062642afa42e70b70105500c",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 110,
"avg_line_length": 37.42857142857143,
"alnum_prop": 0.6461286804798255,
"repo_name": "danesparza/NLogReader",
"id": "37c375ed26565926c93c8a29821f4ee5c0e76bbf",
"size": "1836",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NLogReader.SPA/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "111"
},
{
"name": "C#",
"bytes": "176100"
},
{
"name": "CSS",
"bytes": "12613"
},
{
"name": "HTML",
"bytes": "8496"
},
{
"name": "JavaScript",
"bytes": "84136"
},
{
"name": "PLpgSQL",
"bytes": "1040"
}
],
"symlink_target": ""
} |
/* This thing should be set up to do byteordering correctly. But... */
#include "as.h"
#include "subsegs.h"
#include "obstack.h"
#include "output-file.h"
#include "dwarf2dbg.h"
#include "libbfd.h"
#include "compress-debug.h"
#ifndef TC_ADJUST_RELOC_COUNT
#define TC_ADJUST_RELOC_COUNT(FIX, COUNT)
#endif
#ifndef TC_FORCE_RELOCATION
#define TC_FORCE_RELOCATION(FIX) \
(generic_force_reloc (FIX))
#endif
#ifndef TC_FORCE_RELOCATION_ABS
#define TC_FORCE_RELOCATION_ABS(FIX) \
(TC_FORCE_RELOCATION (FIX))
#endif
#ifndef TC_FORCE_RELOCATION_LOCAL
#define TC_FORCE_RELOCATION_LOCAL(FIX) \
(!(FIX)->fx_pcrel \
|| TC_FORCE_RELOCATION (FIX))
#endif
#ifndef TC_FORCE_RELOCATION_SUB_SAME
#define TC_FORCE_RELOCATION_SUB_SAME(FIX, SEG) \
(! SEG_NORMAL (SEG))
#endif
#ifndef md_register_arithmetic
# define md_register_arithmetic 1
#endif
#ifndef TC_FORCE_RELOCATION_SUB_ABS
#define TC_FORCE_RELOCATION_SUB_ABS(FIX, SEG) \
(!md_register_arithmetic && (SEG) == reg_section)
#endif
#ifndef TC_FORCE_RELOCATION_SUB_LOCAL
#ifdef DIFF_EXPR_OK
#define TC_FORCE_RELOCATION_SUB_LOCAL(FIX, SEG) \
(!md_register_arithmetic && (SEG) == reg_section)
#else
#define TC_FORCE_RELOCATION_SUB_LOCAL(FIX, SEG) 1
#endif
#endif
#ifndef TC_VALIDATE_FIX_SUB
#ifdef UNDEFINED_DIFFERENCE_OK
/* The PA needs this for PIC code generation. */
#define TC_VALIDATE_FIX_SUB(FIX, SEG) \
(md_register_arithmetic || (SEG) != reg_section)
#else
#define TC_VALIDATE_FIX_SUB(FIX, SEG) \
((md_register_arithmetic || (SEG) != reg_section) \
&& ((FIX)->fx_r_type == BFD_RELOC_GPREL32 \
|| (FIX)->fx_r_type == BFD_RELOC_GPREL16))
#endif
#endif
#ifndef TC_LINKRELAX_FIXUP
#define TC_LINKRELAX_FIXUP(SEG) 1
#endif
#ifndef MD_APPLY_SYM_VALUE
#define MD_APPLY_SYM_VALUE(FIX) 1
#endif
#ifndef TC_FINALIZE_SYMS_BEFORE_SIZE_SEG
#define TC_FINALIZE_SYMS_BEFORE_SIZE_SEG 1
#endif
#ifndef MD_PCREL_FROM_SECTION
#define MD_PCREL_FROM_SECTION(FIX, SEC) md_pcrel_from (FIX)
#endif
#ifndef TC_FAKE_LABEL
#define TC_FAKE_LABEL(NAME) (strcmp ((NAME), FAKE_LABEL_NAME) == 0)
#endif
/* Positive values of TC_FX_SIZE_SLACK allow a target to define
fixups that far past the end of a frag. Having such fixups
is of course most most likely a bug in setting fx_size correctly.
A negative value disables the fixup check entirely, which is
appropriate for something like the Renesas / SuperH SH_COUNT
reloc. */
#ifndef TC_FX_SIZE_SLACK
#define TC_FX_SIZE_SLACK(FIX) 0
#endif
/* Used to control final evaluation of expressions. */
int finalize_syms = 0;
int symbol_table_frozen;
symbolS *abs_section_sym;
/* Remember the value of dot when parsing expressions. */
addressT dot_value;
/* Relocs generated by ".reloc" pseudo. */
struct reloc_list* reloc_list;
void print_fixup (fixS *);
/* We generally attach relocs to frag chains. However, after we have
chained these all together into a segment, any relocs we add after
that must be attached to a segment. This will include relocs added
in md_estimate_size_for_relax, for example. */
static int frags_chained = 0;
static int n_fixups;
#define RELOC_ENUM enum bfd_reloc_code_real
/* Create a fixS in obstack 'notes'. */
static fixS *
fix_new_internal (fragS *frag, /* Which frag? */
int where, /* Where in that frag? */
int size, /* 1, 2, or 4 usually. */
symbolS *add_symbol, /* X_add_symbol. */
symbolS *sub_symbol, /* X_op_symbol. */
offsetT offset, /* X_add_number. */
int pcrel, /* TRUE if PC-relative relocation. */
RELOC_ENUM r_type /* Relocation type. */,
int at_beginning) /* Add to the start of the list? */
{
fixS *fixP;
n_fixups++;
fixP = (fixS *) obstack_alloc (¬es, sizeof (fixS));
fixP->fx_frag = frag;
fixP->fx_where = where;
fixP->fx_size = size;
/* We've made fx_size a narrow field; check that it's wide enough. */
if (fixP->fx_size != size)
{
as_bad (_("field fx_size too small to hold %d"), size);
abort ();
}
fixP->fx_addsy = add_symbol;
fixP->fx_subsy = sub_symbol;
fixP->fx_offset = offset;
fixP->fx_dot_value = dot_value;
fixP->fx_pcrel = pcrel;
fixP->fx_r_type = r_type;
fixP->fx_im_disp = 0;
fixP->fx_pcrel_adjust = 0;
fixP->fx_bit_fixP = 0;
fixP->fx_addnumber = 0;
fixP->fx_tcbit = 0;
fixP->fx_tcbit2 = 0;
fixP->fx_done = 0;
fixP->fx_no_overflow = 0;
fixP->fx_signed = 0;
#ifdef USING_CGEN
fixP->fx_cgen.insn = NULL;
fixP->fx_cgen.opinfo = 0;
#endif
#ifdef TC_FIX_TYPE
TC_INIT_FIX_DATA (fixP);
#endif
as_where (&fixP->fx_file, &fixP->fx_line);
{
fixS **seg_fix_rootP = (frags_chained
? &seg_info (now_seg)->fix_root
: &frchain_now->fix_root);
fixS **seg_fix_tailP = (frags_chained
? &seg_info (now_seg)->fix_tail
: &frchain_now->fix_tail);
if (at_beginning)
{
fixP->fx_next = *seg_fix_rootP;
*seg_fix_rootP = fixP;
if (fixP->fx_next == NULL)
*seg_fix_tailP = fixP;
}
else
{
fixP->fx_next = NULL;
if (*seg_fix_tailP)
(*seg_fix_tailP)->fx_next = fixP;
else
*seg_fix_rootP = fixP;
*seg_fix_tailP = fixP;
}
}
return fixP;
}
/* Create a fixup relative to a symbol (plus a constant). */
fixS *
fix_new (fragS *frag, /* Which frag? */
int where, /* Where in that frag? */
int size, /* 1, 2, or 4 usually. */
symbolS *add_symbol, /* X_add_symbol. */
offsetT offset, /* X_add_number. */
int pcrel, /* TRUE if PC-relative relocation. */
RELOC_ENUM r_type /* Relocation type. */)
{
return fix_new_internal (frag, where, size, add_symbol,
(symbolS *) NULL, offset, pcrel, r_type, FALSE);
}
/* Create a fixup for an expression. Currently we only support fixups
for difference expressions. That is itself more than most object
file formats support anyhow. */
fixS *
fix_new_exp (fragS *frag, /* Which frag? */
int where, /* Where in that frag? */
int size, /* 1, 2, or 4 usually. */
expressionS *exp, /* Expression. */
int pcrel, /* TRUE if PC-relative relocation. */
RELOC_ENUM r_type /* Relocation type. */)
{
symbolS *add = NULL;
symbolS *sub = NULL;
offsetT off = 0;
switch (exp->X_op)
{
case O_absent:
break;
case O_register:
as_bad (_("register value used as expression"));
break;
case O_add:
/* This comes up when _GLOBAL_OFFSET_TABLE_+(.-L0) is read, if
the difference expression cannot immediately be reduced. */
{
symbolS *stmp = make_expr_symbol (exp);
exp->X_op = O_symbol;
exp->X_op_symbol = 0;
exp->X_add_symbol = stmp;
exp->X_add_number = 0;
return fix_new_exp (frag, where, size, exp, pcrel, r_type);
}
case O_symbol_rva:
add = exp->X_add_symbol;
off = exp->X_add_number;
r_type = BFD_RELOC_RVA;
break;
case O_uminus:
sub = exp->X_add_symbol;
off = exp->X_add_number;
break;
case O_subtract:
sub = exp->X_op_symbol;
/* Fall through. */
case O_symbol:
add = exp->X_add_symbol;
/* Fall through. */
case O_constant:
off = exp->X_add_number;
break;
default:
add = make_expr_symbol (exp);
break;
}
return fix_new_internal (frag, where, size, add, sub, off, pcrel,
r_type, FALSE);
}
/* Create a fixup at the beginning of FRAG. The arguments are the same
as for fix_new, except that WHERE is implicitly 0. */
fixS *
fix_at_start (fragS *frag, int size, symbolS *add_symbol,
offsetT offset, int pcrel, RELOC_ENUM r_type)
{
return fix_new_internal (frag, 0, size, add_symbol,
(symbolS *) NULL, offset, pcrel, r_type, TRUE);
}
/* Generic function to determine whether a fixup requires a relocation. */
int
generic_force_reloc (fixS *fix)
{
if (fix->fx_r_type == BFD_RELOC_VTABLE_INHERIT
|| fix->fx_r_type == BFD_RELOC_VTABLE_ENTRY)
return 1;
if (fix->fx_addsy == NULL)
return 0;
return S_FORCE_RELOC (fix->fx_addsy, fix->fx_subsy == NULL);
}
/* Append a string onto another string, bumping the pointer along. */
void
append (char **charPP, char *fromP, unsigned long length)
{
/* Don't trust memcpy() of 0 chars. */
if (length == 0)
return;
memcpy (*charPP, fromP, length);
*charPP += length;
}
/* This routine records the largest alignment seen for each segment.
If the beginning of the segment is aligned on the worst-case
boundary, all of the other alignments within it will work. At
least one object format really uses this info. */
void
record_alignment (/* Segment to which alignment pertains. */
segT seg,
/* Alignment, as a power of 2 (e.g., 1 => 2-byte
boundary, 2 => 4-byte boundary, etc.) */
int align)
{
if (seg == absolute_section)
return;
if ((unsigned int) align > bfd_get_section_alignment (stdoutput, seg))
bfd_set_section_alignment (stdoutput, seg, align);
}
int
get_recorded_alignment (segT seg)
{
if (seg == absolute_section)
return 0;
return bfd_get_section_alignment (stdoutput, seg);
}
/* Reset the section indices after removing the gas created sections. */
static void
renumber_sections (bfd *abfd ATTRIBUTE_UNUSED, asection *sec, void *countparg)
{
int *countp = (int *) countparg;
sec->index = *countp;
++*countp;
}
static fragS *
chain_frchains_together_1 (segT section, struct frchain *frchp)
{
fragS dummy, *prev_frag = &dummy;
fixS fix_dummy, *prev_fix = &fix_dummy;
for (; frchp; frchp = frchp->frch_next)
{
prev_frag->fr_next = frchp->frch_root;
prev_frag = frchp->frch_last;
gas_assert (prev_frag->fr_type != 0);
if (frchp->fix_root != (fixS *) NULL)
{
if (seg_info (section)->fix_root == (fixS *) NULL)
seg_info (section)->fix_root = frchp->fix_root;
prev_fix->fx_next = frchp->fix_root;
seg_info (section)->fix_tail = frchp->fix_tail;
prev_fix = frchp->fix_tail;
}
}
gas_assert (prev_frag->fr_type != 0);
gas_assert (prev_frag != &dummy);
prev_frag->fr_next = 0;
return prev_frag;
}
static void
chain_frchains_together (bfd *abfd ATTRIBUTE_UNUSED,
segT section,
void *xxx ATTRIBUTE_UNUSED)
{
segment_info_type *info;
/* BFD may have introduced its own sections without using
subseg_new, so it is possible that seg_info is NULL. */
info = seg_info (section);
if (info != (segment_info_type *) NULL)
info->frchainP->frch_last
= chain_frchains_together_1 (section, info->frchainP);
/* Now that we've chained the frags together, we must add new fixups
to the segment, not to the frag chain. */
frags_chained = 1;
}
static void
cvt_frag_to_fill (segT sec ATTRIBUTE_UNUSED, fragS *fragP)
{
switch (fragP->fr_type)
{
case rs_align:
case rs_align_code:
case rs_align_test:
case rs_org:
case rs_space:
#ifdef HANDLE_ALIGN
HANDLE_ALIGN (fragP);
#endif
know (fragP->fr_next != NULL);
fragP->fr_offset = (fragP->fr_next->fr_address
- fragP->fr_address
- fragP->fr_fix) / fragP->fr_var;
if (fragP->fr_offset < 0)
{
as_bad_where (fragP->fr_file, fragP->fr_line,
_("attempt to .org/.space backwards? (%ld)"),
(long) fragP->fr_offset);
fragP->fr_offset = 0;
}
fragP->fr_type = rs_fill;
break;
case rs_fill:
break;
case rs_leb128:
{
valueT value = S_GET_VALUE (fragP->fr_symbol);
int size;
size = output_leb128 (fragP->fr_literal + fragP->fr_fix, value,
fragP->fr_subtype);
fragP->fr_fix += size;
fragP->fr_type = rs_fill;
fragP->fr_var = 0;
fragP->fr_offset = 0;
fragP->fr_symbol = NULL;
}
break;
case rs_cfa:
eh_frame_convert_frag (fragP);
break;
case rs_dwarf2dbg:
dwarf2dbg_convert_frag (fragP);
break;
case rs_machine_dependent:
md_convert_frag (stdoutput, sec, fragP);
printf("rs Machine dep fr_next address %u fr_address %u fr_fix %u\n",(unsigned)fragP->fr_next->fr_address, (unsigned)fragP->fr_address,
(unsigned)fragP->fr_fix);
gas_assert (fragP->fr_next == NULL
|| ((offsetT) (fragP->fr_next->fr_address - fragP->fr_address)
== fragP->fr_fix));
/* After md_convert_frag, we make the frag into a ".space 0".
md_convert_frag() should set up any fixSs and constants
required. */
frag_wane (fragP);
break;
#ifndef WORKING_DOT_WORD
case rs_broken_word:
{
struct broken_word *lie;
if (fragP->fr_subtype)
{
fragP->fr_fix += md_short_jump_size;
for (lie = (struct broken_word *) (fragP->fr_symbol);
lie && lie->dispfrag == fragP;
lie = lie->next_broken_word)
if (lie->added == 1)
fragP->fr_fix += md_long_jump_size;
}
frag_wane (fragP);
}
break;
#endif
default:
BAD_CASE (fragP->fr_type);
break;
}
#ifdef md_frag_check
md_frag_check (fragP);
#endif
}
struct relax_seg_info
{
int pass;
int changed;
};
static void
relax_seg (bfd *abfd ATTRIBUTE_UNUSED, asection *sec, void *xxx)
{
segment_info_type *seginfo = seg_info (sec);
struct relax_seg_info *info = (struct relax_seg_info *) xxx;
if (seginfo && seginfo->frchainP
&& relax_segment (seginfo->frchainP->frch_root, sec, info->pass))
info->changed = 1;
}
static void
size_seg (bfd *abfd, asection *sec, void *xxx ATTRIBUTE_UNUSED)
{
flagword flags;
fragS *fragp;
segment_info_type *seginfo;
int x;
valueT size, newsize;
subseg_change (sec, 0);
seginfo = seg_info (sec);
if (seginfo && seginfo->frchainP)
{
for (fragp = seginfo->frchainP->frch_root; fragp; fragp = fragp->fr_next)
cvt_frag_to_fill (sec, fragp);
for (fragp = seginfo->frchainP->frch_root;
fragp->fr_next;
fragp = fragp->fr_next)
/* Walk to last elt. */
;
size = fragp->fr_address + fragp->fr_fix;
}
else
size = 0;
flags = bfd_get_section_flags (abfd, sec);
if (size == 0 && bfd_get_section_size (sec) != 0 &&
(flags & SEC_HAS_CONTENTS) != 0)
return;
if (size > 0 && ! seginfo->bss)
flags |= SEC_HAS_CONTENTS;
flags &= ~SEC_RELOC;
x = bfd_set_section_flags (abfd, sec, flags);
gas_assert (x);
newsize = md_section_align (sec, size);
x = bfd_set_section_size (abfd, sec, newsize);
gas_assert (x);
/* If the size had to be rounded up, add some padding in the last
non-empty frag. */
gas_assert (newsize >= size);
if (size != newsize)
{
fragS *last = seginfo->frchainP->frch_last;
fragp = seginfo->frchainP->frch_root;
while (fragp->fr_next != last)
fragp = fragp->fr_next;
last->fr_address = size;
if ((newsize - size) % fragp->fr_var == 0)
fragp->fr_offset += (newsize - size) / fragp->fr_var;
else
/* If we hit this abort, it's likely due to subsegs_finish not
providing sufficient alignment on the last frag, and the
machine dependent code using alignment frags with fr_var
greater than 1. */
abort ();
}
#ifdef tc_frob_section
tc_frob_section (sec);
#endif
#ifdef obj_frob_section
obj_frob_section (sec);
#endif
}
#ifdef DEBUG2
static void
dump_section_relocs (bfd *abfd ATTRIBUTE_UNUSED, asection *sec, FILE *stream)
{
segment_info_type *seginfo = seg_info (sec);
fixS *fixp = seginfo->fix_root;
if (!fixp)
return;
fprintf (stream, "sec %s relocs:\n", sec->name);
while (fixp)
{
symbolS *s = fixp->fx_addsy;
fprintf (stream, " %08lx: type %d ", (unsigned long) fixp,
(int) fixp->fx_r_type);
if (s == NULL)
fprintf (stream, "no sym\n");
else
{
print_symbol_value_1 (stream, s);
fprintf (stream, "\n");
}
fixp = fixp->fx_next;
}
}
#else
#define dump_section_relocs(ABFD,SEC,STREAM) ((void) 0)
#endif
#ifndef EMIT_SECTION_SYMBOLS
#define EMIT_SECTION_SYMBOLS 1
#endif
/* Resolve U.A.OFFSET_SYM and U.A.SYM fields of RELOC_LIST entries,
and check for validity. Convert RELOC_LIST from using U.A fields
to U.B fields. */
static void
resolve_reloc_expr_symbols (void)
{
bfd_vma addr_mask = 1;
struct reloc_list *r;
/* Avoid a shift by the width of type. */
addr_mask <<= bfd_arch_bits_per_address (stdoutput) - 1;
addr_mask <<= 1;
addr_mask -= 1;
for (r = reloc_list; r; r = r->next)
{
reloc_howto_type *howto = r->u.a.howto;
expressionS *symval;
symbolS *sym;
bfd_vma offset, addend;
asection *sec;
resolve_symbol_value (r->u.a.offset_sym);
symval = symbol_get_value_expression (r->u.a.offset_sym);
offset = 0;
sym = NULL;
if (symval->X_op == O_constant)
sym = r->u.a.offset_sym;
else if (symval->X_op == O_symbol)
{
sym = symval->X_add_symbol;
offset = symval->X_add_number;
symval = symbol_get_value_expression (symval->X_add_symbol);
}
if (sym == NULL
|| symval->X_op != O_constant
|| (sec = S_GET_SEGMENT (sym)) == NULL
|| !SEG_NORMAL (sec))
{
as_bad_where (r->file, r->line, _("invalid offset expression"));
sec = NULL;
}
else
offset += S_GET_VALUE (sym);
sym = NULL;
addend = r->u.a.addend;
if (r->u.a.sym != NULL)
{
resolve_symbol_value (r->u.a.sym);
symval = symbol_get_value_expression (r->u.a.sym);
if (symval->X_op == O_constant)
sym = r->u.a.sym;
else if (symval->X_op == O_symbol)
{
sym = symval->X_add_symbol;
addend += symval->X_add_number;
symval = symbol_get_value_expression (symval->X_add_symbol);
}
if (symval->X_op != O_constant)
{
as_bad_where (r->file, r->line, _("invalid reloc expression"));
sec = NULL;
}
else if (sym != NULL)
{
/* Convert relocs against local symbols to refer to the
corresponding section symbol plus offset instead. Keep
PC-relative relocs of the REL variety intact though to
prevent the offset from overflowing the relocated field,
unless it has enough bits to cover the whole address
space. */
if (S_IS_LOCAL (sym) && !symbol_section_p (sym)
&& !(howto->partial_inplace
&& howto->pc_relative
&& howto->src_mask != addr_mask))
{
asection *symsec = S_GET_SEGMENT (sym);
if (!(((symsec->flags & SEC_MERGE) != 0
&& addend != 0)
|| (symsec->flags & SEC_THREAD_LOCAL) != 0))
{
addend += S_GET_VALUE (sym);
sym = section_symbol (symsec);
}
}
symbol_mark_used_in_reloc (sym);
}
}
if (sym == NULL)
{
if (abs_section_sym == NULL)
abs_section_sym = section_symbol (absolute_section);
sym = abs_section_sym;
}
r->u.b.sec = sec;
r->u.b.s = symbol_get_bfdsym (sym);
r->u.b.r.sym_ptr_ptr = &r->u.b.s;
r->u.b.r.address = offset;
r->u.b.r.addend = addend;
r->u.b.r.howto = howto;
}
}
/* This pass over fixups decides whether symbols can be replaced with
section symbols. */
static void
adjust_reloc_syms (bfd *abfd ATTRIBUTE_UNUSED,
asection *sec,
void *xxx ATTRIBUTE_UNUSED)
{
segment_info_type *seginfo = seg_info (sec);
fixS *fixp;
if (seginfo == NULL)
return;
dump_section_relocs (abfd, sec, stderr);
for (fixp = seginfo->fix_root; fixp; fixp = fixp->fx_next)
if (fixp->fx_done)
/* Ignore it. */
;
else if (fixp->fx_addsy)
{
symbolS *sym;
asection *symsec;
#ifdef DEBUG5
fprintf (stderr, "\n\nadjusting fixup:\n");
print_fixup (fixp);
#endif
sym = fixp->fx_addsy;
/* All symbols should have already been resolved at this
point. It is possible to see unresolved expression
symbols, though, since they are not in the regular symbol
table. */
resolve_symbol_value (sym);
if (fixp->fx_subsy != NULL)
resolve_symbol_value (fixp->fx_subsy);
/* If this symbol is equated to an undefined or common symbol,
convert the fixup to being against that symbol. */
while (symbol_equated_reloc_p (sym)
|| S_IS_WEAKREFR (sym))
{
symbolS *newsym = symbol_get_value_expression (sym)->X_add_symbol;
if (sym == newsym)
break;
fixp->fx_offset += symbol_get_value_expression (sym)->X_add_number;
fixp->fx_addsy = newsym;
sym = newsym;
}
if (symbol_mri_common_p (sym))
{
fixp->fx_offset += S_GET_VALUE (sym);
fixp->fx_addsy = symbol_get_value_expression (sym)->X_add_symbol;
continue;
}
/* If the symbol is undefined, common, weak, or global (ELF
shared libs), we can't replace it with the section symbol. */
if (S_FORCE_RELOC (fixp->fx_addsy, 1))
continue;
/* Is there some other (target cpu dependent) reason we can't adjust
this one? (E.g. relocations involving function addresses on
the PA. */
#ifdef tc_fix_adjustable
if (! tc_fix_adjustable (fixp))
continue;
#endif
/* Since we're reducing to section symbols, don't attempt to reduce
anything that's already using one. */
if (symbol_section_p (sym))
continue;
symsec = S_GET_SEGMENT (sym);
if (symsec == NULL)
abort ();
if (bfd_is_abs_section (symsec))
{
/* The fixup_segment routine normally will not use this
symbol in a relocation. */
continue;
}
/* Don't try to reduce relocs which refer to non-local symbols
in .linkonce sections. It can lead to confusion when a
debugging section refers to a .linkonce section. I hope
this will always be correct. */
if (symsec != sec && ! S_IS_LOCAL (sym))
{
if ((symsec->flags & SEC_LINK_ONCE) != 0
|| (IS_ELF
/* The GNU toolchain uses an extension for ELF: a
section beginning with the magic string
.gnu.linkonce is a linkonce section. */
&& strncmp (segment_name (symsec), ".gnu.linkonce",
sizeof ".gnu.linkonce" - 1) == 0))
continue;
}
/* Never adjust a reloc against local symbol in a merge section
with non-zero addend. */
if ((symsec->flags & SEC_MERGE) != 0
&& (fixp->fx_offset != 0 || fixp->fx_subsy != NULL))
continue;
/* Never adjust a reloc against TLS local symbol. */
if ((symsec->flags & SEC_THREAD_LOCAL) != 0)
continue;
/* We refetch the segment when calling section_symbol, rather
than using symsec, because S_GET_VALUE may wind up changing
the section when it calls resolve_symbol_value. */
fixp->fx_offset += S_GET_VALUE (sym);
fixp->fx_addsy = section_symbol (S_GET_SEGMENT (sym));
#ifdef DEBUG5
fprintf (stderr, "\nadjusted fixup:\n");
print_fixup (fixp);
#endif
}
dump_section_relocs (abfd, sec, stderr);
}
/* fixup_segment()
Go through all the fixS's in a segment and see which ones can be
handled now. (These consist of fixS where we have since discovered
the value of a symbol, or the address of the frag involved.)
For each one, call md_apply_fix to put the fix into the frag data.
Result is a count of how many relocation structs will be needed to
handle the remaining fixS's that we couldn't completely handle here.
These will be output later by emit_relocations(). */
static long
fixup_segment (fixS *fixP, segT this_segment)
{
long seg_reloc_count = 0;
valueT add_number;
fragS *fragP;
segT add_symbol_segment = absolute_section;
if (fixP != NULL && abs_section_sym == NULL)
abs_section_sym = section_symbol (absolute_section);
/* If the linker is doing the relaxing, we must not do any fixups.
Well, strictly speaking that's not true -- we could do any that
are PC-relative and don't cross regions that could change size.
And for the i960 we might be able to turn callx/callj into bal
anyways in cases where we know the maximum displacement. */
if (linkrelax && TC_LINKRELAX_FIXUP (this_segment))
{
for (; fixP; fixP = fixP->fx_next)
if (!fixP->fx_done)
{
if (fixP->fx_addsy == NULL)
{
/* There was no symbol required by this relocation.
However, BFD doesn't really handle relocations
without symbols well. So fake up a local symbol in
the absolute section. */
fixP->fx_addsy = abs_section_sym;
}
symbol_mark_used_in_reloc (fixP->fx_addsy);
if (fixP->fx_subsy != NULL)
symbol_mark_used_in_reloc (fixP->fx_subsy);
seg_reloc_count++;
}
TC_ADJUST_RELOC_COUNT (fixP, seg_reloc_count);
return seg_reloc_count;
}
for (; fixP; fixP = fixP->fx_next)
{
#ifdef DEBUG5
fprintf (stderr, "\nprocessing fixup:\n");
print_fixup (fixP);
#endif
fragP = fixP->fx_frag;
know (fragP);
#ifdef TC_VALIDATE_FIX
TC_VALIDATE_FIX (fixP, this_segment, skip);
#endif
add_number = fixP->fx_offset;
if (fixP->fx_addsy != NULL)
add_symbol_segment = S_GET_SEGMENT (fixP->fx_addsy);
if (fixP->fx_subsy != NULL)
{
segT sub_symbol_segment;
resolve_symbol_value (fixP->fx_subsy);
sub_symbol_segment = S_GET_SEGMENT (fixP->fx_subsy);
if (fixP->fx_addsy != NULL
&& sub_symbol_segment == add_symbol_segment
&& !S_FORCE_RELOC (fixP->fx_addsy, 0)
&& !S_FORCE_RELOC (fixP->fx_subsy, 0)
&& !TC_FORCE_RELOCATION_SUB_SAME (fixP, add_symbol_segment))
{
add_number += S_GET_VALUE (fixP->fx_addsy);
add_number -= S_GET_VALUE (fixP->fx_subsy);
fixP->fx_offset = add_number;
fixP->fx_addsy = NULL;
fixP->fx_subsy = NULL;
#ifdef TC_M68K
/* See the comment below about 68k weirdness. */
fixP->fx_pcrel = 0;
#endif
}
else if (sub_symbol_segment == absolute_section
&& !S_FORCE_RELOC (fixP->fx_subsy, 0)
&& !TC_FORCE_RELOCATION_SUB_ABS (fixP, add_symbol_segment))
{
add_number -= S_GET_VALUE (fixP->fx_subsy);
fixP->fx_offset = add_number;
fixP->fx_subsy = NULL;
}
else if (sub_symbol_segment == this_segment
&& !S_FORCE_RELOC (fixP->fx_subsy, 0)
&& !TC_FORCE_RELOCATION_SUB_LOCAL (fixP, add_symbol_segment))
{
add_number -= S_GET_VALUE (fixP->fx_subsy);
fixP->fx_offset = (add_number + fixP->fx_dot_value
+ fixP->fx_frag->fr_address);
/* Make it pc-relative. If the back-end code has not
selected a pc-relative reloc, cancel the adjustment
we do later on all pc-relative relocs. */
if (0
#ifdef TC_M68K
/* Do this for m68k even if it's already described
as pc-relative. On the m68k, an operand of
"pc@(foo-.-2)" should address "foo" in a
pc-relative mode. */
|| 1
#endif
|| !fixP->fx_pcrel)
add_number += MD_PCREL_FROM_SECTION (fixP, this_segment);
fixP->fx_subsy = NULL;
fixP->fx_pcrel = 1;
}
else if (!TC_VALIDATE_FIX_SUB (fixP, add_symbol_segment))
{
if (!md_register_arithmetic
&& (add_symbol_segment == reg_section
|| sub_symbol_segment == reg_section))
as_bad_where (fixP->fx_file, fixP->fx_line,
_("register value used as expression"));
else
as_bad_where (fixP->fx_file, fixP->fx_line,
_("can't resolve `%s' {%s section} - `%s' {%s section}"),
fixP->fx_addsy ? S_GET_NAME (fixP->fx_addsy) : "0",
segment_name (add_symbol_segment),
S_GET_NAME (fixP->fx_subsy),
segment_name (sub_symbol_segment));
}
else if (sub_symbol_segment != undefined_section
&& ! bfd_is_com_section (sub_symbol_segment)
&& MD_APPLY_SYM_VALUE (fixP))
add_number -= S_GET_VALUE (fixP->fx_subsy);
}
if (fixP->fx_addsy)
{
if (add_symbol_segment == this_segment
&& !S_FORCE_RELOC (fixP->fx_addsy, 0)
&& !TC_FORCE_RELOCATION_LOCAL (fixP))
{
/* This fixup was made when the symbol's segment was
SEG_UNKNOWN, but it is now in the local segment.
So we know how to do the address without relocation. */
add_number += S_GET_VALUE (fixP->fx_addsy);
fixP->fx_offset = add_number;
if (fixP->fx_pcrel)
add_number -= MD_PCREL_FROM_SECTION (fixP, this_segment);
fixP->fx_addsy = NULL;
fixP->fx_pcrel = 0;
}
else if (add_symbol_segment == absolute_section
&& !S_FORCE_RELOC (fixP->fx_addsy, 0)
&& !TC_FORCE_RELOCATION_ABS (fixP))
{
add_number += S_GET_VALUE (fixP->fx_addsy);
fixP->fx_offset = add_number;
fixP->fx_addsy = NULL;
}
else if (add_symbol_segment != undefined_section
&& ! bfd_is_com_section (add_symbol_segment)
&& MD_APPLY_SYM_VALUE (fixP))
add_number += S_GET_VALUE (fixP->fx_addsy);
}
if (fixP->fx_pcrel)
{
add_number -= MD_PCREL_FROM_SECTION (fixP, this_segment);
if (!fixP->fx_done && fixP->fx_addsy == NULL)
{
/* There was no symbol required by this relocation.
However, BFD doesn't really handle relocations
without symbols well. So fake up a local symbol in
the absolute section. */
fixP->fx_addsy = abs_section_sym;
}
}
if (!fixP->fx_done)
md_apply_fix (fixP, &add_number, this_segment);
if (!fixP->fx_done)
{
++seg_reloc_count;
if (fixP->fx_addsy == NULL)
fixP->fx_addsy = abs_section_sym;
symbol_mark_used_in_reloc (fixP->fx_addsy);
if (fixP->fx_subsy != NULL)
symbol_mark_used_in_reloc (fixP->fx_subsy);
}
if (!fixP->fx_bit_fixP && !fixP->fx_no_overflow && fixP->fx_size != 0)
{
if (fixP->fx_size < sizeof (valueT))
{
valueT mask;
mask = 0;
mask--; /* Set all bits to one. */
mask <<= fixP->fx_size * 8 - (fixP->fx_signed ? 1 : 0);
if ((add_number & mask) != 0 && (add_number & mask) != mask)
{
char buf[50], buf2[50];
sprint_value (buf, fragP->fr_address + fixP->fx_where);
if (add_number > 1000)
sprint_value (buf2, add_number);
else
sprintf (buf2, "%ld", (long) add_number);
as_bad_where (fixP->fx_file, fixP->fx_line,
_("value of %s too large for field of %d bytes at %s"),
buf2, fixP->fx_size, buf);
} /* Generic error checking. */
}
#ifdef WARN_SIGNED_OVERFLOW_WORD
/* Warn if a .word value is too large when treated as a signed
number. We already know it is not too negative. This is to
catch over-large switches generated by gcc on the 68k. */
if (!flag_signed_overflow_ok
&& fixP->fx_size == 2
&& add_number > 0x7fff)
as_bad_where (fixP->fx_file, fixP->fx_line,
_("signed .word overflow; switch may be too large; %ld at 0x%lx"),
(long) add_number,
(long) (fragP->fr_address + fixP->fx_where));
#endif
} /* Not a bit fix. */
#ifdef TC_VALIDATE_FIX
skip: ATTRIBUTE_UNUSED_LABEL
;
#endif
#ifdef DEBUG5
fprintf (stderr, "result:\n");
print_fixup (fixP);
#endif
} /* For each fixS in this segment. */
TC_ADJUST_RELOC_COUNT (fixP, seg_reloc_count);
return seg_reloc_count;
}
static void
fix_segment (bfd *abfd ATTRIBUTE_UNUSED,
asection *sec,
void *xxx ATTRIBUTE_UNUSED)
{
segment_info_type *seginfo = seg_info (sec);
fixup_segment (seginfo->fix_root, sec);
}
static void
install_reloc (asection *sec, arelent *reloc, fragS *fragp,
char *file, unsigned int line)
{
char *err;
bfd_reloc_status_type s;
asymbol *sym;
if (reloc->sym_ptr_ptr != NULL
&& (sym = *reloc->sym_ptr_ptr) != NULL
&& (sym->flags & BSF_KEEP) == 0
&& ((sym->flags & BSF_SECTION_SYM) == 0
|| (EMIT_SECTION_SYMBOLS
&& !bfd_is_abs_section (sym->section))))
as_bad_where (file, line, _("redefined symbol cannot be used on reloc"));
s = bfd_install_relocation (stdoutput, reloc,
fragp->fr_literal, fragp->fr_address,
sec, &err);
switch (s)
{
case bfd_reloc_ok:
break;
case bfd_reloc_overflow:
as_bad_where (file, line, _("relocation overflow"));
break;
case bfd_reloc_outofrange:
as_bad_where (file, line, _("relocation out of range"));
break;
default:
as_fatal (_("%s:%u: bad return from bfd_install_relocation: %x"),
file, line, s);
}
}
static fragS *
get_frag_for_reloc (fragS *last_frag,
const segment_info_type *seginfo,
const struct reloc_list *r)
{
fragS *f;
for (f = last_frag; f != NULL; f = f->fr_next)
if (f->fr_address <= r->u.b.r.address
&& r->u.b.r.address < f->fr_address + f->fr_fix)
return f;
for (f = seginfo->frchainP->frch_root; f != NULL; f = f->fr_next)
if (f->fr_address <= r->u.b.r.address
&& r->u.b.r.address < f->fr_address + f->fr_fix)
return f;
as_bad_where (r->file, r->line,
_("reloc not within (fixed part of) section"));
return NULL;
}
static void
write_relocs (bfd *abfd, asection *sec, void *xxx ATTRIBUTE_UNUSED)
{
segment_info_type *seginfo = seg_info (sec);
unsigned int n;
struct reloc_list *my_reloc_list, **rp, *r;
arelent **relocs;
fixS *fixp;
fragS *last_frag;
/* If seginfo is NULL, we did not create this section; don't do
anything with it. */
if (seginfo == NULL)
return;
n = 0;
for (fixp = seginfo->fix_root; fixp; fixp = fixp->fx_next)
if (!fixp->fx_done)
n++;
#ifdef RELOC_EXPANSION_POSSIBLE
n *= MAX_RELOC_EXPANSION;
#endif
/* Extract relocs for this section from reloc_list. */
rp = &reloc_list;
my_reloc_list = NULL;
while ((r = *rp) != NULL)
{
if (r->u.b.sec == sec)
{
*rp = r->next;
r->next = my_reloc_list;
my_reloc_list = r;
n++;
}
else
rp = &r->next;
}
relocs = (arelent **) xcalloc (n, sizeof (arelent *));
n = 0;
r = my_reloc_list;
last_frag = NULL;
for (fixp = seginfo->fix_root; fixp != (fixS *) NULL; fixp = fixp->fx_next)
{
int fx_size, slack;
offsetT loc;
arelent **reloc;
#ifndef RELOC_EXPANSION_POSSIBLE
arelent *rel;
reloc = &rel;
#endif
if (fixp->fx_done)
continue;
fx_size = fixp->fx_size;
slack = TC_FX_SIZE_SLACK (fixp);
if (slack > 0)
fx_size = fx_size > slack ? fx_size - slack : 0;
loc = fixp->fx_where + fx_size;
#if 0
as_warn_where(fixp->fx_file, fixp->fx_line,"write_relocs: slack %d, loc %ld, fr_fix %ld, size %d, where %ld\n",
slack ,loc, fixp->fx_frag->fr_fix, fx_size, fixp->fx_where);
#endif
if (slack >= 0 && loc > fixp->fx_frag->fr_fix)
as_bad_where (fixp->fx_file, fixp->fx_line,
_("internal error: fixup not contained within frag"));
#ifndef RELOC_EXPANSION_POSSIBLE
*reloc = tc_gen_reloc (sec, fixp);
#else
reloc = tc_gen_reloc (sec, fixp);
#endif
while (*reloc)
{
while (r != NULL && r->u.b.r.address < (*reloc)->address)
{
fragS *f = get_frag_for_reloc (last_frag, seginfo, r);
if (f != NULL)
{
last_frag = f;
relocs[n++] = &r->u.b.r;
install_reloc (sec, &r->u.b.r, f, r->file, r->line);
}
r = r->next;
}
relocs[n++] = *reloc;
install_reloc (sec, *reloc, fixp->fx_frag,
fixp->fx_file, fixp->fx_line);
#ifndef RELOC_EXPANSION_POSSIBLE
break;
#else
reloc++;
#endif
}
}
while (r != NULL)
{
fragS *f = get_frag_for_reloc (last_frag, seginfo, r);
if (f != NULL)
{
last_frag = f;
relocs[n++] = &r->u.b.r;
install_reloc (sec, &r->u.b.r, f, r->file, r->line);
}
r = r->next;
}
#ifdef DEBUG4
{
unsigned int k, j, nsyms;
asymbol **sympp;
sympp = bfd_get_outsymbols (stdoutput);
nsyms = bfd_get_symcount (stdoutput);
for (k = 0; k < n; k++)
if (((*relocs[k]->sym_ptr_ptr)->flags & BSF_SECTION_SYM) == 0)
{
for (j = 0; j < nsyms; j++)
if (sympp[j] == *relocs[k]->sym_ptr_ptr)
break;
if (j == nsyms)
abort ();
}
}
#endif
if (n)
{
flagword flags = bfd_get_section_flags (abfd, sec);
flags |= SEC_RELOC;
bfd_set_section_flags (abfd, sec, flags);
bfd_set_reloc (stdoutput, sec, relocs, n);
}
#ifdef SET_SECTION_RELOCS
SET_SECTION_RELOCS (sec, relocs, n);
#endif
#ifdef DEBUG3
{
unsigned int k;
fprintf (stderr, "relocs for sec %s\n", sec->name);
for (k = 0; k < n; k++)
{
arelent *rel = relocs[k];
asymbol *s = *rel->sym_ptr_ptr;
fprintf (stderr, " reloc %2d @%p off %4lx : sym %-10s addend %lx\n",
k, rel, (unsigned long)rel->address, s->name,
(unsigned long)rel->addend);
}
}
#endif
}
static int
compress_frag (struct z_stream_s *strm, const char *contents, int in_size,
fragS **last_newf, struct obstack *ob)
{
int out_size;
int total_out_size = 0;
fragS *f = *last_newf;
char *next_out;
int avail_out;
/* Call the compression routine repeatedly until it has finished
processing the frag. */
while (in_size > 0)
{
/* Reserve all the space available in the current chunk.
If none is available, start a new frag. */
avail_out = obstack_room (ob);
if (avail_out <= 0)
{
obstack_finish (ob);
f = frag_alloc (ob);
f->fr_type = rs_fill;
(*last_newf)->fr_next = f;
*last_newf = f;
avail_out = obstack_room (ob);
}
if (avail_out <= 0)
as_fatal (_("can't extend frag"));
next_out = obstack_next_free (ob);
obstack_blank_fast (ob, avail_out);
out_size = compress_data (strm, &contents, &in_size,
&next_out, &avail_out);
if (out_size < 0)
return -1;
f->fr_fix += out_size;
total_out_size += out_size;
/* Return unused space. */
if (avail_out > 0)
obstack_blank_fast (ob, -avail_out);
}
return total_out_size;
}
static void
compress_debug (bfd *abfd, asection *sec, void *xxx ATTRIBUTE_UNUSED)
{
segment_info_type *seginfo = seg_info (sec);
fragS *f;
fragS *first_newf;
fragS *last_newf;
struct obstack *ob = &seginfo->frchainP->frch_obstack;
bfd_size_type uncompressed_size = (bfd_size_type) sec->size;
bfd_size_type compressed_size;
const char *section_name;
char *compressed_name;
char *header;
struct z_stream_s *strm;
int x;
flagword flags = bfd_get_section_flags (abfd, sec);
if (seginfo == NULL
|| sec->size < 32
|| (flags & (SEC_ALLOC | SEC_HAS_CONTENTS)) == SEC_ALLOC)
return;
section_name = bfd_get_section_name (stdoutput, sec);
if (strncmp (section_name, ".debug_", 7) != 0)
return;
strm = compress_init ();
if (strm == NULL)
return;
/* Create a new frag to contain the "ZLIB" header. */
first_newf = frag_alloc (ob);
if (obstack_room (ob) < 12)
first_newf = frag_alloc (ob);
if (obstack_room (ob) < 12)
as_fatal (_("can't extend frag %u chars"), 12);
last_newf = first_newf;
obstack_blank_fast (ob, 12);
last_newf->fr_type = rs_fill;
last_newf->fr_fix = 12;
header = last_newf->fr_literal;
memcpy (header, "ZLIB", 4);
header[11] = uncompressed_size; uncompressed_size >>= 8;
header[10] = uncompressed_size; uncompressed_size >>= 8;
header[9] = uncompressed_size; uncompressed_size >>= 8;
header[8] = uncompressed_size; uncompressed_size >>= 8;
header[7] = uncompressed_size; uncompressed_size >>= 8;
header[6] = uncompressed_size; uncompressed_size >>= 8;
header[5] = uncompressed_size; uncompressed_size >>= 8;
header[4] = uncompressed_size;
compressed_size = 12;
/* Stream the frags through the compression engine, adding new frags
as necessary to accomodate the compressed output. */
for (f = seginfo->frchainP->frch_root;
f;
f = f->fr_next)
{
offsetT fill_size;
char *fill_literal;
offsetT count;
int out_size;
gas_assert (f->fr_type == rs_fill);
if (f->fr_fix)
{
out_size = compress_frag (strm, f->fr_literal, f->fr_fix,
&last_newf, ob);
if (out_size < 0)
return;
compressed_size += out_size;
}
fill_literal = f->fr_literal + f->fr_fix;
fill_size = f->fr_var;
count = f->fr_offset;
gas_assert (count >= 0);
if (fill_size && count)
{
while (count--)
{
out_size = compress_frag (strm, fill_literal, (int) fill_size,
&last_newf, ob);
if (out_size < 0)
return;
compressed_size += out_size;
}
}
}
/* Flush the compression state. */
for (;;)
{
int avail_out;
char *next_out;
int out_size;
/* Reserve all the space available in the current chunk.
If none is available, start a new frag. */
avail_out = obstack_room (ob);
if (avail_out <= 0)
{
fragS *newf;
obstack_finish (ob);
newf = frag_alloc (ob);
newf->fr_type = rs_fill;
last_newf->fr_next = newf;
last_newf = newf;
avail_out = obstack_room (ob);
}
if (avail_out <= 0)
as_fatal (_("can't extend frag"));
next_out = obstack_next_free (ob);
obstack_blank_fast (ob, avail_out);
x = compress_finish (strm, &next_out, &avail_out, &out_size);
if (x < 0)
return;
last_newf->fr_fix += out_size;
compressed_size += out_size;
/* Return unused space. */
if (avail_out > 0)
obstack_blank_fast (ob, -avail_out);
if (x == 0)
break;
}
/* Replace the uncompressed frag list with the compressed frag list. */
seginfo->frchainP->frch_root = first_newf;
seginfo->frchainP->frch_last = last_newf;
/* Update the section size and its name. */
x = bfd_set_section_size (abfd, sec, compressed_size);
gas_assert (x);
compressed_name = (char *) xmalloc (strlen (section_name) + 2);
compressed_name[0] = '.';
compressed_name[1] = 'z';
strcpy (compressed_name + 2, section_name + 1);
bfd_section_name (stdoutput, sec) = compressed_name;
}
static void
write_contents (bfd *abfd ATTRIBUTE_UNUSED,
asection *sec,
void *xxx ATTRIBUTE_UNUSED)
{
segment_info_type *seginfo = seg_info (sec);
addressT offset = 0;
fragS *f;
/* Write out the frags. */
if (seginfo == NULL
|| !(bfd_get_section_flags (abfd, sec) & SEC_HAS_CONTENTS))
return;
for (f = seginfo->frchainP->frch_root;
f;
f = f->fr_next)
{
int x;
addressT fill_size;
char *fill_literal;
offsetT count;
gas_assert (f->fr_type == rs_fill);
if (f->fr_fix)
{
x = bfd_set_section_contents (stdoutput, sec,
f->fr_literal, (file_ptr) offset,
(bfd_size_type) f->fr_fix);
if (!x)
as_fatal (_("can't write %s: %s"), stdoutput->filename,
bfd_errmsg (bfd_get_error ()));
offset += f->fr_fix;
}
fill_literal = f->fr_literal + f->fr_fix;
fill_size = f->fr_var;
count = f->fr_offset;
gas_assert (count >= 0);
if (fill_size && count)
{
char buf[256];
if (fill_size > sizeof (buf))
{
/* Do it the old way. Can this ever happen? */
while (count--)
{
x = bfd_set_section_contents (stdoutput, sec,
fill_literal,
(file_ptr) offset,
(bfd_size_type) fill_size);
if (!x)
as_fatal (_("can't write %s: %s"), stdoutput->filename,
bfd_errmsg (bfd_get_error ()));
offset += fill_size;
}
}
else
{
/* Build a buffer full of fill objects and output it as
often as necessary. This saves on the overhead of
potentially lots of bfd_set_section_contents calls. */
int n_per_buf, i;
if (fill_size == 1)
{
n_per_buf = sizeof (buf);
memset (buf, *fill_literal, n_per_buf);
}
else
{
char *bufp;
n_per_buf = sizeof (buf) / fill_size;
for (i = n_per_buf, bufp = buf; i; i--, bufp += fill_size)
memcpy (bufp, fill_literal, fill_size);
}
for (; count > 0; count -= n_per_buf)
{
n_per_buf = n_per_buf > count ? count : n_per_buf;
x = bfd_set_section_contents
(stdoutput, sec, buf, (file_ptr) offset,
(bfd_size_type) n_per_buf * fill_size);
if (!x)
as_fatal (_("cannot write to output file '%s': %s"),
stdoutput->filename,
bfd_errmsg (bfd_get_error ()));
offset += n_per_buf * fill_size;
}
}
}
}
}
static void
merge_data_into_text (void)
{
seg_info (text_section)->frchainP->frch_last->fr_next =
seg_info (data_section)->frchainP->frch_root;
seg_info (text_section)->frchainP->frch_last =
seg_info (data_section)->frchainP->frch_last;
seg_info (data_section)->frchainP = 0;
}
static void
set_symtab (void)
{
int nsyms;
asymbol **asympp;
symbolS *symp;
bfd_boolean result;
/* Count symbols. We can't rely on a count made by the loop in
write_object_file, because *_frob_file may add a new symbol or
two. */
nsyms = 0;
for (symp = symbol_rootP; symp; symp = symbol_next (symp))
nsyms++;
if (nsyms)
{
int i;
bfd_size_type amt = (bfd_size_type) nsyms * sizeof (asymbol *);
asympp = (asymbol **) bfd_alloc (stdoutput, amt);
symp = symbol_rootP;
for (i = 0; i < nsyms; i++, symp = symbol_next (symp))
{
asympp[i] = symbol_get_bfdsym (symp);
if (asympp[i]->flags != BSF_SECTION_SYM
|| !(bfd_is_const_section (asympp[i]->section)
&& asympp[i]->section->symbol == asympp[i]))
asympp[i]->flags |= BSF_KEEP;
symbol_mark_written (symp);
}
}
else
asympp = 0;
result = bfd_set_symtab (stdoutput, asympp, nsyms);
gas_assert (result);
symbol_table_frozen = 1;
}
/* Finish the subsegments. After every sub-segment, we fake an
".align ...". This conforms to BSD4.2 brane-damage. We then fake
".fill 0" because that is the kind of frag that requires least
thought. ".align" frags like to have a following frag since that
makes calculating their intended length trivial. */
#ifndef SUB_SEGMENT_ALIGN
#ifdef HANDLE_ALIGN
/* The last subsegment gets an alignment corresponding to the alignment
of the section. This allows proper nop-filling at the end of
code-bearing sections. */
#define SUB_SEGMENT_ALIGN(SEG, FRCHAIN) \
(!(FRCHAIN)->frch_next ? get_recorded_alignment (SEG) : 0)
#else
#define SUB_SEGMENT_ALIGN(SEG, FRCHAIN) 0
#endif
#endif
void
subsegs_finish (void)
{
struct frchain *frchainP;
asection *s;
for (s = stdoutput->sections; s; s = s->next)
{
segment_info_type *seginfo = seg_info (s);
if (!seginfo)
continue;
for (frchainP = seginfo->frchainP;
frchainP != NULL;
frchainP = frchainP->frch_next)
{
int alignment = 0;
subseg_set (s, frchainP->frch_subseg);
/* This now gets called even if we had errors. In that case,
any alignment is meaningless, and, moreover, will look weird
if we are generating a listing. */
if (!had_errors ())
{
alignment = SUB_SEGMENT_ALIGN (now_seg, frchainP);
if ((bfd_get_section_flags (now_seg->owner, now_seg) & SEC_MERGE)
&& now_seg->entsize)
{
unsigned int entsize = now_seg->entsize;
int entalign = 0;
while ((entsize & 1) == 0)
{
++entalign;
entsize >>= 1;
}
if (entalign > alignment)
alignment = entalign;
}
}
if (subseg_text_p (now_seg))
frag_align_code (alignment, 0);
else
frag_align (alignment, 0, 0);
/* frag_align will have left a new frag.
Use this last frag for an empty ".fill".
For this segment ...
Create a last frag. Do not leave a "being filled in frag". */
frag_wane (frag_now);
frag_now->fr_fix = 0;
know (frag_now->fr_next == NULL);
}
}
}
/* Write the object file. */
void
write_object_file (void)
{
struct relax_seg_info rsi;
#ifndef WORKING_DOT_WORD
fragS *fragP; /* Track along all frags. */
#endif
#ifdef md_pre_output_hook
md_pre_output_hook;
#endif
/* Do we really want to write it? */
{
int n_warns, n_errs;
n_warns = had_warnings ();
n_errs = had_errors ();
/* The -Z flag indicates that an object file should be generated,
regardless of warnings and errors. */
if (flag_always_generate_output)
{
if (n_warns || n_errs)
as_warn (_("%d error%s, %d warning%s, generating bad object file"),
n_errs, n_errs == 1 ? "" : "s",
n_warns, n_warns == 1 ? "" : "s");
}
else
{
if (n_errs)
as_fatal (_("%d error%s, %d warning%s, no object file generated"),
n_errs, n_errs == 1 ? "" : "s",
n_warns, n_warns == 1 ? "" : "s");
}
}
#ifdef md_pre_relax_hook
md_pre_relax_hook;
#endif
/* From now on, we don't care about sub-segments. Build one frag chain
for each segment. Linked thru fr_next. */
/* Remove the sections created by gas for its own purposes. */
{
int i;
bfd_section_list_remove (stdoutput, reg_section);
bfd_section_list_remove (stdoutput, expr_section);
stdoutput->section_count -= 2;
i = 0;
bfd_map_over_sections (stdoutput, renumber_sections, &i);
}
bfd_map_over_sections (stdoutput, chain_frchains_together, (char *) 0);
/* We have two segments. If user gave -R flag, then we must put the
data frags into the text segment. Do this before relaxing so
we know to take advantage of -R and make shorter addresses. */
if (flag_readonly_data_in_text)
{
merge_data_into_text ();
}
rsi.pass = 0;
while (1)
{
#ifndef WORKING_DOT_WORD
/* We need to reset the markers in the broken word list and
associated frags between calls to relax_segment (via
relax_seg). Since the broken word list is global, we do it
once per round, rather than locally in relax_segment for each
segment. */
struct broken_word *brokp;
for (brokp = broken_words;
brokp != (struct broken_word *) NULL;
brokp = brokp->next_broken_word)
{
brokp->added = 0;
if (brokp->dispfrag != (fragS *) NULL
&& brokp->dispfrag->fr_type == rs_broken_word)
brokp->dispfrag->fr_subtype = 0;
}
#endif
rsi.changed = 0;
bfd_map_over_sections (stdoutput, relax_seg, &rsi);
rsi.pass++;
if (!rsi.changed)
break;
}
/* Note - Most ports will use the default value of
TC_FINALIZE_SYMS_BEFORE_SIZE_SEG, which 1. This will force
local symbols to be resolved, removing their frag information.
Some ports however, will not have finished relaxing all of
their frags and will still need the local symbol frag
information. These ports can set
TC_FINALIZE_SYMS_BEFORE_SIZE_SEG to 0. */
finalize_syms = TC_FINALIZE_SYMS_BEFORE_SIZE_SEG;
bfd_map_over_sections (stdoutput, size_seg, (char *) 0);
/* Relaxation has completed. Freeze all syms. */
finalize_syms = 1;
#ifdef md_post_relax_hook
md_post_relax_hook;
#endif
#ifndef WORKING_DOT_WORD
{
struct broken_word *lie;
struct broken_word **prevP;
prevP = &broken_words;
for (lie = broken_words; lie; lie = lie->next_broken_word)
if (!lie->added)
{
expressionS exp;
subseg_change (lie->seg, lie->subseg);
exp.X_op = O_subtract;
exp.X_add_symbol = lie->add;
exp.X_op_symbol = lie->sub;
exp.X_add_number = lie->addnum;
#ifdef TC_CONS_FIX_NEW
TC_CONS_FIX_NEW (lie->frag,
lie->word_goes_here - lie->frag->fr_literal,
2, &exp);
#else
fix_new_exp (lie->frag,
lie->word_goes_here - lie->frag->fr_literal,
2, &exp, 0, BFD_RELOC_16);
#endif
*prevP = lie->next_broken_word;
}
else
prevP = &(lie->next_broken_word);
for (lie = broken_words; lie;)
{
struct broken_word *untruth;
char *table_ptr;
addressT table_addr;
addressT from_addr, to_addr;
int n, m;
subseg_change (lie->seg, lie->subseg);
fragP = lie->dispfrag;
/* Find out how many broken_words go here. */
n = 0;
for (untruth = lie;
untruth && untruth->dispfrag == fragP;
untruth = untruth->next_broken_word)
if (untruth->added == 1)
n++;
table_ptr = lie->dispfrag->fr_opcode;
table_addr = (lie->dispfrag->fr_address
+ (table_ptr - lie->dispfrag->fr_literal));
/* Create the jump around the long jumps. This is a short
jump from table_ptr+0 to table_ptr+n*long_jump_size. */
from_addr = table_addr;
to_addr = table_addr + md_short_jump_size + n * md_long_jump_size;
md_create_short_jump (table_ptr, from_addr, to_addr, lie->dispfrag,
lie->add);
table_ptr += md_short_jump_size;
table_addr += md_short_jump_size;
for (m = 0;
lie && lie->dispfrag == fragP;
m++, lie = lie->next_broken_word)
{
if (lie->added == 2)
continue;
/* Patch the jump table. */
for (untruth = (struct broken_word *) (fragP->fr_symbol);
untruth && untruth->dispfrag == fragP;
untruth = untruth->next_broken_word)
{
if (untruth->use_jump == lie)
{
/* This is the offset from ??? to table_ptr+0.
The target is the same for all users of this
md_long_jump, but the "sub" bases (and hence the
offsets) may be different. */
addressT to_word = table_addr - S_GET_VALUE (untruth->sub);
#ifdef TC_CHECK_ADJUSTED_BROKEN_DOT_WORD
TC_CHECK_ADJUSTED_BROKEN_DOT_WORD (to_word, untruth);
#endif
md_number_to_chars (untruth->word_goes_here, to_word, 2);
}
}
/* Install the long jump. */
/* This is a long jump from table_ptr+0 to the final target. */
from_addr = table_addr;
to_addr = S_GET_VALUE (lie->add) + lie->addnum;
md_create_long_jump (table_ptr, from_addr, to_addr, lie->dispfrag,
lie->add);
table_ptr += md_long_jump_size;
table_addr += md_long_jump_size;
}
}
}
#endif /* not WORKING_DOT_WORD */
/* Resolve symbol values. This needs to be done before processing
the relocations. */
if (symbol_rootP)
{
symbolS *symp;
for (symp = symbol_rootP; symp; symp = symbol_next (symp))
resolve_symbol_value (symp);
}
resolve_local_symbol_values ();
resolve_reloc_expr_symbols ();
PROGRESS (1);
#ifdef tc_frob_file_before_adjust
tc_frob_file_before_adjust ();
#endif
#ifdef obj_frob_file_before_adjust
obj_frob_file_before_adjust ();
#endif
bfd_map_over_sections (stdoutput, adjust_reloc_syms, (char *) 0);
#ifdef tc_frob_file_before_fix
tc_frob_file_before_fix ();
#endif
#ifdef obj_frob_file_before_fix
obj_frob_file_before_fix ();
#endif
bfd_map_over_sections (stdoutput, fix_segment, (char *) 0);
/* Set up symbol table, and write it out. */
if (symbol_rootP)
{
symbolS *symp;
bfd_boolean skip_next_symbol = FALSE;
for (symp = symbol_rootP; symp; symp = symbol_next (symp))
{
int punt = 0;
const char *name;
if (skip_next_symbol)
{
/* Don't do anything besides moving the value of the
symbol from the GAS value-field to the BFD value-field. */
symbol_get_bfdsym (symp)->value = S_GET_VALUE (symp);
skip_next_symbol = FALSE;
continue;
}
if (symbol_mri_common_p (symp))
{
if (S_IS_EXTERNAL (symp))
as_bad (_("%s: global symbols not supported in common sections"),
S_GET_NAME (symp));
symbol_remove (symp, &symbol_rootP, &symbol_lastP);
continue;
}
name = S_GET_NAME (symp);
if (name)
{
const char *name2 =
decode_local_label_name ((char *) S_GET_NAME (symp));
/* They only differ if `name' is a fb or dollar local
label name. */
if (name2 != name && ! S_IS_DEFINED (symp))
as_bad (_("local label `%s' is not defined"), name2);
}
/* Do it again, because adjust_reloc_syms might introduce
more symbols. They'll probably only be section symbols,
but they'll still need to have the values computed. */
resolve_symbol_value (symp);
/* Skip symbols which were equated to undefined or common
symbols. */
if (symbol_equated_reloc_p (symp)
|| S_IS_WEAKREFR (symp))
{
const char *sname = S_GET_NAME (symp);
if (S_IS_COMMON (symp)
&& !TC_FAKE_LABEL (sname)
&& !S_IS_WEAKREFR (symp)
&& (!S_IS_EXTERNAL (symp) || S_IS_LOCAL (symp)))
{
expressionS *e = symbol_get_value_expression (symp);
as_bad (_("Local symbol `%s' can't be equated to common symbol `%s'"),
sname, S_GET_NAME (e->X_add_symbol));
}
if (S_GET_SEGMENT (symp) == reg_section)
{
/* Report error only if we know the symbol name. */
if (S_GET_NAME (symp) != reg_section->name)
as_bad (_("can't make global register symbol `%s'"),
sname);
}
symbol_remove (symp, &symbol_rootP, &symbol_lastP);
continue;
}
#ifdef obj_frob_symbol
obj_frob_symbol (symp, punt);
#endif
#ifdef tc_frob_symbol
if (! punt || symbol_used_in_reloc_p (symp))
tc_frob_symbol (symp, punt);
#endif
/* If we don't want to keep this symbol, splice it out of
the chain now. If EMIT_SECTION_SYMBOLS is 0, we never
want section symbols. Otherwise, we skip local symbols
and symbols that the frob_symbol macros told us to punt,
but we keep such symbols if they are used in relocs. */
if (symp == abs_section_sym
|| (! EMIT_SECTION_SYMBOLS
&& symbol_section_p (symp))
/* Note that S_IS_EXTERNAL and S_IS_LOCAL are not always
opposites. Sometimes the former checks flags and the
latter examines the name... */
|| (!S_IS_EXTERNAL (symp)
&& (punt || S_IS_LOCAL (symp) ||
(S_IS_WEAKREFD (symp) && ! symbol_used_p (symp)))
&& ! symbol_used_in_reloc_p (symp)))
{
symbol_remove (symp, &symbol_rootP, &symbol_lastP);
/* After symbol_remove, symbol_next(symp) still returns
the one that came after it in the chain. So we don't
need to do any extra cleanup work here. */
continue;
}
/* Make sure we really got a value for the symbol. */
if (! symbol_resolved_p (symp))
{
as_bad (_("can't resolve value for symbol `%s'"),
S_GET_NAME (symp));
symbol_mark_resolved (symp);
}
/* Set the value into the BFD symbol. Up til now the value
has only been kept in the gas symbolS struct. */
symbol_get_bfdsym (symp)->value = S_GET_VALUE (symp);
/* A warning construct is a warning symbol followed by the
symbol warned about. Don't let anything object-format or
target-specific muck with it; it's ready for output. */
if (symbol_get_bfdsym (symp)->flags & BSF_WARNING)
skip_next_symbol = TRUE;
}
}
PROGRESS (1);
/* Now do any format-specific adjustments to the symbol table, such
as adding file symbols. */
#ifdef tc_adjust_symtab
tc_adjust_symtab ();
#endif
#ifdef obj_adjust_symtab
obj_adjust_symtab ();
#endif
/* Stop if there is an error. */
if (had_errors ())
return;
/* Now that all the sizes are known, and contents correct, we can
start writing to the file. */
set_symtab ();
/* If *_frob_file changes the symbol value at this point, it is
responsible for moving the changed value into symp->bsym->value
as well. Hopefully all symbol value changing can be done in
*_frob_symbol. */
#ifdef tc_frob_file
tc_frob_file ();
#endif
#ifdef obj_frob_file
obj_frob_file ();
#endif
#ifdef obj_coff_generate_pdata
obj_coff_generate_pdata ();
#endif
bfd_map_over_sections (stdoutput, write_relocs, (char *) 0);
#ifdef tc_frob_file_after_relocs
tc_frob_file_after_relocs ();
#endif
#ifdef obj_frob_file_after_relocs
obj_frob_file_after_relocs ();
#endif
/* Once all relocations have been written, we can compress the
contents of the debug sections. This needs to be done before
we start writing any sections, because it will affect the file
layout, which is fixed once we start writing contents. */
if (flag_compress_debug)
bfd_map_over_sections (stdoutput, compress_debug, (char *) 0);
bfd_map_over_sections (stdoutput, write_contents, (char *) 0);
}
#ifdef TC_GENERIC_RELAX_TABLE
/* Relax a fragment by scanning TC_GENERIC_RELAX_TABLE. */
long
relax_frag (segT segment, fragS *fragP, long stretch)
{
const relax_typeS *this_type;
const relax_typeS *start_type;
relax_substateT next_state;
relax_substateT this_state;
offsetT growth;
offsetT aim;
addressT target;
addressT address;
symbolS *symbolP;
const relax_typeS *table;
target = fragP->fr_offset;
address = fragP->fr_address;
table = TC_GENERIC_RELAX_TABLE;
this_state = fragP->fr_subtype;
start_type = this_type = table + this_state;
symbolP = fragP->fr_symbol;
if (symbolP)
{
fragS *sym_frag;
sym_frag = symbol_get_frag (symbolP);
#ifndef DIFF_EXPR_OK
know (sym_frag != NULL);
#endif
know (S_GET_SEGMENT (symbolP) != absolute_section
|| sym_frag == &zero_address_frag);
target += S_GET_VALUE (symbolP);
/* If SYM_FRAG has yet to be reached on this pass, assume it
will move by STRETCH just as we did, unless there is an
alignment frag between here and SYM_FRAG. An alignment may
well absorb any STRETCH, and we don't want to choose a larger
branch insn by overestimating the needed reach of this
branch. It isn't critical to calculate TARGET exactly; We
know we'll be doing another pass if STRETCH is non-zero. */
if (stretch != 0
&& sym_frag->relax_marker != fragP->relax_marker
&& S_GET_SEGMENT (symbolP) == segment)
{
if (stretch < 0
|| sym_frag->region == fragP->region)
target += stretch;
/* If we get here we know we have a forward branch. This
relax pass may have stretched previous instructions so
far that omitting STRETCH would make the branch
negative. Don't allow this in case the negative reach is
large enough to require a larger branch instruction. */
else if (target < address)
target = fragP->fr_next->fr_address + stretch;
}
}
aim = target - address - fragP->fr_fix;
#ifdef TC_PCREL_ADJUST
/* Currently only the ns32k family needs this. */
aim += TC_PCREL_ADJUST (fragP);
#endif
#ifdef md_prepare_relax_scan
/* Formerly called M68K_AIM_KLUDGE. */
md_prepare_relax_scan (fragP, address, aim, this_state, this_type);
#endif
if (aim < 0)
{
/* Look backwards. */
for (next_state = this_type->rlx_more; next_state;)
if (aim >= this_type->rlx_backward)
next_state = 0;
else
{
/* Grow to next state. */
this_state = next_state;
this_type = table + this_state;
next_state = this_type->rlx_more;
}
}
else
{
/* Look forwards. */
for (next_state = this_type->rlx_more; next_state;)
if (aim <= this_type->rlx_forward)
next_state = 0;
else
{
/* Grow to next state. */
this_state = next_state;
this_type = table + this_state;
next_state = this_type->rlx_more;
}
}
growth = this_type->rlx_length - start_type->rlx_length;
if (growth != 0)
fragP->fr_subtype = this_state;
return growth;
}
#endif /* defined (TC_GENERIC_RELAX_TABLE) */
/* Relax_align. Advance location counter to next address that has 'alignment'
lowest order bits all 0s, return size of adjustment made. */
static relax_addressT
relax_align (register relax_addressT address, /* Address now. */
register int alignment /* Alignment (binary). */)
{
relax_addressT mask;
relax_addressT new_address;
mask = ~((~0) << alignment);
new_address = (address + mask) & (~mask);
#ifdef LINKER_RELAXING_SHRINKS_ONLY
if (linkrelax)
/* We must provide lots of padding, so the linker can discard it
when needed. The linker will not add extra space, ever. */
new_address += (1 << alignment);
#endif
return (new_address - address);
}
/* Now we have a segment, not a crowd of sub-segments, we can make
fr_address values.
Relax the frags.
After this, all frags in this segment have addresses that are correct
within the segment. Since segments live in different file addresses,
these frag addresses may not be the same as final object-file
addresses. */
int
relax_segment (struct frag *segment_frag_root, segT segment, int pass)
{
unsigned long frag_count;
struct frag *fragP;
relax_addressT address;
int region;
int ret;
/* In case md_estimate_size_before_relax() wants to make fixSs. */
subseg_change (segment, 0);
/* For each frag in segment: count and store (a 1st guess of)
fr_address. */
address = 0;
region = 0;
for (frag_count = 0, fragP = segment_frag_root;
fragP;
fragP = fragP->fr_next, frag_count ++)
{
fragP->region = region;
fragP->relax_marker = 0;
fragP->fr_address = address;
address += fragP->fr_fix;
switch (fragP->fr_type)
{
case rs_fill:
address += fragP->fr_offset * fragP->fr_var;
break;
case rs_align:
case rs_align_code:
case rs_align_test:
{
addressT offset = relax_align (address, (int) fragP->fr_offset);
if (fragP->fr_subtype != 0 && offset > fragP->fr_subtype)
offset = 0;
if (offset % fragP->fr_var != 0)
{
as_bad_where (fragP->fr_file, fragP->fr_line,
_("alignment padding (%lu bytes) not a multiple of %ld"),
(unsigned long) offset, (long) fragP->fr_var);
offset -= (offset % fragP->fr_var);
}
address += offset;
region += 1;
}
break;
case rs_org:
/* Assume .org is nugatory. It will grow with 1st relax. */
region += 1;
break;
case rs_space:
break;
case rs_machine_dependent:
/* If fr_symbol is an expression, this call to
resolve_symbol_value sets up the correct segment, which will
likely be needed in md_estimate_size_before_relax. */
if (fragP->fr_symbol)
resolve_symbol_value (fragP->fr_symbol);
address += md_estimate_size_before_relax (fragP, segment);
break;
#ifndef WORKING_DOT_WORD
/* Broken words don't concern us yet. */
case rs_broken_word:
break;
#endif
case rs_leb128:
/* Initial guess is always 1; doing otherwise can result in
stable solutions that are larger than the minimum. */
address += fragP->fr_offset = 1;
break;
case rs_cfa:
address += eh_frame_estimate_size_before_relax (fragP);
break;
case rs_dwarf2dbg:
address += dwarf2dbg_estimate_size_before_relax (fragP);
break;
default:
BAD_CASE (fragP->fr_type);
break;
}
}
/* Do relax(). */
{
unsigned long max_iterations;
/* Cumulative address adjustment. */
offsetT stretch;
/* Have we made any adjustment this pass? We can't just test
stretch because one piece of code may have grown and another
shrank. */
int stretched;
/* Most horrible, but gcc may give us some exception data that
is impossible to assemble, of the form
.align 4
.byte 0, 0
.uleb128 end - start
start:
.space 128*128 - 1
.align 4
end:
If the leb128 is two bytes in size, then end-start is 128*128,
which requires a three byte leb128. If the leb128 is three
bytes in size, then end-start is 128*128-1, which requires a
two byte leb128. We work around this dilemma by inserting
an extra 4 bytes of alignment just after the .align. This
works because the data after the align is accessed relative to
the end label.
This counter is used in a tiny state machine to detect
whether a leb128 followed by an align is impossible to
relax. */
int rs_leb128_fudge = 0;
/* We want to prevent going into an infinite loop where one frag grows
depending upon the location of a symbol which is in turn moved by
the growing frag. eg:
foo = .
.org foo+16
foo = .
So we dictate that this algorithm can be at most O2. */
max_iterations = frag_count * frag_count;
/* Check for overflow. */
if (max_iterations < frag_count)
max_iterations = frag_count;
ret = 0;
do
{
stretch = 0;
stretched = 0;
for (fragP = segment_frag_root; fragP; fragP = fragP->fr_next)
{
offsetT growth = 0;
addressT was_address;
offsetT offset;
symbolS *symbolP;
fragP->relax_marker ^= 1;
was_address = fragP->fr_address;
address = fragP->fr_address += stretch;
symbolP = fragP->fr_symbol;
offset = fragP->fr_offset;
switch (fragP->fr_type)
{
case rs_fill: /* .fill never relaxes. */
growth = 0;
break;
#ifndef WORKING_DOT_WORD
/* JF: This is RMS's idea. I do *NOT* want to be blamed
for it I do not want to write it. I do not want to have
anything to do with it. This is not the proper way to
implement this misfeature. */
case rs_broken_word:
{
struct broken_word *lie;
struct broken_word *untruth;
/* Yes this is ugly (storing the broken_word pointer
in the symbol slot). Still, this whole chunk of
code is ugly, and I don't feel like doing anything
about it. Think of it as stubbornness in action. */
growth = 0;
for (lie = (struct broken_word *) (fragP->fr_symbol);
lie && lie->dispfrag == fragP;
lie = lie->next_broken_word)
{
if (lie->added)
continue;
offset = (S_GET_VALUE (lie->add)
+ lie->addnum
- S_GET_VALUE (lie->sub));
if (offset <= -32768 || offset >= 32767)
{
if (flag_warn_displacement)
{
char buf[50];
sprint_value (buf, (addressT) lie->addnum);
as_warn_where (fragP->fr_file, fragP->fr_line,
_(".word %s-%s+%s didn't fit"),
S_GET_NAME (lie->add),
S_GET_NAME (lie->sub),
buf);
}
if (fragP->fr_subtype == 0)
{
fragP->fr_subtype++;
growth += md_short_jump_size;
}
/* Redirect *all* words of this table with the same
target, lest we have to handle the case where the
same target but with a offset that fits on this
round overflows at the next relaxation round. */
for (untruth = (struct broken_word *) (fragP->fr_symbol);
untruth && untruth->dispfrag == lie->dispfrag;
untruth = untruth->next_broken_word)
if ((symbol_get_frag (untruth->add)
== symbol_get_frag (lie->add))
&& (S_GET_VALUE (untruth->add)
== S_GET_VALUE (lie->add)))
{
untruth->added = 2;
untruth->use_jump = lie;
}
lie->added = 1;
growth += md_long_jump_size;
}
}
break;
} /* case rs_broken_word */
#endif
case rs_align:
case rs_align_code:
case rs_align_test:
{
addressT oldoff, newoff;
oldoff = relax_align (was_address + fragP->fr_fix,
(int) offset);
newoff = relax_align (address + fragP->fr_fix,
(int) offset);
if (fragP->fr_subtype != 0)
{
if (oldoff > fragP->fr_subtype)
oldoff = 0;
if (newoff > fragP->fr_subtype)
newoff = 0;
}
growth = newoff - oldoff;
/* If this align happens to follow a leb128 and
we have determined that the leb128 is bouncing
in size, then break the cycle by inserting an
extra alignment. */
if (growth < 0
&& (rs_leb128_fudge & 16) != 0
&& (rs_leb128_fudge & 15) >= 2)
{
segment_info_type *seginfo = seg_info (segment);
struct obstack *ob = &seginfo->frchainP->frch_obstack;
struct frag *newf;
newf = frag_alloc (ob);
obstack_blank_fast (ob, fragP->fr_var);
obstack_finish (ob);
memcpy (newf, fragP, SIZEOF_STRUCT_FRAG);
memcpy (newf->fr_literal,
fragP->fr_literal + fragP->fr_fix,
fragP->fr_var);
newf->fr_type = rs_fill;
newf->fr_address = address + fragP->fr_fix + newoff;
newf->fr_fix = 0;
newf->fr_offset = (((offsetT) 1 << fragP->fr_offset)
/ fragP->fr_var);
if (newf->fr_offset * newf->fr_var
!= (offsetT) 1 << fragP->fr_offset)
{
newf->fr_offset = (offsetT) 1 << fragP->fr_offset;
newf->fr_var = 1;
}
/* Include size of new frag in GROWTH. */
growth += newf->fr_offset * newf->fr_var;
/* Adjust the new frag address for the amount
we'll add when we process the new frag. */
newf->fr_address -= stretch + growth;
newf->relax_marker ^= 1;
fragP->fr_next = newf;
#ifdef DEBUG
as_warn (_("padding added"));
#endif
}
}
break;
case rs_org:
{
addressT target = offset;
addressT after;
if (symbolP)
{
/* Convert from an actual address to an octet offset
into the section. Here it is assumed that the
section's VMA is zero, and can omit subtracting it
from the symbol's value to get the address offset. */
know (S_GET_SEGMENT (symbolP)->vma == 0);
target += S_GET_VALUE (symbolP) * OCTETS_PER_BYTE;
}
know (fragP->fr_next);
after = fragP->fr_next->fr_address + stretch;
growth = target - after;
if (growth < 0)
{
growth = 0;
/* Don't error on first few frag relax passes.
The symbol might be an expression involving
symbol values from other sections. If those
sections have not yet been processed their
frags will all have zero addresses, so we
will calculate incorrect values for them. The
number of passes we allow before giving an
error is somewhat arbitrary. It should be at
least one, with larger values requiring
increasingly contrived dependencies between
frags to trigger a false error. */
if (pass < 2)
{
/* Force another pass. */
ret = 1;
break;
}
/* Growth may be negative, but variable part of frag
cannot have fewer than 0 chars. That is, we can't
.org backwards. */
as_bad_where (fragP->fr_file, fragP->fr_line,
_("attempt to move .org backwards"));
/* We've issued an error message. Change the
frag to avoid cascading errors. */
fragP->fr_type = rs_align;
fragP->fr_subtype = 0;
fragP->fr_offset = 0;
fragP->fr_fix = after - address;
}
}
break;
case rs_space:
growth = 0;
if (symbolP)
{
offsetT amount;
amount = S_GET_VALUE (symbolP);
if (S_GET_SEGMENT (symbolP) != absolute_section
|| S_IS_COMMON (symbolP)
|| ! S_IS_DEFINED (symbolP))
{
as_bad_where (fragP->fr_file, fragP->fr_line,
_(".space specifies non-absolute value"));
/* Prevent repeat of this error message. */
fragP->fr_symbol = 0;
}
else if (amount < 0)
{
/* Don't error on first few frag relax passes.
See rs_org comment for a longer explanation. */
if (pass < 2)
{
ret = 1;
break;
}
as_warn_where (fragP->fr_file, fragP->fr_line,
_(".space or .fill with negative value, ignored"));
fragP->fr_symbol = 0;
}
else
growth = (was_address + fragP->fr_fix + amount
- fragP->fr_next->fr_address);
}
break;
case rs_machine_dependent:
#ifdef md_relax_frag
growth = md_relax_frag (segment, fragP, stretch);
#else
#ifdef TC_GENERIC_RELAX_TABLE
/* The default way to relax a frag is to look through
TC_GENERIC_RELAX_TABLE. */
growth = relax_frag (segment, fragP, stretch);
#endif /* TC_GENERIC_RELAX_TABLE */
#endif
break;
case rs_leb128:
{
valueT value;
offsetT size;
value = resolve_symbol_value (fragP->fr_symbol);
size = sizeof_leb128 (value, fragP->fr_subtype);
growth = size - fragP->fr_offset;
fragP->fr_offset = size;
}
break;
case rs_cfa:
growth = eh_frame_relax_frag (fragP);
break;
case rs_dwarf2dbg:
growth = dwarf2dbg_relax_frag (fragP);
break;
default:
BAD_CASE (fragP->fr_type);
break;
}
if (growth)
{
stretch += growth;
stretched = 1;
if (fragP->fr_type == rs_leb128)
rs_leb128_fudge += 16;
else if (fragP->fr_type == rs_align
&& (rs_leb128_fudge & 16) != 0
&& stretch == 0)
rs_leb128_fudge += 16;
else
rs_leb128_fudge = 0;
}
}
if (stretch == 0
&& (rs_leb128_fudge & 16) == 0
&& (rs_leb128_fudge & -16) != 0)
rs_leb128_fudge += 1;
else
rs_leb128_fudge = 0;
}
/* Until nothing further to relax. */
while (stretched && -- max_iterations);
if (stretched)
as_fatal (_("Infinite loop encountered whilst attempting to compute the addresses of symbols in section %s"),
segment_name (segment));
}
for (fragP = segment_frag_root; fragP; fragP = fragP->fr_next)
if (fragP->last_fr_address != fragP->fr_address)
{
fragP->last_fr_address = fragP->fr_address;
ret = 1;
}
return ret;
}
void
number_to_chars_bigendian (char *buf, valueT val, int n)
{
if (n <= 0)
abort ();
while (n--)
{
buf[n] = val & 0xff;
val >>= 8;
}
}
void
number_to_chars_littleendian (char *buf, valueT val, int n)
{
if (n <= 0)
abort ();
while (n--)
{
*buf++ = val & 0xff;
val >>= 8;
}
}
void
write_print_statistics (FILE *file)
{
fprintf (file, "fixups: %d\n", n_fixups);
}
/* For debugging. */
extern int indent_level;
void
print_fixup (fixS *fixp)
{
indent_level = 1;
fprintf (stderr, "fix ");
fprintf_vma (stderr, (bfd_vma)((bfd_hostptr_t) fixp));
fprintf (stderr, " %s:%d",fixp->fx_file, fixp->fx_line);
if (fixp->fx_pcrel)
fprintf (stderr, " pcrel");
if (fixp->fx_pcrel_adjust)
fprintf (stderr, " pcrel_adjust=%d", fixp->fx_pcrel_adjust);
if (fixp->fx_im_disp)
{
#ifdef TC_NS32K
fprintf (stderr, " im_disp=%d", fixp->fx_im_disp);
#else
fprintf (stderr, " im_disp");
#endif
}
if (fixp->fx_tcbit)
fprintf (stderr, " tcbit");
if (fixp->fx_done)
fprintf (stderr, " done");
fprintf (stderr, "\n size=%d frag=", fixp->fx_size);
fprintf_vma (stderr, (bfd_vma) ((bfd_hostptr_t) fixp->fx_frag));
fprintf (stderr, " where=%ld offset=%lx addnumber=%lx",
(long) fixp->fx_where,
(unsigned long) fixp->fx_offset,
(unsigned long) fixp->fx_addnumber);
fprintf (stderr, "\n %s (%d)", bfd_get_reloc_code_name (fixp->fx_r_type),
fixp->fx_r_type);
if (fixp->fx_addsy)
{
fprintf (stderr, "\n +<");
print_symbol_value_1 (stderr, fixp->fx_addsy);
fprintf (stderr, ">");
}
if (fixp->fx_subsy)
{
fprintf (stderr, "\n -<");
print_symbol_value_1 (stderr, fixp->fx_subsy);
fprintf (stderr, ">");
}
fprintf (stderr, "\n");
#ifdef TC_FIX_DATA_PRINT
TC_FIX_DATA_PRINT (stderr, fixp);
#endif
}
| {
"content_hash": "1e67a811f40f94f7d6ea0015dc35863c",
"timestamp": "",
"source": "github",
"line_count": 2861,
"max_line_length": 141,
"avg_line_length": 27.21321216357917,
"alnum_prop": 0.6068561593690998,
"repo_name": "alvieboy/xtc-base",
"id": "41fd9030734d6215eb944fcad24c6b89e432939e",
"size": "78828",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "binutils-2.23.1/gas/write.c",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "14022177"
},
{
"name": "Awk",
"bytes": "1218"
},
{
"name": "C",
"bytes": "85314980"
},
{
"name": "C++",
"bytes": "7845704"
},
{
"name": "Common Lisp",
"bytes": "38866"
},
{
"name": "D",
"bytes": "13746885"
},
{
"name": "DIGITAL Command Language",
"bytes": "27922"
},
{
"name": "DTrace",
"bytes": "4262126"
},
{
"name": "E",
"bytes": "1645"
},
{
"name": "Eiffel",
"bytes": "1161"
},
{
"name": "Elixir",
"bytes": "157"
},
{
"name": "Emacs Lisp",
"bytes": "27586"
},
{
"name": "GAP",
"bytes": "11624"
},
{
"name": "GDScript",
"bytes": "53491"
},
{
"name": "Groff",
"bytes": "584049"
},
{
"name": "HTML",
"bytes": "802"
},
{
"name": "Lex",
"bytes": "338783"
},
{
"name": "Logos",
"bytes": "14359"
},
{
"name": "Makefile",
"bytes": "3700494"
},
{
"name": "Mathematica",
"bytes": "356"
},
{
"name": "Matlab",
"bytes": "7741"
},
{
"name": "Objective-C",
"bytes": "140723"
},
{
"name": "Perl",
"bytes": "151937"
},
{
"name": "Perl6",
"bytes": "41829"
},
{
"name": "PicoLisp",
"bytes": "15997"
},
{
"name": "Pure Data",
"bytes": "20780"
},
{
"name": "R",
"bytes": "400799"
},
{
"name": "Rebol",
"bytes": "25794"
},
{
"name": "Scheme",
"bytes": "2172768"
},
{
"name": "Shell",
"bytes": "753644"
},
{
"name": "SuperCollider",
"bytes": "393798"
},
{
"name": "Tcl",
"bytes": "1297"
},
{
"name": "TeX",
"bytes": "706553"
},
{
"name": "VHDL",
"bytes": "515432"
},
{
"name": "Yacc",
"bytes": "418455"
}
],
"symlink_target": ""
} |
/**
* Licensed under Apache License v2. See LICENSE for more information.
*/
package org.inaetics.remote.itest.util;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.service.remoteserviceadmin.EndpointDescription;
import org.osgi.service.remoteserviceadmin.EndpointListener;
/**
* Self-registering utility that uses an {@link EndpointListener} to await an added or removed callback
* for a specific {@link EndpointDescription}.
*
* @author <a href="mailto:[email protected]">Amdatu Project Team</a>
*/
@SuppressWarnings("deprecation")
public final class BlockingEndpointListener extends AbstractBlockingEndpointListener<EndpointListener> {
private final String[] m_objectClass = new String[] { EndpointListener.class.getName() };
private final Object m_listenerInstance = new InternalEndpointListener();
public BlockingEndpointListener(final BundleContext context, final EndpointDescription description) {
this(context, description, "(" + Constants.OBJECTCLASS + "=*)");
}
public BlockingEndpointListener(final BundleContext context, final EndpointDescription description,
final String scopeFilter) {
super(context, scopeFilter, description);
}
@Override
protected Object getListener() {
return m_listenerInstance;
}
private class InternalEndpointListener implements EndpointListener {
@Override
public void endpointAdded(EndpointDescription endpoint, String matchedFilter) {
BlockingEndpointListener.this.endpointAdded(endpoint);
}
@Override
public void endpointRemoved(EndpointDescription endpoint, String matchedFilter) {
BlockingEndpointListener.this.endpointRemoved(endpoint);
}
}
@Override
protected String getScopeKey() {
return EndpointListener.ENDPOINT_LISTENER_SCOPE;
}
@Override
protected String[] getObjectClass() {
return m_objectClass;
}
} | {
"content_hash": "be4b4b84c59ffceeb23242b20730ee33",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 105,
"avg_line_length": 33.61666666666667,
"alnum_prop": 0.7337630143777888,
"repo_name": "INAETICS/node-wiring-secure",
"id": "917e1a978d3da7483bbe1f9982e1676c3389f10d",
"size": "2017",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "node-wiring-secure-java/org.inaetics.remote.itest/src/org/inaetics/remote/itest/util/BlockingEndpointListener.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "665760"
},
{
"name": "C++",
"bytes": "36572"
},
{
"name": "CMake",
"bytes": "16393"
},
{
"name": "Java",
"bytes": "501330"
},
{
"name": "Objective-C",
"bytes": "7011"
},
{
"name": "Shell",
"bytes": "11273"
}
],
"symlink_target": ""
} |
"""Provides functionality to interact with fans."""
from datetime import timedelta
import functools as ft
import logging
from typing import List, Optional
import voluptuous as vol
from homeassistant.const import (
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.loader import bind_hass
from homeassistant.util.percentage import (
ordered_list_item_to_percentage,
percentage_to_ordered_list_item,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "fan"
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
# Bitfield of features supported by the fan entity
SUPPORT_SET_SPEED = 1
SUPPORT_OSCILLATE = 2
SUPPORT_DIRECTION = 4
SUPPORT_PRESET_MODE = 8
SERVICE_SET_SPEED = "set_speed"
SERVICE_OSCILLATE = "oscillate"
SERVICE_SET_DIRECTION = "set_direction"
SERVICE_SET_PERCENTAGE = "set_percentage"
SERVICE_SET_PRESET_MODE = "set_preset_mode"
SPEED_OFF = "off"
SPEED_LOW = "low"
SPEED_MEDIUM = "medium"
SPEED_HIGH = "high"
DIRECTION_FORWARD = "forward"
DIRECTION_REVERSE = "reverse"
ATTR_SPEED = "speed"
ATTR_PERCENTAGE = "percentage"
ATTR_SPEED_LIST = "speed_list"
ATTR_OSCILLATING = "oscillating"
ATTR_DIRECTION = "direction"
ATTR_PRESET_MODE = "preset_mode"
ATTR_PRESET_MODES = "preset_modes"
# Invalid speeds do not conform to the entity model, but have crept
# into core integrations at some point so we are temporarily
# accommodating them in the transition to percentages.
_NOT_SPEED_OFF = "off"
_NOT_SPEED_ON = "on"
_NOT_SPEED_AUTO = "auto"
_NOT_SPEED_SMART = "smart"
_NOT_SPEED_INTERVAL = "interval"
_NOT_SPEED_IDLE = "idle"
_NOT_SPEED_FAVORITE = "favorite"
_NOT_SPEED_SLEEP = "sleep"
_NOT_SPEEDS_FILTER = {
_NOT_SPEED_OFF,
_NOT_SPEED_ON,
_NOT_SPEED_AUTO,
_NOT_SPEED_SMART,
_NOT_SPEED_INTERVAL,
_NOT_SPEED_IDLE,
_NOT_SPEED_SLEEP,
_NOT_SPEED_FAVORITE,
}
_FAN_NATIVE = "_fan_native"
OFF_SPEED_VALUES = [SPEED_OFF, None]
LEGACY_SPEED_LIST = [SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
class NoValidSpeedsError(ValueError):
"""Exception class when there are no valid speeds."""
class NotValidSpeedError(ValueError):
"""Exception class when the speed in not in the speed list."""
class NotValidPresetModeError(ValueError):
"""Exception class when the preset_mode in not in the preset_modes list."""
@bind_hass
def is_on(hass, entity_id: str) -> bool:
"""Return if the fans are on based on the statemachine."""
state = hass.states.get(entity_id)
if ATTR_SPEED in state.attributes:
return state.attributes[ATTR_SPEED] not in OFF_SPEED_VALUES
return state.state == STATE_ON
async def async_setup(hass, config: dict):
"""Expose fan control via statemachine and services."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
# After the transition to percentage and preset_modes concludes,
# switch this back to async_turn_on and remove async_turn_on_compat
component.async_register_entity_service(
SERVICE_TURN_ON,
{
vol.Optional(ATTR_SPEED): cv.string,
vol.Optional(ATTR_PERCENTAGE): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
),
vol.Optional(ATTR_PRESET_MODE): cv.string,
},
"async_turn_on_compat",
)
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
# After the transition to percentage and preset_modes concludes,
# remove this service
component.async_register_entity_service(
SERVICE_SET_SPEED,
{vol.Required(ATTR_SPEED): cv.string},
"async_set_speed_deprecated",
[SUPPORT_SET_SPEED],
)
component.async_register_entity_service(
SERVICE_OSCILLATE,
{vol.Required(ATTR_OSCILLATING): cv.boolean},
"async_oscillate",
[SUPPORT_OSCILLATE],
)
component.async_register_entity_service(
SERVICE_SET_DIRECTION,
{vol.Optional(ATTR_DIRECTION): cv.string},
"async_set_direction",
[SUPPORT_DIRECTION],
)
component.async_register_entity_service(
SERVICE_SET_PERCENTAGE,
{
vol.Required(ATTR_PERCENTAGE): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
)
},
"async_set_percentage",
[SUPPORT_SET_SPEED],
)
component.async_register_entity_service(
SERVICE_SET_PRESET_MODE,
{vol.Required(ATTR_PRESET_MODE): cv.string},
"async_set_preset_mode",
[SUPPORT_SET_SPEED, SUPPORT_PRESET_MODE],
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
def _fan_native(method):
"""Native fan method not overridden."""
setattr(method, _FAN_NATIVE, True)
return method
class FanEntity(ToggleEntity):
"""Representation of a fan."""
@_fan_native
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
raise NotImplementedError()
async def async_set_speed_deprecated(self, speed: str):
"""Set the speed of the fan."""
_LOGGER.warning(
"fan.set_speed is deprecated, use fan.set_percentage or fan.set_preset_mode instead."
)
await self.async_set_speed(speed)
@_fan_native
async def async_set_speed(self, speed: str):
"""Set the speed of the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
return
if speed in self.preset_modes:
if not hasattr(self.async_set_preset_mode, _FAN_NATIVE):
await self.async_set_preset_mode(speed)
return
if not hasattr(self.set_preset_mode, _FAN_NATIVE):
await self.hass.async_add_executor_job(self.set_preset_mode, speed)
return
else:
if not hasattr(self.async_set_percentage, _FAN_NATIVE):
await self.async_set_percentage(self.speed_to_percentage(speed))
return
if not hasattr(self.set_percentage, _FAN_NATIVE):
await self.hass.async_add_executor_job(
self.set_percentage, self.speed_to_percentage(speed)
)
return
await self.hass.async_add_executor_job(self.set_speed, speed)
@_fan_native
def set_percentage(self, percentage: int) -> None:
"""Set the speed of the fan, as a percentage."""
raise NotImplementedError()
@_fan_native
async def async_set_percentage(self, percentage: int) -> None:
"""Set the speed of the fan, as a percentage."""
if percentage == 0:
await self.async_turn_off()
elif not hasattr(self.set_percentage, _FAN_NATIVE):
await self.hass.async_add_executor_job(self.set_percentage, percentage)
else:
await self.async_set_speed(self.percentage_to_speed(percentage))
@_fan_native
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
self._valid_preset_mode_or_raise(preset_mode)
self.set_speed(preset_mode)
@_fan_native
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if not hasattr(self.set_preset_mode, _FAN_NATIVE):
await self.hass.async_add_executor_job(self.set_preset_mode, preset_mode)
return
self._valid_preset_mode_or_raise(preset_mode)
await self.async_set_speed(preset_mode)
def _valid_preset_mode_or_raise(self, preset_mode):
"""Raise NotValidPresetModeError on invalid preset_mode."""
preset_modes = self.preset_modes
if preset_mode not in preset_modes:
raise NotValidPresetModeError(
f"The preset_mode {preset_mode} is not a valid preset_mode: {preset_modes}"
)
def set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
raise NotImplementedError()
async def async_set_direction(self, direction: str):
"""Set the direction of the fan."""
await self.hass.async_add_executor_job(self.set_direction, direction)
# pylint: disable=arguments-differ
def turn_on(
self,
speed: Optional[str] = None,
percentage: Optional[int] = None,
preset_mode: Optional[str] = None,
**kwargs,
) -> None:
"""Turn on the fan."""
raise NotImplementedError()
# pylint: disable=arguments-differ
async def async_turn_on_compat(
self,
speed: Optional[str] = None,
percentage: Optional[int] = None,
preset_mode: Optional[str] = None,
**kwargs,
) -> None:
"""Turn on the fan.
This _compat version wraps async_turn_on with
backwards and forward compatibility.
After the transition to percentage and preset_modes concludes, it
should be removed.
"""
if preset_mode is not None:
self._valid_preset_mode_or_raise(preset_mode)
speed = preset_mode
percentage = None
elif speed is not None:
_LOGGER.warning(
"Calling fan.turn_on with the speed argument is deprecated, use percentage or preset_mode instead."
)
if speed in self.preset_modes:
preset_mode = speed
percentage = None
else:
percentage = self.speed_to_percentage(speed)
elif percentage is not None:
speed = self.percentage_to_speed(percentage)
await self.async_turn_on(
speed=speed,
percentage=percentage,
preset_mode=preset_mode,
**kwargs,
)
# pylint: disable=arguments-differ
async def async_turn_on(
self,
speed: Optional[str] = None,
percentage: Optional[int] = None,
preset_mode: Optional[str] = None,
**kwargs,
) -> None:
"""Turn on the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
else:
await self.hass.async_add_executor_job(
ft.partial(
self.turn_on,
speed=speed,
percentage=percentage,
preset_mode=preset_mode,
**kwargs,
)
)
def oscillate(self, oscillating: bool) -> None:
"""Oscillate the fan."""
raise NotImplementedError()
async def async_oscillate(self, oscillating: bool):
"""Oscillate the fan."""
await self.hass.async_add_executor_job(self.oscillate, oscillating)
@property
def is_on(self):
"""Return true if the entity is on."""
return self.speed not in [SPEED_OFF, None]
@property
def _implemented_percentage(self):
"""Return true if percentage has been implemented."""
return not hasattr(self.set_percentage, _FAN_NATIVE) or not hasattr(
self.async_set_percentage, _FAN_NATIVE
)
@property
def _implemented_preset_mode(self):
"""Return true if preset_mode has been implemented."""
return not hasattr(self.set_preset_mode, _FAN_NATIVE) or not hasattr(
self.async_set_preset_mode, _FAN_NATIVE
)
@property
def _implemented_speed(self):
"""Return true if speed has been implemented."""
return not hasattr(self.set_speed, _FAN_NATIVE) or not hasattr(
self.async_set_speed, _FAN_NATIVE
)
@property
def speed(self) -> Optional[str]:
"""Return the current speed."""
if self._implemented_preset_mode:
preset_mode = self.preset_mode
if preset_mode:
return preset_mode
if self._implemented_percentage:
percentage = self.percentage
if percentage is None:
return None
return self.percentage_to_speed(percentage)
return None
@property
def percentage(self) -> Optional[int]:
"""Return the current speed as a percentage."""
if not self._implemented_preset_mode:
if self.speed in self.preset_modes:
return None
if not self._implemented_percentage:
return self.speed_to_percentage(self.speed)
return 0
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
speeds = []
if self._implemented_percentage:
speeds += [SPEED_OFF, *LEGACY_SPEED_LIST]
if self._implemented_preset_mode:
speeds += self.preset_modes
return speeds
@property
def current_direction(self) -> Optional[str]:
"""Return the current direction of the fan."""
return None
@property
def oscillating(self):
"""Return whether or not the fan is currently oscillating."""
return None
@property
def capability_attributes(self):
"""Return capability attributes."""
attrs = {}
if self.supported_features & SUPPORT_SET_SPEED:
attrs[ATTR_SPEED_LIST] = self.speed_list
if (
self.supported_features & SUPPORT_SET_SPEED
or self.supported_features & SUPPORT_PRESET_MODE
):
attrs[ATTR_PRESET_MODES] = self.preset_modes
return attrs
@property
def _speed_list_without_preset_modes(self) -> list:
"""Return the speed list without preset modes.
This property provides forward and backwards
compatibility for conversion to percentage speeds.
"""
if not self._implemented_speed:
return LEGACY_SPEED_LIST
return speed_list_without_preset_modes(self.speed_list)
def speed_to_percentage(self, speed: str) -> int:
"""
Map a speed to a percentage.
Officially this should only have to deal with the 4 pre-defined speeds:
return {
SPEED_OFF: 0,
SPEED_LOW: 33,
SPEED_MEDIUM: 66,
SPEED_HIGH: 100,
}[speed]
Unfortunately lots of fans make up their own speeds. So the default
mapping is more dynamic.
"""
if speed in OFF_SPEED_VALUES:
return 0
speed_list = self._speed_list_without_preset_modes
if speed_list and speed not in speed_list:
raise NotValidSpeedError(f"The speed {speed} is not a valid speed.")
try:
return ordered_list_item_to_percentage(speed_list, speed)
except ValueError as ex:
raise NoValidSpeedsError(
f"The speed_list {speed_list} does not contain any valid speeds."
) from ex
def percentage_to_speed(self, percentage: int) -> str:
"""
Map a percentage onto self.speed_list.
Officially, this should only have to deal with 4 pre-defined speeds.
if value == 0:
return SPEED_OFF
elif value <= 33:
return SPEED_LOW
elif value <= 66:
return SPEED_MEDIUM
else:
return SPEED_HIGH
Unfortunately there is currently a high degree of non-conformancy.
Until fans have been corrected a more complicated and dynamic
mapping is used.
"""
if percentage == 0:
return SPEED_OFF
speed_list = self._speed_list_without_preset_modes
try:
return percentage_to_ordered_list_item(speed_list, percentage)
except ValueError as ex:
raise NoValidSpeedsError(
f"The speed_list {speed_list} does not contain any valid speeds."
) from ex
@property
def state_attributes(self) -> dict:
"""Return optional state attributes."""
data = {}
supported_features = self.supported_features
if supported_features & SUPPORT_DIRECTION:
data[ATTR_DIRECTION] = self.current_direction
if supported_features & SUPPORT_OSCILLATE:
data[ATTR_OSCILLATING] = self.oscillating
if supported_features & SUPPORT_SET_SPEED:
data[ATTR_SPEED] = self.speed
data[ATTR_PERCENTAGE] = self.percentage
if (
supported_features & SUPPORT_PRESET_MODE
or supported_features & SUPPORT_SET_SPEED
):
data[ATTR_PRESET_MODE] = self.preset_mode
return data
@property
def supported_features(self) -> int:
"""Flag supported features."""
return 0
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., auto, smart, interval, favorite.
Requires SUPPORT_SET_SPEED.
"""
speed = self.speed
if speed in self.preset_modes:
return speed
return None
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes.
Requires SUPPORT_SET_SPEED.
"""
return preset_modes_from_speed_list(self.speed_list)
def speed_list_without_preset_modes(speed_list: List):
"""Filter out non-speeds from the speed list.
The goal is to get the speeds in a list from lowest to
highest by removing speeds that are not valid or out of order
so we can map them to percentages.
Examples:
input: ["off", "low", "low-medium", "medium", "medium-high", "high", "auto"]
output: ["low", "low-medium", "medium", "medium-high", "high"]
input: ["off", "auto", "low", "medium", "high"]
output: ["low", "medium", "high"]
input: ["off", "1", "2", "3", "4", "5", "6", "7", "smart"]
output: ["1", "2", "3", "4", "5", "6", "7"]
input: ["Auto", "Silent", "Favorite", "Idle", "Medium", "High", "Strong"]
output: ["Silent", "Medium", "High", "Strong"]
"""
return [speed for speed in speed_list if speed.lower() not in _NOT_SPEEDS_FILTER]
def preset_modes_from_speed_list(speed_list: List):
"""Filter out non-preset modes from the speed list.
The goal is to return only preset modes.
Examples:
input: ["off", "low", "low-medium", "medium", "medium-high", "high", "auto"]
output: ["auto"]
input: ["off", "auto", "low", "medium", "high"]
output: ["auto"]
input: ["off", "1", "2", "3", "4", "5", "6", "7", "smart"]
output: ["smart"]
input: ["Auto", "Silent", "Favorite", "Idle", "Medium", "High", "Strong"]
output: ["Auto", "Favorite", "Idle"]
"""
return [
speed
for speed in speed_list
if speed.lower() in _NOT_SPEEDS_FILTER and speed.lower() != SPEED_OFF
]
| {
"content_hash": "2aa324f96bf155718ba3f743e22deab6",
"timestamp": "",
"source": "github",
"line_count": 615,
"max_line_length": 115,
"avg_line_length": 31.53008130081301,
"alnum_prop": 0.6069826208034655,
"repo_name": "turbokongen/home-assistant",
"id": "8d6fcbea2c965805bf540df36b10c7553aa5625a",
"size": "19391",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/fan/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "30405146"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="enumvalues_b.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>
| {
"content_hash": "bb1ef81ae3b554bbf160f0c6ad5b2641",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 121,
"avg_line_length": 39.23076923076923,
"alnum_prop": 0.7068627450980393,
"repo_name": "lucasbrsa/OpenCV-3.2",
"id": "cdb29e71fc37db83b8882fda56f7994de9cfba2b",
"size": "1020",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "docs/3.2/search/enumvalues_b.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "320592"
},
{
"name": "C#",
"bytes": "12756"
},
{
"name": "C++",
"bytes": "499322"
},
{
"name": "CMake",
"bytes": "244871"
},
{
"name": "Makefile",
"bytes": "344335"
},
{
"name": "Python",
"bytes": "7735"
},
{
"name": "Visual Basic",
"bytes": "13139"
}
],
"symlink_target": ""
} |
notes-and-todos
===============
## meta-go
- [awesome-go](http://0x434d53.github.io/awesome-go/)
- [libs.club/go](http://libs.club/golang/)
## notes about stuff
- [go patterns](https://sites.google.com/site/gopatterns/)
- [go concurrency](http://www.nada.kth.se/~snilsson/concurrency)
- [learning CS w/ Go](https://github.com/gyuho/learn#contents)
- [libmill: go-style concurrency in C](http://libmill.org/)
## VMs implementations
- [ktap](https://github.com/ktap/ktap)
- [agora](https://github.com/PuerkitoBio/agora)go
- [potion](https://github.com/perl11/potion/blob/master/core/vm.c)
- [lua](http://www.lua.org/source/5.2/lopcodes.h.html)
- [lua-5.0](http://www.lua.org/doc/jucs05.pdf)
- [lua-vm](http://luaforge.net/docman/83/98/ANoFrillsIntroToLua51VMInstructions.pdf)
- [lune: pure Go impl. of Lua VM](https://github.com/PuerkitoBio/lune)
- [gopher-lua: another go impl. of Lua VM](https://github.com/yuin/gopher-lua)
- [shopify's pure-Go Lua impl](www.shopify.com/technology/17605604-announcing-go-lua)
- [neko](http://nekovm.org/lua)
- [golightly](https://github.com/feyeleanor/GoLightly)
- [zerovm](https://github.com/zerovm/zerovm)
- [py-byterun](https://github.com/nedbat/byterun)
- [go-twik](http://blog.labix.org/2013/07/16/twik-a-tiny-language-for-go)
- [go-lisp](https://github.com/bobappleyard/golisp)
- [go-ssa-interpreter](https://gist.github.com/elliott5/7578605)
- [embedded go-lisp (kakapo)](https://github.com/bytbox/kakapo)
- [go-rvm](https://github.com/jteeuwen/rvm)
- [mini-llvm](https://github.com/intelfx/Homework_2011)
- [arogue: Go->R REPL](https://github.com/glycerine/arogue)
- [anko](https://github.com/mattn/anko)
- [golog](https://github.com/mndrix/golog)
- [gelo](https://code.google.com/p/gelo/)
- [carpt](https://github.com/tekknolagi/carp)
- [toycompiler](https://github.com/geraldstanje/toycompiler)
- [grubby](https://github.com/grubby/grubby)
- [dyg: dynamic go (built on top of otto)](https://github.com/glycerine/dyg)
- [asm: jit'ing asm](https://github.com/crawshaw/asm)
- [gothon: python in go](https://github.com/flowlo/gothon)
- [goconsole: an interactive go interpreter](https://github.com/davidthomas426/goconsole)
- [jvm.go: JVM in Go](https://github.com/zxh0/jvm.go)
- [bpfjit: JIT for Berkeley Packet Filter](https://github.com/alnsn/bpfjit)
- [vident: an interpreter in Go (tuto)](https://github.com/felixangell/vident)
- [ark: a compiled systems programming language written in Go using the LLVM framework](https://github.com/ark-lang/ark)
- [rthornton128/vm: a Go VM for teaching](https://github.com/rthornton128/vm)
- [zygomys: embedded scripting language for Go](https://github.com/glycerine/zygomys)
## go-bric
- [gotask](https://github.com/sbinet/gotask)
- [geto](https://github.com/bgmerrell/geto)
- [loom - a go fabric](https://github.com/wingedpig/loom)
- [slex: fabric for go](https://github.com/crosbymichael/slex)
## terminal/line-edit
- [linenoise](https://github.com/antirez/linenoise)
- [liner](https://github.com/sbinet/liner)
- [gobs/cmd](https://github.com/gobs/cmd)
- [go-cui](https://github.com/jroimartin/gocui)
- [termshare](https://github.com/progrium/termshare/blob/master/termshare.go)
- [goline](https://github.com/nemith/go-goline)
- [python prompt toolkit](https://github.com/jonathanslenders/python-prompt-toolkit)
- [bowery/prompt](https://github.com/Bowery/prompt)
- [termui: termbox-based tui](https://github.com/gizak/termui)
- [termloop: terminal-based game engine (on top of termbox)](https://github.com/JoelOtter/termloop)
- [chzyer/readline: pure-Go readline implementation (MIT)](https://github.com/chzyer/readline)
- [gotty: share your terminal as a web-app](https://github.com/yudai/gotty)
## build systems
- [ninja](https://github.com/martine/ninja)
- [GYP](https://code.google.com/p/gyp/)
- [tup](https://github.com/gittup/tup)
- [go build](https://code.google.com/p/go/source/browse/src/cmd/go/build.go)
- [gogo](https://github.com/davecheney/gogo)
- [fubsy](http://fubsy.gerg.ca/)
- [repobuild](https://github.com/chrisvana/repobuild)
- [gobot](https://github.com/kr/gobot)
- [makex (make in go)](https://github.com/sourcegraph/makex)
- [go build/install for c/c++: qo](https://github.com/andlabs/qo)
## serialization
- [binc](https://github.com/ugorji/go/tree/master/codec)
- [msgpack](https://github.com/msgpack/msgpack-go)
- [nanomsg](https://github.com/op/go-nanomsg)
- [drillbit](https://github.com/JohannesEbke/drillbit)
- [capnproto](http://kentonv.github.io/capnproto/)
- [go-capnproto](https://github.com/jmckaskill/go-capnproto)
- [protobuf](https://code.google.com/p/protobuf/)
- [gobin](https://code.google.com/p/gobin/)
- [parquet](https://github.com/parquet/parquet-format)
- [c-gob](https://code.google.com/p/libgob/)
- [ffjson](https://github.com/pquerna/ffjson)
- [goser: serialization benchmarks](https://github.com/cloudflare/goser)
- [flatbuffers (c++)](https://github.com/google/flatbuffers)
- [godec](https://github.com/zond/godec)
- [sereal](https://github.com/Sereal/Sereal)
- [gobdb](https://github.com/dasmithii/GobDB)
- [msg: codec generator for msgpack](https://github.com/philhofer/msgp)
- [serialization benchmarks](https://github.com/alecthomas/go_serialization_benchmarks)
- [struc: pack/unpack types like encoding/binary](https://github.com/lunixbochs/struc)
- [goavro: pure-Go library (de|en)coding Avro](https://github.com/linkedin/goavro)
- [xz: pure-Go library for decoding xz streams and files](https://github.com/xi2/xz)
- [compress: SSE-optimized stdlib packages](https://github.com/klauspost/compress)
- [cdb](https://github.com/colinmarc/cdb)
- [decompilation for LLVM](https://github.com/decomp/decompilation)
- [block-based compressor (like pytables?)](https://github.com/monkeybutter/goblock_compressor)
- [capnproto2 generator for go](https://github.com/zombiezen/go-capnproto2/)
- [shuffle: a blosc-like shuffling package](https://github.com/opennota/shuffle)
- [bzip2 enc/dec in Go](https://godoc.org/github.com/dsnet/compress/bzip2)
- [restruct: rich binary (de)serialization](https://github.com/go-restruct/restruct)
## checkpointing, VMs, containers
- [dmtcp](http://dmtcp.sourceforge.net/)
- [criu](http://criu.org/Main_Page)
- [autodock](https://github.com/cholcombe973/autodock)
- [docker](http://www.docker.io/)
- [go-docker client](https://github.com/fsouza/go-dockerclient)
- [dockerclient](https://github.com/samalba/dockerclient)
- [docker+openstack](http://www.sebastien-han.fr/blog/2013/10/31/build-a-paas-zone-within-your-openstack-cloud)
- [docker-devenv](https://github.com/relateiq/docker_public)
- [docker-cluster](https://github.com/tsuru/docker-cluster)
- [docker-cloud](https://github.com/brendandburns/docker-cloud)
- [dockit](https://github.com/benschw/dockit)
- [docker-codecube](http://hmarr.com/2013/oct/16/codecube-runnable-gists/)
- [nix-docker](https://github.com/zefhemel/nix-docker)
- [golab](https://github.com/mb0/lab)
- [dbg: a go debugger in go](https://github.com/derekparker/dbg)
- [godbg: another debugger in go](https://github.com/sirnewton01/godbg)
- [ceph+juju](http://ceph.com/dev-notes/deploying-ceph-with-juju/)
- [private docker registry](http://www.activestate.com/blog/2014/01/deploying-your-own-private-docker-registry)
- [docker-gaudi (appliance)](https://github.com/marmelab/gaudi)
- [juju-docker](https://github.com/bcsaller/juju-docker)
- [boot2docker (go CLI)](https://github.com/boot2docker/boot2docker-cli)
- [frenzy - vagrant for docker](https://github.com/stevedomin/frenzy)
- [kubernets: cluster mgt for containers](https://github.com/GoogleCloudPlatform/kubernetes)
- [dockersh: shell with/for docker](https://github.com/Yelp/dockersh)
- [gojenkins: manage jenkins jobs](https://github.com/bndr/gojenkins)
- [docker-volumes: manage docker volumes](https://github.com/cpuguy83/docker-volumes)
- [dev: overlay docker containers for development](https://github.com/Xe/dev)
- [docker-bastion: ssh access to containers](https://github.com/gophergala/docker-bastion)
- [docker + jenkins](http://www.catosplace.net/blog/2015/02/11/running-jenkins-in-docker-containers/)
- [go+mesos](http://java.dzone.com/articles/building-massively-scalable)
- [docker + gui/tui apps](https://blog.jessfraz.com/posts/docker-containers-on-the-desktop.html)
- [dockerception: dockers building dockers](https://github.com/jamiemccrindle/dockerception)
- [images: vm/dkr-images manager](https://github.com/fatih/images)
- [libretto: create VMs from Go](https://github.com/apcera/libretto)
## parsers
- [ql](https://github.com/cznic/ql)
- [otto (parser for js)](https://github.com/robertkrimen/otto)
- [ragel](https://github.com/antage/ragel-go/tree/golang-6/ragel)
- [ebnf](https://github.com/cznic/ebnf)
- [ebnf2y](https://github.com/cznic/ebnf2y)
- [ebnfutil](https://github.com/cznic/ebnfutil)
- [walkngo](https://github.com/raff/walkngo)
- [pigeon: generates parsers from PEG grammar](https://github.com/PuerkitoBio/pigeon)
- [decomp: decompile LLVM->Go](https://github.com/decomp)
- [sh: a shell parser and formatter in Go](https://github.com/mvdan/sh)
## C/S
- [neural-go](https://github.com/schuyler/neural-go)
- [neural-nwk](http://natureofcode.com/book/chapter-10-neural-networks/)
- [c/s in real life](https://github.com/jcb/CSIRL)
- [data science](http://blog.zipfianacademy.com/post/46864003608/a-practical-intro-to-data-science)
- [navier stokes](http://lorenabarba.com/blog/cfd-python-12-steps-to-navier-stokes/)
- [go svm, forests](https://github.com/ryanbressler/CloudForest)
- [deep learning](http://markus.com/deep-learning-101/)
- [python MC](http://www.chrisstucchio.com/blog/2013/basic_income_vs_basic_job.html)
- [static libs](http://eli.thegreenplace.net/2012/08/13/how-statically-linked-programs-run-on-linux/)
- [.so relocations](http://www.mindfruit.co.uk/2012/06/relocations-relocations.html)
- [PIC .so](http://eli.thegreenplace.net/2011/11/03/position-independent-code-pic-in-shared-libraries/)
- [c-plugins](http://eli.thegreenplace.net/2012/08/24/plugins-in-c/)
- [ELF map](https://code.google.com/p/corkami/wiki/ELF101)
- [go-c-code w/o cgo](http://dave.cheney.net/2013/09/07/how-to-include-c-code-in-your-go-package)
- [intro to neural nets](http://www.theprojectspot.com/tutorial-post/introduction-to-artificial-neural-networks-part-1/7/?)
- [self-describing data formats](http://inamidst.com/whits/2014/formats)
- [go machine learning](http://biosphere.cc/software-engineering/go-machine-learning-nlp-libraries/)
- [go linear regr. (zettalm)](https://github.com/glycerine/zettalm)
- [miridius - AI](https://github.com/miridius/miridius-ai)
- [distributed computing course](http://cseweb.ucsd.edu/classes/sp14/cse223B-a/index.html)
- [conway's game of life](https://bitbucket.org/teknico/a_life)
- [neural network + deep learning](http://neuralnetworksanddeeplearning.com/chap2.html)
- [digital signal processing](http://www.dspguide.com/pdfbook.htm)
- [intro to signal processing](http://www.ece.rutgers.edu/~orfanidi/intro2sp/)
- [mlgo: machine learning library in go](https://code.google.com/p/mlgo/)
- [or-tools: python linear solver](https://code.google.com/p/or-tools/)
- [data mining book](http://www.mmds.org/#ver21)
- [go-cool: compiler for cool - coursera compiler lectures](https://github.com/zellyn/gocool)
- [atlas s/w openmp tuto](https://indico.cern.ch/event/352961/other-view?view=standard)
- [go-algorithms](https://github.com/arnauddri/algorithms)
- [linux perf](http://www.brendangregg.com/perf.html)
- [goweave: aspect oriented programming in Go](https://deferpanic.com/blog/compile-time-code-weaving-in-go)
- [go-neural](https://github.com/NOX73/go-neural)
- [drago: feed fwd NN](https://github.com/aaparella/Drago)
- [NN & deep learning book](http://neuralnetworksanddeeplearning.com/)
- [super tiny compiler in Go](https://github.com/hazbo/the-super-tiny-compiler)
## go libs
- [req http](https://github.com/franela/goreq)
- [megajson](https://github.com/benbjohnson/megajson) (for the ast generation)
- [pipe](https://launchpad.net/pipe)
- [passwd](https://github.com/seehuhn/password)
- [tomb](https://launchpad.net/tomb)
- [fatchan](https://github.com/kylelemons/fatchan)
- [gobots (robotics)](http://gobot.io/)
- [gobots (battle game)](https://github.com/tanema/botbattle)
- [cobra (cli)](https://github.com/spf13/cobra)
- [webapp](https://github.com/Unknwon/build-web-application-with-golang_EN)
- [uwsgi gccgo plugin](https://github.com/unbit/uwsgi/tree/master/plugins/gccgo)
- [ipython/go kernel](https://github.com/takluyver/igo)
- [go-plugins](https://github.com/progrium/go-plugins)
- [gox (crosscompile)](https://github.com/mitchellh/gox)
- [go-zappy](https://github.com/cznic/zappy)
- [go-snappy](https://code.google.com/p/snappy-go/)
- [go-ggplot](https://github.com/vdobler/plot)
- [go-kv database](https://github.com/cznic/kv)
- [go tiedot NoSQL db](https://github.com/HouzuoGuo/tiedot)
- [go someutils (POSIX-like utils)](https://github.com/laher/someutils)
- [go-shlex](https://github.com/flynn/go-shlex)
- [go-coroutine](https://github.com/PuerkitoBio/gocoro)
- [go-shh (proc/sys)](https://github.com/freeformz/shh)
- [go-mesos](https://github.com/mesosphere/mesos-go)
- [go-metrix](https://github.com/dynport/metrix)
- [godoc2md](https://github.com/davecheney/godoc2md)
- [davecheney/poller: Poll selectable Reader/Writers](https://github.com/davecheney/poller)
- [npat-efault/poller: An epoll(7)-based file-descriptor multiplexer.](https://github.com/npat-efault/poller)
- [wishful (monads)](https://github.com/SimonRichardson/wishful)
- [ar](https://github.com/nightlyone/ar)
- [gopark](https://github.com/mijia/gopark)
- [goloop](http://godoc.org/git.tideland.biz/goas/loop)
- [multitick](https://github.com/VividCortex/multitick)
- [fn](https://github.com/tobia/fn)
- [go-pprof](https://github.com/remyoudompheng/go-misc/tree/master/pprof)
- [go-service](https://bitbucket.org/kardianos/service/src)
- [go-respawn,go-forever](https://github.com/gwoo/goforever)
- [go-channels (multiplex, tee, pipe channels)](https://github.com/eapache/channels)
- [termbox](https://github.com/nsf/termbox-go)
- [go-expect](https://github.com/ThomasRooney/gexpect)
- [tirion (monitoring)](https://github.com/zimmski/tirion)
- [tachyon](https://github.com/vektra/tachyon)
- [logging](https://github.com/koding/logging)
- [go-im: go IMDb](https://github.com/BurntSushi/goim)
- [go-coprocess](https://github.com/kevinwallace/coprocess)
- [azul-3d](https://code.google.com/p/azul3d/)
- [go-dsl](https://github.com/leeview/godsl)
- [ptrace](https://github.com/eaburns/ptrace)
- [bolt](https://github.com/boltdb/bolt)
- [mount](https://github.com/jsgilmore/mount)
- [shm](https://github.com/jsgilmore/shm)
- [trace](https://github.com/jimrobinson/trace/)
- [oh (shell in go)](https://github.com/michaelmacinnis/oh)
- [go-update](https://github.com/inconshreveable/go-update)
- [go-jit](https://github.com/nelhage/gojit)
- [go-asm](https://github.com/crawshaw/asm)
- [gengen: generics generator](https://github.com/joeshaw/gengen)
- [go-mysql (pure go)](https://github.com/go-sql-driver/mysql/)
- [gobot.io](http://gobot.io/)
- [gosgl: opengpl+gpu](https://github.com/phaikawl/gosgl)
- [embd: go for GPIO, RPi, embedded](https://github.com/kidoman/embd)
- [occult: go-based MapReduce fwk](https://github.com/akualab/occult)
- [goq: sun-engine like job queue](https://github.com/glycerine/goq)
- [keyboard: binding keys to actions](https://github.com/jteeuwen/keyboard)
- [libchan: channels over the network](https://github.com/docker/libchan)
- [async](https://github.com/egonelbre/async)
- [go-update](https://github.com/inconshreveable/go-update)
- [go-bindata](https://github.com/jteeuwen/go-bindata)
- [go-runsit](https://github.com/bradfitz/runsit)
- [jobmux: job multiplexer](https://github.com/kaicheng/jobmux)
- [goast-viewer](https://github.com/yuroyoro/goast-viewer)
- [geno: a generics generator](https://github.com/champioj/geno)
- [go-lzma](https://code.google.com/p/lzma/)
- [go-stream - inspired from labix/pipe](https://github.com/ghemawat/stream)
- [edi - an editor, in Go](https://github.com/satran/edi)
- [daemon - to daemonize a go program](https://github.com/takama/daemon)
- [vitess' sqlparser](https://github.com/youtube/vitess/tree/master/go/vt/sqlparser)
- [srclib: sourcegraph's code groker](https://srclib.org/)
- [find unused functions (go-oracle-based)](https://github.com/3rf/go-unused-funcs)
- [impl: generate stubs from an interface](https://github.com/josharian/impl)
- [chidley: xml-to-go-struct generator](https://github.com/gnewton/chidley)
- [stacko: fetching stack traces](https://github.com/hallas/stacko)
- [go-netchan: channels over the network](https://github.com/OneOfOne/netchan)
- [cmemory: profiling+tools cgo](https://github.com/emilymaier/cmemory)
- [table: interface to sql.DB](https://bitbucket.org/kardianos/table)
- [dot: graphviz writer](https://github.com/tmc/dot)
- [gographviz: graphviz writer](https://code.google.com/p/gographviz/)
- [slex: fabric for go](https://github.com/crosbymichael/slex)
- [xsd: generate go-xml structs from a XSD spec](https://github.com/metaleap/go-xsd)
- [nex: lexer generating go code](https://crypto.stanford.edu/~blynn/nex/)
- [afero: a FS abstraction](https://github.com/spf13/afero)
- [tmass: tmux session manager](https://github.com/fzerorubigd/tmass)
- [dotsql: go library for SQL](https://github.com/gchaincl/dotsql)
- [sflag: cmd-line struct-based flags](https://github.com/teamldcs/sflag)
- [go-observer: a publish/subscribe library](https://github.com/imkira/go-observer)
- [ivy: an APL-like calculator](https://github.com/robpike/ivy)
- [mailhog: mailer+ui](https://github.com/mailhog/MailHog)
- [vigo: vi in go](https://github.com/kisielk/vigo)
- [tabler: go-generate structs from SQL schema](https://github.com/tristanwietsma/tabler)
- [c2go: translate C to Go](https://github.com/rsc/c2go)
- [pt: a path tracer](https://github.com/fogleman/pt)
- [go datastructures](https://github.com/Workiva/go-datastructures)
- [gometalinter](https://github.com/alecthomas/gometalinter)
- [websocketd](http://websocketd.com/)
- [syncthing: file sync](https://github.com/syncthing/syncthing)
- [pong w/ opengl](https://github.com/LaurenceGA/Pong)
- [go-react-example (js+duktape)](https://github.com/olebedev/go-react-example)
- [datagen](https://github.com/aybabtme/datagen)
- [runlocal: run remote commands locally over ssh-x11](https://github.com/pwaller/runlocal)
- [go-package-store: update packages in GOPATH](https://github.com/shurcooL/Go-Package-Store)
- [bampf: 3D arcade-style game (opengl+openal)](https://github.com/gazed/bampf)
- [vu: Virtual Universe](https://github.com/gazed/vu)
- [cc: a C compiler in Go](https://github.com/andrewchambers/cc)
- [lz4: pure-go LZ4 (de)compressor](https://github.com/pierrec/lz4)
- [cluefs: a FUSE-based fs to monitor I/O](https://github.com/airnandez/cluefs)
- [pry: go REPL](https://github.com/d4l3k/go-pry)
- [buffer: composable buffers](https://github.com/djherbis/buffer)
- [nio: concurrent buffered i/o](https://github.com/djherbis/nio)
- [pcgr: PCG random number generator](https://github.com/dgryski/go-pcgr)
- [ergo: errors with stack+context](https://github.com/gima/ergo)
- [monkey: monkey-patching in go](http://bouk.co/blog/monkey-patching-in-go/)
- [llgoi: go interpreter on top of llgo](http://llvm.org/klaus/llgo/tree/master/cmd/llgoi)
- [gorewrite: AST rewrite](https://github.com/tsuna/gorewrite)
- [parallel: OpenMP-like](https://github.com/wangkuiyi/parallel)
- [shellwords: parse lines as shell words](https://github.com/mattn/go-shellwords)
- [shlex: lexing shell-like lines](https://github.com/flynn/go-shlex)
- [killable: actors, pipelines and graph of (killable) goroutines](https://github.com/icholy/killable)
- [freehold: open-source Dropbox-like](http://tshannon.bitbucket.org/freehold/)
- [gzran: gzip indexer for random access into compressed files](https://github.com/coreos/gzran)
- [tar-split: expose archive/tar TAR raw bytes](https://github.com/vbatts/tar-split)
- [go-rat: tar extension for random access](https://github.com/mcuadros/go-rat)
- [pingo: go plugins](https://github.com/dullgiulio/pingo)
- [pie: go plugins](https://github.com/natefinch/pie)
- [sigil: standalone string processor and interpolator](https://github.com/gliderlabs/sigil)
- [go-hardware: directory of hw related libs](https://github.com/rakyll/go-hardware)
- [melody: minimalist websocket fwk](https://github.com/olahol/melody)
- [xmlgen](https://github.com/dutchcoders/xmlgen)
- [dupl: source code duplication finder](https://github.com/mibk/dupl)
- [gocp: Go concurrency primitives exposed to C/C++](https://github.com/fiorix/gocp)
- [deputy: wrapper around os/exec.Command](https://github.com/juju/deputy)
- [garbler: generate passwords](https://github.com/michaelbironneau/garbler)
- [taowm: a tiling X11-window manager in Go](https://github.com/nigeltao/taowm)
- [streamtools: a graphical toolkit to deal with streams of data](http://blog.nytlabs.com/streamtools/)
- [process: bkg+group-leader process mgmt](https://github.com/nightlyone/process)
- [llir/llvm: pure Go library to process LLVM IR](https://github.com/llir/llvm)
- [go-outdated: find outdated packages in GOPATH](https://github.com/firstrow/go-outdated)
- [xsocket: os/exec over websockets](https://github.com/tidwall/xsocket)
- [check, aligncheck, pahole-like, ...: set of utilities to check Go code](https://github.com/opennota/check)
- [maligned: tool to detect holes in structs (like pahole)](https://github.com/mdempsky/maligned)
- [gensimd: generate SIMD-ized packages](https://github.com/bjwbell/gensimd)
- [xorshift: a fast PRNG](https://github.com/lazybeaver/xorshift)
- [wm: a terminal based window manager](https://github.com/cznic/wm)
- [readahead](https://github.com/klauspost/readahead)
- [interfacer: a linter that suggests interface types](https://github.com/mvdan/interfacer)
- [hashicorp/go-plugin: Go plugins over RPC](https://github.com/hashicorp/go-plugin)
- [go-structlayout: pahole,maligned-like tool to display structs layouts](https://github.com/dominikh/go-structlayout)
- [depscheck: analyze packages for unwarranted dependencies](https://github.com/divan/depscheck)
- [stm: software transactional memory in Go](https://github.com/lukechampine/stm)
- [gam: Akka actors in Go](https://github.com/rogeralsing/gam)
- [POSIX semaphores in Go](https://github.com/shubhros/drunkendeluge/blob/master/semaphore/semaphore.go)
- [goit/devices: devices for IoT](https://github.com/goiot/devices)
- [gx: a package management tool](https://github.com/whyrusleeping/gx)
- [find: High-precision indoor positioning framework for most wifi-enabled devices](https://github.com/schollz/find)
- [watson: Go (golang) SDK for IBM Watson services](https://github.com/liviosoares/go-watson-sdk)
- [katydid: a toolkit for trees](http://katydid.github.io/)
## sci-libs
- [dataframe](https://github.com/akualab/dataframe)
- [py-d3](http://jakevdp.github.io/blog/2013/12/19/a-d3-viewer-for-matplotlib/)
- [py-vincent-vega](https://github.com/wrobstory/vincent)
- [go-3d](https://github.com/ungerik/go3d)
- [go-rand (64b+Mersenne-Twister)](https://bitbucket.org/MaVo159/rand/)
- [go-math32](https://github.com/AE9RB/math32)
- [go+cuda](https://fosdem.org/2014/schedule/event/hpc_devroom_go/)
- [go-stats](https://github.com/GaryBoone/GoStats)
- [go-glpk (linear programming kit)](https://github.com/lukpank/go-glpk)
- [go-learn: machine learning library](https://github.com/sjwhitworth/golearn)
- [zettalm: linear fits](https://github.com/glycerine/zettalm)
- [scimark2 - scientific benchmarks (C/Java)](http://math.nist.gov/scimark2/index.html)
- [arrgh: a Go<->R layer](https://github.com/kortschak/arrgh)
- [tesseradata - deep analysis of large complex data](http://tesseradata.org/)
- [libsvm-go: support vector machine in go](https://github.com/ewalker544/libsvm-go)
- [ad: automatic differentiation in go](https://github.com/xoba/ad)
- [ode: ordinary differential equations](https://github.com/sj14/ode)
- [go+montecarlo+concurrency](http://www.soroushjp.com/2015/02/07/go-concurrency-is-not-parallelism-real-world-lessons-with-monte-carlo-simulations/)
- [blond: c++/python lib for Beam Longitudinal Dynamics code](http://blond.web.cern.ch/)
- [geoindex: k-mean clusters](https://github.com/hailocab/go-geoindex)
- [narray: float64-ndarray](https://github.com/akualab/narray)
- [lp: linear programming (simplex)](https://github.com/jvlmdr/golp)
- [golinear: bindings to liblinear (SVM)](https://github.com/danieldk/golinear)
- [goml: golang machine learning](https://github.com/cdipaolo/goml)
- [biblexer: a bibTeX lexer in Go](https://github.com/meling/biblexer)
- [machine learning libraries in Go](http://www.fodop.com/ar-1002)
- [krisalder/ml: machine learning toolkit](https://github.com/krisalder/ml)
- [stevenmiller888/go-mind: machine learning library](https://github.com/stevenmiller888/go-mind)
## h5/ui
- [go-polymer](https://github.com/treeder/go-polymer)
- [thrust: UI based on chromium/blink/v8](https://github.com/miketheprogrammer/go-thrust)
- [seven5](http://seven5.github.io)
- [godesktopgui: a go-html5-webapp example](https://github.com/peterhoward42/godesktopgui)
- [gobenchui: a go+html webapp + highcharts.js](https://github.com/divan/gobenchui)
## editors in Go
- [driusan/de](https://github.com/driusan/de)
- [eaburns/T](https://github.com/eaburns/T)
- [sigint.ca/graphics/editor](https://github.com/jnjackins/graphics/tree/master/editor)
## tools
- [github forks](http://forked.yannick.io/)
- [gh](http://owenou.com/2013/12/22/fast-github-command-line-client-written-in-go.html)
- [git-manager](https://github.com/etola/git-manager)
- [gostatus](https://github.com/shurcooL/gostatus)
- [call go from c](http://stackoverflow.com/questions/6125683/call-go-functions-from-c)
- [trace-viewer: view traces in browser](https://github.com/google/trace-viewer)
## tutorials
- [go ssh](http://kukuruku.co/hub/golang/ssh-commands-execution-on-hundreds-of-servers-via-go)
- [programming problems in go](https://github.com/PrikPrak/Go_101)
- [distributed computing with mesos+go](http://blog.fmpwizard.com/blog/web-crawler-using-mesos-and-golang)
- [go-runtime process walkthrough](http://blog.matttproud.com/2015/02/exploring-gos-runtime-how-process.html)
- [go concurrency primitives from C](https://github.com/fiorix/gocp)
## games/ai
- [gorobots](https://github.com/frasergraham/gorobots)
## packaging
- [guix](http://www.gnu.org/software/guix/manual/guix.pdf)
## Quantum Computing
- http://iqim.caltech.edu/
| {
"content_hash": "f92c2840aa87f145f7f67ac8e09c6cf9",
"timestamp": "",
"source": "github",
"line_count": 447,
"max_line_length": 149,
"avg_line_length": 59.31543624161074,
"alnum_prop": 0.7363656935958361,
"repo_name": "sbinet/notes-and-todos",
"id": "01e3611a3058e69f0b7330d0ab13388e57c1f54f",
"size": "26514",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
package com.hearthsim.test.minion;
import com.hearthsim.card.Card;
import com.hearthsim.card.CharacterIndex;
import com.hearthsim.card.basic.minion.BoulderfistOgre;
import com.hearthsim.card.basic.minion.DragonlingMechanic;
import com.hearthsim.card.basic.minion.RaidLeader;
import com.hearthsim.exception.HSException;
import com.hearthsim.model.BoardModel;
import com.hearthsim.model.PlayerModel;
import com.hearthsim.model.PlayerSide;
import com.hearthsim.util.tree.HearthTreeNode;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestDragonlingMechanic {
private HearthTreeNode board;
private PlayerModel currentPlayer;
private PlayerModel waitingPlayer;
@Before
public void setup() throws HSException {
board = new HearthTreeNode(new BoardModel());
currentPlayer = board.data_.getCurrentPlayer();
waitingPlayer = board.data_.getWaitingPlayer();
board.data_.placeMinion(PlayerSide.CURRENT_PLAYER, new RaidLeader());
board.data_.placeMinion(PlayerSide.CURRENT_PLAYER, new BoulderfistOgre());
board.data_.placeMinion(PlayerSide.WAITING_PLAYER, new RaidLeader());
board.data_.placeMinion(PlayerSide.WAITING_PLAYER, new BoulderfistOgre());
Card fb = new DragonlingMechanic();
currentPlayer.placeCardHand(fb);
currentPlayer.setMana((byte) 8);
waitingPlayer.setMana((byte) 8);
}
@Test
public void test0() throws HSException {
Card theCard = currentPlayer.getHand().get(0);
HearthTreeNode ret = theCard.useOn(PlayerSide.WAITING_PLAYER, CharacterIndex.HERO, board);
assertNull(ret);
assertEquals(currentPlayer.getHand().size(), 1);
assertEquals(currentPlayer.getNumMinions(), 2);
assertEquals(waitingPlayer.getNumMinions(), 2);
assertEquals(currentPlayer.getMana(), 8);
assertEquals(waitingPlayer.getMana(), 8);
assertEquals(currentPlayer.getHero().getHealth(), 30);
assertEquals(waitingPlayer.getHero().getHealth(), 30);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 7);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 7);
}
@Test
public void test2() throws HSException {
Card theCard = currentPlayer.getHand().get(0);
HearthTreeNode ret = theCard.useOn(PlayerSide.CURRENT_PLAYER, CharacterIndex.MINION_1, board);
assertNotNull(ret);
currentPlayer = ret.data_.getCurrentPlayer();
waitingPlayer = ret.data_.getWaitingPlayer();
assertEquals(currentPlayer.getHand().size(), 0);
assertEquals(currentPlayer.getNumMinions(), 4);
assertEquals(waitingPlayer.getNumMinions(), 2);
assertEquals(currentPlayer.getMana(), 4);
assertEquals(waitingPlayer.getMana(), 8);
assertEquals(currentPlayer.getHero().getHealth(), 30);
assertEquals(waitingPlayer.getHero().getHealth(), 30);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 4);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_3).getHealth(), 1);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_4).getHealth(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 7);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 3);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 3);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_4).getTotalAttack(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 7);
}
@Test
public void testBattlecryFizzlesOnFullBoard() throws HSException {
board.data_.placeMinion(PlayerSide.CURRENT_PLAYER, new BoulderfistOgre());
board.data_.placeMinion(PlayerSide.CURRENT_PLAYER, new BoulderfistOgre());
board.data_.placeMinion(PlayerSide.CURRENT_PLAYER, new BoulderfistOgre());
board.data_.placeMinion(PlayerSide.CURRENT_PLAYER, new BoulderfistOgre());
Card theCard = currentPlayer.getHand().get(0);
HearthTreeNode ret = theCard.useOn(PlayerSide.CURRENT_PLAYER, CharacterIndex.MINION_3, board);
assertNotNull(ret);
currentPlayer = ret.data_.getCurrentPlayer();
waitingPlayer = ret.data_.getWaitingPlayer();
assertEquals(currentPlayer.getHand().size(), 0);
assertEquals(currentPlayer.getNumMinions(), 7);
assertEquals(currentPlayer.getMana(), 4);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_4).getTotalAttack(), 3);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_4).getTotalHealth(), 4);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_5).getTotalAttack(), 7);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_5).getTotalHealth(), 7);
}
}
| {
"content_hash": "3a69acea77e2c444c7f132632e7a3cda",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 102,
"avg_line_length": 48.76229508196721,
"alnum_prop": 0.7281896116994453,
"repo_name": "slaymaker1907/HearthSim",
"id": "f92e1acc72d73156e1dfa33aee3c58b197f2df58",
"size": "5949",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/test/java/com/hearthsim/test/minion/TestDragonlingMechanic.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Groovy",
"bytes": "477994"
},
{
"name": "Java",
"bytes": "2321266"
},
{
"name": "Python",
"bytes": "7208"
}
],
"symlink_target": ""
} |
package com.twitter.finagle.validate
import com.twitter.finagle.load.{Event, LoadGenerator}
import com.twitter.finagle.service.TimeoutFilter
import com.twitter.util.{Duration, Future, MockTimer, Time, Try}
import com.twitter.conversions.time.longToTimeableNumber
import java.io.{BufferedReader, InputStreamReader}
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
@RunWith(classOf[JUnitRunner])
class ValidateNaiveTimeoutFilter extends FunSuite {
def getRealData(n: Int): Iterator[Long] = {
val cl: ClassLoader = getClass().getClassLoader();
val input: InputStreamReader = new InputStreamReader(
cl.getResourceAsStream("resources/real_latencies.data"));
val reader: BufferedReader = new BufferedReader(input);
reader.mark(n)
new Iterator[String] {
def hasNext = true
def next(): String = {
Option(reader.readLine()) getOrElse {
reader.reset()
reader.readLine()
}
}
} filter (_.nonEmpty) map (opt => (1000 * opt.toDouble).toLong) take (n)
}
test("Timeout kills everything over timeout") {
val now = Time.now
val timer = new MockTimer
val total = 10000
var num = 0
var success = 0
var failure = 0
val data = getRealData(total).toSeq
val median = data.sorted.apply(total / 2)
val timeout = median.milliseconds - 1.nanosecond
val filter = new TimeoutFilter[Event[Boolean, Boolean], Boolean](timeout, timer)
val gen =
new LoadGenerator(
data.map { latency =>
new Event(now, latency.milliseconds, true, { b: Boolean => Try(true) })
},
{
case (duration: Duration, f: Future[Boolean]) => f onSuccess { _ =>
assert(duration <= timeout)
} onFailure { _ =>
assert(duration > timeout)
} ensure {
num += 1
}
}: (Duration, Future[Boolean]) => Unit,
filter,
timer
)
gen.execute()
assert(num == total)
}
}
| {
"content_hash": "e68f90f3ed43da3a9651ec044396959f",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 84,
"avg_line_length": 32,
"alnum_prop": 0.6491477272727273,
"repo_name": "a-manumohan/finagle",
"id": "b19f6f1c0932743f0408e172cc03b3e4d393909c",
"size": "2112",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "finagle-validate/src/test/scala/com/twitter/finagle/validate/ValidateNaiveTimeoutFilter.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "4834"
},
{
"name": "Java",
"bytes": "705768"
},
{
"name": "Python",
"bytes": "37701"
},
{
"name": "Ruby",
"bytes": "24870"
},
{
"name": "Scala",
"bytes": "3815752"
},
{
"name": "Shell",
"bytes": "10481"
},
{
"name": "Thrift",
"bytes": "20351"
}
],
"symlink_target": ""
} |
function updateHC2(arg){
if (arguments.length==0) arg='%'; // TODO: Set default to Raw Coal (?)
var _pit = alasql("SELECT KNAME,SUM(TOTAL) AS TOTAL FROM ? WHERE MAKTX LIKE '"+arg+"%' GROUP BY KNAME ORDER BY KNAME",[actual]);
var act_pit = [];
var act_pit_dd = [];
var drilldown = [];
var temp = [];
var currentID;
alasql("SELECT KNAME,SUM(TOTAL) AS TOTAL,MONTHS FROM ? "+
"WHERE MAKTX LIKE '"+arg+"%' GROUP BY KNAME, MONTHS ORDER BY KNAME, MONTHS",[actual]).forEach(function(item,index){
if(index==0) currentID=item.KNAME;
if(item.KNAME!=currentID){
currentID=item.KNAME;
drilldown.push(temp);
temp=[];
} temp.push([item.MONTHS,item.TOTAL]);
}); drilldown.push(temp);
_pit.forEach(function(item, index){
act_pit.push({'name':item.KNAME,'y':item.TOTAL,'drilldown':(index+1)});
});
drilldown.forEach(function(item,index){
act_pit_dd.push({'name':_pit[index].KNAME,'id':(index+1),'data':drilldown[index]});
});
$('#container-chart2').highcharts({
chart: { type: 'column' },
title: { text: 'Actual Production per Contractor. In 1000 MT<br>January, 2015 to December, 2015' },
subtitle: { text: 'Click the columns to view detailed data.' },
xAxis: { type: 'category' },
yAxis: {
title: { text: 'Total Production' }
},
legend: { enabled: false },
plotOptions: {
column: { minPointLength: 10 },
series: {
borderWidth: 0,
dataLabels: {
enabled: true,
format: '{point.y:,.2f}'
}
}
},
tooltip: {
headerFormat: '<span style="font-size:11px">{series.name}</span><br>',
pointFormat: '<span style="color:{point.color}">{point.name}</span>: <b>{point.y:,.2f}</b><br/>'
},
series: [{
name: 'Pit',
colorByPoint: true,
data: act_pit
}],
drilldown: {
drillUpButton:{
position:{
x: 0, y:-50
}
},
series: act_pit_dd
}
});
}
$(document).ready(function(){
updateHC2();
}); | {
"content_hash": "0cd489d3b0b8e3255057d55de06ee247",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 129,
"avg_line_length": 29.015384615384615,
"alnum_prop": 0.6166489925768823,
"repo_name": "Izzur/mis-bc",
"id": "bacbe3471cf6977fb883bb4854874658bb629ca9",
"size": "1886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "assets/js/chart2.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "420"
},
{
"name": "CSS",
"bytes": "985553"
},
{
"name": "HTML",
"bytes": "405909"
},
{
"name": "JavaScript",
"bytes": "8710771"
},
{
"name": "PHP",
"bytes": "1853829"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.18"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="files_3.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&dn=gpl-2.0.txt GPL-v2 */
createResults();
/* @license-end */
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&dn=gpl-2.0.txt GPL-v2 */
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
window.addEventListener("message", function(event) {
if (event.data == "take_focus") {
var elem = searchResults.NavNext(0);
if (elem) elem.focus();
}
});
/* @license-end */
--></script>
</div>
</body>
</html>
| {
"content_hash": "7c9c800936ed565c1025b493dc9b8070",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 122,
"avg_line_length": 39.55555555555556,
"alnum_prop": 0.7036516853932584,
"repo_name": "MikeHeiber/KMC_Lattice",
"id": "d1b79b97ace2c16b2bcdecda286f7e788e483b47",
"size": "1424",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "docs/search/files_3.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "202583"
},
{
"name": "Makefile",
"bytes": "3194"
},
{
"name": "TeX",
"bytes": "4915"
}
],
"symlink_target": ""
} |
package org.openntf.red.design;
/**
* <i>Initial code borrowed from OpenNTF Domino API.</i><br>
* @author jgallagher
*
*/
public interface XPage extends XspResource {
}
| {
"content_hash": "9c64989f14b7d8ec3306bc8954a725e9",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 60,
"avg_line_length": 17.6,
"alnum_prop": 0.6988636363636364,
"repo_name": "hyarthi/project-red",
"id": "5330438b0cce8661fbdae412aca27ccfdac78d3a",
"size": "176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/java/org.openntf.red.main/src/org/openntf/red/design/XPage.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "3608"
},
{
"name": "C++",
"bytes": "164206"
},
{
"name": "Java",
"bytes": "1056227"
},
{
"name": "Objective-C",
"bytes": "308"
},
{
"name": "Protocol Buffer",
"bytes": "1736"
}
],
"symlink_target": ""
} |
/* ========================================================================
Component: media-queries
========================================================================== */
/* ========================================================================
Component: layout
========================================================================== */
body,
.wrapper > section {
background-color: #f5f7fa; }
.wrapper > .aside {
background-color: #3a3f51; }
/* ========================================================================
Component: top-navbar
========================================================================== */
.topnavbar {
background-color: #fff; }
.topnavbar .navbar-header {
background-color: transparent;
background-image: -webkit-linear-gradient(left, #564aa3 0%, #7266ba 100%);
background-image: -o-linear-gradient(left, #564aa3 0%, #7266ba 100%);
background-image: linear-gradient(to right, #564aa3 0%, #7266ba 100%);
background-repeat: repeat-x;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#FF564AA3', endColorstr='#FF7266BA', GradientType=1); }
@media only screen and (min-width: 768px) {
.topnavbar .navbar-header {
background-image: none; } }
.topnavbar .navbar-nav > li > a,
.topnavbar .navbar-nav > .open > a {
color: #564aa3; }
.topnavbar .navbar-nav > li > a:hover,
.topnavbar .navbar-nav > li > a:focus,
.topnavbar .navbar-nav > .open > a:hover, .topnavbar .navbar-nav > .open > a:focus {
color: #312a5d; }
.topnavbar .navbar-nav > .active > a,
.topnavbar .navbar-nav > .active > a:hover,
.topnavbar .navbar-nav > .active > a:focus,
.topnavbar .navbar-nav > .open > a, .topnavbar .navbar-nav > .open > a:hover, .topnavbar .navbar-nav > .open > a:focus {
background-color: transparent; }
.topnavbar .navbar-nav > li > [data-toggle='navbar-search'] {
color: #fff; }
.topnavbar .nav-wrapper {
background-color: #564aa3;
background-image: -webkit-linear-gradient(left, #564aa3 0%, #7266ba 100%);
background-image: -o-linear-gradient(left, #564aa3 0%, #7266ba 100%);
background-image: linear-gradient(to right, #564aa3 0%, #7266ba 100%);
background-repeat: repeat-x;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#FF564AA3', endColorstr='#FF7266BA', GradientType=1); }
@media only screen and (min-width: 768px) {
.topnavbar {
background-color: #564aa3;
background-image: -webkit-linear-gradient(left, #564aa3 0%, #7266ba 100%);
background-image: -o-linear-gradient(left, #564aa3 0%, #7266ba 100%);
background-image: linear-gradient(to right, #564aa3 0%, #7266ba 100%);
background-repeat: repeat-x;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#FF564AA3', endColorstr='#FF7266BA', GradientType=1); }
.topnavbar .navbar-nav > .open > a, .topnavbar .navbar-nav > .open > a:hover, .topnavbar .navbar-nav > .open > a:focus {
box-shadow: 0 -3px 0 rgba(255, 255, 255, 0.5) inset; }
.topnavbar .navbar-nav > li > a,
.topnavbar .navbar-nav > .open > a {
color: #fff; }
.topnavbar .navbar-nav > li > a:hover,
.topnavbar .navbar-nav > li > a:focus,
.topnavbar .navbar-nav > .open > a:hover, .topnavbar .navbar-nav > .open > a:focus {
color: #312a5d; } }
/* ========================================================================
Component: sidebar
========================================================================== */
.sidebar {
background-color: #3a3f51; }
.sidebar .nav-heading {
color: #919DA8; }
.sidebar .nav > li > a,
.sidebar .nav > li > .nav-item {
color: #e1e2e3; }
.sidebar .nav > li > a:focus,
.sidebar .nav > li > a:hover,
.sidebar .nav > li > .nav-item:focus, .sidebar .nav > li > .nav-item:hover {
color: #9289ca; }
.sidebar .nav > li > a > em,
.sidebar .nav > li > .nav-item > em {
color: inherits; }
.sidebar .nav > li.active,
.sidebar .nav > li.active > a,
.sidebar .nav > li.active .nav,
.sidebar .nav > li.open,
.sidebar .nav > li.open > a,
.sidebar .nav > li.open .nav {
background-color: #383d4e;
color: #9289ca; }
.sidebar .nav > li.active > a > em,
.sidebar .nav > li.open > a > em {
color: #9289ca; }
.sidebar .nav > li.active {
border-left-color: #9289ca; }
.sidebar-subnav {
background-color: #3a3f51; }
.sidebar-subnav > .sidebar-subnav-header {
color: #e1e2e3; }
.sidebar-subnav > li > a,
.sidebar-subnav > li > .nav-item {
color: #e1e2e3; }
.sidebar-subnav > li > a:focus,
.sidebar-subnav > li > a:hover,
.sidebar-subnav > li > .nav-item:focus, .sidebar-subnav > li > .nav-item:hover {
color: #9289ca; }
.sidebar-subnav > li.active > a,
.sidebar-subnav > li.active > .nav-item {
color: #9289ca; }
.sidebar-subnav > li.active > a:after,
.sidebar-subnav > li.active > .nav-item:after {
border-color: #9289ca;
background-color: #9289ca; }
/* ========================================================================
Component: offsidebar
========================================================================== */
.offsidebar {
border-left: 1px solid greyscale(#cccccc);
background-color: #fff;
color: #515253; }
| {
"content_hash": "4ebbeae33d66c24d914bd0ea912d58be",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 125,
"avg_line_length": 42.456,
"alnum_prop": 0.5423026191822121,
"repo_name": "amorwilliams/gst",
"id": "b99c457eeaac52abca5d65c9804ccc1f431986ef",
"size": "5307",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "client/app/css/theme-g.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "434853"
},
{
"name": "HTML",
"bytes": "37691"
},
{
"name": "JavaScript",
"bytes": "2276457"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "48281"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<script src="../../../../media/media-file.js"></script>
<script src="../../../../media/video-paint-test.js"></script>
<script src="../../../../media/media-controls.js"></script>
<script src="../../../../media/overflow-menu.js"></script>
<meta name="color-scheme" content="light dark">
<body style="padding-top: 200px; padding-left: 100px">
<video controls></video>
<script>
var video = document.querySelector("video");
enableTestMode(video);
video.onloadeddata = function () {
var overflowMenu = getOverflowMenuButton(video);
var coords = elementCoordinates(overflowMenu);
clickAtCoordinates(coords[0], coords[1]);
// Simulate a 'tab' keypress to validate :focus-visible styles.
eventSender.keyDown('\u0009');
};
setVideoSrcAndWaitForFirstFrame("../../../../media/content/test.ogv");
</script>
</body>
| {
"content_hash": "cfcab5a23d9ad1274622d5bf1b9591b1",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 78,
"avg_line_length": 40.52173913043478,
"alnum_prop": 0.6083690987124464,
"repo_name": "chromium/chromium",
"id": "141fbef70f9c16bb2a493ec7442f6df4a8478219",
"size": "932",
"binary": false,
"copies": "6",
"ref": "refs/heads/main",
"path": "third_party/blink/web_tests/fast/forms/color-scheme/media/video-overlay-menu.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
class CudaDriverError(Exception):
pass
class CudaRuntimeError(Exception):
pass
class CudaSupportError(ImportError):
pass
class NvvmError(Exception):
def __str__(self):
return '\n'.join(map(str, self.args))
class NvvmSupportError(ImportError):
pass
| {
"content_hash": "ff4ba802b87ec93fa04897712e6c185d",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 45,
"avg_line_length": 14.947368421052632,
"alnum_prop": 0.6936619718309859,
"repo_name": "gmarkall/numba",
"id": "c82cebd545364a8bc95900d0ffb2dbee6c625f82",
"size": "284",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "numba/cuda/cudadrv/error.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6761"
},
{
"name": "C",
"bytes": "625527"
},
{
"name": "C++",
"bytes": "85627"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Python",
"bytes": "8467098"
},
{
"name": "Shell",
"bytes": "8286"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
namespace work.bacome.imapclient
{
internal partial class cBytesCursor
{
internal bool GetRFC822Domain(out string rDomain)
{
if (GetDomainLiteral(out rDomain)) return true;
var lBookmark = Position;
List<string> lParts = new List<string>();
string lPart;
SkipRFC822CFWS();
if (!GetRFC822Atom(out lPart)) { Position = lBookmark; rDomain = null; return false; }
lParts.Add(lPart);
while (true)
{
SkipRFC822CFWS();
lBookmark = Position;
if (!SkipByte(cASCII.DOT)) break;
SkipRFC822CFWS();
if (!GetRFC822Atom(out lPart)) { Position = lBookmark; break; }
lParts.Add(lPart);
}
rDomain = string.Join(".", lParts);
return true;
}
}
} | {
"content_hash": "f324fbbfe0d31c32a350768f15e8a794",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 98,
"avg_line_length": 27.314285714285713,
"alnum_prop": 0.5251046025104602,
"repo_name": "bacome/imapclient",
"id": "0a148a434ac3f1132e96cd5c475cee6a350743e2",
"size": "958",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "imapclient/imapclient/support/bytescursor/rfc822/getdomain.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "35"
},
{
"name": "C#",
"bytes": "2089852"
}
],
"symlink_target": ""
} |
"""Simple textbox editing widget with Emacs-like keybindings."""
import curses, ascii
def rectangle(win, uly, ulx, lry, lrx):
"""Draw a rectangle with corners at the provided upper-left
and lower-right coordinates.
"""
win.vline(uly+1, ulx, curses.ACS_VLINE, lry - uly - 1)
win.hline(uly, ulx+1, curses.ACS_HLINE, lrx - ulx - 1)
win.hline(lry, ulx+1, curses.ACS_HLINE, lrx - ulx - 1)
win.vline(uly+1, lrx, curses.ACS_VLINE, lry - uly - 1)
win.addch(uly, ulx, curses.ACS_ULCORNER)
win.addch(uly, lrx, curses.ACS_URCORNER)
win.addch(lry, lrx, curses.ACS_LRCORNER)
win.addch(lry, ulx, curses.ACS_LLCORNER)
class Textbox:
"""Editing widget using the interior of a window object.
Supports the following Emacs-like key bindings:
Ctrl-A Go to left edge of window.
Ctrl-B Cursor left, wrapping to previous line if appropriate.
Ctrl-D Delete character under cursor.
Ctrl-E Go to right edge (stripspaces off) or end of line (stripspaces on).
Ctrl-F Cursor right, wrapping to next line when appropriate.
Ctrl-G Terminate, returning the window contents.
Ctrl-H Delete character backward.
Ctrl-J Terminate if the window is 1 line, otherwise insert newline.
Ctrl-K If line is blank, delete it, otherwise clear to end of line.
Ctrl-L Refresh screen.
Ctrl-N Cursor down; move down one line.
Ctrl-O Insert a blank line at cursor location.
Ctrl-P Cursor up; move up one line.
Move operations do nothing if the cursor is at an edge where the movement
is not possible. The following synonyms are supported where possible:
KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N
KEY_BACKSPACE = Ctrl-h
"""
def __init__(self, win):
self.win = win
(self.maxy, self.maxx) = win.getmaxyx()
self.maxy = self.maxy - 1
self.maxx = self.maxx - 1
self.stripspaces = 1
self.lastcmd = None
win.keypad(1)
def _end_of_line(self, y):
"Go to the location of the first blank on the given line."
last = self.maxx
while 1:
if ascii.ascii(self.win.inch(y, last)) != ascii.SP:
last = last + 1
break
elif last == 0:
break
last = last - 1
return last
def do_command(self, ch):
"Process a single editing command."
(y, x) = self.win.getyx()
self.lastcmd = ch
if ascii.isprint(ch):
if y < self.maxy or x < self.maxx:
# The try-catch ignores the error we trigger from some curses
# versions by trying to write into the lowest-rightmost spot
# in the window.
try:
self.win.addch(ch)
except curses.error:
pass
elif ch == ascii.SOH: # ^a
self.win.move(y, 0)
elif ch in (ascii.STX,curses.KEY_LEFT, ascii.BS,curses.KEY_BACKSPACE):
if x > 0:
self.win.move(y, x-1)
elif y == 0:
pass
elif self.stripspaces:
self.win.move(y-1, self._end_of_line(y-1))
else:
self.win.move(y-1, self.maxx)
if ch in (ascii.BS, curses.KEY_BACKSPACE):
self.win.delch()
elif ch == ascii.EOT: # ^d
self.win.delch()
elif ch == ascii.ENQ: # ^e
if self.stripspaces:
self.win.move(y, self._end_of_line(y))
else:
self.win.move(y, self.maxx)
elif ch in (ascii.ACK, curses.KEY_RIGHT): # ^f
if x < self.maxx:
self.win.move(y, x+1)
elif y == self.maxy:
pass
else:
self.win.move(y+1, 0)
elif ch == ascii.BEL: # ^g
return 0
elif ch == ascii.NL: # ^j
if self.maxy == 0:
return 0
elif y < self.maxy:
self.win.move(y+1, 0)
elif ch == ascii.VT: # ^k
if x == 0 and self._end_of_line(y) == 0:
self.win.deleteln()
else:
# first undo the effect of self._end_of_line
self.win.move(y, x)
self.win.clrtoeol()
elif ch == ascii.FF: # ^l
self.win.refresh()
elif ch in (ascii.SO, curses.KEY_DOWN): # ^n
if y < self.maxy:
self.win.move(y+1, x)
if x > self._end_of_line(y+1):
self.win.move(y+1, self._end_of_line(y+1))
elif ch == ascii.SI: # ^o
self.win.insertln()
elif ch in (ascii.DLE, curses.KEY_UP): # ^p
if y > 0:
self.win.move(y-1, x)
if x > self._end_of_line(y-1):
self.win.move(y-1, self._end_of_line(y-1))
return 1
def gather(self):
"Collect and return the contents of the window."
result = ""
for y in range(self.maxy+1):
self.win.move(y, 0)
stop = self._end_of_line(y)
if stop == 0 and self.stripspaces:
continue
for x in range(self.maxx+1):
if self.stripspaces and x == stop:
break
result = result + chr(ascii.ascii(self.win.inch(y, x)))
if self.maxy > 0:
result = result + "\n"
return result
def edit(self, validate=None):
"Edit in the widget window and collect the results."
while 1:
ch = self.win.getch()
if validate:
ch = validate(ch)
if not ch:
continue
if not self.do_command(ch):
break
self.win.refresh()
return self.gather()
if __name__ == '__main__':
def test_editbox(stdscr):
ncols, nlines = 9, 4
uly, ulx = 15, 20
stdscr.addstr(uly-2, ulx, "Use Ctrl-G to end editing.")
win = curses.newwin(nlines, ncols, uly, ulx)
rectangle(stdscr, uly-1, ulx-1, uly + nlines, ulx + ncols)
stdscr.refresh()
return Textbox(win).edit()
str = curses.wrapper(test_editbox)
print 'Contents of text box:', repr(str)
| {
"content_hash": "4e1db8ad18018077697c103dd26c50ec",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 83,
"avg_line_length": 38.225433526011564,
"alnum_prop": 0.5024950854377741,
"repo_name": "MalloyPower/parsing-python",
"id": "28d78dd5cd382cae86f8b0b94b6820555a800681",
"size": "6613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-2.4/Lib/curses/textpad.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
package io.dropwizard.jersey.optional;
import io.dropwizard.jersey.AbstractJerseyTest;
import io.dropwizard.jersey.DropwizardResourceConfig;
import org.junit.jupiter.api.Test;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.OptionalDouble;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
public class OptionalDoubleMessageBodyWriterTest extends AbstractJerseyTest {
@Override
protected Application configure() {
return DropwizardResourceConfig.forTesting()
.register(new EmptyOptionalExceptionMapper())
.register(OptionalDoubleReturnResource.class);
}
@Test
public void presentOptionalsReturnTheirValue() throws Exception {
assertThat(target("optional-return")
.queryParam("id", "1").request()
.get(Double.class))
.isEqualTo(1);
}
@Test
public void presentOptionalsReturnTheirValueWithResponse() throws Exception {
assertThat(target("optional-return/response-wrapped")
.queryParam("id", "1").request()
.get(Double.class))
.isEqualTo(1);
}
@Test
public void absentOptionalsThrowANotFound() throws Exception {
try {
target("optional-return").request().get(Double.class);
failBecauseExceptionWasNotThrown(WebApplicationException.class);
} catch (WebApplicationException e) {
assertThat(e.getResponse().getStatus()).isEqualTo(404);
}
}
@Test
public void valueSetIgnoresDefault() {
assertThat(target("optional-return/default").queryParam("id", "1").request().get(Double.class))
.isEqualTo(target("optional-return/double/default").queryParam("id", "1").request().get(Double.class))
.isEqualTo(1);
}
@Test
public void valueNotSetReturnsDefault() {
assertThat(target("optional-return/default").request().get(Double.class))
.isEqualTo(target("optional-return/double/default").request().get(Double.class))
.isEqualTo(0);
}
@Test
public void valueEmptyReturnsDefault() {
assertThat(target("optional-return/default").queryParam("id", "").request().get(Double.class))
.isEqualTo(target("optional-return/double/default").queryParam("id", "").request().get(Double.class))
.isEqualTo(0);
}
@Test
public void valueInvalidReturns404() {
assertThatThrownBy(() -> target("optional-return/default").queryParam("id", "invalid").request().get(Double.class))
.isInstanceOf(NotFoundException.class);
assertThatThrownBy(() -> target("optional-return/double/default").queryParam("id", "invalid").request().get(Double.class))
.isInstanceOf(NotFoundException.class);
}
@Path("optional-return")
@Produces(MediaType.TEXT_PLAIN)
public static class OptionalDoubleReturnResource {
@GET
public OptionalDouble showWithQueryParam(@QueryParam("id") OptionalDouble id) {
return id;
}
@POST
public OptionalDouble showWithFormParam(@FormParam("id") OptionalDouble id) {
return id;
}
@Path("response-wrapped")
@GET
public Response showWithQueryParamResponse(@QueryParam("id") OptionalDouble id) {
return Response.ok(id).build();
}
@Path("default")
@GET
public OptionalDouble showWithQueryParamAndDefaultValue(@QueryParam("id") @DefaultValue("0") OptionalDouble id) {
return id;
}
@Path("double/default")
@GET
public Double showWithLongQueryParamAndDefaultValue(@QueryParam("id") @DefaultValue("0") Double id) {
return id;
}
}
}
| {
"content_hash": "941b94c70547f1653c772257dfa62655",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 130,
"avg_line_length": 35.71666666666667,
"alnum_prop": 0.6630891273915073,
"repo_name": "mosoft521/dropwizard",
"id": "3decd90e08affb294369bc4d8c91d5096906c0c8",
"size": "4286",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "dropwizard-jersey/src/test/java/io/dropwizard/jersey/optional/OptionalDoubleMessageBodyWriterTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "FreeMarker",
"bytes": "992"
},
{
"name": "HTML",
"bytes": "680"
},
{
"name": "Java",
"bytes": "2607539"
},
{
"name": "Shell",
"bytes": "6865"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.