text
stringlengths 2
99.9k
| meta
dict |
---|---|
<div class="apiDetail">
<div>
<h2><span>Function(event, treeId, treeNode)</span><span class="path">setting.callback.</span>onMouseUp</h2>
<h3>概述<span class="h3_info">[ 依赖 <span class="highlight_green">jquery.ztree.core</span> 核心 js ]</span></h3>
<div class="desc">
<p></p>
<div class="longdesc">
<p>用于捕获 zTree 上鼠标按键松开后的事件回调函数</p>
<p class="highlight_red">如果设置了 setting.callback.beforeMouseUp 方法,且返回 false,将无法触发 onMouseUp 事件回调函数。</p>
<p>默认值:null</p>
</div>
</div>
<h3>Function 参数说明</h3>
<div class="desc">
<h4><b>event</b><span>js event 对象</span></h4>
<p>标准的 js event 对象</p>
<h4 class="topLine"><b>treeId</b><span>String</span></h4>
<p>对应 zTree 的 <b class="highlight_red">treeId</b>,便于用户操控</p>
<h4 class="topLine"><b>treeNode</b><span>JSON</span></h4>
<p>鼠标按键松开时所在节点的 JSON 数据对象</p>
<p class="highlight_red">如果不在节点上,则返回 null</p>
</div>
<h3>setting & function 举例</h3>
<h4>1. 每次鼠标按键松开后, 弹出鼠标所在节点的 tId、name 的信息</h4>
<pre xmlns=""><code>function zTreeOnMouseUp(event, treeId, treeNode) {
alert(treeNode ? treeNode.tId + ", " + treeNode.name : "isRoot");
};
var setting = {
callback: {
onMouseUp: zTreeOnMouseUp
}
};
......</code></pre>
</div>
</div> | {
"pile_set_name": "Github"
} |
package io.github.detekt.psi
import org.jetbrains.kotlin.com.intellij.psi.PsiFile
import java.io.File
import java.nio.file.Path
import java.nio.file.Paths
const val KOTLIN_SUFFIX = ".kt"
const val KOTLIN_SCRIPT_SUFFIX = ".kts"
val PsiFile.fileName: String
get() = name.substringAfterLast(File.separatorChar)
fun PsiFile.fileNameWithoutSuffix(): String {
val fileName = this.fileName
if (fileName.endsWith(KOTLIN_SCRIPT_SUFFIX)) {
return fileName.removeSuffix(KOTLIN_SCRIPT_SUFFIX)
}
return fileName.removeSuffix(KOTLIN_SUFFIX)
}
fun PsiFile.absolutePath(): Path = Paths.get(name)
fun PsiFile.relativePath(): Path {
val value = getUserData(RELATIVE_PATH)
checkNotNull(value) { "KtFile '$name' expected to have an relative path." }
return Paths.get(value)
}
| {
"pile_set_name": "Github"
} |
/*
* (c) Copyright 2018 Palantir Technologies Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.nexus.db.sql;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Pattern;
import javax.annotation.concurrent.GuardedBy;
import org.apache.commons.lang3.Validate;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import com.palantir.common.exception.PalantirRuntimeException;
import com.palantir.exception.PalantirSqlException;
import com.palantir.logsafe.Preconditions;
import com.palantir.nexus.db.DBType;
import com.palantir.nexus.db.SqlClause;
// class extended by other projects
@SuppressWarnings("WeakerAccess")
public class SQLString extends BasicSQLString {
private static final Pattern ALL_WORD_CHARS_REGEX = Pattern.compile("^[a-zA-Z_0-9\\.\\-]*$"); //$NON-NLS-1$
private static final String UNREGISTERED_SQL_COMMENT = "/* UnregisteredSQLString */";
/**
* Callers changing the value of cachedUnregistered and
* cachedKeyed should be synchronized on this lock. Readers
* do not need to - the values of those maps are not guaranteed
* to be in sync with each other.
*/
private static final Object cacheLock = new Object();
//TODO (DCohen): Combine cachedKeyed and cachedUnregistered maps into one.
/**
* Rewritten unregistered queries.
* Key: String with all whitespace removed
* Value: the new SQLString to run instead.
*/
@GuardedBy("cacheLock")
private static volatile ImmutableMap<String, FinalSQLString> cachedUnregistered = ImmutableMap.of();
/** Rewritten registered queries. */
@GuardedBy("cacheLock")
private static volatile ImmutableMap<String, FinalSQLString> cachedKeyed = ImmutableMap.of();
/** All registered queries. */
protected static final ConcurrentMap<String, FinalSQLString> registeredValues = new ConcurrentHashMap<String, FinalSQLString>();
/** DB-specific registered queries. */
protected static final ConcurrentMap<String, ConcurrentMap<DBType, FinalSQLString>> registeredValuesOverride =
new ConcurrentHashMap<>();
protected interface OnUseCallback {
void noteUse(SQLString used);
}
//by default, no callback. This is set in OverridableSQLString
protected static OnUseCallback callbackOnUse = used -> {
//do nothing
};
protected interface CallableCheckedException<T, E extends Exception> {
T call() throws E;
}
/**
* Runs the provided callable while holding the lock for the override caches.
* Callers replacing the caches should hold this lock.
*/
protected static <T, E extends Exception> T runWithCacheLock(CallableCheckedException<T, E> callable) throws E {
synchronized (cacheLock) {
return callable.call();
}
}
/**
* Call this function to store a query to be used later with the given key.
* @param key Unique identifier for this query
* @param sql The query that will be stored
*/
public static RegisteredSQLString registerQuery(String key, String sql) {
SQLString sqlString = new SQLString(key, sql, null);
FinalSQLString newVal = new FinalSQLString(sqlString);
FinalSQLString oldVal = registeredValues.put(key, newVal);
assert null == oldVal || oldVal.delegate.equals(newVal.delegate)
: "newVal: " + newVal + " oldVal: " + oldVal; //$NON-NLS-1$ //$NON-NLS-2$
return new RegisteredSQLString(sqlString);
}
/**
* Same as the overloaded registerQuery, but overrides the query for a specific DBType.
* @param key Unique identifier representing this query
* @param sql The query that will be stored
* @param dbTypes Override the query for this list of DBTypes. These are not allowed to be null.
*/
public static void registerQuery(String key, String sql, DBType... dbTypes) {
Validate.notEmpty(dbTypes, "DbType list may not be empty"); //$NON-NLS-1$
for (DBType type : dbTypes) {
Preconditions.checkNotNull(type, "dbType must not be null"); //$NON-NLS-1$
registerQuery(key, sql, type);
}
}
/**
* Same as the overloaded registerQuery, but overrides the query for a specific DBType.
* @param key Unique identifier representing this query
* @param sql The query that will be stored
* @param dbType Override the query for this DBType.
* If this value is null, it is the same as <code>registerQuery(key, sql)</code>
*/
public static RegisteredSQLString registerQuery(String key, String sql, DBType dbType) {
if (dbType == null) {
return registerQuery(key, sql);
}
SQLString sqlString = new SQLString(key, sql, dbType);
ConcurrentMap<DBType, FinalSQLString> newHash = new ConcurrentHashMap<DBType, FinalSQLString>();
ConcurrentMap<DBType, FinalSQLString> dbTypeHash = registeredValuesOverride.putIfAbsent(key, newHash);
if (null == dbTypeHash) {
dbTypeHash = newHash;
}
FinalSQLString newVal = new FinalSQLString(sqlString);
FinalSQLString oldVal = dbTypeHash.put(dbType, newVal);
assert null == oldVal || newVal.delegate.equals(oldVal.delegate) :
"newVal: " + newVal + " oldVal: " + oldVal; //$NON-NLS-1$ //$NON-NLS-2$
return new RegisteredSQLString(sqlString);
}
public static boolean isQueryRegistered(String key) {
return registeredValues.containsKey(key);
}
/**
* A query that has been registered with <code>registerQuery</code> can be looked up by its key. This factory
* returns a SQLString object representing the registered query. The stored query may have been overridden in the
* database and the object returned will reflect that. If the query is not overridden in the Database, we will check
* the dbType override first, then use the general registered query This factory is used by <code>SQL</code> to find
* a registered query.
*
* @param key The key that was passed to <code>registerQuery</code>
* @param dbType Look for queries registered with this override first
* @return a SQLString object representing the stored query
*/
@SuppressWarnings("GuardedByChecker")
static FinalSQLString getByKey(final String key, DBType dbType) {
assert isValidKey(key) : "Keys only consist of word characters"; //$NON-NLS-1$
assert registeredValues.containsKey(key) || registeredValuesOverride.containsKey(key) :
"Couldn't find SQLString key: " + key + ", dbtype " + dbType; //$NON-NLS-1$ //$NON-NLS-2$
FinalSQLString cached = cachedKeyed.get(key);
if (null != cached) {
callbackOnUse.noteUse((SQLString) cached.delegate);
return cached;
}
ConcurrentMap<DBType, FinalSQLString> dbTypeHash = registeredValuesOverride.get(key);
if (null != dbTypeHash) {
FinalSQLString dbOverride = dbTypeHash.get(dbType);
if (null != dbOverride) {
return dbOverride;
}
}
FinalSQLString valueForKey = registeredValues.get(key);
if (valueForKey == null) {
return new FinalSQLString(new NullSQLString(key));
}
return valueForKey;
}
static FinalSQLString getByKey(String key, Connection connection) throws PalantirSqlException {
DBType type = DBType.getTypeFromConnection(connection);
return getByKey(key, type);
}
public static boolean isValidKey(final String key) {
return ALL_WORD_CHARS_REGEX.matcher(key).matches();
}
/**
* A Factory used by the SQL class to turn a string sql query into an SQLString object.
* This may just contain the sql given, or the given SQL may be overriden in the database and the object returned
* will reflect that new SQL from the DB.
*
* @param sql The string to be used in a query
* @return a SQLString object representing the given SQL
*/
@SuppressWarnings("GuardedByChecker")
static FinalSQLString getUnregisteredQuery(String sql) {
assert !isValidKey(sql) : "Unregistered Queries should not look like keys"; //$NON-NLS-1$
FinalSQLString cached = cachedUnregistered.get(canonicalizeStringAndRemoveWhitespaceEntirely(sql));
if (null != cached) {
callbackOnUse.noteUse((SQLString) cached.delegate);
return cached;
}
return new FinalSQLString(new SQLString(sql));
}
/**
* Contructor for unregistered (dynamic) SQL.
* @param sql The string to be used in a query
*/
private SQLString(String sql) {
super(null, makeCommentString(null, null) + sql);
}
/**
* Contructor for registered SQL.
* @param key The query key
* @param sql The string to be used in a query
* @param dbType This is only used in making the SQL comment
*/
protected SQLString(String key, String sql, DBType dbType) {
super(key, makeCommentString(key, dbType) + sql);
}
/**
* Creates an appropriate comment string for the beginning of a SQL statement.
* @param keyString Identifier for the SQL; will be null if the SQL is unregistered
* @param dbType The database type
*/
private static String makeCommentString(String keyString, DBType dbType) {
String registrationState;
if (keyString != null) {
registrationState = "SQLString Identifier: " + keyString; //$NON-NLS-1$
} else {
registrationState = "UnregisteredSQLString"; //$NON-NLS-1$
}
String dbTypeString = ""; //$NON-NLS-1$
if (dbType != null) {
dbTypeString = " dbType: " + dbType; //$NON-NLS-1$
}
return "/* " + registrationState + dbTypeString + " */ "; //$NON-NLS-1$ //$NON-NLS-2$
}
@VisibleForTesting
static String canonicalizeStringAndRemoveWhitespaceEntirely(String sql) {
return canonicalizeString(sql, true);
}
/**
* Cleans up whitespace, any trailing semicolons, and prefixed comments that a string is
* unregistered, in order to come up with a canonical representation of this sql string.
* Note that for backwards compatibility, this method condenses contiguous whitespace
* into a single space. For example, "foo\t \nbar;" becomes "foo bar".
*/
public static String canonicalizeString(String sql) {
return canonicalizeString(sql, false);
}
private static String canonicalizeString(String original, boolean removeAllWhitespaceEntirely) {
StringBuilder cleanedString = new StringBuilder(original);
int originalIdx = 0;
int cleanedIdx = 0;
int firstUnregisteredIdx = cleanedString.indexOf(UNREGISTERED_SQL_COMMENT);
while (originalIdx < original.length()) {
char originalChar = original.charAt(originalIdx);
if (originalIdx == firstUnregisteredIdx) {
originalIdx += UNREGISTERED_SQL_COMMENT.length();
firstUnregisteredIdx = original.indexOf(UNREGISTERED_SQL_COMMENT, originalIdx);
} else if (Character.isWhitespace(originalChar)) {
if (cleanedIdx != 0
&& !Character.isWhitespace(cleanedString.charAt(cleanedIdx - 1))
&& !removeAllWhitespaceEntirely) {
cleanedString.setCharAt(cleanedIdx, ' ');
++cleanedIdx;
}
++originalIdx;
} else {
cleanedString.setCharAt(cleanedIdx, originalChar);
++cleanedIdx;
++originalIdx;
}
}
if (cleanedIdx > 0 && Character.isWhitespace(cleanedString.charAt(cleanedIdx - 1))) {
--cleanedIdx;
}
while (cleanedIdx > 0 && cleanedString.charAt(cleanedIdx - 1) == ';') {
--cleanedIdx;
if (cleanedIdx > 0 && Character.isWhitespace(cleanedString.charAt(cleanedIdx - 1))) {
--cleanedIdx;
}
}
return cleanedString.substring(0, cleanedIdx);
}
static class NullSQLString extends SQLString {
final String key;
NullSQLString(String key) {
super(""); //$NON-NLS-1$
this.key = key;
}
@Override
public String getQuery() {
throw new PalantirRuntimeException("Could not find any registered query value for key: " + //$NON-NLS-1$
key + "\nThe key is potentially an unregistered query."); //$NON-NLS-1$
}
}
/** Routine for registering all the possible combinations of queries
* given a set of keyed clauses. This is used to build up a map in
* {@code map} from which clients can decode the queries string to use for
* the clauses they want. The clauses will always occur in the generated
* queries in the order listed the {@code clauses} array.
*
* @param baseKey the basic type of search
* @param map a mapping from a set of restrictive clause names and the base
* key to a distinguishing query name.
* @param sqlFormat format string which takes one argument which is the
* conjunction of clauses (from <code>clauses</code>) which modify the
* query variant
* @param type database type the search is for, null for all DBs
* @param clauses clauses (in the same order as their keys) which can narrow
* the search
*/
public static void registerQueryVariants(String baseKey,
Map<Set<String>, String> map, String sqlFormat,
DBType type, List<SqlClause> clauses) {
Validate.noNullElements(clauses);
Set<Integer> indexes = new HashSet<Integer>();
for (int i = 0; i < clauses.size(); i++) {
indexes.add(i);
}
Set<Set<Integer>> variants = Sets.powerSet(indexes);
for (Set<Integer> variantSet : variants) {
List<Integer> variant = new ArrayList<Integer>(variantSet);
Collections.sort(variant);
StringBuilder key = new StringBuilder(baseKey);
StringBuilder whereClause = new StringBuilder();
Set<String> keySet = new HashSet<String>();
for (int i : variant) {
SqlClause clause = clauses.get(i);
keySet.add(clause.getKey());
key.append("_").append(clause.getKey()); //$NON-NLS-1$
whereClause.append(" AND ").append(clause.getClause()); //$NON-NLS-1$
}
keySet.add(baseKey);
String sql = String.format(sqlFormat, whereClause);
String keyString = key.toString();
registerQuery(keyString, sql, type);
map.put(keySet, keyString);
}
}
/**
* Object returned when a query is registered.
* Its only method is getKey(), because we can't actually rely on the SQL itself inside this
* object (since it might be overridden).
* @author dcohen
*
*/
public static class RegisteredSQLString {
private final BasicSQLString delegate;
/**
* Should only be called inside SQLString because this class essentially verifies that we've
* checked for updates.
*/
private RegisteredSQLString(BasicSQLString sqlstring) {
this.delegate = sqlstring;
}
@Override
public String toString() {
return "RegisteredSQLString [delegate=" + delegate + "]"; //$NON-NLS-1$ //$NON-NLS-2$
}
public String getKey() {
return delegate.getKey();
}
}
public static RegisteredSQLString getRegisteredQueryByKey(FinalSQLString key) {
return new RegisteredSQLString(key.delegate);
}
@SuppressWarnings("GuardedByChecker")
protected static ImmutableMap<String, FinalSQLString> getCachedUnregistered() {
return cachedUnregistered;
}
protected static void setCachedUnregistered(ImmutableMap<String, FinalSQLString> cachedUnregistered) {
synchronized (cacheLock) {
SQLString.cachedUnregistered = cachedUnregistered;
}
}
@SuppressWarnings("GuardedByChecker")
protected static ImmutableMap<String, FinalSQLString> getCachedKeyed() {
return cachedKeyed;
}
protected static void setCachedKeyed(ImmutableMap<String, FinalSQLString> cachedKeyed) {
synchronized (cacheLock) {
SQLString.cachedKeyed = cachedKeyed;
}
}
}
| {
"pile_set_name": "Github"
} |
/******************************************************************************
*
* Copyright (C) 2014 - 2015 Xilinx, Inc. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* Use of the Software is limited solely to applications:
* (a) running on a Xilinx device, or
* (b) that interact with a Xilinx device through a bus or interconnect.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* XILINX BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
* OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
* Except as contained in this notice, the name of the Xilinx shall not be used
* in advertising or otherwise to promote the sale, use or other dealings in
* this Software without prior written authorization from Xilinx.
*
******************************************************************************/
/*****************************************************************************/
/**
*
* @file xil_cache.h
*
* @addtogroup a53_64_cache_apis Cortex A53 64bit Processor Cache Functions
*
* Cache functions provide access to cache related operations such as flush
* and invalidate for instruction and data caches. It gives option to perform
* the cache operations on a single cacheline, a range of memory and an entire
* cache.
*
* @{
*
* <pre>
* MODIFICATION HISTORY:
*
* Ver Who Date Changes
* ----- ---- -------- -----------------------------------------------
* 5.00 pkp 05/29/14 First release
* </pre>
*
******************************************************************************/
#ifndef XIL_CACHE_H
#define XIL_CACHE_H
#include "xil_types.h"
#ifdef __cplusplus
extern "C" {
#endif
/************************** Constant Definitions *****************************/
#define L1_DATA_PREFETCH_CONTROL_MASK 0xE000
#define L1_DATA_PREFETCH_CONTROL_SHIFT 13
/************************** Function Prototypes ******************************/
void Xil_DCacheEnable(void);
void Xil_DCacheDisable(void);
void Xil_DCacheInvalidate(void);
void Xil_DCacheInvalidateRange(INTPTR adr, INTPTR len);
void Xil_DCacheInvalidateLine(INTPTR adr);
void Xil_DCacheFlush(void);
void Xil_DCacheFlushRange(INTPTR adr, INTPTR len);
void Xil_DCacheFlushLine(INTPTR adr);
void Xil_ICacheEnable(void);
void Xil_ICacheDisable(void);
void Xil_ICacheInvalidate(void);
void Xil_ICacheInvalidateRange(INTPTR adr, INTPTR len);
void Xil_ICacheInvalidateLine(INTPTR adr);
void Xil_ConfigureL1Prefetch(u8 num);
#ifdef __cplusplus
}
#endif
#endif
/**
* @} End of "addtogroup a53_64_cache_apis".
*/
| {
"pile_set_name": "Github"
} |
define(function (require, exports, module) {
var React = require('react')
var Todo = require('./todo')
var Todos = React.createClass({displayName: "Todos",
render: function() {
return (
React.createElement("ul", {id: "todo-list"},
this.props.todos.map(function(todo) {
return React.createElement(Todo, React.__spread({}, todo, {updateTodo: this.props.updateTodo, removeTodo: this.props.removeTodo}))
}.bind(this))
)
)
}
})
module.exports = Todos
}); | {
"pile_set_name": "Github"
} |
/*
*
* cblas_dsyr2.c
* This program is a C interface to dsyr2.
* Written by Keita Teranishi
* 4/6/1998
*
*/
#include "cblas.h"
#include "cblas_f77.h"
void cblas_dsyr2(const CBLAS_LAYOUT layout, const CBLAS_UPLO Uplo,
const int N, const double alpha, const double *X,
const int incX, const double *Y, const int incY, double *A,
const int lda)
{
char UL;
#ifdef F77_CHAR
F77_CHAR F77_UL;
#else
#define F77_UL &UL
#endif
#ifdef F77_INT
F77_INT F77_N=N, F77_incX=incX, F77_incY=incY, F77_lda=lda;
#else
#define F77_N N
#define F77_incX incX
#define F77_incY incY
#define F77_lda lda
#endif
extern int CBLAS_CallFromC;
extern int RowMajorStrg;
RowMajorStrg = 0;
CBLAS_CallFromC = 1;
if (layout == CblasColMajor)
{
if (Uplo == CblasLower) UL = 'L';
else if (Uplo == CblasUpper) UL = 'U';
else
{
cblas_xerbla(2, "cblas_dsyr2","Illegal Uplo setting, %d\n",Uplo );
CBLAS_CallFromC = 0;
RowMajorStrg = 0;
return;
}
#ifdef F77_CHAR
F77_UL = C2F_CHAR(&UL);
#endif
F77_dsyr2(F77_UL, &F77_N, &alpha, X, &F77_incX, Y, &F77_incY, A,
&F77_lda);
} else if (layout == CblasRowMajor)
{
RowMajorStrg = 1;
if (Uplo == CblasLower) UL = 'U';
else if (Uplo == CblasUpper) UL = 'L';
else
{
cblas_xerbla(2, "cblas_dsyr2","Illegal Uplo setting, %d\n",Uplo );
CBLAS_CallFromC = 0;
RowMajorStrg = 0;
return;
}
#ifdef F77_CHAR
F77_UL = C2F_CHAR(&UL);
#endif
F77_dsyr2(F77_UL, &F77_N, &alpha, X, &F77_incX, Y, &F77_incY, A,
&F77_lda);
} else cblas_xerbla(1, "cblas_dsyr2", "Illegal layout setting, %d\n", layout);
CBLAS_CallFromC = 0;
RowMajorStrg = 0;
return;
}
| {
"pile_set_name": "Github"
} |
using System;
using HA4IoT.Contracts.Areas;
using HA4IoT.Contracts.Sensors;
namespace HA4IoT.Sensors.MotionDetectors
{
public static class MotionDetectorExtensions
{
public static IMotionDetector GetMotionDetector(this IArea area, Enum id)
{
if (area == null) throw new ArgumentNullException(nameof(area));
return area.GetComponent<IMotionDetector>($"{area.Id}.{id}");
}
}
}
| {
"pile_set_name": "Github"
} |
/**
* Copyright 2016 SmartBear Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.swagger.sample.exception;
public class ApiException extends Exception{
private int code;
public ApiException (int code, String msg) {
super(msg);
this.code = code;
}
}
| {
"pile_set_name": "Github"
} |
/* Copyright (C) 2008-2016 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.variable
import cc.factorie.la._
import cc.factorie.util.{DoubleSeq, SparseDoubleSeq}
import scala.util.Random
/** A Tensor containing only non-negative entries. These are also the basis for Proportions.
@author Andrew McCallum */
trait Masses extends Tensor {
def massTotal: Double
override def sum = massTotal
/** Get a normalized entry in this Masses, which can be interpreted as a probability. */
def pr(index:Int): Double = {
val mt = massTotal
if (mt == 0.0) 1.0 / length else apply(index) / mt
}
def logpr(index:Int) = math.log(pr(index))
override def sampleIndex(implicit r:Random): Int = sampleIndex(massTotal)(r)
override def stringPrefix = "Masses"
def maxToStringLength = 10
override def toString = this.asSeq.take(maxToStringLength).mkString(stringPrefix+"(", ",", if (length > 10) "...)" else ")")
}
// TODO Should we get rid of all these combinations and make users extend the combinations themselves? -akm
trait Masses1 extends Tensor1 with Masses
trait Masses2 extends Tensor2 with Masses
trait Masses3 extends Tensor3 with Masses
trait Masses4 extends Tensor4 with Masses
/** A Masses that provides a protected var for holding the massTotal.
@author Andrew McCallum */
trait MassesWithTotal extends Masses {
protected var _massTotal: Double = 0.0
def massTotal = _massTotal
final override def sum = _massTotal
override def update(i:Int, v:Double): Unit = throw new Error("Masses cannot be modified by update; use += instead.")
}
/** A class for arbitrary tensors to become Masses. E.g.: GrowableSparseHashMasses1
@author Dirk Weissenborn */
trait WrappedTensorMasses[A <: Tensor] extends WrappedTensor[A] with MassesWithTotal {
//initialize massTotal
require(tensor.forallActiveElements { case (_:Int,v:Double) => v >= 0 } )
_massTotal = tensor.sum
final override def zero(): Unit = { tensor.zero(); _massTotal = 0.0 } //this might be a little slow
final override def +=(i:Int, v:Double): Unit = { _massTotal += v; tensor.+=(i,v)/*; assert(_massTotal >= 0.0); assert(tensor(i) >= 0.0)*/ }
final override def update(i: Int, v: Double): Unit = {this += (i,v - this(i))}
final override def *=(d:Double): Unit = { _massTotal *= d; tensor*=d}
final override def *=(ds:DoubleSeq): Unit = { tensor*=ds;_massTotal=tensor.sum}
final override def /=(ds:DoubleSeq): Unit = { tensor/=ds;_massTotal=tensor.sum}
final override def +=(ds:DoubleSeq, f:Double): Unit = { tensor.+=(ds,f); _massTotal += ds.sum }
final override def :=(v:Double): Unit = { tensor.:=(v); _massTotal = v * tensor.activeDomainSize }
final override def :=(ds:DoubleSeq): Unit = { tensor.:=(ds); _massTotal += tensor.sum}
final override def :=(ds: Array[Double]) : Unit = { this := new DenseTensor1(ds) }
}
class WrappedTensorMasses1[A <: Tensor1](val tensor:A) extends WrappedTensorMasses[A] with WrappedTensor1[A] with Masses1
class WrappedTensorMasses2[A <: Tensor2](val tensor:A) extends WrappedTensorMasses[A] with WrappedTensor2[A] with Masses2
class WrappedTensorMasses3[A <: Tensor3](val tensor:A) extends WrappedTensorMasses[A] with WrappedTensor3[A] with Masses3
class WrappedTensorMasses4[A <: Tensor4](val tensor:A) extends WrappedTensorMasses[A] with WrappedTensor4[A] with Masses4
/** A DenseTensor Masses that provides a protected var for holding the massTotal.
@author Andrew McCallum */
trait DenseMassesWithTotal extends DenseTensor with MassesWithTotal {
final override def zero(): Unit = { super.zero(); _massTotal = 0.0 }
final override def +=(i:Int, v:Double): Unit = { _massTotal += v; _values(i) += v; assert(_massTotal >= 0.0); assert(_values(i) >= 0.0) }
final override def update(i: Int, v: Double): Unit = {this += (i,v - this(i))}
final override def *=(d:Double): Unit = { _massTotal = 0.0; val l = length; var i = 0; var v = 0.0; while (i < l) { v = _values(i)*d; _massTotal += v; _values(i) = v; i += 1 }}
final override def *=(ds:DoubleSeq): Unit = { _massTotal = 0.0; val l = length; var i = 0; var v = 0.0; while (i < l) { v = _values(i)*ds(i); _massTotal += v; _values(i) = v; i += 1 }}
final override def /=(ds:DoubleSeq): Unit = { _massTotal = 0.0; val l = length; var i = 0; var v = 0.0; while (i < l) { v = _values(i)/ds(i); _massTotal += v; _values(i) = v; i += 1 }}
final override def +=(ds:DoubleSeq, f:Double): Unit = ds.foreachElement((i,v) => { _values(i) += v*f; _massTotal += v*f })
final override def :=(v:Double): Unit = { java.util.Arrays.fill(_values, v); _massTotal = v * length }
final override def :=(ds:DoubleSeq): Unit = { _massTotal = 0.0; val l = length; var v = 0.0; var i = 0; while (i < l) { v = ds(i); assert(v >= 0.0); _values(i) = v; _massTotal += v; i += 1 } }
final override def :=(ds: Array[Double]) : Unit = { this := new DenseTensor1(ds) }
}
//trait DenseMasses extends ... (gather += in here, but we need a DenseTensor class also)
class DenseMasses1(val dim1:Int) extends DenseTensorLike1 with Masses1 with DenseMassesWithTotal {
def this(dim1:Int, uniformValue:Double) = { this(dim1); this := uniformValue }
override def copy: DenseMasses1 = { val c = new DenseMasses1(dim1); c := this; c }
}
class DenseMasses2(val dim1:Int, val dim2:Int) extends DenseTensorLike2 with Masses2 with DenseMassesWithTotal {
override def +=(i:Int, j:Int, v:Double): Unit = { _massTotal += v; val index = singleIndex(i, j); _values(index) += v; assert(_massTotal >= 0.0); assert(_values(index) >= 0.0) }
override def copy: DenseMasses2 = { val c = new DenseMasses2(dim1, dim2); c := this; c }
}
class DenseMasses3(val dim1:Int, val dim2:Int, val dim3:Int) extends DenseTensorLike3 with Masses3 with DenseMassesWithTotal {
override def +=(i:Int, j:Int, k:Int, v:Double): Unit = { _massTotal += v; val index = singleIndex(i, j, k); _values(index) += v; assert(_massTotal >= 0.0); assert(_values(index) >= 0.0) }
override def copy: DenseMasses3 = { val c = new DenseMasses3(dim1, dim2, dim3); c := this; c }
}
class DenseMasses4(val dim1:Int, val dim2:Int, val dim3:Int, val dim4:Int) extends DenseTensorLike4 with Masses4 with DenseMassesWithTotal {
override def +=(i:Int, j:Int, k:Int, l:Int, v:Double): Unit = { _massTotal += v; val index = singleIndex(i, j, k, l); _values(index) += v; assert(_massTotal >= 0.0); assert(_values(index) >= 0.0) }
override def copy: DenseMasses4 = { val c = new DenseMasses4(dim1, dim2, dim3, dim4); c := this; c }
}
class UniformMasses1(dim1:Int, uniformValue:Double) extends UniformTensor1(dim1, uniformValue) with Masses1 with UniformTensor {
override val massTotal = dim1 * uniformValue
override def sampleIndex(massTotal:Double)(implicit r:Random): Int = r.nextInt(dim1)
}
class SingletonMasses1(dim1:Int, singleIndex:Int, singleValue:Double) extends SingletonTensor1(dim1, singleIndex, singleValue) with Masses1 {
def massTotal = singleValue
override def sampleIndex(massTotal:Double)(implicit r:Random): Int = singleIndex
}
class SingletonMasses2(dim1:Int, dim2: Int, singleIndex1:Int, singleIndex2: Int, singleValue:Double) extends SingletonTensor2(dim1, dim2, singleIndex1, singleIndex2, singleValue) with Masses2 {
def massTotal = singleValue
override def sampleIndex(massTotal:Double)(implicit r:Random): Int = singleIndex
}
class GrowableDenseMasses1(sizeProxy:Iterable[Any]) extends ProxyGrowableDenseTensor1(sizeProxy) with Masses1 with MassesWithTotal {
override def +=(i:Int, v:Double): Unit = { _massTotal += v; super.+=(i, v); assert(_massTotal >= 0.0, "_masstotal is negative: "+_massTotal); assert(_values(i) >= 0.0, "Negative value " + i + " " + _values(i)) }
}
class GrowableUniformMasses1(val sizeProxy:Iterable[Any], val uniformValue:Double) extends Masses1 with UniformTensor /*Like1 with Masses1*/ {
def activeDomain = new cc.factorie.util.RangeIntSeq(0, dim1)
def dim1 = sizeProxy.size
def massTotal = sizeProxy.size * uniformValue
override def sampleIndex(massTotal:Double)(implicit r:Random): Int = r.nextInt(dim1)
}
class SortedSparseCountsMasses1(val dim1:Int) extends SparseDoubleSeq with Masses1 {
val sparseCounts = new cc.factorie.util.SortedSparseCounts(dim1, 4, false)
def activeDomainSize = sparseCounts.activeIndices.length
def dot(t: DoubleSeq): Double = throw new Error("No efficient dot for " + this.getClass.getName)
def isDense = false
override def foreachActiveElement(f: (Int, Double) => Unit) { sparseCounts.activeIndices.foreach(i => f(i, this(i))) }
def activeDomain = sparseCounts.activeIndices
//def activeDomain = activeDomain1
def apply(index:Int): Double = {
if (sparseCounts.countsTotal == 0) 0.0
else sparseCounts.countOfIndex(index).toDouble
}
override def +=(index:Int, incr:Double): Unit = {
assert(incr.floor == incr)
sparseCounts.incrementCountAtIndex(index, incr.toInt)
}
def update(i: Int, v: Double) = this += (i, v - this(i))
override def zero(): Unit = sparseCounts.clear()
def massTotal = sparseCounts.countsTotal.toDouble
override def sampleIndex(massTotal:Double)(implicit r:Random): Int = {
if (sparseCounts.countsTotal == 0) r.nextInt(dim1) // If there are no counts, use a uniform distribution
else {
val sampledMass = r.nextInt(sparseCounts.countsTotal)
var i = 0; var sum = sparseCounts.countAtPosition(0)
while (sum < sampledMass) {
i += 1
sum += sparseCounts.countAtPosition(i)
}
sparseCounts.indexAtPosition(i)
}
}
}
// Masses Variables
///** The domain type for MassesVar. No real functionality, just used as a marker.
// @author Andrew McCallum */
//trait MassesDomain extends TensorDomain with Domain[Masses]
//
///** The domain for MassesVar. No real functionality, just used as a marker.
// @author Andrew McCallum */
//object MassesDomain extends MassesDomain
/** An abstract variable with value Masses.
@author Andrew McCallum */
trait MassesVar extends TensorVar {
type Value <: Masses
def value: Value
//def domain: MassesDomain
}
/** An abstract variable with value Masses.
@author Andrew McCallum */
trait MutableMassesVar extends MutableTensorVar with MassesVar {
type Value <: Masses
}
/** A variable with value Masses.
@author Andrew McCallum */
class MassesVariable extends MutableMassesVar {
//def domain = MassesDomain
type Value = Masses
def this(initialValue:Masses) = { this(); set(initialValue)(null) }
}
/** Convenience methods for constructing MassesVariables with a Masses1 of various types.
@author Andrew McCallum */
object MassesVariable {
def dense(dim1:Int) = new MassesVariable(new DenseMasses1(dim1))
def dense(dim1:Int, uniformValue:Double) = new MassesVariable(new DenseMasses1(dim1, uniformValue))
def growableDense(sizeProxy:Iterable[Any]) = new MassesVariable(new GrowableDenseMasses1(sizeProxy))
def growableUniform(sizeProxy:Iterable[Any], uniformValue:Double) = new MassesVariable(new GrowableUniformMasses1(sizeProxy, uniformValue))
def sortedSparseCounts(dim1:Int) = new MassesVariable(new SortedSparseCountsMasses1(dim1))
implicit def toMasses1(tensor:Tensor1) = new WrappedTensorMasses1(tensor)
implicit def toMasses2(tensor:Tensor2) = new WrappedTensorMasses2(tensor)
implicit def toMasses3(tensor:Tensor3) = new WrappedTensorMasses3(tensor)
implicit def toMasses4(tensor:Tensor4) = new WrappedTensorMasses4(tensor)
}
| {
"pile_set_name": "Github"
} |
const { resolve } = require('path');
const webpack = require('webpack');
module.exports = {
context: __dirname,
entry: [
// 'react-hot-loader/patch',
// 'webpack/hot/only-dev-server',
'./src/index.tsx'
],
output: {
path: resolve(__dirname, 'build'),//打包后的文件存放的地方
filename: "react-dragger-layout.js",//打包后输出文件的文件名
publicPath: "/"
},
devServer: {
contentBase: resolve(__dirname, 'build'),
hot: true,
publicPath: '/',
},
resolve: {
// Add '.ts' and '.tsx' as resolvable extensions.
extensions: [".ts", ".tsx", ".js", ".json"]
},
module: {
rules: [
{
test: /\.jsx?$/,
use: [
'babel-loader',
],
exclude: /node_modules/
},
{
test: /\.css$/,
use: [
'style-loader', 'css-loader'
],
exclude: /node_modules/
},
// All files with a '.ts' or '.tsx' extension will be handled by 'awesome-typescript-loader'.
{ test: /\.tsx?$/, loader: "awesome-typescript-loader" },
// All output '.js' files will have any sourcemaps re-processed by 'source-map-loader'.
{ enforce: "pre", test: /\.js$/, loader: "source-map-loader" }
],
},
plugins: [
// new webpack.HotModuleReplacementPlugin(),
// new webpack.NamedModulesPlugin(),
new webpack.optimize.UglifyJsPlugin({
compress: {
warnings: false,
comparisons: false,
},
output: {
comments: false,
// Turned on because emoji and regex is not minified properly using default
// https://github.com/facebookincubator/create-react-app/issues/2488
ascii_only: true,
},
}),
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
},
}),
],
// devtool: "cheap-eval-source-map",
}; | {
"pile_set_name": "Github"
} |
# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild
# Author: Pablo Escobar Lopez
# Swiss Institute of Bioinformatics
# Biozentrum - University of Basel
easyblock = 'ConfigureMake'
name = 'NLopt'
version = '2.4.2'
homepage = 'http://ab-initio.mit.edu/wiki/index.php/NLopt'
description = """ NLopt is a free/open-source library for nonlinear optimization,
providing a common interface for a number of different free optimization routines
available online as well as original implementations of various other algorithms. """
toolchain = {'name': 'GCCcore', 'version': '7.3.0'}
toolchainopts = {'pic': True}
source_urls = ['http://ab-initio.mit.edu/nlopt/']
sources = [SOURCELOWER_TAR_GZ]
checksums = ['8099633de9d71cbc06cd435da993eb424bbcdbded8f803cdaa9fb8c6e09c8e89']
builddependencies = [('binutils', '2.30')]
configopts = '--enable-shared --without-matlab --without-octave'
sanity_check_paths = {
'files': ['lib/libnlopt.a', 'lib/libnlopt.%s' % SHLIB_EXT, 'include/nlopt.h'],
'dirs': ['lib/pkgconfig'],
}
moduleclass = 'numlib'
| {
"pile_set_name": "Github"
} |
/** @file
This file contains the IGD OpRegion/Software ACPI Reference
Code.
It defines the methods to enable/disable output switching,
store display switching and LCD brightness BIOS control
and return valid addresses for all display device encoders
present in the system, etc.
Copyright (c) 2018 - 2019, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
External(\ECST, MethodObj)
External(\PBCL, MethodObj)
External(HDOS, MethodObj)
External(\ECON, IntObj)
External(\PNHM, IntObj)
External(OSYS, IntObj)
External(CPSC)
External(\GUAM, MethodObj)
External(DSEN)
External(S0ID)
Name(TMP1,Package() {0xFFFFFFFF})
Name(TMP2,Package() {0xFFFFFFFF, 0xFFFFFFFF})
Name(TMP3,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMP4,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMP5,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMP6,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMP7,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMP8,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF})
Name(TMP9,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF})
Name(TMPA,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF })
Name(TMPB,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMPC,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMPD,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMPE,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF})
Name(TMPF,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF})
Name(TMPG,Package() {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
0xFFFFFFFF, 0xFFFFFFFF})
// Enable/Disable Output Switching. In WIN2K/WINXP, _DOS = 0 will
// get called during initialization to prepare for an ACPI Display
// Switch Event. During an ACPI Display Switch, the OS will call
// _DOS = 2 immediately after a Notify=0x80 to temporarily disable
// all Display Switching. After ACPI Display Switching is complete,
// the OS will call _DOS = 0 to re-enable ACPI Display Switching.
Method(_DOS,1)
{
//
// Store Display Switching and LCD brightness BIOS control bit
//
Store(And(Arg0,7),DSEN)
If(LEqual(And(Arg0, 0x3), 0)) // If _DOS[1:0]=0
{
If(CondRefOf(HDOS))
{
HDOS()
}
}
}
//
// Enumerate the Display Environment. This method will return
// valid addresses for all display device encoders present in the
// system. The Miniport Driver will reject the addresses for every
// encoder that does not have an attached display device. After
// enumeration is complete, the OS will call the _DGS methods
// during a display switch only for the addresses accepted by the
// Miniport Driver. For hot-insertion and removal of display
// devices, a re-enumeration notification will be required so the
// address of the newly present display device will be accepted by
// the Miniport Driver.
//
Method(_DOD,0)
{
If (LEqual(IPTP,1)) {
//
// Increment number of devices if IPU is enabled
//
Store(1, NDID)
} Else {
Store(0, NDID)
}
If(LNotEqual(DIDL, Zero))
{
Store(SDDL(DIDL),DID1)
}
If(LNotEqual(DDL2, Zero))
{
Store(SDDL(DDL2),DID2)
}
If(LNotEqual(DDL3, Zero))
{
Store(SDDL(DDL3),DID3)
}
If(LNotEqual(DDL4, Zero))
{
Store(SDDL(DDL4),DID4)
}
If(LNotEqual(DDL5, Zero))
{
Store(SDDL(DDL5),DID5)
}
If(LNotEqual(DDL6, Zero))
{
Store(SDDL(DDL6),DID6)
}
If(LNotEqual(DDL7, Zero))
{
Store(SDDL(DDL7),DID7)
}
If(LNotEqual(DDL8, Zero))
{
Store(SDDL(DDL8),DID8)
}
If(LNotEqual(DDL9, Zero))
{
Store(SDDL(DDL9),DID9)
}
If(LNotEqual(DD10, Zero))
{
Store(SDDL(DD10),DIDA)
}
If(LNotEqual(DD11, Zero))
{
Store(SDDL(DD11),DIDB)
}
If(LNotEqual(DD12, Zero))
{
Store(SDDL(DD12),DIDC)
}
If(LNotEqual(DD13, Zero))
{
Store(SDDL(DD13),DIDD)
}
If(LNotEqual(DD14, Zero))
{
Store(SDDL(DD14),DIDE)
}
If(LNotEqual(DD15, Zero))
{
Store(SDDL(DD15),DIDF)
}
//
// Enumerate the encoders. Note that for
// current silicon, the maximum number of encoders
// possible is 15.
//
If(LEqual(NDID,1))
{
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP1,0))
} Else {
Store(Or(0x10000,DID1),Index(TMP1,0))
}
Return(TMP1)
}
If(LEqual(NDID,2))
{
Store(Or(0x10000,DID1),Index(TMP2,0))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP2,1))
} Else {
Store(Or(0x10000,DID2),Index(TMP2,1))
}
Return(TMP2)
}
If(LEqual(NDID,3))
{
Store(Or(0x10000,DID1),Index(TMP3,0))
Store(Or(0x10000,DID2),Index(TMP3,1))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP3,2))
} Else {
Store(Or(0x10000,DID3),Index(TMP3,2))
}
Return(TMP3)
}
If(LEqual(NDID,4))
{
Store(Or(0x10000,DID1),Index(TMP4,0))
Store(Or(0x10000,DID2),Index(TMP4,1))
Store(Or(0x10000,DID3),Index(TMP4,2))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP4,3))
} Else {
Store(Or(0x10000,DID4),Index(TMP4,3))
}
Return(TMP4)
}
If(LEqual(NDID,5))
{
Store(Or(0x10000,DID1),Index(TMP5,0))
Store(Or(0x10000,DID2),Index(TMP5,1))
Store(Or(0x10000,DID3),Index(TMP5,2))
Store(Or(0x10000,DID4),Index(TMP5,3))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP5,4))
} Else {
Store(Or(0x10000,DID5),Index(TMP5,4))
}
Return(TMP5)
}
If(LEqual(NDID,6))
{
Store(Or(0x10000,DID1),Index(TMP6,0))
Store(Or(0x10000,DID2),Index(TMP6,1))
Store(Or(0x10000,DID3),Index(TMP6,2))
Store(Or(0x10000,DID4),Index(TMP6,3))
Store(Or(0x10000,DID5),Index(TMP6,4))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP6,5))
} Else {
Store(Or(0x10000,DID6),Index(TMP6,5))
}
Return(TMP6)
}
If(LEqual(NDID,7))
{
Store(Or(0x10000,DID1),Index(TMP7,0))
Store(Or(0x10000,DID2),Index(TMP7,1))
Store(Or(0x10000,DID3),Index(TMP7,2))
Store(Or(0x10000,DID4),Index(TMP7,3))
Store(Or(0x10000,DID5),Index(TMP7,4))
Store(Or(0x10000,DID6),Index(TMP7,5))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP7,6))
} Else {
Store(Or(0x10000,DID7),Index(TMP7,6))
}
Return(TMP7)
}
If(LEqual(NDID,8))
{
Store(Or(0x10000,DID1),Index(TMP8,0))
Store(Or(0x10000,DID2),Index(TMP8,1))
Store(Or(0x10000,DID3),Index(TMP8,2))
Store(Or(0x10000,DID4),Index(TMP8,3))
Store(Or(0x10000,DID5),Index(TMP8,4))
Store(Or(0x10000,DID6),Index(TMP8,5))
Store(Or(0x10000,DID7),Index(TMP8,6))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP8,7))
} Else {
Store(Or(0x10000,DID8),Index(TMP8,7))
}
Return(TMP8)
}
If(LEqual(NDID,9))
{
Store(Or(0x10000,DID1),Index(TMP9,0))
Store(Or(0x10000,DID2),Index(TMP9,1))
Store(Or(0x10000,DID3),Index(TMP9,2))
Store(Or(0x10000,DID4),Index(TMP9,3))
Store(Or(0x10000,DID5),Index(TMP9,4))
Store(Or(0x10000,DID6),Index(TMP9,5))
Store(Or(0x10000,DID7),Index(TMP9,6))
Store(Or(0x10000,DID8),Index(TMP9,7))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMP9,8))
} Else {
Store(Or(0x10000,DID9),Index(TMP9,8))
}
Return(TMP9)
}
If(LEqual(NDID,0x0A))
{
Store(Or(0x10000,DID1),Index(TMPA,0))
Store(Or(0x10000,DID2),Index(TMPA,1))
Store(Or(0x10000,DID3),Index(TMPA,2))
Store(Or(0x10000,DID4),Index(TMPA,3))
Store(Or(0x10000,DID5),Index(TMPA,4))
Store(Or(0x10000,DID6),Index(TMPA,5))
Store(Or(0x10000,DID7),Index(TMPA,6))
Store(Or(0x10000,DID8),Index(TMPA,7))
Store(Or(0x10000,DID9),Index(TMPA,8))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMPA,9))
} Else {
Store(Or(0x10000,DIDA),Index(TMPA,9))
}
Return(TMPA)
}
If(LEqual(NDID,0x0B))
{
Store(Or(0x10000,DID1),Index(TMPB,0))
Store(Or(0x10000,DID2),Index(TMPB,1))
Store(Or(0x10000,DID3),Index(TMPB,2))
Store(Or(0x10000,DID4),Index(TMPB,3))
Store(Or(0x10000,DID5),Index(TMPB,4))
Store(Or(0x10000,DID6),Index(TMPB,5))
Store(Or(0x10000,DID7),Index(TMPB,6))
Store(Or(0x10000,DID8),Index(TMPB,7))
Store(Or(0x10000,DID9),Index(TMPB,8))
Store(Or(0x10000,DIDA),Index(TMPB,9))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMPB,10))
} Else {
Store(Or(0x10000,DIDB),Index(TMPB,10))
}
Return(TMPB)
}
If(LEqual(NDID,0x0C))
{
Store(Or(0x10000,DID1),Index(TMPC,0))
Store(Or(0x10000,DID2),Index(TMPC,1))
Store(Or(0x10000,DID3),Index(TMPC,2))
Store(Or(0x10000,DID4),Index(TMPC,3))
Store(Or(0x10000,DID5),Index(TMPC,4))
Store(Or(0x10000,DID6),Index(TMPC,5))
Store(Or(0x10000,DID7),Index(TMPC,6))
Store(Or(0x10000,DID8),Index(TMPC,7))
Store(Or(0x10000,DID9),Index(TMPC,8))
Store(Or(0x10000,DIDA),Index(TMPC,9))
Store(Or(0x10000,DIDB),Index(TMPC,10))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMPC,11))
} Else {
Store(Or(0x10000,DIDC),Index(TMPC,11))
}
Return(TMPC)
}
If(LEqual(NDID,0x0D))
{
Store(Or(0x10000,DID1),Index(TMPD,0))
Store(Or(0x10000,DID2),Index(TMPD,1))
Store(Or(0x10000,DID3),Index(TMPD,2))
Store(Or(0x10000,DID4),Index(TMPD,3))
Store(Or(0x10000,DID5),Index(TMPD,4))
Store(Or(0x10000,DID6),Index(TMPD,5))
Store(Or(0x10000,DID7),Index(TMPD,6))
Store(Or(0x10000,DID8),Index(TMPD,7))
Store(Or(0x10000,DID9),Index(TMPD,8))
Store(Or(0x10000,DIDA),Index(TMPD,9))
Store(Or(0x10000,DIDB),Index(TMPD,10))
Store(Or(0x10000,DIDC),Index(TMPD,11))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMPD,12))
} Else {
Store(Or(0x10000,DIDD),Index(TMPD,12))
}
Return(TMPD)
}
If(LEqual(NDID,0x0E))
{
Store(Or(0x10000,DID1),Index(TMPE,0))
Store(Or(0x10000,DID2),Index(TMPE,1))
Store(Or(0x10000,DID3),Index(TMPE,2))
Store(Or(0x10000,DID4),Index(TMPE,3))
Store(Or(0x10000,DID5),Index(TMPE,4))
Store(Or(0x10000,DID6),Index(TMPE,5))
Store(Or(0x10000,DID7),Index(TMPE,6))
Store(Or(0x10000,DID8),Index(TMPE,7))
Store(Or(0x10000,DID9),Index(TMPE,8))
Store(Or(0x10000,DIDA),Index(TMPE,9))
Store(Or(0x10000,DIDB),Index(TMPE,10))
Store(Or(0x10000,DIDC),Index(TMPE,11))
Store(Or(0x10000,DIDD),Index(TMPE,12))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMPE,13))
} Else {
Store(Or(0x10000,DIDE),Index(TMPE,13))
}
Return(TMPE)
}
If(LEqual(NDID,0x0F))
{
Store(Or(0x10000,DID1),Index(TMPF,0))
Store(Or(0x10000,DID2),Index(TMPF,1))
Store(Or(0x10000,DID3),Index(TMPF,2))
Store(Or(0x10000,DID4),Index(TMPF,3))
Store(Or(0x10000,DID5),Index(TMPF,4))
Store(Or(0x10000,DID6),Index(TMPF,5))
Store(Or(0x10000,DID7),Index(TMPF,6))
Store(Or(0x10000,DID8),Index(TMPF,7))
Store(Or(0x10000,DID9),Index(TMPF,8))
Store(Or(0x10000,DIDA),Index(TMPF,9))
Store(Or(0x10000,DIDB),Index(TMPF,10))
Store(Or(0x10000,DIDC),Index(TMPF,11))
Store(Or(0x10000,DIDD),Index(TMPF,12))
Store(Or(0x10000,DIDE),Index(TMPF,13))
If (LEqual(IPTP,1)) {
//
// IGFX need report IPUA as GFX0 child
//
Store(0x00023480,Index(TMPF,14))
} Else {
Store(Or(0x10000,DIDF),Index(TMPF,14))
}
Return(TMPF)
}
If(LEqual(NDID,0x10))
{
Store(Or(0x10000,DID1),Index(TMPG,0))
Store(Or(0x10000,DID2),Index(TMPG,1))
Store(Or(0x10000,DID3),Index(TMPG,2))
Store(Or(0x10000,DID4),Index(TMPG,3))
Store(Or(0x10000,DID5),Index(TMPG,4))
Store(Or(0x10000,DID6),Index(TMPG,5))
Store(Or(0x10000,DID7),Index(TMPG,6))
Store(Or(0x10000,DID8),Index(TMPG,7))
Store(Or(0x10000,DID9),Index(TMPG,8))
Store(Or(0x10000,DIDA),Index(TMPG,9))
Store(Or(0x10000,DIDB),Index(TMPG,10))
Store(Or(0x10000,DIDC),Index(TMPG,11))
Store(Or(0x10000,DIDD),Index(TMPG,12))
Store(Or(0x10000,DIDE),Index(TMPG,13))
Store(Or(0x10000,DIDF),Index(TMPG,14))
//
// IGFX need report IPUA as GFX0 child
// NDID can only be 0x10 if IPU is enabled
//
Store(0x00023480,Index(TMPG,15))
Return(TMPG)
}
//
// If nothing else, return Unknown LFP.
// (Prevents compiler warning.)
//
Return(Package() {0x00000400})
}
Device(DD01)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID1),0x400))
{
Store(0x1, EDPV)
Store(NXD1, NXDX)
Store(DID1, DIDX)
Return(1)
}
If(LEqual(DID1,0))
{
Return(1)
}
Else
{
Return(And(0xFFFF,DID1))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
Return(CDDS(DID1))
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD1)
}
Return(NDDS(DID1))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD02)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID2),0x400))
{
Store(0x2, EDPV)
Store(NXD2, NXDX)
Store(DID2, DIDX)
Return(2)
}
If(LEqual(DID2,0))
{
Return(2)
}
Else
{
Return(And(0xFFFF,DID2))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(LIDS,0))
{
Return(0x0)
}
Return(CDDS(DID2))
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
//
// Return the Next State.
//
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD2)
}
Return(NDDS(DID2))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD03)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID3),0x400))
{
Store(0x3, EDPV)
Store(NXD3, NXDX)
Store(DID3, DIDX)
Return(3)
}
If(LEqual(DID3,0))
{
Return(3)
}
Else
{
Return(And(0xFFFF,DID3))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DID3,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DID3))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD3)
}
Return(NDDS(DID3))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD04)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID4),0x400))
{
Store(0x4, EDPV)
Store(NXD4, NXDX)
Store(DID4, DIDX)
Return(4)
}
If(LEqual(DID4,0))
{
Return(4)
}
Else
{
Return(And(0xFFFF,DID4))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DID4,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DID4))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD4)
}
Return(NDDS(DID4))
}
//
// Device Set State. (See table above.)
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD05)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID5),0x400))
{
Store(0x5, EDPV)
Store(NXD5, NXDX)
Store(DID5, DIDX)
Return(5)
}
If(LEqual(DID5,0))
{
Return(5)
}
Else
{
Return(And(0xFFFF,DID5))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DID5,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DID5))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD5)
}
Return(NDDS(DID5))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD06)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID6),0x400))
{
Store(0x6, EDPV)
Store(NXD6, NXDX)
Store(DID6, DIDX)
Return(6)
}
If(LEqual(DID6,0))
{
Return(6)
}
Else
{
Return(And(0xFFFF,DID6))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DID6,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DID6))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD6)
}
Return(NDDS(DID6))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD07)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID7),0x400))
{
Store(0x7, EDPV)
Store(NXD7, NXDX)
Store(DID7, DIDX)
Return(7)
}
If(LEqual(DID7,0))
{
Return(7)
}
Else
{
Return(And(0xFFFF,DID7))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DID7,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DID7))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD7)
}
Return(NDDS(DID7))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD08)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID8),0x400))
{
Store(0x8, EDPV)
Store(NXD8, NXDX)
Store(DID8, DIDX)
Return(8)
}
If(LEqual(DID8,0))
{
Return(8)
}
Else
{
Return(And(0xFFFF,DID8))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DID8,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DID8))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD8)
}
Return(NDDS(DID8))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD09)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DID9),0x400))
{
Store(0x9, EDPV)
Store(NXD8, NXDX)
Store(DID9, DIDX)
Return(9)
}
If(LEqual(DID9,0))
{
Return(9)
}
Else
{
Return(And(0xFFFF,DID9))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DID9,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DID9))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD8)
}
Return(NDDS(DID9))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD0A)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DIDA),0x400))
{
Store(0xA, EDPV)
Store(NXD8, NXDX)
Store(DIDA, DIDX)
Return(0x0A)
}
If(LEqual(DIDA,0))
{
Return(0x0A)
}
Else
{
Return(And(0xFFFF,DIDA))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DIDA,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DIDA))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD8)
}
Return(NDDS(DIDA))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD0B)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DIDB),0x400))
{
Store(0xB, EDPV)
Store(NXD8, NXDX)
Store(DIDB, DIDX)
Return(0X0B)
}
If(LEqual(DIDB,0))
{
Return(0x0B)
}
Else
{
Return(And(0xFFFF,DIDB))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DIDB,0))
{
Return(0x0B)
}
Else
{
Return(CDDS(DIDB))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD8)
}
Return(NDDS(DIDB))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD0C)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DIDC),0x400))
{
Store(0xC, EDPV)
Store(NXD8, NXDX)
Store(DIDC, DIDX)
Return(0X0C)
}
If(LEqual(DIDC,0))
{
Return(0x0C)
}
Else
{
Return(And(0xFFFF,DIDC))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DIDC,0))
{
Return(0x0C)
}
Else
{
Return(CDDS(DIDC))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD8)
}
Return(NDDS(DIDC))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD0D)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DIDD),0x400))
{
Store(0xD, EDPV)
Store(NXD8, NXDX)
Store(DIDD, DIDX)
Return(0X0D)
}
If(LEqual(DIDD,0))
{
Return(0x0D)
}
Else
{
Return(And(0xFFFF,DIDD))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DIDD,0))
{
Return(0x0D)
}
Else
{
Return(CDDS(DIDD))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD8)
}
Return(NDDS(DIDD))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD0E)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DIDE),0x400))
{
Store(0xE, EDPV)
Store(NXD8, NXDX)
Store(DIDE, DIDX)
Return(0X0E)
}
If(LEqual(DIDE,0))
{
Return(0x0E)
}
Else
{
Return(And(0xFFFF,DIDE))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DIDE,0))
{
Return(0x0E)
}
Else
{
Return(CDDS(DIDE))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD8)
}
Return(NDDS(DIDE))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
Device(DD0F)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(And(0x0F00,DIDF),0x400))
{
Store(0xF, EDPV)
Store(NXD8, NXDX)
Store(DIDF, DIDX)
Return(0X0F)
}
If(LEqual(DIDF,0))
{
Return(0x0F)
}
Else
{
Return(And(0xFFFF,DIDF))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(DIDC,0))
{
Return(0x0F)
}
Else
{
Return(CDDS(DIDF))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXD8)
}
Return(NDDS(DIDF))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
}
//
//Device for eDP
//
Device(DD1F)
{
//
// Return Unique ID.
//
Method(_ADR,0,Serialized)
{
If(LEqual(EDPV, 0x0))
{
Return(0x1F)
}
Else
{
Return(And(0xFFFF,DIDX))
}
}
//
// Return the Current Status.
//
Method(_DCS,0)
{
If(LEqual(EDPV, 0x0))
{
Return(0x00)
}
Else
{
Return(CDDS(DIDX))
}
}
//
// Query Graphics State (active or inactive).
//
Method(_DGS,0)
{
If(LAnd(LEqual(And(SGMD,0x7F),0x01),CondRefOf(SNXD)))
{
Return (NXDX)
}
Return(NDDS(DIDX))
}
//
// Device Set State.
//
Method(_DSS,1)
{
DSST(Arg0)
}
//
// Query List of Brightness Control Levels Supported.
//
Method(_BCL,0)
{
//
// List of supported brightness levels in the following sequence.
// Level when machine has full power.
// Level when machine is on batteries.
// Other supported levels.
//
If(CondRefOf(\PBCL)) {
Return (PBCL())
} Else {
Return(Package(){80, 50, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100})
}
}
//
// Set the Brightness Level.
//
Method (_BCM,1)
{
//
// Set the requested level if it is between 0 and 100%.
//
If(LAnd(LGreaterEqual(Arg0,0),LLessEqual(Arg0,100)))
{
\_SB.PCI0.GFX0.AINT(1, Arg0)
Store(Arg0,BRTL) // Store Brightness Level.
}
}
//
// Brightness Query Current level.
//
Method (_BQC,0)
{
Return(BRTL)
}
}
Method(SDDL,1)
{
Increment(NDID)
Store(And(Arg0,0xF0F),Local0)
Or(0x80000000,Local0, Local1)
If(LEqual(DIDL,Local0))
{
Return(Local1)
}
If(LEqual(DDL2,Local0))
{
Return(Local1)
}
If(LEqual(DDL3,Local0))
{
Return(Local1)
}
If(LEqual(DDL4,Local0))
{
Return(Local1)
}
If(LEqual(DDL5,Local0))
{
Return(Local1)
}
If(LEqual(DDL6,Local0))
{
Return(Local1)
}
If(LEqual(DDL7,Local0))
{
Return(Local1)
}
If(LEqual(DDL8,Local0))
{
Return(Local1)
}
If(LEqual(DDL9,Local0))
{
Return(Local1)
}
If(LEqual(DD10,Local0))
{
Return(Local1)
}
If(LEqual(DD11,Local0))
{
Return(Local1)
}
If(LEqual(DD12,Local0))
{
Return(Local1)
}
If(LEqual(DD13,Local0))
{
Return(Local1)
}
If(LEqual(DD14,Local0))
{
Return(Local1)
}
If(LEqual(DD15,Local0))
{
Return(Local1)
}
Return(0)
}
Method(CDDS,1)
{
Store(And(Arg0,0xF0F),Local0)
If(LEqual(0, Local0))
{
Return(0x1D)
}
If(LEqual(CADL, Local0))
{
Return(0x1F)
}
If(LEqual(CAL2, Local0))
{
Return(0x1F)
}
If(LEqual(CAL3, Local0))
{
Return(0x1F)
}
If(LEqual(CAL4, Local0))
{
Return(0x1F)
}
If(LEqual(CAL5, Local0))
{
Return(0x1F)
}
If(LEqual(CAL6, Local0))
{
Return(0x1F)
}
If(LEqual(CAL7, Local0))
{
Return(0x1F)
}
If(LEqual(CAL8, Local0))
{
Return(0x1F)
}
Return(0x1D)
}
Method(NDDS,1)
{
Store(And(Arg0,0xF0F),Local0)
If(LEqual(0, Local0))
{
Return(0)
}
If(LEqual(NADL, Local0))
{
Return(1)
}
If(LEqual(NDL2, Local0))
{
Return(1)
}
If(LEqual(NDL3, Local0))
{
Return(1)
}
If(LEqual(NDL4, Local0))
{
Return(1)
}
If(LEqual(NDL5, Local0))
{
Return(1)
}
If(LEqual(NDL6, Local0))
{
Return(1)
}
If(LEqual(NDL7, Local0))
{
Return(1)
}
If(LEqual(NDL8, Local0))
{
Return(1)
}
Return(0)
}
//
// Device Set State Table
// BIT31 BIT30 Execution
// 0 0 Don't implement.
// 0 1 Cache change. Nothing to Implement.
// 1 0 Don't Implement.
// 1 1 Display Switch Complete. Implement.
//
Method(DSST,1)
{
If(LEqual(And(Arg0,0xC0000000),0xC0000000))
{
//
// State change was performed by the
// Video Drivers. Simply update the
// New State.
//
Store(NSTE,CSTE)
}
}
//
// Include IGD OpRegion/Software SCI interrupt handler/DSM which is used by
// the graphics drivers to request data from system BIOS.
//
include ("IgfxOpRn.asl")
include ("IgfxDsm.asl")
| {
"pile_set_name": "Github"
} |
import pytest
from mlagents.torch_utils import torch
from mlagents.trainers.policy.torch_policy import TorchPolicy
from mlagents.trainers.tests import mock_brain as mb
from mlagents.trainers.settings import TrainerSettings, NetworkSettings
from mlagents.trainers.torch.utils import ModelUtils
VECTOR_ACTION_SPACE = 2
VECTOR_OBS_SPACE = 8
DISCRETE_ACTION_SPACE = [3, 3, 3, 2]
BUFFER_INIT_SAMPLES = 32
NUM_AGENTS = 12
EPSILON = 1e-7
def create_policy_mock(
dummy_config: TrainerSettings,
use_rnn: bool = False,
use_discrete: bool = True,
use_visual: bool = False,
seed: int = 0,
) -> TorchPolicy:
mock_spec = mb.setup_test_behavior_specs(
use_discrete,
use_visual,
vector_action_space=DISCRETE_ACTION_SPACE
if use_discrete
else VECTOR_ACTION_SPACE,
vector_obs_space=VECTOR_OBS_SPACE,
)
trainer_settings = dummy_config
trainer_settings.keep_checkpoints = 3
trainer_settings.network_settings.memory = (
NetworkSettings.MemorySettings() if use_rnn else None
)
policy = TorchPolicy(seed, mock_spec, trainer_settings)
return policy
@pytest.mark.parametrize("discrete", [True, False], ids=["discrete", "continuous"])
@pytest.mark.parametrize("visual", [True, False], ids=["visual", "vector"])
@pytest.mark.parametrize("rnn", [True, False], ids=["rnn", "no_rnn"])
def test_policy_evaluate(rnn, visual, discrete):
# Test evaluate
policy = create_policy_mock(
TrainerSettings(), use_rnn=rnn, use_discrete=discrete, use_visual=visual
)
decision_step, terminal_step = mb.create_steps_from_behavior_spec(
policy.behavior_spec, num_agents=NUM_AGENTS
)
run_out = policy.evaluate(decision_step, list(decision_step.agent_id))
if discrete:
run_out["action"].shape == (NUM_AGENTS, len(DISCRETE_ACTION_SPACE))
else:
assert run_out["action"].shape == (NUM_AGENTS, VECTOR_ACTION_SPACE)
@pytest.mark.parametrize("discrete", [True, False], ids=["discrete", "continuous"])
@pytest.mark.parametrize("visual", [True, False], ids=["visual", "vector"])
@pytest.mark.parametrize("rnn", [True, False], ids=["rnn", "no_rnn"])
def test_evaluate_actions(rnn, visual, discrete):
policy = create_policy_mock(
TrainerSettings(), use_rnn=rnn, use_discrete=discrete, use_visual=visual
)
buffer = mb.simulate_rollout(64, policy.behavior_spec, memory_size=policy.m_size)
vec_obs = [ModelUtils.list_to_tensor(buffer["vector_obs"])]
act_masks = ModelUtils.list_to_tensor(buffer["action_mask"])
if policy.use_continuous_act:
actions = ModelUtils.list_to_tensor(buffer["actions"]).unsqueeze(-1)
else:
actions = ModelUtils.list_to_tensor(buffer["actions"], dtype=torch.long)
vis_obs = []
for idx, _ in enumerate(policy.actor_critic.network_body.visual_processors):
vis_ob = ModelUtils.list_to_tensor(buffer["visual_obs%d" % idx])
vis_obs.append(vis_ob)
memories = [
ModelUtils.list_to_tensor(buffer["memory"][i])
for i in range(0, len(buffer["memory"]), policy.sequence_length)
]
if len(memories) > 0:
memories = torch.stack(memories).unsqueeze(0)
log_probs, entropy, values = policy.evaluate_actions(
vec_obs,
vis_obs,
masks=act_masks,
actions=actions,
memories=memories,
seq_len=policy.sequence_length,
)
assert log_probs.shape == (64, policy.behavior_spec.action_size)
assert entropy.shape == (64, policy.behavior_spec.action_size)
for val in values.values():
assert val.shape == (64,)
@pytest.mark.parametrize("discrete", [True, False], ids=["discrete", "continuous"])
@pytest.mark.parametrize("visual", [True, False], ids=["visual", "vector"])
@pytest.mark.parametrize("rnn", [True, False], ids=["rnn", "no_rnn"])
def test_sample_actions(rnn, visual, discrete):
policy = create_policy_mock(
TrainerSettings(), use_rnn=rnn, use_discrete=discrete, use_visual=visual
)
buffer = mb.simulate_rollout(64, policy.behavior_spec, memory_size=policy.m_size)
vec_obs = [ModelUtils.list_to_tensor(buffer["vector_obs"])]
act_masks = ModelUtils.list_to_tensor(buffer["action_mask"])
vis_obs = []
for idx, _ in enumerate(policy.actor_critic.network_body.visual_processors):
vis_ob = ModelUtils.list_to_tensor(buffer["visual_obs%d" % idx])
vis_obs.append(vis_ob)
memories = [
ModelUtils.list_to_tensor(buffer["memory"][i])
for i in range(0, len(buffer["memory"]), policy.sequence_length)
]
if len(memories) > 0:
memories = torch.stack(memories).unsqueeze(0)
(sampled_actions, log_probs, entropies, memories) = policy.sample_actions(
vec_obs,
vis_obs,
masks=act_masks,
memories=memories,
seq_len=policy.sequence_length,
all_log_probs=not policy.use_continuous_act,
)
if discrete:
assert log_probs.shape == (
64,
sum(policy.behavior_spec.discrete_action_branches),
)
else:
assert log_probs.shape == (64, policy.behavior_spec.action_shape)
assert entropies.shape == (64, policy.behavior_spec.action_size)
if rnn:
assert memories.shape == (1, 1, policy.m_size)
| {
"pile_set_name": "Github"
} |
{
"@context": {
"@base": "http://example.com/",
"@vocab": "http://schema.org/",
"xml": "http://www.w3.org/2001/XMLSchema#"
},
"@id": "a",
"@type": "Person",
"name": {
"@value": "A",
"@language": "en"
},
"age": [
{
"@value": "16",
"@type": "xml:int"
},
{
"@type": "QuantitativeValue",
"value": {
"@value": "16",
"@type": "xml:int"
},
"unitText": "years"
}
]
} | {
"pile_set_name": "Github"
} |
# YAGPDB's Event System
Instead of just using discordgo's standard simple event system, I like to experiment a little as I work on stuff to see what happens.
It all boils down to a simple 3D slice of handlers (handlers [][][]Handler)
The first index is the event index, that length is generated by events_gen.go
Next index is order, there is 3 orders:
0 - first ran
1 - state handler is here
2 - last, ran concurrently from here on
Orders 1 and 0 are run synchronously, but 2 is run concurrently, this is in order to have the state be as proper as possible.
| {
"pile_set_name": "Github"
} |
// Place your settings in this file to overwrite default and user settings.
{
"files.exclude": {
"out": false // set this to true to hide the "out" folder with the compiled JS files
},
"search.exclude": {
"out": true // set this to false to include "out" folder in search results
},
// Turn off tsc task auto detection since we have the necessary tasks as npm scripts
"typescript.tsc.autoDetect": "off"
} | {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>JS Bin</title>
</head>
<body>
</body>
</html> | {
"pile_set_name": "Github"
} |
package main
import (
"flag"
"fmt"
"os"
"strings"
"github.com/golang/glog"
"github.com/golang/protobuf/proto"
plugin "github.com/golang/protobuf/protoc-gen-go/plugin"
"github.com/grpc-ecosystem/grpc-gateway/codegenerator"
"github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor"
"github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger"
)
var (
importPrefix = flag.String("import_prefix", "", "prefix to be added to go package paths for imported proto files")
file = flag.String("file", "-", "where to load data from")
allowDeleteBody = flag.Bool("allow_delete_body", false, "unless set, HTTP DELETE methods may not have a body")
grpcAPIConfiguration = flag.String("grpc_api_configuration", "", "path to gRPC API Configuration in YAML format")
allowMerge = flag.Bool("allow_merge", false, "if set, generation one swagger file out of multiple protos")
mergeFileName = flag.String("merge_file_name", "apidocs", "target swagger file name prefix after merge")
useJSONNamesForFields = flag.Bool("json_names_for_fields", false, "if it sets Field.GetJsonName() will be used for generating swagger definitions, otherwise Field.GetName() will be used")
repeatedPathParamSeparator = flag.String("repeated_path_param_separator", "csv", "configures how repeated fields should be split. Allowed values are `csv`, `pipes`, `ssv` and `tsv`.")
versionFlag = flag.Bool("version", false, "print the current version")
allowRepeatedFieldsInBody = flag.Bool("allow_repeated_fields_in_body", false, "allows to use repeated field in `body` and `response_body` field of `google.api.http` annotation option")
includePackageInTags = flag.Bool("include_package_in_tags", false, "if unset, the gRPC service name is added to the `Tags` field of each operation. if set and the `package` directive is shown in the proto file, the package name will be prepended to the service name")
useFQNForSwaggerName = flag.Bool("fqn_for_swagger_name", false, "if set, the object's swagger names will use the fully qualify name from the proto definition (ie my.package.MyMessage.MyInnerMessage")
useGoTemplate = flag.Bool("use_go_templates", false, "if set, you can use Go templates in protofile comments")
disableDefaultErrors = flag.Bool("disable_default_errors", false, "if set, disables generation of default errors. This is useful if you have defined custom error handling")
enumsAsInts = flag.Bool("enums_as_ints", false, "whether to render enum values as integers, as opposed to string values")
simpleOperationIDs = flag.Bool("simple_operation_ids", false, "whether to remove the service prefix in the operationID generation. Can introduce duplicate operationIDs, use with caution.")
generateUnboundMethods = flag.Bool("generate_unbound_methods", false, "generate swagger metadata even for RPC methods that have no HttpRule annotation")
)
// Variables set by goreleaser at build time
var (
version = "dev"
commit = "unknown"
date = "unknown"
)
func main() {
flag.Parse()
defer glog.Flush()
if *versionFlag {
fmt.Printf("Version %v, commit %v, built at %v\n", version, commit, date)
os.Exit(0)
}
reg := descriptor.NewRegistry()
glog.V(1).Info("Processing code generator request")
f := os.Stdin
if *file != "-" {
var err error
f, err = os.Open(*file)
if err != nil {
glog.Fatal(err)
}
}
glog.V(1).Info("Parsing code generator request")
req, err := codegenerator.ParseRequest(f)
if err != nil {
glog.Fatal(err)
}
glog.V(1).Info("Parsed code generator request")
pkgMap := make(map[string]string)
if req.Parameter != nil {
err := parseReqParam(req.GetParameter(), flag.CommandLine, pkgMap)
if err != nil {
glog.Fatalf("Error parsing flags: %v", err)
}
}
reg.SetPrefix(*importPrefix)
reg.SetAllowDeleteBody(*allowDeleteBody)
reg.SetAllowMerge(*allowMerge)
reg.SetMergeFileName(*mergeFileName)
reg.SetUseJSONNamesForFields(*useJSONNamesForFields)
reg.SetAllowRepeatedFieldsInBody(*allowRepeatedFieldsInBody)
reg.SetIncludePackageInTags(*includePackageInTags)
reg.SetUseFQNForSwaggerName(*useFQNForSwaggerName)
reg.SetUseGoTemplate(*useGoTemplate)
reg.SetEnumsAsInts(*enumsAsInts)
reg.SetDisableDefaultErrors(*disableDefaultErrors)
reg.SetSimpleOperationIDs(*simpleOperationIDs)
reg.SetGenerateUnboundMethods(*generateUnboundMethods)
if err := reg.SetRepeatedPathParamSeparator(*repeatedPathParamSeparator); err != nil {
emitError(err)
return
}
for k, v := range pkgMap {
reg.AddPkgMap(k, v)
}
if *grpcAPIConfiguration != "" {
if err := reg.LoadGrpcAPIServiceFromYAML(*grpcAPIConfiguration); err != nil {
emitError(err)
return
}
}
g := genswagger.New(reg)
if err := genswagger.AddStreamError(reg); err != nil {
emitError(err)
return
}
if err := reg.Load(req); err != nil {
emitError(err)
return
}
var targets []*descriptor.File
for _, target := range req.FileToGenerate {
f, err := reg.LookupFile(target)
if err != nil {
glog.Fatal(err)
}
targets = append(targets, f)
}
out, err := g.Generate(targets)
glog.V(1).Info("Processed code generator request")
if err != nil {
emitError(err)
return
}
emitFiles(out)
}
func emitFiles(out []*plugin.CodeGeneratorResponse_File) {
emitResp(&plugin.CodeGeneratorResponse{File: out})
}
func emitError(err error) {
emitResp(&plugin.CodeGeneratorResponse{Error: proto.String(err.Error())})
}
func emitResp(resp *plugin.CodeGeneratorResponse) {
buf, err := proto.Marshal(resp)
if err != nil {
glog.Fatal(err)
}
if _, err := os.Stdout.Write(buf); err != nil {
glog.Fatal(err)
}
}
// parseReqParam parses a CodeGeneratorRequest parameter and adds the
// extracted values to the given FlagSet and pkgMap. Returns a non-nil
// error if setting a flag failed.
func parseReqParam(param string, f *flag.FlagSet, pkgMap map[string]string) error {
if param == "" {
return nil
}
for _, p := range strings.Split(param, ",") {
spec := strings.SplitN(p, "=", 2)
if len(spec) == 1 {
if spec[0] == "allow_delete_body" {
err := f.Set(spec[0], "true")
if err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)
}
continue
}
if spec[0] == "allow_merge" {
err := f.Set(spec[0], "true")
if err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)
}
continue
}
if spec[0] == "allow_repeated_fields_in_body" {
err := f.Set(spec[0], "true")
if err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)
}
continue
}
if spec[0] == "include_package_in_tags" {
err := f.Set(spec[0], "true")
if err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)
}
continue
}
err := f.Set(spec[0], "")
if err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)
}
continue
}
name, value := spec[0], spec[1]
if strings.HasPrefix(name, "M") {
pkgMap[name[1:]] = value
continue
}
if err := f.Set(name, value); err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)
}
}
return nil
}
| {
"pile_set_name": "Github"
} |
import React from 'react'
import {Form, ReformContext, WrapInput} from 'react-reform'
import defaultValidations from 'react-reform/opt/validations'
import defaultTheme from '../default-theme'
import RawDatePicker from 'react-datepicker'
import 'react-datepicker/dist/react-datepicker.css'
const DatePicker = props => (
<WrapInput type="DatePicker" directProps={props} focusFn={node => node.setOpen.call(node, true)}>{
({value, themeProps}) => (
<RawDatePicker selected={value} {...themeProps}/>
)
}</WrapInput>
)
export default class ExampleForm extends React.Component {
handleSubmit = (data) => {
console.log('data', data)
}
render() {
return (
<ReformContext themes={{default: defaultTheme}} validations={defaultValidations}>
<div>
<h4>Form</h4>
<Form onSubmit={this.handleSubmit}>
<DatePicker name="startDate" isRequired/>
</Form>
</div>
</ReformContext>
)
}
}
| {
"pile_set_name": "Github"
} |
"""
categories: Core,Functions
description: Unpacking function arguments in non-last position isn't detected as an error
cause: Unknown
workaround: The syntax below is invalid, never use it in applications.
"""
print(*(1, 2), 3)
| {
"pile_set_name": "Github"
} |
require 'ceedling/plugin'
require 'ceedling/constants'
DEPENDENCIES_ROOT_NAME = 'dependencies'
DEPENDENCIES_TASK_ROOT = DEPENDENCIES_ROOT_NAME + ':'
DEPENDENCIES_SYM = DEPENDENCIES_ROOT_NAME.to_sym
class Dependencies < Plugin
def setup
@plugin_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
# Set up a fast way to look up dependencies by name or static lib path
@dependencies = {}
@dynamic_libraries = []
DEPENDENCIES_LIBRARIES.each do |deplib|
@dependencies[ deplib[:name] ] = deplib.clone
all_deps = get_static_libraries_for_dependency(deplib) +
get_dynamic_libraries_for_dependency(deplib) +
get_include_directories_for_dependency(deplib) +
get_source_files_for_dependency(deplib)
all_deps.each do |key|
@dependencies[key] = @dependencies[ deplib[:name] ]
end
@dynamic_libraries += get_dynamic_libraries_for_dependency(deplib)
end
end
def config
updates = {
:collection_paths_include => COLLECTION_PATHS_INCLUDE,
:collection_all_headers => COLLECTION_ALL_HEADERS,
}
@ceedling[DEPENDENCIES_SYM].get_include_directories_for_dependency(deplib).each do |incpath|
updates[:collection_paths_include] << incpath
Dir[ File.join(incpath, "*#{EXTENSION_HEADER}") ].each do |f|
updates[:collection_all_headers] << f
end
end
return updates
end
def get_name(deplib)
raise "Each dependency must have a name!" if deplib[:name].nil?
return deplib[:name].gsub(/\W*/,'')
end
def get_source_path(deplib)
return deplib[:source_path] || File.join('dependencies', get_name(deplib))
end
def get_build_path(deplib)
return deplib[:build_path] || deplib[:source_path] || File.join('dependencies', get_name(deplib))
end
def get_artifact_path(deplib)
return deplib[:artifact_path] || deplib[:source_path] || File.join('dependencies', get_name(deplib))
end
def get_working_paths(deplib)
paths = [deplib[:source_path], deplib[:build_path], deplib[:artifact_paths]].compact.uniq
paths = [ File.join('dependencies', get_name(deplib)) ] if (paths.empty?)
return paths
end
def get_static_libraries_for_dependency(deplib)
(deplib[:artifacts][:static_libraries] || []).map {|path| File.join(get_artifact_path(deplib), path)}
end
def get_dynamic_libraries_for_dependency(deplib)
(deplib[:artifacts][:dynamic_libraries] || []).map {|path| File.join(get_artifact_path(deplib), path)}
end
def get_source_files_for_dependency(deplib)
(deplib[:artifacts][:source] || []).map {|path| File.join(get_artifact_path(deplib), path)}
end
def get_include_directories_for_dependency(deplib)
paths = (deplib[:artifacts][:includes] || []).map {|path| File.join(get_artifact_path(deplib), path)}
@ceedling[:file_system_utils].collect_paths(paths)
end
def set_env_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
return if (blob[:environment].nil?)
return if (blob[:environment].empty?)
blob[:environment].each do |e|
m = e.match(/^(\w+)\s*(\+?\-?=)\s*(.*)$/)
unless m.nil?
case m[2]
when "+="
ENV[m[1]] = (ENV[m[1]] || "") + m[3]
when "-="
ENV[m[1]] = (ENV[m[1]] || "").gsub(m[3],'')
else
ENV[m[1]] = m[3]
end
end
end
end
def fetch_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
return if (blob[:fetch].nil?)
return if (blob[:fetch][:method].nil?)
return if (directory(blob[:source_path]) && !Dir.empty?(blob[:source_path]))
steps = case blob[:fetch][:method]
when :none
return
when :zip
[ "gzip -d #{blob[:fetch][:source]}" ]
when :git
branch = blob[:fetch][:tag] || blob[:fetch][:branch] || ''
branch = ("-b " + branch) unless branch.empty?
retval = [ "git clone #{branch} --depth 1 #{blob[:fetch][:source]} ." ]
retval << "git checkout #{blob[:fetch][:hash]}" unless blob[:fetch][:hash].nil?
retval
when :svn
revision = blob[:fetch][:revision] || ''
revision = ("--revision " + branch) unless branch.empty?
retval = [ "svn checkout #{revision} #{blob[:fetch][:source]} ." ]
retval
when :custom
blob[:fetch][:executable]
else
raise "Unknown fetch method '#{blob[:fetch][:method].to_s}' for dependency '#{blob[:name]}'"
end
# Perform the actual fetching
@ceedling[:streaminator].stdout_puts("Fetching dependency #{blob[:name]}...", Verbosity::NORMAL)
Dir.chdir(get_source_path(blob)) do
steps.each do |step|
@ceedling[:tool_executor].exec( step )
end
end
end
def build_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
# We don't clean anything unless we know how to fetch a new copy
if (blob[:build].nil? || blob[:build].empty?)
@ceedling[:streaminator].stdout_puts("Nothing to build for dependency #{blob[:name]}", Verbosity::NORMAL)
return
end
# Perform the build
@ceedling[:streaminator].stdout_puts("Building dependency #{blob[:name]}...", Verbosity::NORMAL)
Dir.chdir(get_build_path(blob)) do
blob[:build].each do |step|
@ceedling[:tool_executor].exec( step )
end
end
end
def clean_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
# We don't clean anything unless we know how to fetch a new copy
if (blob[:fetch].nil? || blob[:fetch][:method].nil? || (blob[:fetch][:method] == :none))
@ceedling[:streaminator].stdout_puts("Nothing to clean for dependency #{blob[:name]}", Verbosity::NORMAL)
return
end
# Perform the actual Cleaning
@ceedling[:streaminator].stdout_puts("Cleaning dependency #{blob[:name]}...", Verbosity::NORMAL)
get_working_paths(blob).each do |path|
FileUtils.rm_rf(path) if File.directory?(path)
end
end
def deploy_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
# We don't need to deploy anything if there isn't anything to deploy
if (blob[:artifacts].nil? || blob[:artifacts][:dynamic_libraries].nil? || blob[:artifacts][:dynamic_libraries].empty?)
@ceedling[:streaminator].stdout_puts("Nothing to deploy for dependency #{blob[:name]}", Verbosity::NORMAL)
return
end
# Perform the actual Deploying
@ceedling[:streaminator].stdout_puts("Deploying dependency #{blob[:name]}...", Verbosity::NORMAL)
FileUtils.cp( lib_path, File.dirname(PROJECT_RELEASE_BUILD_TARGET) )
end
def add_headers_and_sources()
# Search for header file paths and files to add to our collections
DEPENDENCIES_LIBRARIES.each do |deplib|
get_include_directories_for_dependency(deplib).each do |header|
cfg = @ceedling[:configurator].project_config_hash
cfg[:collection_paths_include] << header
cfg[:collection_paths_source_and_include] << header
cfg[:collection_paths_test_support_source_include] << header
cfg[:collection_paths_test_support_source_include_vendor] << header
cfg[:collection_paths_release_toolchain_include] << header
Dir[ File.join(header, "*#{EXTENSION_HEADER}") ].each do |f|
cfg[:collection_all_headers] << f
end
end
get_source_files_for_dependency(deplib).each do |source|
cfg = @ceedling[:configurator].project_config_hash
cfg[:collection_paths_source_and_include] << source
cfg[:collection_paths_test_support_source_include] << source
cfg[:collection_paths_test_support_source_include_vendor] << source
cfg[:collection_paths_release_toolchain_include] << source
Dir[ File.join(source, "*#{EXTENSION_SOURCE}") ].each do |f|
cfg[:collection_all_source] << f
end
end
end
# Make all these updated files findable by Ceedling
@ceedling[:file_finder].prepare_search_sources()
end
end
# end blocks always executed following rake run
END {
}
| {
"pile_set_name": "Github"
} |
/*
This file is part of the OdinMS Maple Story Server
Copyright (C) 2008 Patrick Huy <[email protected]>
Matthias Butz <[email protected]>
Jan Christian Meyer <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation version 3 as published by
the Free Software Foundation. You may not use, modify or distribute
this program under any other version of the GNU Affero General Public
License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.server.channel.handlers;
import client.MapleCharacter;
import client.MapleClient;
import client.inventory.MapleInventoryType;
import client.autoban.AutobanManager;
import constants.inventory.ItemConstants;
import net.AbstractMaplePacketHandler;
import net.server.Server;
import client.inventory.manipulator.MapleInventoryManipulator;
import server.MapleItemInformationProvider;
import server.life.MapleMonster;
import tools.MaplePacketCreator;
import tools.data.input.SeekableLittleEndianAccessor;
/**
*
* @author kevintjuh93
*/
public final class UseCatchItemHandler extends AbstractMaplePacketHandler {
@Override
public final void handlePacket(SeekableLittleEndianAccessor slea, MapleClient c) {
MapleCharacter chr = c.getPlayer();
AutobanManager abm = chr.getAutobanManager();
slea.readInt();
abm.setTimestamp(5, Server.getInstance().getCurrentTimestamp(), 4);
slea.readShort();
int itemId = slea.readInt();
int monsterid = slea.readInt();
MapleMonster mob = chr.getMap().getMonsterByOid(monsterid);
if (chr.getInventory(ItemConstants.getInventoryType(itemId)).countById(itemId) <= 0) {
return;
}
if (mob == null) {
return;
}
switch (itemId) {
case 2270000:
if (mob.getId() == 9300101) {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 1902000, (short) 1, "", -1);
}
c.announce(MaplePacketCreator.enableActions());
break;
case 2270001:
if (mob.getId() == 9500197) {
if ((abm.getLastSpam(10) + 1000) < currentServerTime()) {
if (mob.getHp() < ((mob.getMaxHp() / 10) * 4)) {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 4031830, (short) 1, "", -1);
} else {
abm.spam(10);
c.announce(MaplePacketCreator.catchMessage(0));
}
}
c.announce(MaplePacketCreator.enableActions());
}
break;
case 2270002:
if (mob.getId() == 9300157) {
if ((abm.getLastSpam(10) + 800) < currentServerTime()) {
if (mob.getHp() < ((mob.getMaxHp() / 10) * 4)) {
if (chr.canHold(4031868, 1)) {
if (Math.random() < 0.5) { // 50% chance
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 4031868, (short) 1, "", -1);
} else {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 0));
}
} else {
chr.dropMessage(5, "Make a ETC slot available before using this item.");
}
abm.spam(10);
} else {
c.announce(MaplePacketCreator.catchMessage(0));
}
}
c.announce(MaplePacketCreator.enableActions());
}
break;
case 2270003:
if (mob.getId() == 9500320) {
if (mob.getHp() < ((mob.getMaxHp() / 10) * 4)) {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 4031887, (short) 1, "", -1);
} else {
c.announce(MaplePacketCreator.catchMessage(0));
}
}
c.announce(MaplePacketCreator.enableActions());
break;
case 2270005:
if (mob.getId() == 9300187) {
if (mob.getHp() < ((mob.getMaxHp() / 10) * 3)) {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 2109001, (short) 1, "", -1);
} else {
c.announce(MaplePacketCreator.catchMessage(0));
}
}
c.announce(MaplePacketCreator.enableActions());
break;
case 2270006:
if (mob.getId() == 9300189) {
if (mob.getHp() < ((mob.getMaxHp() / 10) * 3)) {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 2109002, (short) 1, "", -1);
} else {
c.announce(MaplePacketCreator.catchMessage(0));
}
}
c.announce(MaplePacketCreator.enableActions());
break;
case 2270007:
if (mob.getId() == 9300191) {
if (mob.getHp() < ((mob.getMaxHp() / 10) * 3)) {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 2109003, (short) 1, "", -1);
} else {
c.announce(MaplePacketCreator.catchMessage(0));
}
}
c.announce(MaplePacketCreator.enableActions());
break;
case 2270004:
if (mob.getId() == 9300175) {
if (mob.getHp() < ((mob.getMaxHp() / 10) * 4)) {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 4001169, (short) 1, "", -1);
} else {
c.announce(MaplePacketCreator.catchMessage(0));
}
}
c.announce(MaplePacketCreator.enableActions());
break;
case 2270008:
if (mob.getId() == 9500336) {
if ((abm.getLastSpam(10) + 3000) < currentServerTime()) {
abm.spam(10);
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, 2022323, (short) 1, "", -1);
} else {
chr.message("You cannot use the Fishing Net yet.");
}
c.announce(MaplePacketCreator.enableActions());
}
break;
default:
// proper Fish catch, thanks to Dragohe4rt
MapleItemInformationProvider ii = MapleItemInformationProvider.getInstance();
int itemGanho = ii.getCreateItem(itemId);
int mobItem = ii.getMobItem(itemId);
if (itemGanho != 0 && mobItem == mob.getId()) {
int timeCatch = ii.getUseDelay(itemId);
int mobHp = ii.getMobHP(itemId);
if (timeCatch != 0 && (abm.getLastSpam(10) + timeCatch) < currentServerTime()) {
if (mobHp != 0 && mob.getHp() < ((mob.getMaxHp() / 100) * mobHp)) {
chr.getMap().broadcastMessage(MaplePacketCreator.catchMonster(monsterid, itemId, (byte) 1));
mob.getMap().killMonster(mob, null, false);
MapleInventoryManipulator.removeById(c, MapleInventoryType.USE, itemId, 1, true, true);
MapleInventoryManipulator.addById(c, itemGanho, (short) 1, "", -1);
} else if (mob.getId() != 9500336) {
if (mobHp != 0) {
abm.spam(10);
c.announce(MaplePacketCreator.catchMessage(0));
}
} else {
chr.message("You cannot use the Fishing Net yet.");
}
}
}
c.announce(MaplePacketCreator.enableActions());
// System.out.println("UseCatchItemHandler: \r\n" + slea.toString());
}
}
}
| {
"pile_set_name": "Github"
} |
{
"$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"standard": {
"type": "secureObject",
"defaultValue": {
"deploymentId": null,
"searchServiceName": null,
"solrConnectionString": null
}
},
"searchProvider": {
"type": "string",
"defaultValue": "[if(empty(parameters('standard').solrConnectionString),'Azure','Solr')]",
"allowedValues": [
"Azure",
"Solr"
]
},
"extension": {
"type": "secureObject",
"defaultValue": {
"omsWorkspaceAlertRecipients": null,
"omsWorkspaceMetricsRetentionDays": null,
"omsWorkspaceLocation": null,
"applicationInsightsLocation": null,
"templateLinkAccessToken": ""
}
},
"deploymentId": {
"type": "string",
"defaultValue": "[coalesce(parameters('standard').deploymentId, resourceGroup().name)]"
},
"searchServiceName": {
"type": "string",
"defaultValue": "[coalesce(parameters('standard').searchServiceName, concat(parameters('deploymentId'), '-as'))]"
},
"omsWorkspaceMetricsRetentionDays": {
"type": "int",
"defaultValue": "[parameters('extension').omsWorkspaceMetricsRetentionDays]",
"metadata": {
"description": "Number of days of retention from 30 to 730."
}
},
"omsWorkspaceAlertRecipients": {
"type": "string",
"defaultValue": "[parameters('extension').omsWorkspaceAlertRecipients]",
"metadata": {
"Description": "List of recipients for the email alert separated by semicolon"
}
},
"omsWorkspaceLocation": {
"type": "string",
"defaultValue": "[parameters('extension').omsWorkspaceLocation]",
"metadata": {
"description": "the Location in which your OMS will be provisioned"
}
},
"applicationInsightsLocation": {
"type": "string",
"defaultValue": "[parameters('extension').applicationInsightsLocation]",
"metadata": {
"description": "the Location in which your Application Insight exists"
}
},
"templateLinkBase": {
"type": "string",
"defaultValue": "[uri(replace(json(string(deployment().properties.templateLink)).uri,' ','%20'), '.')]"
},
"templateLinkAccessToken": {
"type": "securestring",
"defaultValue": "[if(contains(parameters('extension'), 'templateLinkAccessToken'), parameters('extension').templateLinkAccessToken, '')]"
}
},
"variables": {
"resourcesApiVersion": "2016-09-01"
},
"resources": [
{
"apiVersion": "[variables('resourcesApiVersion')]",
"name": "[concat(deployment().name,'-infrastructure')]",
"type": "Microsoft.Resources/deployments",
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/infrastructure.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"omsWorkspaceMetricsRetentionDays": {
"value": "[parameters('omsWorkspaceMetricsRetentionDays')]"
},
"omsWorkspaceLocation": {
"value": "[parameters('omsWorkspaceLocation')]"
},
"omsWorkspaceAlertRecipients": {
"value": "[parameters('omsWorkspaceAlertRecipients')]"
},
"templateLinkAccessToken": {
"value": "[parameters('templateLinkAccessToken')]"
}
}
}
},
{
"apiVersion": "[variables('resourcesApiVersion')]",
"name": "[concat(deployment().name,'-application')]",
"type": "Microsoft.Resources/deployments",
"dependson": [
"[concat(deployment().name,'-infrastructure')]"
],
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/application.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"infrastructure": {
"value": "[reference(concat(deployment().name,'-infrastructure')).outputs.infrastructure.value]"
},
"omsWorkspaceAlertRecipients": {
"value": "[parameters('omsWorkspaceAlertRecipients')]"
},
"omsWorkspaceLocation": {
"value": "[parameters('omsWorkspaceLocation')]"
}
}
}
},
{
"apiVersion": "[variables('resourcesApiVersion')]",
"name": "[concat(deployment().name, '-integration')]",
"type": "Microsoft.Resources/deployments",
"dependson": [
"[concat(deployment().name,'-infrastructure')]"
],
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/integration.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"infrastructure": {
"value": "[reference(concat(deployment().name,'-infrastructure')).outputs.infrastructure.value]"
},
"searchProvider": {
"value": "[parameters('searchProvider')]"
}
}
}
},
{
"apiVersion": "[variables('resourcesApiVersion')]",
"name": "[concat(deployment().name, '-availability')]",
"type": "Microsoft.Resources/deployments",
"dependson": [
"[concat(deployment().name,'-infrastructure')]"
],
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/availability.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"infrastructure": {
"value": "[reference(concat(deployment().name,'-infrastructure')).outputs.infrastructure.value]"
},
"omsWorkspaceAlertRecipients": {
"value": "[parameters('omsWorkspaceAlertRecipients')]"
},
"applicationInsightsLocation": {
"value": "[parameters('applicationInsightsLocation')]"
}
}
}
},
{
"apiVersion": "[variables('resourcesApiVersion')]",
"condition": "[equals(parameters('searchProvider'), 'Azure')]",
"name": "[concat(parameters('deploymentId'), '-alertsearchsearchservices')]",
"type": "Microsoft.Resources/deployments",
"dependson": [
"[concat(deployment().name,'-infrastructure')]"
],
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/alertsearchsearchservices.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"infrastructure": {
"value": "[reference(concat(deployment().name,'-infrastructure')).outputs.infrastructure.value]"
},
"omsWorkspaceLocation": {
"value": "[parameters('omsWorkspaceLocation')]"
},
"omsWorkspaceAlertRecipients": {
"value": "[parameters('omsWorkspaceAlertRecipients')]"
}
}
}
},
{
"apiVersion": "[variables('resourcesApiVersion')]",
"name": "[concat(parameters('deploymentId'), '-alertwebserverfarms')]",
"type": "Microsoft.Resources/deployments",
"dependson": [
"[concat(deployment().name,'-infrastructure')]"
],
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/alertwebserverfarms.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"infrastructure": {
"value": "[reference(concat(deployment().name,'-infrastructure')).outputs.infrastructure.value]"
},
"omsWorkspaceLocation": {
"value": "[parameters('omsWorkspaceLocation')]"
},
"omsWorkspaceAlertRecipients": {
"value": "[parameters('omsWorkspaceAlertRecipients')]"
}
}
}
},
{
"apiVersion": "[variables('resourcesApiVersion')]",
"name": "[concat(parameters('deploymentId'), '-alertwebsites')]",
"type": "Microsoft.Resources/deployments",
"dependson": [
"[concat(deployment().name,'-infrastructure')]"
],
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/alertwebsites.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"infrastructure": {
"value": "[reference(concat(deployment().name,'-infrastructure')).outputs.infrastructure.value]"
},
"omsWorkspaceLocation": {
"value": "[parameters('omsWorkspaceLocation')]"
},
"omsWorkspaceAlertRecipients": {
"value": "[parameters('omsWorkspaceAlertRecipients')]"
}
}
}
},
{
"apiVersion": "[variables('resourcesApiVersion')]",
"name": "[concat(parameters('deploymentId'), '-alertsqlserversdatabases')]",
"type": "Microsoft.Resources/deployments",
"dependson": [
"[concat(deployment().name,'-infrastructure')]"
],
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/alertsqlserversdatabases.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"infrastructure": {
"value": "[reference(concat(deployment().name,'-infrastructure')).outputs.infrastructure.value]"
},
"omsWorkspaceLocation": {
"value": "[parameters('omsWorkspaceLocation')]"
},
"omsWorkspaceAlertRecipients": {
"value": "[parameters('omsWorkspaceAlertRecipients')]"
}
}
}
},
{
"apiVersion": "[variables('resourcesApiVersion')]",
"name": "[concat(parameters('deploymentId'), '-alertcacheredis')]",
"type": "Microsoft.Resources/deployments",
"dependson": [
"[concat(deployment().name,'-infrastructure')]"
],
"properties": {
"mode": "Incremental",
"templateLink": {
"uri": "[concat(uri(parameters('templateLinkBase'), 'nested/alertcacheredis.json'), parameters('templateLinkAccessToken'))]",
"contentVersion": "1.0.0.0"
},
"parameters": {
"deploymentId": {
"value": "[parameters('deploymentId')]"
},
"infrastructure": {
"value": "[reference(concat(deployment().name,'-infrastructure')).outputs.infrastructure.value]"
},
"omsWorkspaceLocation": {
"value": "[parameters('omsWorkspaceLocation')]"
},
"omsWorkspaceAlertRecipients": {
"value": "[parameters('omsWorkspaceAlertRecipients')]"
}
}
}
}
]
} | {
"pile_set_name": "Github"
} |
<?php
/*
* This file is part of the Predis package.
*
* (c) Daniele Alessandri <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Predis\Cluster\Distributor;
/**
* Exception class that identifies empty rings.
*
* @author Daniele Alessandri <[email protected]>
*/
class EmptyRingException extends \Exception
{
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml"><head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>H5微信平台</title>
<style type="text/css">
.wxAuthSucBox{
width: 460px;
height: 380px;
position: fixed;
top: 0;
bottom: 0;
left: 0;
right: 0;
margin: auto;
background: url($!{basePath}/content/commonweixin/img/wx_auth_suc.jpg) no-repeat;
}
</style>
<script type="text/javascript">
</script>
</head>
<body>
<!-- <div>授权成功,请自行关闭此页。</div> -->
<div class="wxAuthSucBoxContent">
<h2 style='text-align:center;color:#FEA128;'>$!{message}</h2>
<h3 style='text-align:center;color:#BDBFC3;'>请自行关闭当前页面</h3>
</div>
<div class="wxAuthSucBox">
</div>
</body>
</html> | {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: c1ce0bf4df9fbb54ca8d0ed7bacb45c3
timeCreated: 1502385975
licenseType: Pro
ShaderImporter:
defaultTextures: []
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
<!--
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<template>
<div class="field" v-if="display">
<label class="label">{{ label }}</label>
<input class="input" style="max-width: 400px;"
type="text"
:name="name"
:value="value"
:placeholder="placeholder"
@input="$emit('input', $event.target.value)">
</div>
</template>
<script>
export default {
props: ['placeholder', 'label', 'name', 'value', 'display']
}
</script>
| {
"pile_set_name": "Github"
} |
/*
* Distributed under the Boost Software License, Version 1.0.(See accompanying
* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
*
* See http://www.boost.org/libs/iostreams for documentation.
*
* File: boost/iostreams/detail/restrict.hpp
* Date: Sun Jan 06 12:57:30 MST 2008
* Copyright: 2008 CodeRage, LLC
2004-2007 Jonathan Turkanis
* Author: Jonathan Turkanis
* Contact: turkanis at coderage dot com
*
* Defines the class template boost::iostreams::restriction and the
* overloaded function template boost::iostreams::restrict
*/
#ifndef BOOST_IOSTREAMS_RESTRICT_HPP_INCLUDED
#define BOOST_IOSTREAMS_RESTRICT_HPP_INCLUDED
#include <boost/iostreams/detail/restrict_impl.hpp>
#define BOOST_IOSTREAMS_RESTRICT restrict
#include <boost/iostreams/detail/restrict_impl.hpp>
#undef BOOST_IOSTREAMS_RESTRICT
#endif // #ifndef BOOST_IOSTREAMS_RESTRICT_HPP_INCLUDED
| {
"pile_set_name": "Github"
} |
# Contents
Home for the Oblivious Decentralized Identifier Service (ODIS), formerly PGPNP (Pretty Good Phone Number Privacy).
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using GL_EditorFramework.Interfaces;
using Toolbox.Library.Forms;
namespace Toolbox.Library
{
//Represents a container that stores multiple drawables
//These can be switched between the viewport
public class DrawableContainer
{
private string _name;
public string Name
{
set
{
List<string> Names = ObjectEditor.GetDrawableContainers().Select(o => o.Name).ToList();
foreach (string str in Names)
Console.WriteLine("STR NAME " + str);
_name = Utils.RenameDuplicateString(Names, value);
}
get
{
return _name;
}
}
public ContainerState ContainerState { get; set; }
public List<AbstractGlDrawable> Drawables = new List<AbstractGlDrawable>();
}
public enum ContainerState
{
Active,
Inactive,
Disposed,
}
}
| {
"pile_set_name": "Github"
} |
// Boost config.hpp configuration header file ------------------------------//
// Copyright (c) 2001-2003 John Maddock
// Copyright (c) 2001 Darin Adler
// Copyright (c) 2001 Peter Dimov
// Copyright (c) 2002 Bill Kempf
// Copyright (c) 2002 Jens Maurer
// Copyright (c) 2002-2003 David Abrahams
// Copyright (c) 2003 Gennaro Prota
// Copyright (c) 2003 Eric Friedman
// Copyright (c) 2010 Eric Jourdanneau, Joel Falcou
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org/ for most recent version.
// Boost config.hpp policy and rationale documentation has been moved to
// http://www.boost.org/libs/config/
//
// This file is intended to be stable, and relatively unchanging.
// It should contain boilerplate code only - no compiler specific
// code unless it is unavoidable - no changes unless unavoidable.
#ifndef BOOST_CONFIG_SUFFIX_HPP
#define BOOST_CONFIG_SUFFIX_HPP
#if defined(__GNUC__) && (__GNUC__ >= 4)
//
// Some GCC-4.x versions issue warnings even when __extension__ is used,
// so use this as a workaround:
//
#pragma GCC system_header
#endif
//
// ensure that visibility macros are always defined, thus symplifying use
//
#ifndef BOOST_SYMBOL_EXPORT
# define BOOST_SYMBOL_EXPORT
#endif
#ifndef BOOST_SYMBOL_IMPORT
# define BOOST_SYMBOL_IMPORT
#endif
#ifndef BOOST_SYMBOL_VISIBLE
# define BOOST_SYMBOL_VISIBLE
#endif
//
// look for long long by looking for the appropriate macros in <limits.h>.
// Note that we use limits.h rather than climits for maximal portability,
// remember that since these just declare a bunch of macros, there should be
// no namespace issues from this.
//
#if !defined(BOOST_HAS_LONG_LONG) && !defined(BOOST_NO_LONG_LONG) \
&& !defined(BOOST_MSVC) && !defined(__BORLANDC__)
# include <limits.h>
# if (defined(ULLONG_MAX) || defined(ULONG_LONG_MAX) || defined(ULONGLONG_MAX))
# define BOOST_HAS_LONG_LONG
# else
# define BOOST_NO_LONG_LONG
# endif
#endif
// GCC 3.x will clean up all of those nasty macro definitions that
// BOOST_NO_CTYPE_FUNCTIONS is intended to help work around, so undefine
// it under GCC 3.x.
#if defined(__GNUC__) && (__GNUC__ >= 3) && defined(BOOST_NO_CTYPE_FUNCTIONS)
# undef BOOST_NO_CTYPE_FUNCTIONS
#endif
//
// Assume any extensions are in namespace std:: unless stated otherwise:
//
# ifndef BOOST_STD_EXTENSION_NAMESPACE
# define BOOST_STD_EXTENSION_NAMESPACE std
# endif
//
// If cv-qualified specializations are not allowed, then neither are cv-void ones:
//
# if defined(BOOST_NO_CV_SPECIALIZATIONS) \
&& !defined(BOOST_NO_CV_VOID_SPECIALIZATIONS)
# define BOOST_NO_CV_VOID_SPECIALIZATIONS
# endif
//
// If there is no numeric_limits template, then it can't have any compile time
// constants either!
//
# if defined(BOOST_NO_LIMITS) \
&& !defined(BOOST_NO_LIMITS_COMPILE_TIME_CONSTANTS)
# define BOOST_NO_LIMITS_COMPILE_TIME_CONSTANTS
# define BOOST_NO_MS_INT64_NUMERIC_LIMITS
# define BOOST_NO_LONG_LONG_NUMERIC_LIMITS
# endif
//
// if there is no long long then there is no specialisation
// for numeric_limits<long long> either:
//
#if !defined(BOOST_HAS_LONG_LONG) && !defined(BOOST_NO_LONG_LONG_NUMERIC_LIMITS)
# define BOOST_NO_LONG_LONG_NUMERIC_LIMITS
#endif
//
// if there is no __int64 then there is no specialisation
// for numeric_limits<__int64> either:
//
#if !defined(BOOST_HAS_MS_INT64) && !defined(BOOST_NO_MS_INT64_NUMERIC_LIMITS)
# define BOOST_NO_MS_INT64_NUMERIC_LIMITS
#endif
//
// if member templates are supported then so is the
// VC6 subset of member templates:
//
# if !defined(BOOST_NO_MEMBER_TEMPLATES) \
&& !defined(BOOST_MSVC6_MEMBER_TEMPLATES)
# define BOOST_MSVC6_MEMBER_TEMPLATES
# endif
//
// Without partial specialization, can't test for partial specialisation bugs:
//
# if defined(BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION) \
&& !defined(BOOST_BCB_PARTIAL_SPECIALIZATION_BUG)
# define BOOST_BCB_PARTIAL_SPECIALIZATION_BUG
# endif
//
// Without partial specialization, we can't have array-type partial specialisations:
//
# if defined(BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION) \
&& !defined(BOOST_NO_ARRAY_TYPE_SPECIALIZATIONS)
# define BOOST_NO_ARRAY_TYPE_SPECIALIZATIONS
# endif
//
// Without partial specialization, std::iterator_traits can't work:
//
# if defined(BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION) \
&& !defined(BOOST_NO_STD_ITERATOR_TRAITS)
# define BOOST_NO_STD_ITERATOR_TRAITS
# endif
//
// Without partial specialization, partial
// specialization with default args won't work either:
//
# if defined(BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION) \
&& !defined(BOOST_NO_PARTIAL_SPECIALIZATION_IMPLICIT_DEFAULT_ARGS)
# define BOOST_NO_PARTIAL_SPECIALIZATION_IMPLICIT_DEFAULT_ARGS
# endif
//
// Without member template support, we can't have template constructors
// in the standard library either:
//
# if defined(BOOST_NO_MEMBER_TEMPLATES) \
&& !defined(BOOST_MSVC6_MEMBER_TEMPLATES) \
&& !defined(BOOST_NO_TEMPLATED_ITERATOR_CONSTRUCTORS)
# define BOOST_NO_TEMPLATED_ITERATOR_CONSTRUCTORS
# endif
//
// Without member template support, we can't have a conforming
// std::allocator template either:
//
# if defined(BOOST_NO_MEMBER_TEMPLATES) \
&& !defined(BOOST_MSVC6_MEMBER_TEMPLATES) \
&& !defined(BOOST_NO_STD_ALLOCATOR)
# define BOOST_NO_STD_ALLOCATOR
# endif
//
// without ADL support then using declarations will break ADL as well:
//
#if defined(BOOST_NO_ARGUMENT_DEPENDENT_LOOKUP) && !defined(BOOST_FUNCTION_SCOPE_USING_DECLARATION_BREAKS_ADL)
# define BOOST_FUNCTION_SCOPE_USING_DECLARATION_BREAKS_ADL
#endif
//
// Without typeid support we have no dynamic RTTI either:
//
#if defined(BOOST_NO_TYPEID) && !defined(BOOST_NO_RTTI)
# define BOOST_NO_RTTI
#endif
//
// If we have a standard allocator, then we have a partial one as well:
//
#if !defined(BOOST_NO_STD_ALLOCATOR)
# define BOOST_HAS_PARTIAL_STD_ALLOCATOR
#endif
//
// We can't have a working std::use_facet if there is no std::locale:
//
# if defined(BOOST_NO_STD_LOCALE) && !defined(BOOST_NO_STD_USE_FACET)
# define BOOST_NO_STD_USE_FACET
# endif
//
// We can't have a std::messages facet if there is no std::locale:
//
# if defined(BOOST_NO_STD_LOCALE) && !defined(BOOST_NO_STD_MESSAGES)
# define BOOST_NO_STD_MESSAGES
# endif
//
// We can't have a working std::wstreambuf if there is no std::locale:
//
# if defined(BOOST_NO_STD_LOCALE) && !defined(BOOST_NO_STD_WSTREAMBUF)
# define BOOST_NO_STD_WSTREAMBUF
# endif
//
// We can't have a <cwctype> if there is no <cwchar>:
//
# if defined(BOOST_NO_CWCHAR) && !defined(BOOST_NO_CWCTYPE)
# define BOOST_NO_CWCTYPE
# endif
//
// We can't have a swprintf if there is no <cwchar>:
//
# if defined(BOOST_NO_CWCHAR) && !defined(BOOST_NO_SWPRINTF)
# define BOOST_NO_SWPRINTF
# endif
//
// If Win32 support is turned off, then we must turn off
// threading support also, unless there is some other
// thread API enabled:
//
#if defined(BOOST_DISABLE_WIN32) && defined(_WIN32) \
&& !defined(BOOST_DISABLE_THREADS) && !defined(BOOST_HAS_PTHREADS)
# define BOOST_DISABLE_THREADS
#endif
//
// Turn on threading support if the compiler thinks that it's in
// multithreaded mode. We put this here because there are only a
// limited number of macros that identify this (if there's any missing
// from here then add to the appropriate compiler section):
//
#if (defined(__MT__) || defined(_MT) || defined(_REENTRANT) \
|| defined(_PTHREADS) || defined(__APPLE__) || defined(__DragonFly__)) \
&& !defined(BOOST_HAS_THREADS)
# define BOOST_HAS_THREADS
#endif
//
// Turn threading support off if BOOST_DISABLE_THREADS is defined:
//
#if defined(BOOST_DISABLE_THREADS) && defined(BOOST_HAS_THREADS)
# undef BOOST_HAS_THREADS
#endif
//
// Turn threading support off if we don't recognise the threading API:
//
#if defined(BOOST_HAS_THREADS) && !defined(BOOST_HAS_PTHREADS)\
&& !defined(BOOST_HAS_WINTHREADS) && !defined(BOOST_HAS_BETHREADS)\
&& !defined(BOOST_HAS_MPTASKS)
# undef BOOST_HAS_THREADS
#endif
//
// Turn threading detail macros off if we don't (want to) use threading
//
#ifndef BOOST_HAS_THREADS
# undef BOOST_HAS_PTHREADS
# undef BOOST_HAS_PTHREAD_MUTEXATTR_SETTYPE
# undef BOOST_HAS_PTHREAD_YIELD
# undef BOOST_HAS_PTHREAD_DELAY_NP
# undef BOOST_HAS_WINTHREADS
# undef BOOST_HAS_BETHREADS
# undef BOOST_HAS_MPTASKS
#endif
//
// If the compiler claims to be C99 conformant, then it had better
// have a <stdint.h>:
//
# if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901)
# define BOOST_HAS_STDINT_H
# ifndef BOOST_HAS_LOG1P
# define BOOST_HAS_LOG1P
# endif
# ifndef BOOST_HAS_EXPM1
# define BOOST_HAS_EXPM1
# endif
# endif
//
// Define BOOST_NO_SLIST and BOOST_NO_HASH if required.
// Note that this is for backwards compatibility only.
//
# if !defined(BOOST_HAS_SLIST) && !defined(BOOST_NO_SLIST)
# define BOOST_NO_SLIST
# endif
# if !defined(BOOST_HAS_HASH) && !defined(BOOST_NO_HASH)
# define BOOST_NO_HASH
# endif
//
// Set BOOST_SLIST_HEADER if not set already:
//
#if defined(BOOST_HAS_SLIST) && !defined(BOOST_SLIST_HEADER)
# define BOOST_SLIST_HEADER <slist>
#endif
//
// Set BOOST_HASH_SET_HEADER if not set already:
//
#if defined(BOOST_HAS_HASH) && !defined(BOOST_HASH_SET_HEADER)
# define BOOST_HASH_SET_HEADER <hash_set>
#endif
//
// Set BOOST_HASH_MAP_HEADER if not set already:
//
#if defined(BOOST_HAS_HASH) && !defined(BOOST_HASH_MAP_HEADER)
# define BOOST_HASH_MAP_HEADER <hash_map>
#endif
// BOOST_HAS_ABI_HEADERS
// This macro gets set if we have headers that fix the ABI,
// and prevent ODR violations when linking to external libraries:
#if defined(BOOST_ABI_PREFIX) && defined(BOOST_ABI_SUFFIX) && !defined(BOOST_HAS_ABI_HEADERS)
# define BOOST_HAS_ABI_HEADERS
#endif
#if defined(BOOST_HAS_ABI_HEADERS) && defined(BOOST_DISABLE_ABI_HEADERS)
# undef BOOST_HAS_ABI_HEADERS
#endif
// BOOST_NO_STDC_NAMESPACE workaround --------------------------------------//
// Because std::size_t usage is so common, even in boost headers which do not
// otherwise use the C library, the <cstddef> workaround is included here so
// that ugly workaround code need not appear in many other boost headers.
// NOTE WELL: This is a workaround for non-conforming compilers; <cstddef>
// must still be #included in the usual places so that <cstddef> inclusion
// works as expected with standard conforming compilers. The resulting
// double inclusion of <cstddef> is harmless.
# if defined(BOOST_NO_STDC_NAMESPACE) && defined(__cplusplus)
# include <cstddef>
namespace std { using ::ptrdiff_t; using ::size_t; }
# endif
// Workaround for the unfortunate min/max macros defined by some platform headers
#define BOOST_PREVENT_MACRO_SUBSTITUTION
#ifndef BOOST_USING_STD_MIN
# define BOOST_USING_STD_MIN() using std::min
#endif
#ifndef BOOST_USING_STD_MAX
# define BOOST_USING_STD_MAX() using std::max
#endif
// BOOST_NO_STD_MIN_MAX workaround -----------------------------------------//
# if defined(BOOST_NO_STD_MIN_MAX) && defined(__cplusplus)
namespace std {
template <class _Tp>
inline const _Tp& min BOOST_PREVENT_MACRO_SUBSTITUTION (const _Tp& __a, const _Tp& __b) {
return __b < __a ? __b : __a;
}
template <class _Tp>
inline const _Tp& max BOOST_PREVENT_MACRO_SUBSTITUTION (const _Tp& __a, const _Tp& __b) {
return __a < __b ? __b : __a;
}
}
# endif
// BOOST_STATIC_CONSTANT workaround --------------------------------------- //
// On compilers which don't allow in-class initialization of static integral
// constant members, we must use enums as a workaround if we want the constants
// to be available at compile-time. This macro gives us a convenient way to
// declare such constants.
# ifdef BOOST_NO_INCLASS_MEMBER_INITIALIZATION
# define BOOST_STATIC_CONSTANT(type, assignment) enum { assignment }
# else
# define BOOST_STATIC_CONSTANT(type, assignment) static const type assignment
# endif
// BOOST_USE_FACET / HAS_FACET workaround ----------------------------------//
// When the standard library does not have a conforming std::use_facet there
// are various workarounds available, but they differ from library to library.
// The same problem occurs with has_facet.
// These macros provide a consistent way to access a locale's facets.
// Usage:
// replace
// std::use_facet<Type>(loc);
// with
// BOOST_USE_FACET(Type, loc);
// Note do not add a std:: prefix to the front of BOOST_USE_FACET!
// Use for BOOST_HAS_FACET is analogous.
#if defined(BOOST_NO_STD_USE_FACET)
# ifdef BOOST_HAS_TWO_ARG_USE_FACET
# define BOOST_USE_FACET(Type, loc) std::use_facet(loc, static_cast<Type*>(0))
# define BOOST_HAS_FACET(Type, loc) std::has_facet(loc, static_cast<Type*>(0))
# elif defined(BOOST_HAS_MACRO_USE_FACET)
# define BOOST_USE_FACET(Type, loc) std::_USE(loc, Type)
# define BOOST_HAS_FACET(Type, loc) std::_HAS(loc, Type)
# elif defined(BOOST_HAS_STLP_USE_FACET)
# define BOOST_USE_FACET(Type, loc) (*std::_Use_facet<Type >(loc))
# define BOOST_HAS_FACET(Type, loc) std::has_facet< Type >(loc)
# endif
#else
# define BOOST_USE_FACET(Type, loc) std::use_facet< Type >(loc)
# define BOOST_HAS_FACET(Type, loc) std::has_facet< Type >(loc)
#endif
// BOOST_NESTED_TEMPLATE workaround ------------------------------------------//
// Member templates are supported by some compilers even though they can't use
// the A::template member<U> syntax, as a workaround replace:
//
// typedef typename A::template rebind<U> binder;
//
// with:
//
// typedef typename A::BOOST_NESTED_TEMPLATE rebind<U> binder;
#ifndef BOOST_NO_MEMBER_TEMPLATE_KEYWORD
# define BOOST_NESTED_TEMPLATE template
#else
# define BOOST_NESTED_TEMPLATE
#endif
// BOOST_UNREACHABLE_RETURN(x) workaround -------------------------------------//
// Normally evaluates to nothing, unless BOOST_NO_UNREACHABLE_RETURN_DETECTION
// is defined, in which case it evaluates to return x; Use when you have a return
// statement that can never be reached.
#ifdef BOOST_NO_UNREACHABLE_RETURN_DETECTION
# define BOOST_UNREACHABLE_RETURN(x) return x;
#else
# define BOOST_UNREACHABLE_RETURN(x)
#endif
// BOOST_DEDUCED_TYPENAME workaround ------------------------------------------//
//
// Some compilers don't support the use of `typename' for dependent
// types in deduced contexts, e.g.
//
// template <class T> void f(T, typename T::type);
// ^^^^^^^^
// Replace these declarations with:
//
// template <class T> void f(T, BOOST_DEDUCED_TYPENAME T::type);
#ifndef BOOST_NO_DEDUCED_TYPENAME
# define BOOST_DEDUCED_TYPENAME typename
#else
# define BOOST_DEDUCED_TYPENAME
#endif
#ifndef BOOST_NO_TYPENAME_WITH_CTOR
# define BOOST_CTOR_TYPENAME typename
#else
# define BOOST_CTOR_TYPENAME
#endif
// long long workaround ------------------------------------------//
// On gcc (and maybe other compilers?) long long is alway supported
// but it's use may generate either warnings (with -ansi), or errors
// (with -pedantic -ansi) unless it's use is prefixed by __extension__
//
#if defined(BOOST_HAS_LONG_LONG) && defined(__cplusplus)
namespace boost{
# ifdef __GNUC__
__extension__ typedef long long long_long_type;
__extension__ typedef unsigned long long ulong_long_type;
# else
typedef long long long_long_type;
typedef unsigned long long ulong_long_type;
# endif
}
#endif
// BOOST_[APPEND_]EXPLICIT_TEMPLATE_[NON_]TYPE macros --------------------------//
//
// Some compilers have problems with function templates whose template
// parameters don't appear in the function parameter list (basically
// they just link one instantiation of the template in the final
// executable). These macros provide a uniform way to cope with the
// problem with no effects on the calling syntax.
// Example:
//
// #include <iostream>
// #include <ostream>
// #include <typeinfo>
//
// template <int n>
// void f() { std::cout << n << ' '; }
//
// template <typename T>
// void g() { std::cout << typeid(T).name() << ' '; }
//
// int main() {
// f<1>();
// f<2>();
//
// g<int>();
// g<double>();
// }
//
// With VC++ 6.0 the output is:
//
// 2 2 double double
//
// To fix it, write
//
// template <int n>
// void f(BOOST_EXPLICIT_TEMPLATE_NON_TYPE(int, n)) { ... }
//
// template <typename T>
// void g(BOOST_EXPLICIT_TEMPLATE_TYPE(T)) { ... }
//
#if defined(BOOST_NO_EXPLICIT_FUNCTION_TEMPLATE_ARGUMENTS) && defined(__cplusplus)
# include "boost/type.hpp"
# include "boost/non_type.hpp"
# define BOOST_EXPLICIT_TEMPLATE_TYPE(t) boost::type<t>* = 0
# define BOOST_EXPLICIT_TEMPLATE_TYPE_SPEC(t) boost::type<t>*
# define BOOST_EXPLICIT_TEMPLATE_NON_TYPE(t, v) boost::non_type<t, v>* = 0
# define BOOST_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v) boost::non_type<t, v>*
# define BOOST_APPEND_EXPLICIT_TEMPLATE_TYPE(t) \
, BOOST_EXPLICIT_TEMPLATE_TYPE(t)
# define BOOST_APPEND_EXPLICIT_TEMPLATE_TYPE_SPEC(t) \
, BOOST_EXPLICIT_TEMPLATE_TYPE_SPEC(t)
# define BOOST_APPEND_EXPLICIT_TEMPLATE_NON_TYPE(t, v) \
, BOOST_EXPLICIT_TEMPLATE_NON_TYPE(t, v)
# define BOOST_APPEND_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v) \
, BOOST_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v)
#else
// no workaround needed: expand to nothing
# define BOOST_EXPLICIT_TEMPLATE_TYPE(t)
# define BOOST_EXPLICIT_TEMPLATE_TYPE_SPEC(t)
# define BOOST_EXPLICIT_TEMPLATE_NON_TYPE(t, v)
# define BOOST_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v)
# define BOOST_APPEND_EXPLICIT_TEMPLATE_TYPE(t)
# define BOOST_APPEND_EXPLICIT_TEMPLATE_TYPE_SPEC(t)
# define BOOST_APPEND_EXPLICIT_TEMPLATE_NON_TYPE(t, v)
# define BOOST_APPEND_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v)
#endif // defined BOOST_NO_EXPLICIT_FUNCTION_TEMPLATE_ARGUMENTS
// When BOOST_NO_STD_TYPEINFO is defined, we can just import
// the global definition into std namespace:
#if defined(BOOST_NO_STD_TYPEINFO) && defined(__cplusplus)
#include <typeinfo>
namespace std{ using ::type_info; }
#endif
// ---------------------------------------------------------------------------//
//
// Helper macro BOOST_STRINGIZE:
// Converts the parameter X to a string after macro replacement
// on X has been performed.
//
#define BOOST_STRINGIZE(X) BOOST_DO_STRINGIZE(X)
#define BOOST_DO_STRINGIZE(X) #X
//
// Helper macro BOOST_JOIN:
// The following piece of macro magic joins the two
// arguments together, even when one of the arguments is
// itself a macro (see 16.3.1 in C++ standard). The key
// is that macro expansion of macro arguments does not
// occur in BOOST_DO_JOIN2 but does in BOOST_DO_JOIN.
//
#define BOOST_JOIN( X, Y ) BOOST_DO_JOIN( X, Y )
#define BOOST_DO_JOIN( X, Y ) BOOST_DO_JOIN2(X,Y)
#define BOOST_DO_JOIN2( X, Y ) X##Y
//
// Set some default values for compiler/library/platform names.
// These are for debugging config setup only:
//
# ifndef BOOST_COMPILER
# define BOOST_COMPILER "Unknown ISO C++ Compiler"
# endif
# ifndef BOOST_STDLIB
# define BOOST_STDLIB "Unknown ISO standard library"
# endif
# ifndef BOOST_PLATFORM
# if defined(unix) || defined(__unix) || defined(_XOPEN_SOURCE) \
|| defined(_POSIX_SOURCE)
# define BOOST_PLATFORM "Generic Unix"
# else
# define BOOST_PLATFORM "Unknown"
# endif
# endif
//
// Set some default values GPU support
//
# ifndef BOOST_GPU_ENABLED
# define BOOST_GPU_ENABLED
# endif
// BOOST_FORCEINLINE ---------------------------------------------//
// Macro to use in place of 'inline' to force a function to be inline
#if !defined(BOOST_FORCEINLINE)
# if defined(_MSC_VER)
# define BOOST_FORCEINLINE __forceinline
# elif defined(__GNUC__) && __GNUC__ > 3
# define BOOST_FORCEINLINE inline __attribute__ ((always_inline))
# else
# define BOOST_FORCEINLINE inline
# endif
#endif
//
// Set BOOST_NO_DECLTYPE_N3276 when BOOST_NO_DECLTYPE is defined
//
#if defined(BOOST_NO_CXX11_DECLTYPE) && !defined(BOOST_NO_CXX11_DECLTYPE_N3276)
#define BOOST_NO_CXX11_DECLTYPE_N3276 BOOST_NO_CXX11_DECLTYPE
#endif
// -------------------- Deprecated macros for 1.50 ---------------------------
// These will go away in a future release
// Use BOOST_NO_CXX11_HDR_UNORDERED_SET or BOOST_NO_CXX11_HDR_UNORDERED_MAP
// instead of BOOST_NO_STD_UNORDERED
#if defined(BOOST_NO_CXX11_HDR_UNORDERED_MAP) || defined (BOOST_NO_CXX11_HDR_UNORDERED_SET)
# ifndef BOOST_NO_STD_UNORDERED
# define BOOST_NO_STD_UNORDERED
# endif
#endif
// Use BOOST_NO_CXX11_HDR_INITIALIZER_LIST instead of BOOST_NO_INITIALIZER_LISTS
#if defined(BOOST_NO_CXX11_HDR_INITIALIZER_LIST) && !defined(BOOST_NO_INITIALIZER_LISTS)
# define BOOST_NO_INITIALIZER_LISTS
#endif
// Use BOOST_NO_CXX11_HDR_ARRAY instead of BOOST_NO_0X_HDR_ARRAY
#if defined(BOOST_NO_CXX11_HDR_ARRAY) && !defined(BOOST_NO_BOOST_NO_0X_HDR_ARRAY)
# define BOOST_NO_0X_HDR_ARRAY
#endif
// Use BOOST_NO_CXX11_HDR_CHRONO instead of BOOST_NO_0X_HDR_CHRONO
#if defined(BOOST_NO_CXX11_HDR_CHRONO) && !defined(BOOST_NO_0X_HDR_CHRONO)
# define BOOST_NO_0X_HDR_CHRONO
#endif
// Use BOOST_NO_CXX11_HDR_CODECVT instead of BOOST_NO_0X_HDR_CODECVT
#if defined(BOOST_NO_CXX11_HDR_CODECVT) && !defined(BOOST_NO_0X_HDR_CODECVT)
# define BOOST_NO_0X_HDR_CODECVT
#endif
// Use BOOST_NO_CXX11_HDR_CONDITION_VARIABLE instead of BOOST_NO_0X_HDR_CONDITION_VARIABLE
#if defined(BOOST_NO_CXX11_HDR_CONDITION_VARIABLE) && !defined(BOOST_NO_0X_HDR_CONDITION_VARIABLE)
# define BOOST_NO_0X_HDR_CONDITION_VARIABLE
#endif
// Use BOOST_NO_CXX11_HDR_FORWARD_LIST instead of BOOST_NO_0X_HDR_FORWARD_LIST
#if defined(BOOST_NO_CXX11_HDR_FORWARD_LIST) && !defined(BOOST_NO_0X_HDR_FORWARD_LIST)
# define BOOST_NO_0X_HDR_FORWARD_LIST
#endif
// Use BOOST_NO_CXX11_HDR_FUTURE instead of BOOST_NO_0X_HDR_FUTURE
#if defined(BOOST_NO_CXX11_HDR_FUTURE) && !defined(BOOST_NO_0X_HDR_FUTURE)
# define BOOST_NO_0X_HDR_FUTURE
#endif
// Use BOOST_NO_CXX11_HDR_INITIALIZER_LIST
// instead of BOOST_NO_0X_HDR_INITIALIZER_LIST or BOOST_NO_INITIALIZER_LISTS
#ifdef BOOST_NO_CXX11_HDR_INITIALIZER_LIST
# ifndef BOOST_NO_0X_HDR_INITIALIZER_LIST
# define BOOST_NO_0X_HDR_INITIALIZER_LIST
# endif
# ifndef BOOST_NO_INITIALIZER_LISTS
# define BOOST_NO_INITIALIZER_LISTS
# endif
#endif
// Use BOOST_NO_CXX11_HDR_MUTEX instead of BOOST_NO_0X_HDR_MUTEX
#if defined(BOOST_NO_CXX11_HDR_MUTEX) && !defined(BOOST_NO_0X_HDR_MUTEX)
# define BOOST_NO_0X_HDR_MUTEX
#endif
// Use BOOST_NO_CXX11_HDR_RANDOM instead of BOOST_NO_0X_HDR_RANDOM
#if defined(BOOST_NO_CXX11_HDR_RANDOM) && !defined(BOOST_NO_0X_HDR_RANDOM)
# define BOOST_NO_0X_HDR_RANDOM
#endif
// Use BOOST_NO_CXX11_HDR_RATIO instead of BOOST_NO_0X_HDR_RATIO
#if defined(BOOST_NO_CXX11_HDR_RATIO) && !defined(BOOST_NO_0X_HDR_RATIO)
# define BOOST_NO_0X_HDR_RATIO
#endif
// Use BOOST_NO_CXX11_HDR_REGEX instead of BOOST_NO_0X_HDR_REGEX
#if defined(BOOST_NO_CXX11_HDR_REGEX) && !defined(BOOST_NO_0X_HDR_REGEX)
# define BOOST_NO_0X_HDR_REGEX
#endif
// Use BOOST_NO_CXX11_HDR_SYSTEM_ERROR instead of BOOST_NO_0X_HDR_SYSTEM_ERROR
#if defined(BOOST_NO_CXX11_HDR_SYSTEM_ERROR) && !defined(BOOST_NO_0X_HDR_SYSTEM_ERROR)
# define BOOST_NO_0X_HDR_SYSTEM_ERROR
#endif
// Use BOOST_NO_CXX11_HDR_THREAD instead of BOOST_NO_0X_HDR_THREAD
#if defined(BOOST_NO_CXX11_HDR_THREAD) && !defined(BOOST_NO_0X_HDR_THREAD)
# define BOOST_NO_0X_HDR_THREAD
#endif
// Use BOOST_NO_CXX11_HDR_TUPLE instead of BOOST_NO_0X_HDR_TUPLE
#if defined(BOOST_NO_CXX11_HDR_TUPLE) && !defined(BOOST_NO_0X_HDR_TUPLE)
# define BOOST_NO_0X_HDR_TUPLE
#endif
// Use BOOST_NO_CXX11_HDR_TYPE_TRAITS instead of BOOST_NO_0X_HDR_TYPE_TRAITS
#if defined(BOOST_NO_CXX11_HDR_TYPE_TRAITS) && !defined(BOOST_NO_0X_HDR_TYPE_TRAITS)
# define BOOST_NO_0X_HDR_TYPE_TRAITS
#endif
// Use BOOST_NO_CXX11_HDR_TYPEINDEX instead of BOOST_NO_0X_HDR_TYPEINDEX
#if defined(BOOST_NO_CXX11_HDR_TYPEINDEX) && !defined(BOOST_NO_0X_HDR_TYPEINDEX)
# define BOOST_NO_0X_HDR_TYPEINDEX
#endif
// Use BOOST_NO_CXX11_HDR_UNORDERED_MAP instead of BOOST_NO_0X_HDR_UNORDERED_MAP
#if defined(BOOST_NO_CXX11_HDR_UNORDERED_MAP) && !defined(BOOST_NO_0X_HDR_UNORDERED_MAP)
# define BOOST_NO_0X_HDR_UNORDERED_MAP
#endif
// Use BOOST_NO_CXX11_HDR_UNORDERED_SET instead of BOOST_NO_0X_HDR_UNORDERED_SET
#if defined(BOOST_NO_CXX11_HDR_UNORDERED_SET) && !defined(BOOST_NO_0X_HDR_UNORDERED_SET)
# define BOOST_NO_0X_HDR_UNORDERED_SET
#endif
// ------------------ End of deprecated macros for 1.50 ---------------------------
// -------------------- Deprecated macros for 1.51 ---------------------------
// These will go away in a future release
// Use BOOST_NO_CXX11_AUTO_DECLARATIONS instead of BOOST_NO_AUTO_DECLARATIONS
#if defined(BOOST_NO_CXX11_AUTO_DECLARATIONS) && !defined(BOOST_NO_AUTO_DECLARATIONS)
# define BOOST_NO_AUTO_DECLARATIONS
#endif
// Use BOOST_NO_CXX11_AUTO_MULTIDECLARATIONS instead of BOOST_NO_AUTO_MULTIDECLARATIONS
#if defined(BOOST_NO_CXX11_AUTO_MULTIDECLARATIONS) && !defined(BOOST_NO_AUTO_MULTIDECLARATIONS)
# define BOOST_NO_AUTO_MULTIDECLARATIONS
#endif
// Use BOOST_NO_CXX11_CHAR16_T instead of BOOST_NO_CHAR16_T
#if defined(BOOST_NO_CXX11_CHAR16_T) && !defined(BOOST_NO_CHAR16_T)
# define BOOST_NO_CHAR16_T
#endif
// Use BOOST_NO_CXX11_CHAR32_T instead of BOOST_NO_CHAR32_T
#if defined(BOOST_NO_CXX11_CHAR32_T) && !defined(BOOST_NO_CHAR32_T)
# define BOOST_NO_CHAR32_T
#endif
// Use BOOST_NO_CXX11_TEMPLATE_ALIASES instead of BOOST_NO_TEMPLATE_ALIASES
#if defined(BOOST_NO_CXX11_TEMPLATE_ALIASES) && !defined(BOOST_NO_TEMPLATE_ALIASES)
# define BOOST_NO_TEMPLATE_ALIASES
#endif
// Use BOOST_NO_CXX11_CONSTEXPR instead of BOOST_NO_CONSTEXPR
#if defined(BOOST_NO_CXX11_CONSTEXPR) && !defined(BOOST_NO_CONSTEXPR)
# define BOOST_NO_CONSTEXPR
#endif
// Use BOOST_NO_CXX11_DECLTYPE_N3276 instead of BOOST_NO_DECLTYPE_N3276
#if defined(BOOST_NO_CXX11_DECLTYPE_N3276) && !defined(BOOST_NO_DECLTYPE_N3276)
# define BOOST_NO_DECLTYPE_N3276
#endif
// Use BOOST_NO_CXX11_DECLTYPE instead of BOOST_NO_DECLTYPE
#if defined(BOOST_NO_CXX11_DECLTYPE) && !defined(BOOST_NO_DECLTYPE)
# define BOOST_NO_DECLTYPE
#endif
// Use BOOST_NO_CXX11_DEFAULTED_FUNCTIONS instead of BOOST_NO_DEFAULTED_FUNCTIONS
#if defined(BOOST_NO_CXX11_DEFAULTED_FUNCTIONS) && !defined(BOOST_NO_DEFAULTED_FUNCTIONS)
# define BOOST_NO_DEFAULTED_FUNCTIONS
#endif
// Use BOOST_NO_CXX11_DELETED_FUNCTIONS instead of BOOST_NO_DELETED_FUNCTIONS
#if defined(BOOST_NO_CXX11_DELETED_FUNCTIONS) && !defined(BOOST_NO_DELETED_FUNCTIONS)
# define BOOST_NO_DELETED_FUNCTIONS
#endif
// Use BOOST_NO_CXX11_EXPLICIT_CONVERSION_OPERATORS instead of BOOST_NO_EXPLICIT_CONVERSION_OPERATORS
#if defined(BOOST_NO_CXX11_EXPLICIT_CONVERSION_OPERATORS) && !defined(BOOST_NO_EXPLICIT_CONVERSION_OPERATORS)
# define BOOST_NO_EXPLICIT_CONVERSION_OPERATORS
#endif
// Use BOOST_NO_CXX11_EXTERN_TEMPLATE instead of BOOST_NO_EXTERN_TEMPLATE
#if defined(BOOST_NO_CXX11_EXTERN_TEMPLATE) && !defined(BOOST_NO_EXTERN_TEMPLATE)
# define BOOST_NO_EXTERN_TEMPLATE
#endif
// Use BOOST_NO_CXX11_FUNCTION_TEMPLATE_DEFAULT_ARGS instead of BOOST_NO_FUNCTION_TEMPLATE_DEFAULT_ARGS
#if defined(BOOST_NO_CXX11_FUNCTION_TEMPLATE_DEFAULT_ARGS) && !defined(BOOST_NO_FUNCTION_TEMPLATE_DEFAULT_ARGS)
# define BOOST_NO_FUNCTION_TEMPLATE_DEFAULT_ARGS
#endif
// Use BOOST_NO_CXX11_LAMBDAS instead of BOOST_NO_LAMBDAS
#if defined(BOOST_NO_CXX11_LAMBDAS) && !defined(BOOST_NO_LAMBDAS)
# define BOOST_NO_LAMBDAS
#endif
// Use BOOST_NO_CXX11_LOCAL_CLASS_TEMPLATE_PARAMETERS instead of BOOST_NO_LOCAL_CLASS_TEMPLATE_PARAMETERS
#if defined(BOOST_NO_CXX11_LOCAL_CLASS_TEMPLATE_PARAMETERS) && !defined(BOOST_NO_LOCAL_CLASS_TEMPLATE_PARAMETERS)
# define BOOST_NO_LOCAL_CLASS_TEMPLATE_PARAMETERS
#endif
// Use BOOST_NO_CXX11_NOEXCEPT instead of BOOST_NO_NOEXCEPT
#if defined(BOOST_NO_CXX11_NOEXCEPT) && !defined(BOOST_NO_NOEXCEPT)
# define BOOST_NO_NOEXCEPT
#endif
// Use BOOST_NO_CXX11_NULLPTR instead of BOOST_NO_NULLPTR
#if defined(BOOST_NO_CXX11_NULLPTR) && !defined(BOOST_NO_NULLPTR)
# define BOOST_NO_NULLPTR
#endif
// Use BOOST_NO_CXX11_RAW_LITERALS instead of BOOST_NO_RAW_LITERALS
#if defined(BOOST_NO_CXX11_RAW_LITERALS) && !defined(BOOST_NO_RAW_LITERALS)
# define BOOST_NO_RAW_LITERALS
#endif
// Use BOOST_NO_CXX11_RVALUE_REFERENCES instead of BOOST_NO_RVALUE_REFERENCES
#if defined(BOOST_NO_CXX11_RVALUE_REFERENCES) && !defined(BOOST_NO_RVALUE_REFERENCES)
# define BOOST_NO_RVALUE_REFERENCES
#endif
// Use BOOST_NO_CXX11_SCOPED_ENUMS instead of BOOST_NO_SCOPED_ENUMS
#if defined(BOOST_NO_CXX11_SCOPED_ENUMS) && !defined(BOOST_NO_SCOPED_ENUMS)
# define BOOST_NO_SCOPED_ENUMS
#endif
// Use BOOST_NO_CXX11_STATIC_ASSERT instead of BOOST_NO_STATIC_ASSERT
#if defined(BOOST_NO_CXX11_STATIC_ASSERT) && !defined(BOOST_NO_STATIC_ASSERT)
# define BOOST_NO_STATIC_ASSERT
#endif
// Use BOOST_NO_CXX11_STD_UNORDERD instead of BOOST_NO_STD_UNORDERD
#if defined(BOOST_NO_CXX11_STD_UNORDERD) && !defined(BOOST_NO_STD_UNORDERD)
# define BOOST_NO_STD_UNORDERD
#endif
// Use BOOST_NO_CXX11_UNICODE_LITERALS instead of BOOST_NO_UNICODE_LITERALS
#if defined(BOOST_NO_CXX11_UNICODE_LITERALS) && !defined(BOOST_NO_UNICODE_LITERALS)
# define BOOST_NO_UNICODE_LITERALS
#endif
// Use BOOST_NO_CXX11_UNIFIED_INITIALIZATION_SYNTAX instead of BOOST_NO_UNIFIED_INITIALIZATION_SYNTAX
#if defined(BOOST_NO_CXX11_UNIFIED_INITIALIZATION_SYNTAX) && !defined(BOOST_NO_UNIFIED_INITIALIZATION_SYNTAX)
# define BOOST_NO_UNIFIED_INITIALIZATION_SYNTAX
#endif
// Use BOOST_NO_CXX11_VARIADIC_TEMPLATES instead of BOOST_NO_VARIADIC_TEMPLATES
#if defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES) && !defined(BOOST_NO_VARIADIC_TEMPLATES)
# define BOOST_NO_VARIADIC_TEMPLATES
#endif
// Use BOOST_NO_CXX11_VARIADIC_MACROS instead of BOOST_NO_VARIADIC_MACROS
#if defined(BOOST_NO_CXX11_VARIADIC_MACROS) && !defined(BOOST_NO_VARIADIC_MACROS)
# define BOOST_NO_VARIADIC_MACROS
#endif
// Use BOOST_NO_CXX11_NUMERIC_LIMITS instead of BOOST_NO_NUMERIC_LIMITS_LOWEST
#if defined(BOOST_NO_CXX11_NUMERIC_LIMITS) && !defined(BOOST_NO_NUMERIC_LIMITS_LOWEST)
# define BOOST_NO_NUMERIC_LIMITS_LOWEST
#endif
// ------------------ End of deprecated macros for 1.51 ---------------------------
//
// Helper macros BOOST_NOEXCEPT, BOOST_NOEXCEPT_IF, BOOST_NOEXCEPT_EXPR
// These aid the transition to C++11 while still supporting C++03 compilers
//
#ifdef BOOST_NO_NOEXCEPT
# define BOOST_NOEXCEPT
# define BOOST_NOEXCEPT_IF(Predicate)
# define BOOST_NOEXCEPT_EXPR(Expression) false
#else
# define BOOST_NOEXCEPT noexcept
# define BOOST_NOEXCEPT_IF(Predicate) noexcept((Predicate))
# define BOOST_NOEXCEPT_EXPR(Expression) noexcept((Expression))
#endif
//
// Normalize BOOST_NO_STATIC_ASSERT and (depricated) BOOST_HAS_STATIC_ASSERT:
//
#if !defined(BOOST_NO_STATIC_ASSERT) && !defined(BOOST_HAS_STATIC_ASSERT)
# define BOOST_HAS_STATIC_ASSERT
#endif
//
// constexpr workarounds
//
#if defined(BOOST_NO_CONSTEXPR)
#define BOOST_CONSTEXPR
#define BOOST_CONSTEXPR_OR_CONST const
#else
#define BOOST_CONSTEXPR constexpr
#define BOOST_CONSTEXPR_OR_CONST constexpr
#endif
#define BOOST_STATIC_CONSTEXPR static BOOST_CONSTEXPR_OR_CONST
//
// Set BOOST_HAS_RVALUE_REFS when BOOST_NO_RVALUE_REFERENCES is not defined
//
#if !defined(BOOST_NO_RVALUE_REFERENCES) && !defined(BOOST_HAS_RVALUE_REFS)
#define BOOST_HAS_RVALUE_REFS
#endif
//
// Set BOOST_HAS_VARIADIC_TMPL when BOOST_NO_VARIADIC_TEMPLATES is not defined
//
#if !defined(BOOST_NO_VARIADIC_TEMPLATES) && !defined(BOOST_HAS_VARIADIC_TMPL)
#define BOOST_HAS_VARIADIC_TMPL
#endif
#endif
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/transitional.dtd">
<html>
<head>
<meta HTTP-EQUIV=CONTENT-TYPE CONTENT="text/html; charset=utf-8">
<title>Slide 3</title>
</head>
<body text="#1C1C1C" bgcolor="#FFFFFF" link="#3584E4" vlink="#1B6ACB" alink="#3584E4">
<center>
<a href="text0.html">First page</a> <a href="text1.html">Back</a> <a href="text3.html">Continue</a> <a href="text16.html">Last page</a> <a href="elf-obfuscation.html">Overview</a> <a href="img2.html">Image</a></center><br>
<h1><font color="#FFFFFF">Overview</font></h1>
<p><font color="#000000">Img: <a href="https://github.com/corkami/pics/blob/master/binary/elf101/elf101-64.svg">https://github.com/corkami/pics/blob/master/binary/elf101/elf101-64.svg</font></a></p>
</body>
</html> | {
"pile_set_name": "Github"
} |
import { Icon, message, Modal, Upload } from "antd";
import React from "react";
import RequestFiles from 'utils/RequestFiles';
import Viewer from 'viewerjs';
import lodash from 'lodash'
function beforeUpload(file) {
console.log(file.type)
const isJPG = lodash.includes(file.type, 'image/');
if (!isJPG) {
message.error('You can only upload image file!');
}
// const isLt2M = file.size / 1024 / 1024 < 2;
// if (!isLt2M) {
// message.error('Image must smaller than 2MB!');
// }
return isJPG //&& isLt2M;
}
export class WtmUploadImg extends React.Component<any, any> {
static wtmType = "UploadImg";
img = new Image();
viewer: Viewer = new Viewer(this.img);
state = {
loading: false,
previewVisible: false,
previewImage: '',
fileList: this.props.value != null && this.props.value != "" ? [
{
uid: '-1',
name: 'xxx.png',
status: 'done',
url: RequestFiles.onFileUrl(this.props.value),
}
] : [],
};
createViewer(fileId) {
if (fileId) {
this.img.src = RequestFiles.onFileUrl(fileId);
}
}
componentDidMount() {
this.createViewer(this.props.value)
}
componentWillUnmount() {
this.viewer && this.viewer.destroy();
}
onChange(data) {
this.props.onChange(data);
}
handleChange = (info) => {
if (info.file.status === 'uploading') {
this.setState({ fileList: info.fileList, loading: true });
// this.setState({ loading: true });
}
if (info.file.status === 'done') {
const response = info.file.response
if (typeof response.Id === "string") {
this.createViewer(response.Id);
this.onChange(response.Id);
} else {
message.error(`${info.file.name} ${response.message}`)
}
this.setState({ fileList: info.fileList, loading: false });
}
}
handlePreview = (file) => {
this.viewer.show()
// this.setState({
// previewImage: file.url || file.thumbUrl,
// previewVisible: true,
// });
}
onRemove = (file) => {
if (this.props.disabled) {
return
}
const response = file.response
this.setState({ fileList: [], loading: false }, () => {
this.onChange(undefined);
});
const fileId = response && response.Id //|| this.props.value
if (typeof fileId === "string") {
setTimeout(() => {
RequestFiles.onFileDelete(fileId)
});
}
}
render() {
const { previewVisible, previewImage, fileList, loading } = this.state;
const uploadButton = (
<div>
<Icon type={loading ? 'loading' : 'plus'} />
<div className="ant-upload-text">Upload</div>
</div>
);
return (
<div style={{ minWidth: 105, minHeight: 105 }}>
<Upload
accept='image/*'
listType="picture-card"
fileList={fileList as any}
action={RequestFiles.FileTarget}
beforeUpload={beforeUpload}
onChange={this.handleChange}
onPreview={this.handlePreview}
onRemove={this.onRemove}
>
{fileList.length == 0 && uploadButton}
</Upload>
</div>
);
}
}
export default WtmUploadImg | {
"pile_set_name": "Github"
} |
import { Component, OnInit } from '@angular/core';
import { EndpointService } from '../endpoint.service';
import { CommonEndpoint } from '../common.endpoint';
@Component({
selector: 'app-docker',
templateUrl: './docker.component.html'
})
export class DockerComponent extends CommonEndpoint implements OnInit {
constructor(endpointService: EndpointService) {
super(endpointService, 'DOCKER');
}
ngOnInit() {
this.getEndpoints();
this.getCertificates();
}
}
| {
"pile_set_name": "Github"
} |
module.exports = {
spec_dir: 'test/weex',
spec_files: [
'**/*[sS]pec.js'
],
helpers: [
require.resolve('@babel/register')
]
}
| {
"pile_set_name": "Github"
} |
<!doctype html>
<!--
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
-->
<html>
<head>
<meta charset="UTF-8">
<title>paper-button basic tests</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0">
<script src="../../webcomponentsjs/webcomponents-lite.js"></script>
<script src="../../web-component-tester/browser.js"></script>
<script src="../../iron-test-helpers/mock-interactions.js"></script>
<link rel="import" href="../paper-button.html">
</head>
<body>
<test-fixture id="TrivialButton">
<template>
<paper-button>Button</paper-button>
</template>
</test-fixture>
<script>
suite('<paper-button>', function() {
var button;
setup(function() {
button = fixture('TrivialButton');
});
test('can be raised imperatively', function(done) {
button.raised = true;
expect(button.hasAttribute('raised')).to.be.eql(true);
Polymer.Base.async(function() {
try {
expect(button.elevation).to.be.eql(1);
done();
} catch (e) {
done(e);
}
}, 1);
});
test('can be unraised after being raised imperatively', function(done) {
button.raised = true;
expect(button.hasAttribute('raised')).to.be.eql(true);
Polymer.Base.async(function() {
expect(button.elevation).to.be.eql(1);
button.raised = false;
expect(button.hasAttribute('raised')).to.be.eql(false);
Polymer.Base.async(function() {
expect(button.elevation).to.be.eql(0);
done();
}, 1);
}, 1);
});
test('can be disabled imperatively', function() {
button.disabled = true;
expect(button.getAttribute('aria-disabled')).to.be.eql('true');
expect(button.hasAttribute('disabled')).to.be.eql(true);
});
test('can be triggered with space', function(done) {
button.addEventListener('click', function() {
done();
});
MockInteractions.pressSpace(button);
});
test('can be triggered with enter', function(done) {
button.addEventListener('click', function() {
done();
});
MockInteractions.pressEnter(button);
});
});
suite('<paper-button>', function() {
var button;
setup(function() {
button = fixture('TrivialButton');
});
test('has aria role "button"', function() {
expect(button.getAttribute('role')).to.be.eql('button');
});
a11ySuite('TrivialButton');
});
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package colltab
import "unicode/utf8"
// For a description of ContractTrieSet, see text/collate/build/contract.go.
type ContractTrieSet []struct{ L, H, N, I uint8 }
// ctScanner is used to match a trie to an input sequence.
// A contraction may match a non-contiguous sequence of bytes in an input string.
// For example, if there is a contraction for <a, combining_ring>, it should match
// the sequence <a, combining_cedilla, combining_ring>, as combining_cedilla does
// not block combining_ring.
// ctScanner does not automatically skip over non-blocking non-starters, but rather
// retains the state of the last match and leaves it up to the user to continue
// the match at the appropriate points.
type ctScanner struct {
states ContractTrieSet
s []byte
n int
index int
pindex int
done bool
}
type ctScannerString struct {
states ContractTrieSet
s string
n int
index int
pindex int
done bool
}
func (t ContractTrieSet) scanner(index, n int, b []byte) ctScanner {
return ctScanner{s: b, states: t[index:], n: n}
}
func (t ContractTrieSet) scannerString(index, n int, str string) ctScannerString {
return ctScannerString{s: str, states: t[index:], n: n}
}
// result returns the offset i and bytes consumed p so far. If no suffix
// matched, i and p will be 0.
func (s *ctScanner) result() (i, p int) {
return s.index, s.pindex
}
func (s *ctScannerString) result() (i, p int) {
return s.index, s.pindex
}
const (
final = 0
noIndex = 0xFF
)
// scan matches the longest suffix at the current location in the input
// and returns the number of bytes consumed.
func (s *ctScanner) scan(p int) int {
pr := p // the p at the rune start
str := s.s
states, n := s.states, s.n
for i := 0; i < n && p < len(str); {
e := states[i]
c := str[p]
// TODO: a significant number of contractions are of a form that
// cannot match discontiguous UTF-8 in a normalized string. We could let
// a negative value of e.n mean that we can set s.done = true and avoid
// the need for additional matches.
if c >= e.L {
if e.L == c {
p++
if e.I != noIndex {
s.index = int(e.I)
s.pindex = p
}
if e.N != final {
i, states, n = 0, states[int(e.H)+n:], int(e.N)
if p >= len(str) || utf8.RuneStart(str[p]) {
s.states, s.n, pr = states, n, p
}
} else {
s.done = true
return p
}
continue
} else if e.N == final && c <= e.H {
p++
s.done = true
s.index = int(c-e.L) + int(e.I)
s.pindex = p
return p
}
}
i++
}
return pr
}
// scan is a verbatim copy of ctScanner.scan.
func (s *ctScannerString) scan(p int) int {
pr := p // the p at the rune start
str := s.s
states, n := s.states, s.n
for i := 0; i < n && p < len(str); {
e := states[i]
c := str[p]
// TODO: a significant number of contractions are of a form that
// cannot match discontiguous UTF-8 in a normalized string. We could let
// a negative value of e.n mean that we can set s.done = true and avoid
// the need for additional matches.
if c >= e.L {
if e.L == c {
p++
if e.I != noIndex {
s.index = int(e.I)
s.pindex = p
}
if e.N != final {
i, states, n = 0, states[int(e.H)+n:], int(e.N)
if p >= len(str) || utf8.RuneStart(str[p]) {
s.states, s.n, pr = states, n, p
}
} else {
s.done = true
return p
}
continue
} else if e.N == final && c <= e.H {
p++
s.done = true
s.index = int(c-e.L) + int(e.I)
s.pindex = p
return p
}
}
i++
}
return pr
}
| {
"pile_set_name": "Github"
} |
package org.bouncycastle.jcajce.provider.symmetric;
import org.bouncycastle.jcajce.provider.config.ConfigurableProvider;
import org.bouncycastle.jcajce.provider.util.AlgorithmProvider;
abstract class SymmetricAlgorithmProvider
extends AlgorithmProvider
{
// BEGIN android-removed
// protected void addGMacAlgorithm(
// ConfigurableProvider provider,
// String algorithm,
// String algorithmClassName,
// String keyGeneratorClassName)
// {
// provider.addAlgorithm("Mac." + algorithm + "-GMAC", algorithmClassName);
// provider.addAlgorithm("Alg.Alias.Mac." + algorithm + "GMAC", algorithm + "-GMAC");
//
// provider.addAlgorithm("KeyGenerator." + algorithm + "-GMAC", keyGeneratorClassName);
// provider.addAlgorithm("Alg.Alias.KeyGenerator." + algorithm + "GMAC", algorithm + "-GMAC");
// }
// END android-removed
}
| {
"pile_set_name": "Github"
} |
+install(kernel-element-button)
.btn,
.btn-check,
.btn-radio,
.btn-file
-webkit-appearance: none
-moz-appearance: none
appearance: none
display: inline-flex
vertical-align: top
flex: 0 0 auto
max-width: 100%
min-width: $button-height-rem
height: $button-height-rem
font-size: typo-size($button-typography-size-level)
line-height: 1
white-space: nowrap
cursor: pointer
-ms-touch-action: manipulation
touch-action: manipulation
-webkit-user-select: none
-moz-user-select: none
-ms-user-select: none
user-select: none
text-decoration: none
background: none
+browser-edge
input.btn
display: inline-block
text-align: center
.btn,
.btn-check label,
.btn-radio label,
.btn-file label
align-items: center
justify-content: center
padding: 0 $button-horizontal-padding-em
border: $button-border-style $button-border-width
overflow: hidden
border-radius: existing($button-border-radius, 0)
.btn-check label,
.btn-radio label,
.btn-file label
display: flex
width: 100%
height: inherit
// pointer-events: none
.btn > *,
.btn-check label > *,
.btn-radio label > *,
.btn-file label > *
flex: 0 0 auto
.btn-check input,
.btn-radio input,
.btn-file input
-webkit-appearance: none
-moz-appearance: none
appearance: none
width: 0.001px
min-width: 100%
// height: 0.001px
height: 100%
margin-right: -100%
overflow: hidden
cursor: pointer
opacity: 0
.btn-check input[disabled],
.btn-radio input[disabled],
.btn-file input[disabled]
opacity: 0
// remove webkit file upload button to prevent cursor change when hover
.btn-file ::-webkit-file-upload-button
display: none
.btn:-moz-focusring
outline: none
.btn.focus,
.btn-check input.focus ~ label,
.btn-radio input.focus ~ label,
.btn-file input.focus ~ label
+_focus
.btn-fluid.btn,
.btn-fluid.btn-check,
.btn-fluid.btn-radio,
.btn-fluid.btn-file
display: flex
flex: 1 1 auto
width: 100%
.btn-small.btn,
.btn-small.btn-check,
.btn-small.btn-radio,
.btn-small.btn-file
min-width: $button-small-height-rem
height: $button-small-height-rem
font-size: typo-size($button-small-typography-size-level)
.btn-large.btn,
.btn-large.btn-check,
.btn-large.btn-radio,
.btn-large.btn-file
min-width: $button-large-height-rem
height: $button-large-height-rem
font-size: typo-size($button-large-typography-size-level)
+button-solid(primary, $button-primary-color, $button-primary-hover-color, $button-primary-active-color, $button-primary-background, $button-primary-hover-background, $button-primary-active-background, $button-primary-border-color, $button-primary-hover-border-color, $button-primary-active-border-color, $button-primary-box-shadow, $button-primary-hover-box-shadow, $button-primary-active-box-shadow)
+button-solid(secondary, $button-secondary-color, $button-secondary-hover-color, $button-secondary-active-color, $button-secondary-background, $button-secondary-hover-background, $button-secondary-active-background, $button-secondary-border-color, $button-secondary-hover-border-color, $button-secondary-active-border-color, $button-secondary-box-shadow, $button-secondary-hover-box-shadow, $button-secondary-active-box-shadow)
+button-solid(danger, $button-danger-color, $button-danger-hover-color, $button-danger-active-color, $button-danger-background, $button-danger-hover-background, $button-danger-active-background, $button-danger-border-color, $button-danger-hover-border-color, $button-danger-active-border-color, $button-danger-box-shadow, $button-danger-hover-box-shadow, $button-danger-active-box-shadow)
+button-solid(dark, $button-dark-color, $button-dark-hover-color, $button-dark-active-color, $button-dark-background, $button-dark-hover-background, $button-dark-active-background, $button-dark-border-color, $button-dark-hover-border-color, $button-dark-active-border-color, $button-dark-box-shadow, $button-dark-hover-box-shadow, $button-dark-active-box-shadow)
+button-solid(light, $button-light-color, $button-light-hover-color, $button-light-active-color, $button-light-background, $button-light-hover-background, $button-light-active-background, $button-light-border-color, $button-light-hover-border-color, $button-light-active-border-color, $button-light-box-shadow, $button-light-hover-box-shadow, $button-light-active-box-shadow)
+button-hollow(primary, $button-hollow-primary-color, $button-hollow-primary-hover-color, $button-hollow-primary-active-color, $button-hollow-primary-hover-background, $button-hollow-primary-active-background, $button-hollow-primary-border-color, $button-hollow-primary-hover-border-color, $button-hollow-primary-active-border-color, $button-hollow-primary-box-shadow, $button-hollow-primary-hover-box-shadow, $button-hollow-primary-active-box-shadow)
+button-hollow(secondary, $button-hollow-secondary-color, $button-hollow-secondary-hover-color, $button-hollow-secondary-active-color, $button-hollow-secondary-hover-background, $button-hollow-secondary-active-background, $button-hollow-secondary-border-color, $button-hollow-secondary-hover-border-color, $button-hollow-secondary-active-border-color, $button-hollow-secondary-box-shadow, $button-hollow-secondary-hover-box-shadow, $button-hollow-secondary-active-box-shadow)
+button-hollow(danger, $button-hollow-danger-color, $button-hollow-danger-hover-color, $button-hollow-danger-active-color, $button-hollow-danger-hover-background, $button-hollow-danger-active-background, $button-hollow-danger-border-color, $button-hollow-danger-hover-border-color, $button-hollow-danger-active-border-color, $button-hollow-danger-box-shadow, $button-hollow-danger-hover-box-shadow, $button-hollow-danger-active-box-shadow)
+button-hollow(dark, $button-hollow-dark-color, $button-hollow-dark-hover-color, $button-hollow-dark-active-color, $button-hollow-dark-hover-background, $button-hollow-dark-active-background, $button-hollow-dark-border-color, $button-hollow-dark-hover-border-color, $button-hollow-dark-active-border-color, $button-hollow-dark-box-shadow, $button-hollow-dark-hover-box-shadow, $button-hollow-dark-active-box-shadow)
+button-hollow(light, $button-hollow-light-color, $button-hollow-light-hover-color, $button-hollow-light-active-color, $button-hollow-light-hover-background, $button-hollow-light-active-background, $button-hollow-light-border-color, $button-hollow-light-hover-border-color, $button-hollow-light-active-border-color, $button-hollow-light-box-shadow, $button-hollow-light-hover-box-shadow, $button-hollow-light-active-box-shadow)
+button-text(primary, $button-text-primary-color, $button-text-primary-hover-color, $button-text-primary-active-color, $button-text-primary-text-shadow, $button-text-primary-hover-text-shadow, $button-text-primary-active-text-shadow)
+button-text(secondary, $button-text-secondary-color, $button-text-secondary-hover-color, $button-text-secondary-active-color, $button-text-secondary-text-shadow, $button-text-secondary-hover-text-shadow, $button-text-secondary-active-text-shadow)
+button-text(danger, $button-text-danger-color, $button-text-danger-hover-color, $button-text-danger-active-color, $button-text-danger-text-shadow, $button-text-danger-hover-text-shadow, $button-text-danger-active-text-shadow)
+button-text(dark, $button-text-dark-color, $button-text-dark-hover-color, $button-text-dark-active-color, $button-text-dark-text-shadow, $button-text-dark-hover-text-shadow, $button-text-dark-active-text-shadow)
+button-text(light, $button-text-light-color, $button-text-light-hover-color, $button-text-light-active-color, $button-text-light-text-shadow, $button-text-light-hover-text-shadow, $button-text-light-active-text-shadow) | {
"pile_set_name": "Github"
} |
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {filePathToUri, uriToFilePath} from '../utils';
describe('filePathToUri', () => {
it('should return URI with File scheme', () => {
const uri = filePathToUri('/project/main.ts');
expect(uri).toMatch(/^file/);
});
it('should handle network path', () => {
const uri = filePathToUri('//project/main.ts');
expect(uri).toBe('file://project/main.ts');
});
if (process.platform === 'win32') {
it('should handle windows path', () => {
const uri = filePathToUri('C:\\project\\main.ts');
expect(uri).toBe('file:///c%3A/project/main.ts');
});
}
});
describe('uriToFilePath', () => {
if (process.platform === 'win32') {
it('should return valid fsPath for windows', () => {
const filePath = uriToFilePath('file:///c%3A/project/main.ts');
expect(filePath).toBe('c:\\project\\main.ts');
});
it('should return valid fsPath for network file uri', () => {
const filePath = uriToFilePath('file://project/main.ts');
expect(filePath).toBe('\\\\project\\main.ts');
});
}
if (process.platform !== 'win32') {
it('should return valid fsPath for unix', () => {
const filePath = uriToFilePath('file:///project/main.ts');
expect(filePath).toBe('/project/main.ts');
});
it('should return valid fsPath for network file uri', () => {
const filePath = uriToFilePath('file://project/main.ts');
expect(filePath).toBe('//project/main.ts');
});
}
});
| {
"pile_set_name": "Github"
} |
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Test the cgo checker on a file that doesn't use cgo.
package c
import "unsafe"
// Passing a pointer (via the slice), but C isn't cgo.
var _ = C.f(unsafe.Pointer(new([]int)))
var C struct{ f func(interface{}) int }
| {
"pile_set_name": "Github"
} |
{"type":"FeatureCollection","properties":{"kind":"state","state":"OK"},"features":[
{"type":"Feature","properties":{"kind":"county","name":"Blaine","state":"OK"},"geometry":{"type":"MultiPolygon","coordinates":[[[[-98.4884,36.1623],[-98.2091,36.1623],[-98.2091,35.7241],[-98.3132,35.7241],[-98.3132,35.5489],[-98.5870,35.5489],[-98.6254,35.5489],[-98.6308,35.8118],[-98.6363,36.1623]]]]}}
]}
| {
"pile_set_name": "Github"
} |
{
"id": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"modelName": "GMSprite",
"mvc": "1.12",
"name": "spr_scrollbar_v_button_up",
"For3D": false,
"HTile": true,
"VTile": true,
"bbox_bottom": 15,
"bbox_left": 0,
"bbox_right": 15,
"bbox_top": 0,
"bboxmode": 0,
"colkind": 1,
"coltolerance": 0,
"edgeFiltering": false,
"frames": [
{
"id": "2a31d065-33cb-48c4-b042-62020fc4dec9",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "7daf5635-c29b-4c3d-90b9-174ef70fa5fb",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "2a31d065-33cb-48c4-b042-62020fc4dec9",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "b813d281-241d-495f-b9ce-a809d1176750",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "2a31d065-33cb-48c4-b042-62020fc4dec9",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "e5b42ff3-e63c-4a3a-8bd4-f696e8c6d68d",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "ef46e27f-659e-4793-9f31-874c8048497f",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "e5b42ff3-e63c-4a3a-8bd4-f696e8c6d68d",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "fd52cc01-1bff-493e-b5c2-b6088e9394d1",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "e5b42ff3-e63c-4a3a-8bd4-f696e8c6d68d",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "b8165182-b385-4292-ab39-a0379dcec27c",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "173cab55-06fa-48f3-a889-584783b8b09a",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "b8165182-b385-4292-ab39-a0379dcec27c",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "104bc154-079f-4a20-b0a1-6a93ea1e3c25",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "b8165182-b385-4292-ab39-a0379dcec27c",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "8b0b99cf-0921-4343-9faf-882c0c9cba24",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "45ed4e14-b7a2-4221-a46a-227a06e80083",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "8b0b99cf-0921-4343-9faf-882c0c9cba24",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "c268b19b-151a-4bc8-a0ef-daf727780be0",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "8b0b99cf-0921-4343-9faf-882c0c9cba24",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "1ad0d78b-4ac3-4057-a6ce-829b1ae50fa3",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "c05c6476-84f3-403c-85d0-4087396753d9",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "1ad0d78b-4ac3-4057-a6ce-829b1ae50fa3",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "79a10d56-ff4b-4331-9c1e-3ddb333e5b02",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "1ad0d78b-4ac3-4057-a6ce-829b1ae50fa3",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "81632200-e710-46e1-abef-faa770e325a5",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "725105d0-3ce7-4ccb-a71e-0dbcd772c03b",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "81632200-e710-46e1-abef-faa770e325a5",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "df607705-2d3a-4939-9c39-4df36ccd3cee",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "81632200-e710-46e1-abef-faa770e325a5",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "9b7d8b8f-7352-4dbf-b568-e088cc51ce2d",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "1706202d-8722-4d4b-83ea-ac5943071ef4",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "9b7d8b8f-7352-4dbf-b568-e088cc51ce2d",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "72dc3a22-0692-4224-8d12-bd99b7da3336",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "9b7d8b8f-7352-4dbf-b568-e088cc51ce2d",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "263049a1-82d9-4ee6-bde4-1fc42a33c35d",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "92805a1d-b26a-4647-80cd-80608d1383d3",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "263049a1-82d9-4ee6-bde4-1fc42a33c35d",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "1dd661b2-1e06-4978-bb42-6ac9d96eb3b4",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "263049a1-82d9-4ee6-bde4-1fc42a33c35d",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "abe01d9a-b131-42bf-8b2d-d81bae1c9203",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "b780b0d2-c73f-48d4-bec5-bf5879272ec5",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "abe01d9a-b131-42bf-8b2d-d81bae1c9203",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "090228ec-e3a0-48d9-90f7-ede822302f7f",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "abe01d9a-b131-42bf-8b2d-d81bae1c9203",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "176b983f-e6a7-4ed7-adf3-5f6d667b5881",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "a3c87605-8e87-46ab-b722-04ecb0ab36d6",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "176b983f-e6a7-4ed7-adf3-5f6d667b5881",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "bb92367b-4313-43b6-973a-039ccd556ec1",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "176b983f-e6a7-4ed7-adf3-5f6d667b5881",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "36069ddf-b10a-45d0-9a90-9c31c572ec8d",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "ce310f30-5094-448b-8507-781c59838913",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "36069ddf-b10a-45d0-9a90-9c31c572ec8d",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "b5a54e43-5dbd-48a2-9b5e-7b6a56e16daf",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "36069ddf-b10a-45d0-9a90-9c31c572ec8d",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
},
{
"id": "d86d2321-a748-4756-8401-4a6b5524ade4",
"modelName": "GMSpriteFrame",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"compositeImage": {
"id": "fd9599c2-c263-4a34-87d0-cadd50d490ff",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "d86d2321-a748-4756-8401-4a6b5524ade4",
"LayerId": "00000000-0000-0000-0000-000000000000"
},
"images": [
{
"id": "015d2166-844c-4926-be36-0adc42dbe54d",
"modelName": "GMSpriteImage",
"mvc": "1.0",
"FrameId": "d86d2321-a748-4756-8401-4a6b5524ade4",
"LayerId": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1"
}
]
}
],
"gridX": 0,
"gridY": 0,
"height": 16,
"layers": [
{
"id": "a70bf41d-bd9c-4170-a1d7-3ccf2c6eefa1",
"modelName": "GMImageLayer",
"mvc": "1.0",
"SpriteId": "42edcaf9-0847-47cc-8fb2-20c4a4f5d6f2",
"blendMode": 0,
"isLocked": false,
"name": "default",
"opacity": 100,
"visible": true
}
],
"origin": 0,
"originLocked": false,
"playbackSpeed": 15,
"playbackSpeedType": 0,
"premultiplyAlpha": false,
"sepmasks": false,
"swatchColours": null,
"swfPrecision": 2.525,
"textureGroupId": "1225f6b0-ac20-43bd-a82e-be73fa0b6f4f",
"type": 0,
"width": 16,
"xorig": 0,
"yorig": 0
} | {
"pile_set_name": "Github"
} |
# amdefine
A module that can be used to implement AMD's define() in Node. This allows you
to code to the AMD API and have the module work in node programs without
requiring those other programs to use AMD.
## Usage
**1)** Update your package.json to indicate amdefine as a dependency:
```javascript
"dependencies": {
"amdefine": ">=0.1.0"
}
```
Then run `npm install` to get amdefine into your project.
**2)** At the top of each module that uses define(), place this code:
```javascript
if (typeof define !== 'function') { var define = require('amdefine')(module) }
```
**Only use these snippets** when loading amdefine. If you preserve the basic structure,
with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
You can add spaces, line breaks and even require amdefine with a local path, but
keep the rest of the structure to get the stripping behavior.
As you may know, because `if` statements in JavaScript don't have their own scope, the var
declaration in the above snippet is made whether the `if` expression is truthy or not. If
RequireJS is loaded then the declaration is superfluous because `define` is already already
declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
declarations of the same variable in the same scope gracefully.
If you want to deliver amdefine.js with your code rather than specifying it as a dependency
with npm, then just download the latest release and refer to it using a relative path:
[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)
### amdefine/intercept
Consider this very experimental.
Instead of pasting the piece of text for the amdefine setup of a `define`
variable in each module you create or consume, you can use `amdefine/intercept`
instead. It will automatically insert the above snippet in each .js file loaded
by Node.
**Warning**: you should only use this if you are creating an application that
is consuming AMD style defined()'d modules that are distributed via npm and want
to run that code in Node.
For library code where you are not sure if it will be used by others in Node or
in the browser, then explicitly depending on amdefine and placing the code
snippet above is suggested path, instead of using `amdefine/intercept`. The
intercept module affects all .js files loaded in the Node app, and it is
inconsiderate to modify global state like that unless you are also controlling
the top level app.
#### Why distribute AMD-style nodes via npm?
npm has a lot of weaknesses for front-end use (installed layout is not great,
should have better support for the `baseUrl + moduleID + '.js' style of loading,
single file JS installs), but some people want a JS package manager and are
willing to live with those constraints. If that is you, but still want to author
in AMD style modules to get dynamic require([]), better direct source usage and
powerful loader plugin support in the browser, then this tool can help.
#### amdefine/intercept usage
Just require it in your top level app module (for example index.js, server.js):
```javascript
require('amdefine/intercept');
```
The module does not return a value, so no need to assign the result to a local
variable.
Then just require() code as you normally would with Node's require(). Any .js
loaded after the intercept require will have the amdefine check injected in
the .js source as it is loaded. It does not modify the source on disk, just
prepends some content to the text of the module as it is loaded by Node.
#### How amdefine/intercept works
It overrides the `Module._extensions['.js']` in Node to automatically prepend
the amdefine snippet above. So, it will affect any .js file loaded by your
app.
## define() usage
It is best if you use the anonymous forms of define() in your module:
```javascript
define(function (require) {
var dependency = require('dependency');
});
```
or
```javascript
define(['dependency'], function (dependency) {
});
```
## RequireJS optimizer integration. <a name="optimizer"></name>
Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
will have support for stripping the `if (typeof define !== 'function')` check
mentioned above, so you can include this snippet for code that runs in the
browser, but avoid taking the cost of the if() statement once the code is
optimized for deployment.
## Node 0.4 Support
If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
```javascript
//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
```
## Limitations
### Synchronous vs Asynchronous
amdefine creates a define() function that is callable by your code. It will
execute and trace dependencies and call the factory function *synchronously*,
to keep the behavior in line with Node's synchronous dependency tracing.
The exception: calling AMD's callback-style require() from inside a factory
function. The require callback is called on process.nextTick():
```javascript
define(function (require) {
require(['a'], function(a) {
//'a' is loaded synchronously, but
//this callback is called on process.nextTick().
});
});
```
### Loader Plugins
Loader plugins are supported as long as they call their load() callbacks
synchronously. So ones that do network requests will not work. However plugins
like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
will not work. This may be fixable, but it is a bit complex, and I do not have
enough node-fu to figure it out yet. See the source for amdefine.js if you want
to get an idea of the issues involved.
## Tests
To run the tests, cd to **tests** and run:
```
node all.js
node all-intercept.js
```
## License
New BSD and MIT. Check the LICENSE file for all the details.
| {
"pile_set_name": "Github"
} |
//
// Copyright (c) ZeroC, Inc. All rights reserved.
//
(function(module, require, exports)
{
const Test = require("Test").Test;
class AMDInitialI extends Test.Initial
{
shutdown(current)
{
current.adapter.getCommunicator().shutdown();
}
pingPong(obj, current)
{
return obj;
}
opOptionalException(a, b, o, current)
{
const ex = new Test.OptionalException();
if(a !== undefined)
{
ex.a = a;
}
else
{
ex.a = undefined; // The member "a" has a default value.
}
if(b !== undefined)
{
ex.b = b;
}
if(o !== undefined)
{
ex.o = o;
}
throw ex;
}
opDerivedException(a, b, o, current)
{
const ex = new Test.DerivedException();
if(a !== undefined)
{
ex.a = a;
}
else
{
ex.a = undefined; // The member "a" has a default value.
}
if(b !== undefined)
{
ex.b = b;
ex.ss = b;
}
else
{
ex.ss = undefined; // The member "ss" has a default value.
}
if(o !== undefined)
{
ex.o = o;
ex.o2 = o;
}
throw ex;
}
opRequiredException(a, b, o, current)
{
const ex = new Test.RequiredException();
if(a !== undefined)
{
ex.a = a;
}
else
{
ex.a = undefined; // The member "a" has a default value.
}
if(b !== undefined)
{
ex.b = b;
ex.ss = b;
}
if(o !== undefined)
{
ex.o = o;
ex.o2 = o;
}
throw ex;
}
opByte(p1, current)
{
return [p1, p1];
}
opBool(p1, current)
{
return [p1, p1];
}
opShort(p1, current)
{
return [p1, p1];
}
opInt(p1, current)
{
return [p1, p1];
}
opLong(p1, current)
{
return [p1, p1];
}
opFloat(p1, current)
{
return [p1, p1];
}
opDouble(p1, current)
{
return [p1, p1];
}
opString(p1, current)
{
return [p1, p1];
}
opMyEnum(p1, current)
{
return [p1, p1];
}
opSmallStruct(p1, current)
{
return [p1, p1];
}
opFixedStruct(p1, current)
{
return [p1, p1];
}
opVarStruct(p1, current)
{
return [p1, p1];
}
opOneOptional(p1, current)
{
return [p1, p1];
}
opOneOptionalProxy(p1, current)
{
return [p1, p1];
}
opByteSeq(p1, current)
{
return [p1, p1];
}
opBoolSeq(p1, current)
{
return [p1, p1];
}
opShortSeq(p1, current)
{
return [p1, p1];
}
opIntSeq(p1, current)
{
return [p1, p1];
}
opLongSeq(p1, current)
{
return [p1, p1];
}
opFloatSeq(p1, current)
{
return [p1, p1];
}
opDoubleSeq(p1, current)
{
return [p1, p1];
}
opStringSeq(p1, current)
{
return [p1, p1];
}
opSmallStructSeq(p1, current)
{
return [p1, p1];
}
opSmallStructList(p1, current)
{
return [p1, p1];
}
opFixedStructSeq(p1, current)
{
return [p1, p1];
}
opFixedStructList(p1, current)
{
return [p1, p1];
}
opVarStructSeq(p1, current)
{
return [p1, p1];
}
opSerializable(p1, current)
{
return [p1, p1];
}
opIntIntDict(p1, current)
{
return [p1, p1];
}
opStringIntDict(p1, current)
{
return [p1, p1];
}
opIntOneOptionalDict(p1, current)
{
return [p1, p1];
}
opClassAndUnknownOptional(p, current)
{
}
sendOptionalClass(req, current)
{
}
returnOptionalClass(req, current)
{
return new Test.OneOptional(53);
}
opG(g, current)
{
return g;
}
opVoid(current)
{
}
opMStruct1(current)
{
return new Test.SmallStruct();
}
opMStruct2(p1, current)
{
return [p1, p1];
}
opMSeq1(current)
{
return [];
}
opMSeq2(p1, current)
{
return [p1, p1];
}
opMDict1(current)
{
return new Map();
}
opMDict2(p1, current)
{
return [p1, p1];
}
opMG1(current)
{
return new Test.G();
}
opMG2(p1, current)
{
return [p1, p1];
}
supportsRequiredParams(current)
{
return false;
}
supportsJavaSerializable(current)
{
return false;
}
supportsCsharpSerializable(current)
{
return false;
}
supportsCppStringView(current)
{
return false;
}
supportsNullOptional(current)
{
return true;
}
}
exports.AMDInitialI = AMDInitialI;
}(typeof global !== "undefined" && typeof global.process !== "undefined" ? module : undefined,
typeof global !== "undefined" && typeof global.process !== "undefined" ? require :
(typeof WorkerGlobalScope !== "undefined" && self instanceof WorkerGlobalScope) ? self.Ice._require : window.Ice._require,
typeof global !== "undefined" && typeof global.process !== "undefined" ? exports :
(typeof WorkerGlobalScope !== "undefined" && self instanceof WorkerGlobalScope) ? self : window));
| {
"pile_set_name": "Github"
} |
package com.palmergames.bukkit.towny.chat.checks;
import com.palmergames.bukkit.towny.TownyUniverse;
import com.palmergames.bukkit.towny.exceptions.NotRegisteredException;
import net.tnemc.tnc.core.common.chat.ChatCheck;
import org.bukkit.entity.Player;
/**
* @author creatorfromhell
*/
public class KingCheck extends ChatCheck {
@Override
public String name() {
return "isking";
}
@Override
public boolean runCheck(Player player, String checkString) {
TownyUniverse townyUniverse = TownyUniverse.getInstance();
try {
if(townyUniverse.getDataSource().getResident(player.getName()).hasNation()) {
return townyUniverse.getDataSource().getResident(player.getName()).getTown().getNation().isKing(townyUniverse.getDataSource().getResident(player.getName()));
}
} catch(NotRegisteredException ignore) {
}
return false;
}
} | {
"pile_set_name": "Github"
} |
[
{
"key": "group_5809dbd3c7d85",
"title": "date_picker-group",
"fields": [
{
"key": "field_5809dbd6472a8",
"label": "Date Picker",
"name": "date_picker",
"type": "date_picker",
"instructions": "Instructions",
"required": 0,
"conditional_logic": 0,
"wrapper": {
"width": "",
"class": "",
"id": ""
},
"display_format": "m\/d\/Y",
"return_format": "F j, Y",
"first_day": 2
}
],
"location": [
[
{
"param": "post_type",
"operator": "==",
"value": "post"
}
]
],
"menu_order": 0,
"position": "normal",
"style": "seamless",
"label_placement": "left",
"instruction_placement": "label",
"hide_on_screen": [
"the_content",
"excerpt",
"custom_fields"
],
"active": 1,
"description": ""
}
]
| {
"pile_set_name": "Github"
} |
{
"kind": "TYPE_CAST_EXPRESSION",
"children": [
{
"kind": "LT_TOKEN"
},
{
"kind": "TYPE_CAST_PARAM",
"children": [
{
"kind": "LIST",
"children": [
{
"kind": "ANNOTATION",
"children": [
{
"kind": "AT_TOKEN"
},
{
"kind": "SIMPLE_NAME_REFERENCE",
"children": [
{
"kind": "IDENTIFIER_TOKEN",
"value": "foo"
}
]
},
{
"kind": "MAPPING_CONSTRUCTOR",
"children": [
{
"kind": "OPEN_BRACE_TOKEN"
},
{
"kind": "LIST",
"children": []
},
{
"kind": "CLOSE_BRACE_TOKEN"
}
]
}
]
}
]
}
]
},
{
"kind": "GT_TOKEN",
"trailingMinutiae": [
{
"kind": "WHITESPACE_MINUTIAE",
"value": " "
}
]
},
{
"kind": "SIMPLE_NAME_REFERENCE",
"children": [
{
"kind": "IDENTIFIER_TOKEN",
"value": "b"
}
]
}
]
}
| {
"pile_set_name": "Github"
} |
---
layout: "kubernetes"
page_title: "Kubernetes: kubernetes_persistent_volume_claim"
description: |-
This resource allows the user to request for and claim to a persistent volume.
---
# kubernetes_persistent_volume_claim
This resource allows the user to request for and claim to a persistent volume.
## Example Usage
```hcl
resource "kubernetes_persistent_volume_claim" "example" {
metadata {
name = "exampleclaimname"
}
spec {
access_modes = ["ReadWriteMany"]
resources {
requests = {
storage = "5Gi"
}
}
volume_name = "${kubernetes_persistent_volume.example.metadata.0.name}"
}
}
resource "kubernetes_persistent_volume" "example" {
metadata {
name = "examplevolumename"
}
spec {
capacity = {
storage = "10Gi"
}
access_modes = ["ReadWriteMany"]
persistent_volume_source {
gce_persistent_disk {
pd_name = "test-123"
}
}
}
}
```
## Argument Reference
The following arguments are supported:
* `metadata` - (Required) Standard persistent volume claim's metadata. For more info see [Kubernetes reference](https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#metadata)
* `spec` - (Required) Spec defines the desired characteristics of a volume requested by a pod author. For more info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/persistent-volumes#persistentvolumeclaims)
* `wait_until_bound` - (Optional) Whether to wait for the claim to reach `Bound` state (to find volume in which to claim the space)
## Nested Blocks
### `metadata`
#### Arguments
* `annotations` - (Optional) An unstructured key value map stored with the persistent volume claim that may be used to store arbitrary metadata.
~> By default, the provider ignores any annotations whose key names end with *kubernetes.io*. This is necessary because such annotations can be mutated by server-side components and consequently cause a perpetual diff in the Terraform plan output. If you explicitly specify any such annotations in the configuration template then Terraform will consider these as normal resource attributes and manage them as expected (while still avoiding the perpetual diff problem). For more info info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/annotations)
* `generate_name` - (Optional) Prefix, used by the server, to generate a unique name ONLY IF the `name` field has not been provided. This value will also be combined with a unique suffix. For more info see [Kubernetes reference](https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#idempotency)
* `labels` - (Optional) Map of string keys and values that can be used to organize and categorize (scope and select) the persistent volume claim. May match selectors of replication controllers and services.
~> By default, the provider ignores any labels whose key names end with *kubernetes.io*. This is necessary because such labels can be mutated by server-side components and consequently cause a perpetual diff in the Terraform plan output. If you explicitly specify any such labels in the configuration template then Terraform will consider these as normal resource attributes and manage them as expected (while still avoiding the perpetual diff problem). For more info info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/labels)
* `name` - (Optional) Name of the persistent volume claim, must be unique. Cannot be updated. For more info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/identifiers#names)
* `namespace` - (Optional) Namespace defines the space within which name of the persistent volume claim must be unique.
#### Attributes
* `generation` - A sequence number representing a specific generation of the desired state.
* `resource_version` - An opaque value that represents the internal version of this persistent volume claim that can be used by clients to determine when persistent volume claim has changed. For more info see [Kubernetes reference](https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency)
* `self_link` - A URL representing this persistent volume claim.
* `uid` - The unique in time and space value for this persistent volume claim. For more info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/identifiers#uids)
### `spec`
#### Arguments
* `access_modes` - (Required) A set of the desired access modes the volume should have. For more info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/persistent-volumes#access-modes-1)
* `resources` - (Required) A list of the minimum resources the volume should have. For more info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/persistent-volumes#resources)
* `selector` - (Optional) A label query over volumes to consider for binding.
* `volume_name` - (Optional) The binding reference to the PersistentVolume backing this claim.
* `storage_class_name` - (Optional) Name of the storage class requested by the claim
### `match_expressions`
#### Arguments
* `key` - (Optional) The label key that the selector applies to.
* `operator` - (Optional) A key's relationship to a set of values. Valid operators ard `In`, `NotIn`, `Exists` and `DoesNotExist`.
* `values` - (Optional) An array of string values. If the operator is `In` or `NotIn`, the values array must be non-empty. If the operator is `Exists` or `DoesNotExist`, the values array must be empty. This array is replaced during a strategic merge patch.
### `resources`
#### Arguments
* `limits` - (Optional) Map describing the maximum amount of compute resources allowed. For more info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/compute-resources)/
* `requests` - (Optional) Map describing the minimum amount of compute resources required. If this is omitted for a container, it defaults to `limits` if that is explicitly specified, otherwise to an implementation-defined value. For more info see [Kubernetes reference](http://kubernetes.io/docs/user-guide/compute-resources)/
### `selector`
#### Arguments
* `match_expressions` - (Optional) A list of label selector requirements. The requirements are ANDed.
* `match_labels` - (Optional) A map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of `match_expressions`, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
## Import
Persistent Volume Claim can be imported using its namespace and name, e.g.
```
$ terraform import kubernetes_persistent_volume_claim.example default/example-name
```
| {
"pile_set_name": "Github"
} |
# 数据库 SDK 设计说明
`@author` haroldhu
[TOC]
## 文件说明
```md
- collection.ts // 集合模块,继承 Query 模块
- constant.ts // 常量模块
- db.ts // 数据库模块
- document.ts // 文档模块
- model.ts // 类型约束模块
- query.ts // 查询模块
- request.ts // 请求模块 - 临时模拟使用
- util.ts // 工具模块
- validate.ts // 校验模块
```
## 常用命令
```shell
# 编辑 typescript
tnpm run tsc
# 实时编译 typescript
tnpm run tsc:w
# 运行测试用例
tnpm run tstest
```
## 类型声明
类型声明写在`.ts`文件里,这样可以及时发现问题。`.d.ts`不能及时暴露问题。
## 设计说明
集合模块继承 Query 模块,为了更好使用查询条件。
主要参考了 firebase - firestore 的设计。
## 字段设计
拉取文档列表后,过滤一遍数据,把特殊类型的字段格式化为相应的js对象。
发送请求新增或更新文档时,过滤一遍数据,把特殊字段编码为后端数据格式。
### 地理位置
每一个地理位置都是一个`GeoPoint`对象。
#### 为什么不在类下面增加一个方法转换成后端数据格式?
这个开发者用不到,所以没有必要暴露出来。
### 日期时间
每一个日期时间都是一个`Date`对象。
## 整体设计
- 使用`document.get()`获取数据时,把`where()`、`orderBy()`、`limit()`、`offser()`、设置的数据拼接到请求里。
- 对后台返回的数据进行格式化,使其成为一个`DocumentSnapshot`对象,对特殊类型的字段,如地理位置、日期时间进行处理。
- 使用`document.set()`和`document.update()`时,把数据进行编码,尤其是特殊字段的处理,编码成后端接口的数据格式。
## 扩展说明
开发者可以在每篇文档里记录创建时间和更新时间。
- 创建时间是一个时间对象
- 更新时间时一个时间对象的数组
| {
"pile_set_name": "Github"
} |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace QuantConnect
{
/// <summary>
/// Basic Template Library Class
/// Library classes are snippets of code you can reuse between projects. They are added to projects on compile. This can be useful for reusing
/// indicators, math components, risk modules etc. If you use a custom namespace make sure you add the correct using statement to the
/// algorithm-user.
/// </summary>
/// <meta name="tag" content="using quantconnect" />
public class BasicTemplateLibrary
{
/*
* To use this library; add its namespace at the top of the page:
* using QuantConnect
*
* Then instantiate the class:
* var btl = new BasicTemplateLibrary();
* btl.Add(1,2)
*/
public int Add(int a, int b)
{
return a + b;
}
public int Subtract(int a, int b)
{
return a - b;
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Persistent Storage - ramfs parts.
*
* Copyright (C) 2010 Intel Corporation <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <linux/module.h>
#include <linux/fs.h>
#include <linux/fsnotify.h>
#include <linux/pagemap.h>
#include <linux/highmem.h>
#include <linux/time.h>
#include <linux/init.h>
#include <linux/list.h>
#include <linux/string.h>
#include <linux/mount.h>
#include <linux/seq_file.h>
#include <linux/ramfs.h>
#include <linux/parser.h>
#include <linux/sched.h>
#include <linux/magic.h>
#include <linux/pstore.h>
#include <linux/slab.h>
#include <linux/spinlock.h>
#include <linux/uaccess.h>
#include <linux/syslog.h>
#include "internal.h"
#define PSTORE_NAMELEN 64
static DEFINE_SPINLOCK(allpstore_lock);
static LIST_HEAD(allpstore);
struct pstore_private {
struct list_head list;
struct pstore_info *psi;
enum pstore_type_id type;
u64 id;
int count;
ssize_t size;
char data[];
};
struct pstore_ftrace_seq_data {
const void *ptr;
size_t off;
size_t size;
};
#define REC_SIZE sizeof(struct pstore_ftrace_record)
static void *pstore_ftrace_seq_start(struct seq_file *s, loff_t *pos)
{
struct pstore_private *ps = s->private;
struct pstore_ftrace_seq_data *data;
data = kzalloc(sizeof(*data), GFP_KERNEL);
if (!data)
return NULL;
data->off = ps->size % REC_SIZE;
data->off += *pos * REC_SIZE;
if (data->off + REC_SIZE > ps->size) {
kfree(data);
return NULL;
}
return data;
}
static void pstore_ftrace_seq_stop(struct seq_file *s, void *v)
{
kfree(v);
}
static void *pstore_ftrace_seq_next(struct seq_file *s, void *v, loff_t *pos)
{
struct pstore_private *ps = s->private;
struct pstore_ftrace_seq_data *data = v;
data->off += REC_SIZE;
if (data->off + REC_SIZE > ps->size)
return NULL;
(*pos)++;
return data;
}
static int pstore_ftrace_seq_show(struct seq_file *s, void *v)
{
struct pstore_private *ps = s->private;
struct pstore_ftrace_seq_data *data = v;
struct pstore_ftrace_record *rec = (void *)(ps->data + data->off);
seq_printf(s, "%d %08lx %08lx %pf <- %pF\n",
pstore_ftrace_decode_cpu(rec), rec->ip, rec->parent_ip,
(void *)rec->ip, (void *)rec->parent_ip);
return 0;
}
static const struct seq_operations pstore_ftrace_seq_ops = {
.start = pstore_ftrace_seq_start,
.next = pstore_ftrace_seq_next,
.stop = pstore_ftrace_seq_stop,
.show = pstore_ftrace_seq_show,
};
static int pstore_check_syslog_permissions(struct pstore_private *ps)
{
switch (ps->type) {
case PSTORE_TYPE_DMESG:
case PSTORE_TYPE_CONSOLE:
return check_syslog_permissions(SYSLOG_ACTION_READ_ALL,
SYSLOG_FROM_READER);
default:
return 0;
}
}
static ssize_t pstore_file_read(struct file *file, char __user *userbuf,
size_t count, loff_t *ppos)
{
struct seq_file *sf = file->private_data;
struct pstore_private *ps = sf->private;
if (ps->type == PSTORE_TYPE_FTRACE)
return seq_read(file, userbuf, count, ppos);
return simple_read_from_buffer(userbuf, count, ppos, ps->data, ps->size);
}
static int pstore_file_open(struct inode *inode, struct file *file)
{
struct pstore_private *ps = inode->i_private;
struct seq_file *sf;
int err;
const struct seq_operations *sops = NULL;
err = pstore_check_syslog_permissions(ps);
if (err)
return err;
if (ps->type == PSTORE_TYPE_FTRACE)
sops = &pstore_ftrace_seq_ops;
err = seq_open(file, sops);
if (err < 0)
return err;
sf = file->private_data;
sf->private = ps;
return 0;
}
static loff_t pstore_file_llseek(struct file *file, loff_t off, int whence)
{
struct seq_file *sf = file->private_data;
if (sf->op)
return seq_lseek(file, off, whence);
return default_llseek(file, off, whence);
}
static const struct file_operations pstore_file_operations = {
.open = pstore_file_open,
.read = pstore_file_read,
.llseek = pstore_file_llseek,
.release = seq_release,
};
/*
* When a file is unlinked from our file system we call the
* platform driver to erase the record from persistent store.
*/
static int pstore_unlink(struct inode *dir, struct dentry *dentry)
{
struct pstore_private *p = dentry->d_inode->i_private;
int err;
err = pstore_check_syslog_permissions(p);
if (err)
return err;
if (p->psi->erase)
p->psi->erase(p->type, p->id, p->count,
dentry->d_inode->i_ctime, p->psi);
else
return -EPERM;
return simple_unlink(dir, dentry);
}
static void pstore_evict_inode(struct inode *inode)
{
struct pstore_private *p = inode->i_private;
unsigned long flags;
clear_inode(inode);
if (p) {
spin_lock_irqsave(&allpstore_lock, flags);
list_del(&p->list);
spin_unlock_irqrestore(&allpstore_lock, flags);
kfree(p);
}
}
static const struct inode_operations pstore_dir_inode_operations = {
.lookup = simple_lookup,
.unlink = pstore_unlink,
};
static struct inode *pstore_get_inode(struct super_block *sb)
{
struct inode *inode = new_inode(sb);
if (inode) {
inode->i_ino = get_next_ino();
inode->i_atime = inode->i_mtime = inode->i_ctime = CURRENT_TIME;
}
return inode;
}
enum {
Opt_kmsg_bytes, Opt_err
};
static const match_table_t tokens = {
{Opt_kmsg_bytes, "kmsg_bytes=%u"},
{Opt_err, NULL}
};
static void parse_options(char *options)
{
char *p;
substring_t args[MAX_OPT_ARGS];
int option;
if (!options)
return;
while ((p = strsep(&options, ",")) != NULL) {
int token;
if (!*p)
continue;
token = match_token(p, tokens, args);
switch (token) {
case Opt_kmsg_bytes:
if (!match_int(&args[0], &option))
pstore_set_kmsg_bytes(option);
break;
}
}
}
static int pstore_remount(struct super_block *sb, int *flags, char *data)
{
sync_filesystem(sb);
parse_options(data);
return 0;
}
static const struct super_operations pstore_ops = {
.statfs = simple_statfs,
.drop_inode = generic_delete_inode,
.evict_inode = pstore_evict_inode,
.remount_fs = pstore_remount,
.show_options = generic_show_options,
};
static struct super_block *pstore_sb;
int pstore_is_mounted(void)
{
return pstore_sb != NULL;
}
/*
* Make a regular file in the root directory of our file system.
* Load it up with "size" bytes of data from "buf".
* Set the mtime & ctime to the date that this record was originally stored.
*/
int pstore_mkfile(enum pstore_type_id type, char *psname, u64 id, int count,
char *data, bool compressed, size_t size,
struct timespec time, struct pstore_info *psi)
{
struct dentry *root = pstore_sb->s_root;
struct dentry *dentry;
struct inode *inode;
int rc = 0;
char name[PSTORE_NAMELEN];
struct pstore_private *private, *pos;
unsigned long flags;
spin_lock_irqsave(&allpstore_lock, flags);
list_for_each_entry(pos, &allpstore, list) {
if (pos->type == type &&
pos->id == id &&
pos->psi == psi) {
rc = -EEXIST;
break;
}
}
spin_unlock_irqrestore(&allpstore_lock, flags);
if (rc)
return rc;
rc = -ENOMEM;
inode = pstore_get_inode(pstore_sb);
if (!inode)
goto fail;
inode->i_mode = S_IFREG | 0444;
inode->i_fop = &pstore_file_operations;
private = kmalloc(sizeof *private + size, GFP_KERNEL);
if (!private)
goto fail_alloc;
private->type = type;
private->id = id;
private->count = count;
private->psi = psi;
switch (type) {
case PSTORE_TYPE_DMESG:
scnprintf(name, sizeof(name), "dmesg-%s-%lld%s",
psname, id, compressed ? ".enc.z" : "");
break;
case PSTORE_TYPE_CONSOLE:
scnprintf(name, sizeof(name), "console-%s-%lld", psname, id);
break;
case PSTORE_TYPE_FTRACE:
scnprintf(name, sizeof(name), "ftrace-%s-%lld", psname, id);
break;
case PSTORE_TYPE_MCE:
scnprintf(name, sizeof(name), "mce-%s-%lld", psname, id);
break;
case PSTORE_TYPE_PPC_RTAS:
scnprintf(name, sizeof(name), "rtas-%s-%lld", psname, id);
break;
case PSTORE_TYPE_PPC_OF:
scnprintf(name, sizeof(name), "powerpc-ofw-%s-%lld",
psname, id);
break;
case PSTORE_TYPE_PPC_COMMON:
scnprintf(name, sizeof(name), "powerpc-common-%s-%lld",
psname, id);
break;
case PSTORE_TYPE_PMSG:
scnprintf(name, sizeof(name), "pmsg-%s-%lld", psname, id);
break;
case PSTORE_TYPE_UNKNOWN:
scnprintf(name, sizeof(name), "unknown-%s-%lld", psname, id);
break;
default:
scnprintf(name, sizeof(name), "type%d-%s-%lld",
type, psname, id);
break;
}
mutex_lock(&root->d_inode->i_mutex);
dentry = d_alloc_name(root, name);
if (!dentry)
goto fail_lockedalloc;
memcpy(private->data, data, size);
inode->i_size = private->size = size;
inode->i_private = private;
if (time.tv_sec)
inode->i_mtime = inode->i_ctime = time;
d_add(dentry, inode);
spin_lock_irqsave(&allpstore_lock, flags);
list_add(&private->list, &allpstore);
spin_unlock_irqrestore(&allpstore_lock, flags);
mutex_unlock(&root->d_inode->i_mutex);
return 0;
fail_lockedalloc:
mutex_unlock(&root->d_inode->i_mutex);
kfree(private);
fail_alloc:
iput(inode);
fail:
return rc;
}
static int pstore_fill_super(struct super_block *sb, void *data, int silent)
{
struct inode *inode;
save_mount_options(sb, data);
pstore_sb = sb;
sb->s_maxbytes = MAX_LFS_FILESIZE;
sb->s_blocksize = PAGE_CACHE_SIZE;
sb->s_blocksize_bits = PAGE_CACHE_SHIFT;
sb->s_magic = PSTOREFS_MAGIC;
sb->s_op = &pstore_ops;
sb->s_time_gran = 1;
parse_options(data);
inode = pstore_get_inode(sb);
if (inode) {
inode->i_mode = S_IFDIR | 0755;
inode->i_op = &pstore_dir_inode_operations;
inode->i_fop = &simple_dir_operations;
inc_nlink(inode);
}
sb->s_root = d_make_root(inode);
if (!sb->s_root)
return -ENOMEM;
pstore_get_records(0);
return 0;
}
static struct dentry *pstore_mount(struct file_system_type *fs_type,
int flags, const char *dev_name, void *data)
{
return mount_single(fs_type, flags, data, pstore_fill_super);
}
static void pstore_kill_sb(struct super_block *sb)
{
kill_litter_super(sb);
pstore_sb = NULL;
}
static struct file_system_type pstore_fs_type = {
.name = "pstore",
.mount = pstore_mount,
.kill_sb = pstore_kill_sb,
};
static struct kobject *pstore_kobj;
static int __init init_pstore_fs(void)
{
int err = 0;
/* Create a convenient mount point for people to access pstore */
pstore_kobj = kobject_create_and_add("pstore", fs_kobj);
if (!pstore_kobj) {
err = -ENOMEM;
goto out;
}
err = register_filesystem(&pstore_fs_type);
if (err < 0)
kobject_put(pstore_kobj);
out:
return err;
}
module_init(init_pstore_fs)
MODULE_AUTHOR("Tony Luck <[email protected]>");
MODULE_LICENSE("GPL");
| {
"pile_set_name": "Github"
} |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef REMOTING_PROTOCOL_THIRD_PARTY_CLIENT_AUTHENTICATOR_H_
#define REMOTING_PROTOCOL_THIRD_PARTY_CLIENT_AUTHENTICATOR_H_
#include <memory>
#include <string>
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "remoting/protocol/client_authentication_config.h"
#include "remoting/protocol/third_party_authenticator_base.h"
namespace remoting {
namespace protocol {
// Implements the client side of the third party authentication mechanism.
// The client authenticator expects a |token_url| and |scope| in the first
// message from the host, then calls the |TokenFetcher| asynchronously to
// request a |token| and |shared_secret| from that url. If the server requires
// interactive authentication, the |TokenFetcher| implementation will show the
// appropriate UI. Once the |TokenFetcher| returns, the client sends the |token|
// to the host, and uses the |shared_secret| to create an underlying
// |V2Authenticator|, which is used to establish the encrypted connection.
class ThirdPartyClientAuthenticator : public ThirdPartyAuthenticatorBase {
public:
// Creates a third-party client authenticator.
// |create_base_authenticator_callback| is used to create the base
// authenticator. |token_fetcher| is used to get the authentication token.
ThirdPartyClientAuthenticator(
const CreateBaseAuthenticatorCallback& create_base_authenticator_callback,
const FetchThirdPartyTokenCallback& fetch_token_callback);
~ThirdPartyClientAuthenticator() override;
protected:
// ThirdPartyAuthenticator implementation.
void ProcessTokenMessage(const buzz::XmlElement* message,
const base::Closure& resume_callback) override;
void AddTokenElements(buzz::XmlElement* message) override;
private:
void OnThirdPartyTokenFetched(const base::Closure& resume_callback,
const std::string& third_party_token,
const std::string& shared_secret);
CreateBaseAuthenticatorCallback create_base_authenticator_callback_;
FetchThirdPartyTokenCallback fetch_token_callback_;
std::string token_;
base::WeakPtrFactory<ThirdPartyClientAuthenticator> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(ThirdPartyClientAuthenticator);
};
} // namespace protocol
} // namespace remoting
#endif // REMOTING_PROTOCOL_THIRD_PARTY_CLIENT_AUTHENTICATOR_H_
| {
"pile_set_name": "Github"
} |
<v-select
v-show="!shouldHide"
:label="label"
:items="sourceList"
item-text="name"
item-value="proxyId"
:disabled="disabled"
:value="selection"
@input="setInternalValue"
></v-select>
| {
"pile_set_name": "Github"
} |
/**
* Wechaty Chatbot SDK - https://github.com/wechaty/wechaty
*
* @copyright 2016 Huan LI (李卓桓) <https://github.com/huan>, and
* Wechaty Contributors <https://github.com/wechaty>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import og from 'open-graph'
export async function openGraph (url: string): Promise<og.Data> {
return new Promise((resolve, reject) => {
og(url, (err, meta) => {
if (err) {
reject(err)
} else {
resolve(meta)
}
})
})
}
| {
"pile_set_name": "Github"
} |
-- HUMAN RESOURCE MACHINE PROGRAM --
-- 35-Duplicate-Removal - SIZE 13/17 - SPEED 219/167 --
JUMP b
a:
COPYFROM [14]
OUTBOX
b:
c:
BUMPUP 14
COPYTO 13
INBOX
COPYTO [14]
d:
BUMPDN 13
JUMPZ a
COPYFROM [14]
SUB [13]
JUMPZ c
JUMP d
DEFINE LABEL 13
eJxjZWBgEOtsyhfuirgNZDJEtesZM2AB8+14PBfZXvPXs0wIbzZUi76gsyQSJN4T+93ydeC9UFM/vcQ9
3va5tt7nyn/6yDY/C/4x9Voky3zWhK7FX1NPLlLPtp8dnb9hwuSi2Y38JSyFgsVLIsULbwUtyL3ko5XF
41mY7u3xNfWsJ8hM+bkszjkzTrq1980O5u59FOHVqxXn2l+YvGfSzoyYeTzlgYtkmwMXbZgwc7797MQ5
bQt0Z5xcZDX55CK2/qPzBbqtZ2Bz/ygYBaMAOwAA3NBOcw;
DEFINE LABEL 14
eJzjZ2BgmNTV5nKxKyIwtFNy3Z223D0KrUYHtBt/HC6uKzvUWv58+9mS+ZtEigRXiBQ5z7tUVDUdqIUh
qMtdnIEMYLn6ZMmrDVo9CVt2Tpu7tXTKrW2pHZLb+yskt5emzd3a5EiMGUmnZwfPORuxFsTWOz5diRx3
jIJRMAogAACFRzEs;
| {
"pile_set_name": "Github"
} |
:103E000001C0B7C0112484B790E89093610010926C
:103E10006100882361F0982F9A70923041F081FF01
:103E200002C097EF94BF282E80E0C6D0E9C085E09D
:103E30008093810082E08093C00088E18093C1007C
:103E400087E68093C40086E08093C2008EE0B4D001
:103E5000209A84E020E33CEF91E0309385002093AA
:103E6000840096BBB09BFECF189AA8954091C000E5
:103E700047FD02C0815089F793D0813479F490D006
:103E8000182FA0D0123811F480E004C088E0113857
:103E900009F083E07ED080E17CD0EECF823419F44B
:103EA00084E198D0F8CF853411F485E0FACF8535D8
:103EB00041F476D0C82F74D0D82FCC0FDD1F82D01C
:103EC000EACF863519F484E085D0DECF843691F5CB
:103ED00067D066D0F82E64D0D82E00E011E05801EB
:103EE0008FEFA81AB80A5CD0F80180838501FA1018
:103EF000F6CF68D0F5E4DF1201C0FFCF50E040E01C
:103F000063E0CE0136D08E01E0E0F1E06F0182E0A7
:103F1000C80ED11C4081518161E0C8012AD00E5FDA
:103F20001F4FF601FC10F2CF50E040E065E0CE01FB
:103F300020D0B1CF843771F433D032D0F82E30D0C6
:103F400041D08E01F80185918F0123D0FA94F110B0
:103F5000F9CFA1CF853739F435D08EE11AD084E975
:103F600018D08AE097CF813509F0A9CF88E024D016
:103F7000A6CFFC010A0167BFE895112407B600FC33
:103F8000FDCF667029F0452B19F481E187BFE895D4
:103F900008959091C00095FFFCCF8093C6000895CE
:103FA0008091C00087FFFCCF8091C00084FD01C0DC
:103FB000A8958091C6000895E0E6F0E098E190832E
:103FC00080830895EDDF803219F088E0F5DFFFCFC0
:103FD00084E1DFCFCF93C82FE3DFC150E9F7CF9162
:023FE000F1CF1F
:023FFE000008B9
:0400000300003E00BB
:00000001FF
| {
"pile_set_name": "Github"
} |
#region License
/*
* Copyright (C) 1999-2020 John Källén.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; see the file COPYING. If not, write to
* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Reko.Core.Serialization
{
/// <summary>
/// Utility base class principally used for dispatching.
/// </summary>
public abstract class SerializedProject
{
public abstract T Accept<T>(ISerializedProjectVisitor<T> visitor);
}
public interface ISerializedProjectVisitor<T>
{
T VisitProject_v2(Project_v2 sProject);
T VisitProject_v3(Project_v3 sProject);
T VisitProject_v4(Project_v4 sProject);
T VisitProject_v5(Project_v5 sProject);
}
}
| {
"pile_set_name": "Github"
} |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package DynamicClass {
dynamic class DynamicClassInner {
var array:Array;
var boolean:Boolean;
var date:Date;
var myFunction:Function;
var math:Math;
var number:Number;
var object:Object;
var string:String;
public var pubArray:Array;
public var pubBoolean:Boolean;
public var pubDate:Date;
public var pubFunction:Function;
public var pubMath:Math;
public var pubNumber:Number;
public var pubObject:Object;
public var pubString:String;
private var privArray:Array;
private var privBoolean:Boolean;
private var privDate:Date;
private var privFunction:Function;
private var privMath:Math;
private var privNumber:Number;
private var privObject:Object;
private var privString:String;
static var statArray:Array;
static var statBoolean:Boolean;
static var statDate:Date;
static var statFunction:Function;
static var statMath:Math;
static var statNumber:Number;
static var statObject:Object;
static var statString:String;
internal var finArray:Array;
internal var finBoolean:Boolean;
internal var finDate:Date;
internal var finFunction:Function;
internal var finMath:Math;
internal var finNumber:Number;
internal var finObject:Object;
internal var finString:String;
public static var pubStatArray:Array;
public static var pubStatBoolean:Boolean;
public static var pubStatDate:Date;
public static var pubStatFunction:Function;
public static var pubStatMath:Math;
public static var pubStatNumber:Number;
public static var pubStatObject:Object;
public static var pubStatString:String;
private static var privStatArray:Array;
private static var privStatBoolean:Boolean;
private static var privStatDate:Date;
private static var privStatFunction:Function;
private static var privStatMath:Math;
private static var privStatNumber:Number;
private static var privStatObject:Object;
private static var privStatString:String;
// ****************
// constructor
// ****************
function DynamicClass() {
}
// *****************
// default methods
// *****************
function getArray() : Array { return array; }
function getBoolean() : Boolean { return boolean; }
function getDate() : Date { return date; }
function getFunction() : Function { return myFunction; }
function getMath() : Math { return math; }
function getNumber() : Number { return number; }
function getObject() : Object { return object; }
function getString() : String { return string; }
//function getSimple() : Simple { return simple; }
function setArray( a:Array ) { array = a; }
function setBoolean( b:Boolean ) { boolean = b; }
function setDate( d:Date ) { date = d; }
function setFunction( f:Function ) { myFunction = f; }
function setMath( m:Math ) { math = m; }
function setNumber( n:Number ) { number = n; }
function setObject( o:Object ) { object = o; }
function setString( s:String ) { string = s; }
function setAll( a:Array,
b:Boolean,
d:Date,
f:Function,
m:Math,
n:Number,
o:Object,
s:String) {
array = a;
boolean = b;
date = d;
myFunction = f;
math = m;
number = n;
object = o;
string = s;
simple = si;
}
// *******************
// public methods
// *******************
public function setPubArray( a:Array ) { pubArray = a; }
public function setPubBoolean( b:Boolean ) { pubBoolean = b; }
public function setPubDate( d:Date ) { pubDate = d; }
public function setPubFunction( f:Function ) { pubFunction = f; }
public function setPubMath( m:Math ) { pubMath = m; }
public function setPubNumber( n:Number ) { pubNumber = n; }
public function setPubObject( o:Object ) { pubObject = o; }
public function setPubString( s:String ) { pubString = s; }
public function getPubArray() : Array { return this.pubArray; }
public function getPubBoolean() : Boolean { return this.pubBoolean; }
public function getPubDate() : Date { return this.pubDate; }
public function getPubFunction() : Function { return this.pubFunction; }
public function getPubMath() : Math { return this.pubMath; }
public function getPubNumber() : Number { return this.pubNumber; }
public function getPubObject() : Object { return this.pubObject; }
public function getPubString() : String { return this.pubString; }
// *******************
// private methods
// *******************
private function getPrivArray() : Array { return privArray; }
private function getPrivBoolean() : Boolean { return privBoolean; }
private function getPrivDate() : Date { return privDate; }
private function getPrivFunction() : Function { return privFunction; }
private function getPrivMath() : Math { return privMath; }
private function getPrivNumber() : Number { return privNumber; }
private function getPrivObject() : Object { return privObject; }
private function getPrivString() : String { return privString; }
private function setPrivArray( a:Array ) { privArray = a; }
private function setPrivBoolean( b:Boolean ) { privBoolean = b; }
private function setPrivDate( d:Date ) { privDate = d; }
private function setPrivFunction( f:Function ) { privFunction = f; }
private function setPrivMath( m:Math ) { privMath = m; }
private function setPrivNumber( n:Number ) { privNumber = n; }
private function setPrivObject( o:Object ) { privObject = o; }
private function setPrivString( s:String ) { privString = s; }
// *******************
// static methods
// *******************
static function setStatArray(a:Array) { statArray=a; }
static function setStatBoolean( b:Boolean ) { statBoolean = b; }
static function getStatArray() { return statArray; }
// *******************
// final methods
// *******************
final function setFinArray(a:Array) { finArray=a; }
final function getFinArray() { return finArray; }
// **************************
// public static methods
// **************************
public static function setPubStatArray(a:Array) { pubStatArray=a; }
public static function setPubStatBoolean( b:Boolean ) { pubStatBoolean = b; }
public static function getPubStatArray() { return pubStatArray; }
// **************************
// private static methods
// **************************
private static function setPrivStatArray(a:Array) { privStatArray=a; }
private static function setPrivStatBoolean( b:Boolean ) { privStatBoolean = b; }
private static function getPrivStatArray() { return privStatArray; }
}
public class DynamicClass extends DynamicClassInner {}
}
| {
"pile_set_name": "Github"
} |
CREATE TABLE CMVFS (
CMFNAM char (255),
CMDTYP int ,
CMMODD bigint,
CMWHOM char (50) NULL,
CMDATA text NULL
);
ALTER TABLE CMVFS
ADD
(
PRIMARY KEY (CMFNAM)
);
CREATE TABLE CMCHAB (
CMUSERID char (50) NULL ,
CMABID char (50) NULL ,
CMABPF int NULL ,
CMABTX text NULL
);
ALTER TABLE CMCHAB
ADD
(
PRIMARY KEY (CMUSERID,CMABID)
);
CREATE TABLE CMSTAT (
CMSTRT bigint NULL ,
CMENDT bigint NULL ,
CMDATA text NULL
);
ALTER TABLE CMSTAT
ADD
(
PRIMARY KEY (CMSTRT)
);
CREATE TABLE CMPOLL (
CMNAME char (100) ,
CMBYNM char (100) NULL ,
CMSUBJ char (255) NULL ,
CMDESC text NULL ,
CMOPTN text NULL ,
CMFLAG bigint NULL ,
CMQUAL char (255) NULL ,
CMRESL text NULL,
CMEXPI bigint NULL
);
ALTER TABLE CMPOLL
ADD
(
PRIMARY KEY (CMNAME)
);
CREATE TABLE CMCHAR (
CMCHID char (50),
CMUSERID char (50) ,
CMPASS char (50) NULL ,
CMCLAS char (250) NULL ,
CMSTRE int NULL ,
CMRACE char (50) NULL ,
CMDEXT int NULL ,
CMCONS int NULL ,
CMGEND char (50) NULL ,
CMWISD int NULL ,
CMINTE int NULL ,
CMCHAR int NULL ,
CMHITP int NULL ,
CMLEVL char (50) NULL ,
CMMANA int NULL ,
CMMOVE int NULL ,
CMDESC text NULL ,
CMALIG int NULL ,
CMEXPE int NULL ,
CMEXLV int NULL ,
CMWORS char (50) NULL ,
CMPRAC int NULL ,
CMTRAI int NULL ,
CMAGEH int NULL ,
CMGOLD int NULL ,
CMWIMP int NULL ,
CMQUES int NULL ,
CMROID char (100) NULL ,
CMDATE char (50) NULL ,
CMCHAN int NULL ,
CMATTA int NULL ,
CMAMOR int NULL ,
CMDAMG int NULL ,
CMBTMP int NULL ,
CMLEIG char (50) NULL ,
CMHEIT int NULL ,
CMWEIT int NULL ,
CMPRPT char (250) NULL,
CMCOLR char (100) NULL,
CMLSIP char (100) NULL,
CMEMAL char (255) NULL,
CMPFIL text NULL,
CMSAVE char (150) NULL,
CMMXML text NULL
);
ALTER TABLE CMCHAR
ADD
(
PRIMARY KEY (CMUSERID)
);
CREATE TABLE CMCHFO (
CMUSERID char (50) NULL ,
CMFONM int NULL ,
CMFOID char (50) NULL ,
CMFOTX text NULL ,
CMFOLV int NULL ,
CMFOAB int NULL
);
ALTER TABLE CMCHFO
ADD
(
PRIMARY KEY (CMUSERID,CMFONM)
);
CREATE TABLE CMCHCL (
CMUSERID char (50) NULL ,
CMCLAN char (100) NULL ,
CMCLRO int NULL,
CMCLSTS char (100) NULL
);
ALTER TABLE CMCHCL
ADD
(
PRIMARY KEY (CMUSERID,CMCLAN)
);
CREATE TABLE CMCHIT (
CMUSERID char (50) NULL ,
CMITNM char (100) NULL ,
CMITID char (50) NULL ,
CMITTX text NULL ,
CMITLO char (100) NULL ,
CMITWO bigint NULL ,
CMITUR int NULL ,
CMITLV int NULL ,
CMITAB int NULL ,
CMHEIT int NULL
);
ALTER TABLE CMCHIT
ADD
(
PRIMARY KEY (CMUSERID,CMITNM)
);
CREATE TABLE CMROCH (
CMROID char (50) NULL ,
CMCHNM char (100) NULL ,
CMCHID char (50) NULL ,
CMCHTX text NULL ,
CMCHLV int NULL ,
CMCHAB int NULL ,
CMCHRE int NULL ,
CMCHRI char (100) NULL
);
ALTER TABLE CMROCH
ADD
(
PRIMARY KEY (CMROID,CMCHNM)
);
CREATE TABLE CMROEX (
CMROID char (50) NULL ,
CMDIRE int NULL ,
CMEXID char (50) NULL ,
CMEXTX text NULL ,
CMNRID char (50) NULL
);
ALTER TABLE CMROEX
ADD
(
PRIMARY KEY (CMROID,CMDIRE)
);
CREATE TABLE CMROIT (
CMROID char (50) NULL ,
CMITNM char (100) NULL ,
CMITID char (50) NULL ,
CMITLO char (100) NULL ,
CMITTX text NULL ,
CMITRE int NULL ,
CMITUR int NULL ,
CMITLV int NULL ,
CMITAB int NULL ,
CMHEIT int NULL
);
ALTER TABLE CMROIT
ADD
(
PRIMARY KEY (CMROID,CMITNM)
);
CREATE TABLE CMROOM (
CMROID char (50) NULL ,
CMLOID char (50) NULL ,
CMAREA char (50) NULL ,
CMDESC1 char (255) NULL ,
CMDESC2 text NULL ,
CMROTX text NULL
);
ALTER TABLE CMROOM
ADD
(
PRIMARY KEY (CMROID)
);
CREATE TABLE CMQUESTS (
CMQUESID char (250) NULL ,
CMQUTYPE char (50) NULL ,
CMQFLAGS int NULL ,
CMQSCRPT text NULL ,
CMQWINNS text NULL
);
ALTER TABLE CMQUESTS
ADD
(
PRIMARY KEY (CMQUESID)
);
CREATE TABLE CMAREA (
CMAREA char (50) ,
CMTYPE char (50) ,
CMCLIM int NULL ,
CMSUBS char (100) NULL ,
CMDESC text NULL ,
CMROTX text NULL ,
CMTECH int NULL
);
ALTER TABLE CMAREA
ADD
(
PRIMARY KEY (CMAREA)
);
CREATE TABLE CMJRNL (
CMJKEY char (75) ,
CMJRNL char (50) NULL ,
CMFROM char (50) NULL ,
CMDATE char (50) NULL ,
CMTONM char (50) NULL ,
CMSUBJ char (255) NULL ,
CMPART char (75) NULL ,
CMATTR integer NULL,
CMDATA char (255) NULL ,
CMUPTM bigint NULL,
CMIMGP char (50) NULL,
CMVIEW integer NULL,
CMREPL integer NULL,
CMMSGT text NULL
);
ALTER TABLE CMJRNL
ADD
(
PRIMARY KEY (CMJKEY)
);
CREATE INDEX CMJRNLNAME on CMJRNL (CMJRNL ASC);
CREATE INDEX CMJRNLCMPART on CMJRNL (CMPART ASC);
CREATE INDEX CMJRNLCMTONM on CMJRNL (CMTONM ASC);
CREATE INDEX CMJRNLCMUPTM on CMJRNL (CMUPTM ASC);
CREATE TABLE CMCLAN (
CMCLID char (100) ,
CMTYPE int ,
CMDESC text NULL ,
CMACPT char (255) NULL ,
CMPOLI text NULL ,
CMRCLL char (50) NULL ,
CMDNAT char (50) NULL ,
CMSTAT int NULL ,
CMMORG char (50) NULL ,
CMTROP int NULL
);
ALTER TABLE CMCLAN
ADD
(
PRIMARY KEY (CMCLID)
);
CREATE TABLE CMPDAT (
CMPLID char (100) ,
CMSECT char (100) ,
CMPKEY char (255) ,
CMPDAT text NULL
);
ALTER TABLE CMPDAT
ADD
(
PRIMARY KEY (CMPLID,CMSECT,CMPKEY)
);
CREATE TABLE CMGRAC (
CMRCID char (250) ,
CMRDAT text NULL ,
CMRCDT bigint NULL
);
ALTER TABLE CMGRAC
ADD
(
PRIMARY KEY (CMRCID)
);
CREATE TABLE CMCCAC (
CMCCID char (50) ,
CMCDAT text NULL
);
ALTER TABLE CMCCAC
ADD
(
PRIMARY KEY (CMCCID)
);
CREATE TABLE CMGAAC (
CMGAID char (50) ,
CMGAAT text NULL ,
CMGACL char (50) NULL
);
ALTER TABLE CMGAAC
ADD
(
PRIMARY KEY (CMGAID)
);
CREATE TABLE CMACCT (
CMANAM char (50) ,
CMPASS char (50) ,
CMCHRS text NULL ,
CMAXML text NULL
);
ALTER TABLE CMACCT
ADD
(
PRIMARY KEY (CMANAM)
);
CREATE TABLE CMBKLG (
CMNAME char (50),
CMINDX int,
CMDATE bigint NULL,
CMDATA text NULL
);
ALTER TABLE CMBKLG
ADD
(
PRIMARY KEY (CMNAME,CMINDX)
);
CREATE TABLE CMCLIT (
CMCLID char (100) ,
CMITNM char (100) ,
CMITID char (50) NULL ,
CMITTX text NULL ,
CMITLO char (100) NULL ,
CMITWO bigint NULL ,
CMITUR int NULL ,
CMITLV int NULL ,
CMITAB int NULL ,
CMHEIT int NULL
);
ALTER TABLE CMCLIT
ADD
(
PRIMARY KEY (CMCLID,CMITNM)
);
| {
"pile_set_name": "Github"
} |
<TABLE BORDER="0" CELLSPACING="0" CELLPADDING="4" WIDTH="100%%" BGCOLOR="#ffffff"><TR><TD>
<B>Generated Source Files</B>
</TD></TR><TR></TR>
<tr><td><a href="ekfAHRS_c.html" onclick="if (top) if (top.tocHiliteMe) top.tocHiliteMe(window, this, false);" id="ekfAHRS_c.html" target="rtwreport_document_frame" name="rtwIdGenFileLinks">ekfAHRS.c</a><span> </span></td></tr><tr></tr>
<tr><td><a href="ekfAHRS_h.html" onclick="if (top) if (top.tocHiliteMe) top.tocHiliteMe(window, this, false);" id="ekfAHRS_h.html" target="rtwreport_document_frame" name="rtwIdGenFileLinks">ekfAHRS.h</a><span> </span></td></tr><tr></tr>
<tr><td><a href="ekfAHRS_types_h.html" onclick="if (top) if (top.tocHiliteMe) top.tocHiliteMe(window, this, false);" id="ekfAHRS_types_h.html" target="rtwreport_document_frame" name="rtwIdGenFileLinks">ekfAHRS_types.h</a><span> </span></td></tr><tr></tr>
<tr><td><a href="rtwtypes_h.html" onclick="if (top) if (top.tocHiliteMe) top.tocHiliteMe(window, this, false);" id="rtwtypes_h.html" target="rtwreport_document_frame" name="rtwIdGenFileLinks">rtwtypes.h</a><span> </span></td></tr><tr></tr>
</table>
| {
"pile_set_name": "Github"
} |
1|3|O|173665.47|1996-01-02|5-LOW|Clerk#000000951|0|nstructions sleep furiously among |
2|4|O|46929.18|1996-12-01|1-URGENT|Clerk#000000880|0| foxes. pending accounts at the pending, silent asymptot|
3|2|F|193846.25|1993-10-14|5-LOW|Clerk#000000955|0|sly final accounts boost. carefully regular ideas cajole carefully. depos| | {
"pile_set_name": "Github"
} |
/**
* OWASP Benchmark Project v1.2
*
* This file is part of the Open Web Application Security Project (OWASP)
* Benchmark Project. For details, please see
* <a href="https://owasp.org/www-project-benchmark/">https://owasp.org/www-project-benchmark/</a>.
*
* The OWASP Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The OWASP Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* @author Nick Sanidas
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet(value="/weakrand-05/BenchmarkTest02506")
public class BenchmarkTest02506 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
String[] values = request.getParameterValues("BenchmarkTest02506");
String param;
if (values != null && values.length > 0)
param = values[0];
else param = "";
String bar = doSomething(request, param);
long l = new java.util.Random().nextLong();
String rememberMeKey = Long.toString(l);
String user = "Logan";
String fullClassName = this.getClass().getName();
String testCaseNumber = fullClassName.substring(fullClassName.lastIndexOf('.')+1+"BenchmarkTest".length());
user+= testCaseNumber;
String cookieName = "rememberMe" + testCaseNumber;
boolean foundUser = false;
javax.servlet.http.Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (int i = 0; !foundUser && i < cookies.length; i++) {
javax.servlet.http.Cookie cookie = cookies[i];
if (cookieName.equals(cookie.getName())) {
if (cookie.getValue().equals(request.getSession().getAttribute(cookieName))) {
foundUser = true;
}
}
}
}
if (foundUser) {
response.getWriter().println(
"Welcome back: " + user + "<br/>"
);
} else {
javax.servlet.http.Cookie rememberMe = new javax.servlet.http.Cookie(cookieName, rememberMeKey);
rememberMe.setSecure(true);
rememberMe.setHttpOnly(true);
rememberMe.setPath(request.getRequestURI()); // i.e., set path to JUST this servlet
// e.g., /benchmark/sql-01/BenchmarkTest01001
request.getSession().setAttribute(cookieName, rememberMeKey);
response.addCookie(rememberMe);
response.getWriter().println(
user + " has been remembered with cookie: " + rememberMe.getName()
+ " whose value is: " + rememberMe.getValue() + "<br/>"
);
}
response.getWriter().println(
"Weak Randomness Test java.util.Random.nextLong() executed"
);
} // end doPost
private static String doSomething(HttpServletRequest request, String param) throws ServletException, IOException {
String bar = "";
if (param != null) {
bar = new String( org.apache.commons.codec.binary.Base64.decodeBase64(
org.apache.commons.codec.binary.Base64.encodeBase64( param.getBytes() ) ));
}
return bar;
}
}
| {
"pile_set_name": "Github"
} |
unit ModeMacPas;
{$mode macpas}{$H+}
interface
uses
Classes, SysUtils;
{$DEFINE test3}
{$DEFINE bogus4}
{$ifc defined test1}
type mmp1 = integer;
{$elifc defined test2}
type mmp2 = integer;
{$elifc defined test3}
type mmp3 = integer;
{$ifc defined bogus1}
type bogus1 = integer;
{$elifc defined bogus2}
type bogus2 = integer;
{$elifc defined bogus3}
type bogus3 = integer;
{$elsec}
type bogusELSE = integer;
{$error Neither bogus1 nor bogus2 nor bogus3 are defined.}
{$endc}
{$elsec}
type mmpELSE = integer;
{$error Neither test1 nor test2 nor test3 are defined.}
{$endc}
implementation
end.
| {
"pile_set_name": "Github"
} |
<wxs src="../wxs/utils.wxs" module="utils" />
<view class="custom-class van-card">
<view class="{{ utils.bem('card__header', { center: centered }) }}">
<view class="van-card__thumb" bind:tap="onClickThumb">
<image
wx:if="{{ thumb }}"
src="{{ thumb }}"
mode="{{ thumbMode }}"
lazy-load="{{ lazyLoad }}"
class="van-card__img thumb-class"
/>
<slot name="thumb" />
<van-tag
wx:if="{{ tag }}"
mark
type="danger"
custom-class="van-card__tag"
>
{{ tag }}
</van-tag>
</view>
<view class="van-card__content">
<view wx:if="{{ title }}" class="van-card__title title-class">{{ title }}</view>
<slot wx:else name="title" />
<view wx:if="{{ desc }}" class="van-card__desc desc-class">{{ desc }}</view>
<slot wx:else name="desc" />
<slot name="tags" />
<view class="van-card__bottom">
<view wx:if="{{ price || price === 0 }}" class="van-card__price price-class">{{ currency }} {{ price }}</view>
<view wx:if="{{ originPrice || originPrice === 0 }}" class="van-card__origin-price origin-price-class">{{ currency }} {{ originPrice }}</view>
<view wx:if="{{ num }}" class="van-card__num num-class">x {{ num }}</view>
<slot name="bottom" />
</view>
</view>
</view>
<view class="van-card__footer">
<slot name="footer" />
</view>
</view>
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.entries;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.DiskId;
import org.apache.geode.internal.cache.DiskStoreImpl;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.internal.cache.RegionEntry;
import org.apache.geode.internal.cache.RegionEntryContext;
import org.apache.geode.internal.cache.Token;
import org.apache.geode.internal.cache.eviction.EvictionController;
import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
import org.apache.geode.internal.cache.versions.VersionSource;
import org.apache.geode.internal.cache.versions.VersionStamp;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.offheap.OffHeapRegionEntryHelper;
import org.apache.geode.internal.offheap.annotations.Released;
import org.apache.geode.internal.offheap.annotations.Retained;
import org.apache.geode.internal.offheap.annotations.Unretained;
import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
/*
* macros whose definition changes this class:
*
* disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP
*
* One of the following key macros must be defined:
*
* key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1:
* KEY_STRING1 key string2: KEY_STRING2
*/
/**
* Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run
* ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).
*/
public class VersionedThinDiskRegionEntryOffHeapIntKey extends VersionedThinDiskRegionEntryOffHeap {
// --------------------------------------- common fields ----------------------------------------
private static final AtomicLongFieldUpdater<VersionedThinDiskRegionEntryOffHeapIntKey> LAST_MODIFIED_UPDATER =
AtomicLongFieldUpdater.newUpdater(VersionedThinDiskRegionEntryOffHeapIntKey.class,
"lastModified");
protected int hash;
private HashEntry<Object, Object> nextEntry;
@SuppressWarnings("unused")
private volatile long lastModified;
// --------------------------------------- offheap fields ---------------------------------------
/**
* All access done using OFF_HEAP_ADDRESS_UPDATER so it is used even though the compiler can not
* tell it is.
*/
@SuppressWarnings("unused")
@Retained
@Released
private volatile long offHeapAddress;
/**
* I needed to add this because I wanted clear to call setValue which normally can only be called
* while the re is synced. But if I sync in that code it causes a lock ordering deadlock with the
* disk regions because they also get a rw lock in clear. Some hardware platforms do not support
* CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync on the
* RegionEntry and we will once again be deadlocked. I don't know if we support any of the
* hardware platforms that do not have a 64bit CAS. If we do then we can expect deadlocks on disk
* regions.
*/
private static final AtomicLongFieldUpdater<VersionedThinDiskRegionEntryOffHeapIntKey> OFF_HEAP_ADDRESS_UPDATER =
AtomicLongFieldUpdater.newUpdater(VersionedThinDiskRegionEntryOffHeapIntKey.class,
"offHeapAddress");
// ---------------------------------------- disk fields -----------------------------------------
/**
* @since GemFire 5.1
*/
protected DiskId id;
// ------------------------------------- versioned fields ---------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private VersionSource memberId;
private short entryVersionLowBytes;
private short regionVersionHighBytes;
private int regionVersionLowBytes;
private byte entryVersionHighByte;
private byte distributedSystemId;
// --------------------------------------- key fields -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private final int key;
public VersionedThinDiskRegionEntryOffHeapIntKey(final RegionEntryContext context, final int key,
@Retained final Object value) {
super(context, (value instanceof RecoveredEntry ? null : value));
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
initialize(context, value);
this.key = key;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Token getValueAsToken() {
return OffHeapRegionEntryHelper.getValueAsToken(this);
}
@Override
protected Object getValueField() {
return OffHeapRegionEntryHelper._getValue(this);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
@Unretained
protected void setValueField(@Unretained final Object value) {
OffHeapRegionEntryHelper.setValue(this, value);
}
@Override
@Retained
public Object getValueRetain(final RegionEntryContext context, final boolean decompress) {
return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context);
}
@Override
public long getAddress() {
return OFF_HEAP_ADDRESS_UPDATER.get(this);
}
@Override
public boolean setAddress(final long expectedAddress, long newAddress) {
return OFF_HEAP_ADDRESS_UPDATER.compareAndSet(this, expectedAddress, newAddress);
}
@Override
@Released
public void release() {
OffHeapRegionEntryHelper.releaseEntry(this);
}
@Override
public void returnToPool() {
// never implemented
}
@Override
protected long getLastModifiedField() {
return LAST_MODIFIED_UPDATER.get(this);
}
@Override
protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) {
return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue);
}
@Override
public int getEntryHash() {
return this.hash;
}
@Override
protected void setEntryHash(final int hash) {
this.hash = hash;
}
@Override
public HashEntry<Object, Object> getNextEntry() {
return this.nextEntry;
}
@Override
public void setNextEntry(final HashEntry<Object, Object> nextEntry) {
this.nextEntry = nextEntry;
}
// ----------------------------------------- disk code ------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
protected void initialize(final RegionEntryContext context, final Object value) {
diskInitialize(context, value);
}
@Override
public int updateAsyncEntrySize(final EvictionController evictionController) {
throw new IllegalStateException("should never be called");
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public DiskId getDiskId() {
return this.id;
}
@Override
public void setDiskId(final RegionEntry oldEntry) {
this.id = ((DiskEntry) oldEntry).getDiskId();
}
private void diskInitialize(final RegionEntryContext context, final Object value) {
DiskRecoveryStore diskRecoveryStore = (DiskRecoveryStore) context;
DiskStoreImpl diskStore = diskRecoveryStore.getDiskStore();
long maxOplogSize = diskStore.getMaxOplogSize();
// get appropriate instance of DiskId implementation based on maxOplogSize
this.id = DiskId.createDiskId(maxOplogSize, true, diskStore.needsLinkedList());
Helper.initialize(this, diskRecoveryStore, value);
}
// -------------------------------------- versioned code ----------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public int getEntryVersion() {
return ((entryVersionHighByte << 16) & 0xFF0000) | (entryVersionLowBytes & 0xFFFF);
}
@Override
public long getRegionVersion() {
return (((long) regionVersionHighBytes) << 32) | (regionVersionLowBytes & 0x00000000FFFFFFFFL);
}
@Override
public long getVersionTimeStamp() {
return getLastModified();
}
@Override
public void setVersionTimeStamp(final long timeStamp) {
setLastModified(timeStamp);
}
@Override
public VersionSource getMemberID() {
return this.memberId;
}
@Override
public int getDistributedSystemId() {
return this.distributedSystemId;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void setVersions(final VersionTag versionTag) {
this.memberId = versionTag.getMemberID();
int eVersion = versionTag.getEntryVersion();
this.entryVersionLowBytes = (short) (eVersion & 0xffff);
this.entryVersionHighByte = (byte) ((eVersion & 0xff0000) >> 16);
this.regionVersionHighBytes = versionTag.getRegionVersionHighBytes();
this.regionVersionLowBytes = versionTag.getRegionVersionLowBytes();
if (!versionTag.isGatewayTag()
&& this.distributedSystemId == versionTag.getDistributedSystemId()) {
if (getVersionTimeStamp() <= versionTag.getVersionTimeStamp()) {
setVersionTimeStamp(versionTag.getVersionTimeStamp());
} else {
versionTag.setVersionTimeStamp(getVersionTimeStamp());
}
} else {
setVersionTimeStamp(versionTag.getVersionTimeStamp());
}
this.distributedSystemId = (byte) (versionTag.getDistributedSystemId() & 0xff);
}
@Override
public void setMemberID(final VersionSource memberId) {
this.memberId = memberId;
}
@Override
public VersionStamp getVersionStamp() {
return this;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public VersionTag asVersionTag() {
VersionTag tag = VersionTag.create(memberId);
tag.setEntryVersion(getEntryVersion());
tag.setRegionVersion(this.regionVersionHighBytes, this.regionVersionLowBytes);
tag.setVersionTimeStamp(getVersionTimeStamp());
tag.setDistributedSystemId(this.distributedSystemId);
return tag;
}
@Override
public void processVersionTag(final InternalRegion region, final VersionTag versionTag,
final boolean isTombstoneFromGII, final boolean hasDelta, final VersionSource versionSource,
final InternalDistributedMember sender, final boolean checkForConflicts) {
basicProcessVersionTag(region, versionTag, isTombstoneFromGII, hasDelta, versionSource, sender,
checkForConflicts);
}
@Override
public void processVersionTag(final EntryEvent cacheEvent) {
// this keeps IDE happy. without it the sender chain becomes confused while browsing this code
super.processVersionTag(cacheEvent);
}
/** get rvv internal high byte. Used by region entries for transferring to storage */
@Override
public short getRegionVersionHighBytes() {
return this.regionVersionHighBytes;
}
/** get rvv internal low bytes. Used by region entries for transferring to storage */
@Override
public int getRegionVersionLowBytes() {
return this.regionVersionLowBytes;
}
// ----------------------------------------- key code -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Object getKey() {
return this.key;
}
@Override
public boolean isKeyEqual(final Object key) {
if (key instanceof Integer) {
return ((Integer) key).intValue() == this.key;
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| {
"pile_set_name": "Github"
} |
section.code, section.code * {
-webkit-user-select: auto !important;
}
html,body{
margin:0;
padding:0;
height: 100%;
}
body {
font-family: Helvetica, Arial, sans-serif;
position: relative;
-webkit-font-smoothing: antialiased;
}
body.light {
background: #fff;
}
body.dark {
color: #F0F1F1;
background: #333;
}
body.light {
color: #181919;
}
h1 {
font-weight: 200;
}
#wrapper {
width: 100%;
background: inherit;
position: relative;
}
#site {
width: 100%;
position: relative;
z-index: 10;
background: inherit;
left: 0;
transition: all 0.2s ease-out;
-webkit-transition: all 0.2s ease-out;
transform: translate3d(0, 0, 0);
-webkit-transform: translate3d(0, 0, 0);
}
#site:before{
position: absolute;
content: '';
left: -4px;
height: 100%;
width: 4px;
background: #3B3E3E;
}
#site.open {
transform: translate3d(250px, 0, 0);
-webkit-transform: translate3d(250px, 0, 0);
}
pre {
font-family: menlo, sans-serif;
font-size: 12px;
}
/* Main Header */
#main-header {
color: #373435;
background: #fff;
height: 80px;
-moz-box-sizing: border-box;
box-sizing: border-box;
padding: 20px 0 0 0;
position: relative;
}
#main-header hgroup {
text-align: center;
}
#main-header hgroup h1 {
font-size: 30px;
margin: 0 0 0 14px;
}
#main-header hgroup a, a {
color: #464646;
text-decoration: none;
}
#main-header hgroup a:hover {
color: #000;
}
#main-header hgroup p {
font-size: 13px;
color: #999;
margin: 0;
}
#main-header nav {
display: none;
}
.topcoat-icon--menu-stack {
background: url("../../img/hamburger_dark.svg") no-repeat;
background-size: cover;
}
#slide-menu-button {
position: absolute;
top: 20px;
left: 20px;
display: inline-block;
vertical-align: top;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
-webkit-background-clip: padding;
-moz-background-clip: padding;
background-clip: padding-box;
padding: 0;
margin: 0;
font: inherit;
color: inherit;
background: transparent;
cursor: default;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
-o-text-overflow: ellipsis;
text-overflow: ellipsis;
white-space: nowrap;
overflow: hidden;
padding: 0 0.5rem;
line-height: 2rem;
letter-spacing: 1px;
color: #454545;
text-shadow: 0 1px #fff;
vertical-align: baseline;
-webkit-box-shadow: inset 0 1px #fff;
box-shadow: inset 0 1px #fff;
-webkit-border-radius: 3px;
border-radius: 3px;
width: 2.6rem;
height: 2.6rem;
line-height: 2.6rem;
border: 1px solid transparent;
-webkit-box-shadow: none;
box-shadow: none;
}
#slide-menu:disabled,
#slide-menu.is-disabled {
opacity: 0.3;
cursor: default;
pointer-events: none;
}
#slide-menu-button:active,
#slide-menu-button.is-active {
color: #454545;
text-shadow: 0 1px #fff;
background-color: #d3d7d7;
border: 1px solid #a5a8a8;
-webkit-box-shadow: inset 0 1px rgba(0,0,0,0.12);
box-shadow: inset 0 1px rgba(0,0,0,0.12);
}
#slide-menu-button span {
background-repeat: no-repeat;
-webkit-background-size: cover;
-moz-background-size: cover;
background-size: cover;
position: relative;
display: inline-block;
vertical-align: top;
overflow: hidden;
vertical-align: middle;
width: 1.3rem;
height: 1.3rem;
}
#download-btn {
display: none;
}
/* Content */
#content {
width: 100%;
-moz-box-sizing: border-box;
box-sizing: border-box;
padding: 10px 20px 20px 20px;
}
#content section.component > div.markup > p {
font-size: 12px;
color: #999;
}
#content section.component > div.markup > p:after {
content: ':';
}
/* Code */
#content section.code {
margin-top: 16px;
background: #FFF;
border: 1px solid #E0E0E0;
-moz-box-sizing: border-box;
box-sizing: border-box;
padding: 4px 12px;
font-size: 13px;
-moz-border-radius: 1px;
-webkit-border-radius: 1px;
border-radius: 1px;
font-weight: 400;
display: none;
}
article.component {
padding: 0 0 10px;
}
#content section.code h3 {
margin: 0;
font-size: 12px;
color: #000;
font-weight: 400;
}
#content header h2 {
font-weight: 300;
margin: 10px 0 25px;
font-size: 20px;
display: block-block;
padding-right: 10px;
}
#content header{
position: relative;
}
#content pre {
padding: 0;
margin: 2px 0 10px;
}
.showcode {
margin: 10px 0;
}
.showcode a, section.examples a, a {
color: #288edf;
text-decoration: none;
}
.showcode a:hover, section.examples a:hover, a:hover {
text-decoration: underline;
}
section.examples ul {
margin: 0 0 20px;
padding: 0 0 0 20px;
}
section.examples h4 {
margin-bottom: 5px;
}
section.examples li {
color: #58595A;
}
/* Side Nav */
#sideNav {
background: #4A4D4E;
position: absolute;
width: 100%;
z-index: 1;
height: 100%;
left: 0;
}
#sideNav ul {
list-style: none;
margin: 0;
padding: 0;
}
#sideNav li a {
color: #F0F1F1;
display: block;
height: 46px;
font-size: 16px;
-moz-box-sizing: border-box;
box-sizing: border-box;
padding: 12px 0 0 20px;
text-decoration: none;
}
#sideNav nav.site, #sideNav .combo {
border-bottom: 1px solid #58595A;
-moz-box-sizing: border-box;
box-sizing: border-box;
padding: 10px;
display: block;
}
#pageNav li {
border-bottom: 1px solid #58595A;
}
@media screen and (min-width: 650px) {
#site.open {
transform: translate3d(0, 0, 0);
-webkit-transform: translate3d(0, 0, 0);
}
#main-header nav {
display: inline-block;
position: absolute;
right: 0;
top: 40px;
}
#main-header ul {
list-style: none;
}
#main-header nav li {
display: inline-block;
margin: 0 18px;
}
#main-header nav li#download-btn {
display: none;
}
#main-header nav li a {
text-decoration: none;
font-size: 20px;
color: #7F7F7F;
}
#main-header nav li.selected a {
color: #373435;
}
#slide-menu-button{
display: none;
}
#main-header hgroup {
text-align: left;
position: absolute;
display: inline-block;
top: 12px;
}
#main-header {
color: #373435;
background: #fff;
height: 80px;
}
#content {
padding-left: 240px;
}
/* Side Nav */
#sideNav {
background: transparent;
width: 220px;
z-index: 20;
left: 10px;
top: 150px;
height: auto;
}
#sideNav nav.site {
display: none;
}
#sideNav .combo {
border-bottom: none;
padding: 0px 0;
}
#sideNav li {
margin: 0;
padding: 0;
border: none;
}
#sideNav li a {
padding: 8px 0 0 0px;
height: auto;
margin: 0;
display: block;
}
body.light #sideNav li a {
color: #797B7B;
}
body.light #pageNav li {
}
}
@media screen and (min-width: 880px) {
#content {
padding-left: 300px;
}
#sideNav li a {
display: block;
text-decoration: none;
}
#sideNav li a:hover {
text-decoration: underline;
}
#content header h2 {
font-size: 28px;
}
#content header:before {
top: 30px;
}
section.code div {
display: inline-block;
width: 100%;
vertical-align: top;
-moz-box-sizing: border-box;
box-sizing: border-box;
}
.max-width {
max-width: 1180px;
position: relative;
margin: 0 auto;
}
header#main-header .max-width {
top: -10px;
}
#main-header nav li a {
font-size: 22px;
}
#main-header nav {
display: inline-block;
}
#main-header nav li {
margin: 0 25px;
}
#main-header nav li:last-child {
margin-right: 0;
}
}
@media screen and (min-width: 940px) {
#main-header nav li#download-btn {
display: inline-block;
}
#main-header nav li a#download-btn{
position:relative;
top: -15px;
display:inline-block;
box-sizing:border-box;
-moz-box-sizing:border-box;
background-clip:padding-box;
font:inherit;
background:transparent;
-webkit-user-select:none;
-moz-user-select:none;
user-select:none;
text-overflow:ellipsis;
white-space:nowrap;
overflow:hidden;
font-size:16px;
line-height:3rem;
letter-spacing:1px;
color:#454545;
text-shadow:0 1px #fff;
vertical-align:top;
background-color:#e5e9e8;
box-shadow:inset 0 1px #fff;
border:1px solid #a5a8a8;
border-radius:6px;
margin:0;
padding:0 1.25rem;
}
#main-header nav li a#download-btn, #main-header nav li a#download-btn:hover {
border:1px solid #143250;
background-color:#288edf;
box-shadow:inset 0 1px rgba(255,255,255,0.36);
color:#fff;
font-weight:500;
text-shadow:0 -1px rgba(0,0,0,0.36);
}
#main-header nav li a#download-btn:hover {
background-color:#2f9cf3;
}
#main-header nav li a#download-btn:active, #main-header nav li a#download-btn.is-active {
background-color:#0380e8;
box-shadow:inset 0 1px rgba(0,0,0,0.12);
}
#main-header nav li a#download-btn:disabled, #main-header nav li a#download-btn.is-disabled {
opacity:.3;
cursor:default;
pointer-events:none;
}
}
| {
"pile_set_name": "Github"
} |
package reflect2
import (
"reflect"
"unsafe"
)
type safeType struct {
reflect.Type
cfg *frozenConfig
}
func (type2 *safeType) New() interface{} {
return reflect.New(type2.Type).Interface()
}
func (type2 *safeType) UnsafeNew() unsafe.Pointer {
panic("does not support unsafe operation")
}
func (type2 *safeType) Elem() Type {
return type2.cfg.Type2(type2.Type.Elem())
}
func (type2 *safeType) Type1() reflect.Type {
return type2.Type
}
func (type2 *safeType) PackEFace(ptr unsafe.Pointer) interface{} {
panic("does not support unsafe operation")
}
func (type2 *safeType) Implements(thatType Type) bool {
return type2.Type.Implements(thatType.Type1())
}
func (type2 *safeType) RType() uintptr {
panic("does not support unsafe operation")
}
func (type2 *safeType) Indirect(obj interface{}) interface{} {
return reflect.Indirect(reflect.ValueOf(obj)).Interface()
}
func (type2 *safeType) UnsafeIndirect(ptr unsafe.Pointer) interface{} {
panic("does not support unsafe operation")
}
func (type2 *safeType) LikePtr() bool {
panic("does not support unsafe operation")
}
func (type2 *safeType) IsNullable() bool {
return IsNullable(type2.Kind())
}
func (type2 *safeType) IsNil(obj interface{}) bool {
if obj == nil {
return true
}
return reflect.ValueOf(obj).Elem().IsNil()
}
func (type2 *safeType) UnsafeIsNil(ptr unsafe.Pointer) bool {
panic("does not support unsafe operation")
}
func (type2 *safeType) Set(obj interface{}, val interface{}) {
reflect.ValueOf(obj).Elem().Set(reflect.ValueOf(val).Elem())
}
func (type2 *safeType) UnsafeSet(ptr unsafe.Pointer, val unsafe.Pointer) {
panic("does not support unsafe operation")
}
func (type2 *safeType) AssignableTo(anotherType Type) bool {
return type2.Type1().AssignableTo(anotherType.Type1())
}
| {
"pile_set_name": "Github"
} |
#Problem APEX 3.3.23
DOCUMENT();
# Load whatever macros you need for the problem
loadMacros(
"PGstandard.pl",
"PGchoicemacros.pl",
"extraAnswerEvaluators.pl",
"MathObjects.pl",
"PGcourse.pl"
);
## DBsubject(Calculus - single variable)
## DBchapter(Applications of differentiation)
## DBsection(Increasing/decreasing functions and local extrema)
## Institution(Valdosta State University)
## Author(S. V. Ault)
## Level(4)
## MO(1)
## TitleText1('APEX Calculus')
## AuthorText1('Hartman')
## EditionText1('3.0')
## Section1('3.3')
## Problem1('23')
TEXT(beginproblem());
$showPartialCorrectAnswers = 1;
$a = random(3,9,2);
$f = "x^$a - $a x";
$a1 = $a - 1;
$df = "$a x^{$a1} - $a";
$c1 = -1;
$c2 = 1;
$ans_crit = List($c1, $c2);
@inc = "(-INF, $c1) U ($c2, INF)";
@dec = "($c1, $c2)";
$ans_max = List($c1);
$ans_min = List($c2);
BEGIN_TEXT
$PAR
$BBOLD NOTE: $EBOLD
When using interval notation in WeBWorK, remember
that:
$BR $SPACE $SPACE $SPACE $SPACE You use 'INF' for \(\infty\)
and '-INF' for \(-\infty\).
$BR $SPACE $SPACE $SPACE $SPACE And use 'U' for the union symbol.
$BR Enter $BBOLD DNE $EBOLD if an answer does not exist.
$PAR
$HR
\[
f(x) = $f
\]
$PAR
a) Find the critical numbers of \(f\). \{ ans_rule(15) \} (Separate multiple
answers by commas.)
$PAR
b) Determine the intervals on which \(f\) is increasing and decreasing.
$BR
\(f\) is increasing on: \{ ans_rule(15)\}
$BR
\(f\) is decreasing on: \{ ans_rule(15)\}
$PAR
c) Use the First Derivative Test to determine whether each
critical point is a relative maximum, minimum, or neither.
$BR
Relative maxima occur at \(x = \) \{ ans_rule(15) \} (Separate multiple
answers by commas.)
$BR
Relative minima occur at \(x = \) \{ ans_rule(15) \} (Separate multiple
answers by commas.)
END_TEXT
ANS( $ans_crit->cmp() );
ANS(interval_cmp(@inc));
ANS(interval_cmp(@dec));
ANS( $ans_max->cmp() );
ANS( $ans_min->cmp() );
SOLUTION(EV3(<<'END_SOLUTION'));
$BR$BBOLD Solution:$EBOLD
$PAR
\( f'(x) = $df \).
Set equal to zero and solve.
\[
\begin{array}{rcl}
$a(x^{$a1} - 1) &=& 0 \\
x^{$a1} &=& 1 \\
x &=& \pm 1
\end{array}
\]
There are two critical numbers, \(x = -1, 1\).
$PAR
Use the first derivative test, choosing sample points in each interval.
\[
\begin{array}{|l|l|l|}
\hline
\textrm{Interval} &
\textrm{Sign of}\; f'\; \textrm{at sample} &
\textrm{Conclusion} \\
\hline
\hline
(-\infty, -1) & \textrm{positive} & \textrm{increasing} \\
\hline
(-1, 1) & \textrm{negative} & \textrm{decreasing} \\
\hline
(1, \infty) & \textrm{positive} & \textrm{increasing} \\
\hline
\end{array}
\]
There is a
relative maximum at \(x = -1\)
and a
relative minimum at \(x = 1\)
END_SOLUTION
ENDDOCUMENT();
| {
"pile_set_name": "Github"
} |
#!/bin/bash
#kill all running tasks
/var/www/sync/stopall
#start video 76
/usr/bin/omxplayer-sync -mu -o both /media/internal/video/76* > /dev/null 2>&1 & echo $! & | {
"pile_set_name": "Github"
} |
import logging
import requests
import json
import azure.functions as func
dapr_url = "http://localhost:3500/v1.0"
def main(msg: func.QueueMessage):
logging.info(f"Python queue-triggered function received a message!")
message = msg.get_body().decode('utf-8')
logging.info(f"Message: {message}")
# Publish an event
url = f'{dapr_url}/publish/myTopic'
content = { "message": message }
logging.info(f'POST to {url} with content {json.dumps(content)}')
p = requests.post(url, json=content)
logging.info(f'Got response code {p.status_code}')
| {
"pile_set_name": "Github"
} |
using GhostRunner.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace GhostRunner.ViewModels.Scripts.Partials
{
public class ConfirmDeleteScriptModel
{
public Script Script { get; set; }
}
} | {
"pile_set_name": "Github"
} |
# Copyright (c) Microsoft Corporation
#
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# compat imports
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
from builtins import ( # noqa
bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import datetime
import io
import json
import logging
try:
import pathlib2 as pathlib
except ImportError:
import pathlib
import os
# non-stdlib imports
import adal
import azure.common.credentials
import dateutil.parser
import msrest.authentication
import msrestazure.azure_exceptions
# local imports
from . import settings
from . import util
# create logger
logger = logging.getLogger(__name__)
util.setup_logger(logger)
# global defines
_LOGIN_AUTH_URI = 'https://login.microsoftonline.com'
_CLIENT_ID = '04b07795-8ddb-461a-bbee-02f9e1bf7b46' # xplat-cli
class DeviceCodeAuthentication(msrest.authentication.Authentication):
"""Device Code Authentication session handler"""
def __init__(self, context, resource, client_id, token_cache_file):
"""Ctor for DeviceCodeAuthentication
:param DeviceCodeAuthentication self: this
:param object context: context
:param str resource: resource
:param str client_id: client id
:param str token_Cache_file: token cache file
"""
self._context = context
self._resource = resource
self._client_id = client_id
self._token_cache_file = token_cache_file
self._token = None
@property
def token(self):
"""Retrieve signed token
:param DeviceCodeAuthentication self: this
"""
return self._token
@token.setter
def token(self, value):
"""Set signed token
:param DeviceCodeAuthentication self: this
:param str value: token value
"""
self._token = value
def signed_session(self):
"""Get a signed session for requests.
Usually called by the Azure SDKs for you to authenticate queries.
:param DeviceCodeAuthentication self: this
:rtype: requests.Session
:return: request session with signed header
"""
session = super(DeviceCodeAuthentication, self).signed_session()
# try to get cached token
if self._token is None and util.is_not_empty(self._token_cache_file):
try:
with open(self._token_cache_file, 'r') as fd:
self._token = json.load(fd)
except OSError:
pass
except Exception:
logger.error(
'Error attempting read of token cache: {}'.format(
self._token_cache_file))
# get token
try:
cache_token = True
if self._token is None:
# get token through selected method
code = self._context.acquire_user_code(
resource=self._resource,
client_id=self._client_id,
)
logger.info(
'Please follow the instructions below. The requesting '
'application will be: Microsoft Azure Cross-platform '
'Command Line Interface')
logger.info(code['message'])
self._token = self._context.acquire_token_with_device_code(
resource=self._resource,
user_code_info=code,
client_id=self._client_id,
)
else:
# check for expiry time
expiry = dateutil.parser.parse(self._token['expiresOn'])
if (datetime.datetime.now() +
datetime.timedelta(minutes=5) >= expiry):
# attempt token refresh
logger.debug('Refreshing token expiring on: {}'.format(
expiry))
self._token = self._context.\
acquire_token_with_refresh_token(
refresh_token=self._token['refreshToken'],
client_id=self._client_id,
resource=self._resource,
)
else:
cache_token = False
# set session authorization header
session.headers['Authorization'] = '{} {}'.format(
self._token['tokenType'], self._token['accessToken'])
# cache token
if cache_token and util.is_not_empty(self._token_cache_file):
logger.debug('storing token to local cache: {}'.format(
self._token_cache_file))
if util.on_python2():
with io.open(
self._token_cache_file,
'w', encoding='utf8') as fd:
fd.write(json.dumps(
self._token, indent=4, sort_keys=True,
ensure_ascii=False))
else:
with open(
self._token_cache_file,
'w', encoding='utf8') as fd:
json.dump(
self._token, fd, indent=4, sort_keys=True,
ensure_ascii=False)
if not util.on_windows():
os.chmod(self._token_cache_file, 0o600)
except adal.AdalError as err:
if (hasattr(err, 'error_response') and
'error_description' in err.error_response and
'AADSTS70008:' in err.error_response['error_description']):
logger.error(
'Credentials have expired due to inactivity. Please '
'retry your command.')
# clear token cache file due to expiration
if util.is_not_empty(self._token_cache_file):
try:
pathlib.Path(self._token_cache_file).unlink()
logger.debug('invalidated local token cache: {}'.format(
self._token_cache_file))
except OSError:
pass
raise
return session
def create_aad_credentials(ctx, aad_settings):
# type: (CliContext, settings.AADSettings) ->
# azure.common.credentials.ServicePrincipalCredentials or
# azure.common.credentials.UserPassCredentials
"""Create Azure Active Directory credentials
:param CliContext ctx: Cli Context
:param settings.AADSettings aad_settings: AAD settings
:rtype: azure.common.credentials.ServicePrincipalCredentials or
azure.common.credentials.UserPassCredentials
:return: aad credentials object
"""
# from aad parameters
aad_directory_id = ctx.aad_directory_id or aad_settings.directory_id
aad_application_id = ctx.aad_application_id or aad_settings.application_id
aad_auth_key = ctx.aad_auth_key or aad_settings.auth_key
aad_user = ctx.aad_user or aad_settings.user
aad_password = ctx.aad_password or aad_settings.password
aad_cert_private_key = (
ctx.aad_cert_private_key or aad_settings.rsa_private_key_pem
)
aad_cert_thumbprint = (
ctx.aad_cert_thumbprint or aad_settings.x509_cert_sha1_thumbprint
)
aad_authority_url = ctx.aad_authority_url or aad_settings.authority_url
if util.is_not_empty(aad_authority_url):
aad_authority_url = aad_authority_url.rstrip('/')
else:
aad_authority_url = _LOGIN_AUTH_URI
endpoint = ctx.aad_endpoint or aad_settings.endpoint
token_cache_file = aad_settings.token_cache_file
# check for aad parameter validity
if ((aad_directory_id is None and aad_application_id is None and
aad_auth_key is None and aad_user is None and
aad_password is None and aad_cert_private_key is None and
aad_cert_thumbprint is None) or endpoint is None):
return None
# create credential object
if (util.is_not_empty(aad_application_id) and
util.is_not_empty(aad_cert_private_key)):
if util.is_not_empty(aad_auth_key):
raise ValueError('cannot specify both cert auth and auth key')
if util.is_not_empty(aad_password):
raise ValueError('cannot specify both cert auth and password')
if settings.verbose(ctx.config):
logger.debug(
('using aad auth with certificate, auth={} endpoint={} '
'directoryid={} appid={} cert_thumbprint={}').format(
aad_authority_url, endpoint, aad_directory_id,
aad_application_id, aad_cert_thumbprint))
context = adal.AuthenticationContext(
'{}/{}'.format(aad_authority_url, aad_directory_id))
return msrestazure.azure_active_directory.AdalAuthentication(
lambda: context.acquire_token_with_client_certificate(
endpoint,
aad_application_id,
util.decode_string(open(aad_cert_private_key, 'rb').read()),
aad_cert_thumbprint
)
)
elif util.is_not_empty(aad_auth_key):
if util.is_not_empty(aad_password):
raise ValueError(
'Cannot specify both an AAD Service Principal and User')
if settings.verbose(ctx.config):
logger.debug(
('using aad auth with key, auth={} endpoint={} '
'directoryid={} appid={}').format(
aad_authority_url, endpoint, aad_directory_id,
aad_application_id))
context = adal.AuthenticationContext(
'{}/{}'.format(aad_authority_url, aad_directory_id))
return msrestazure.azure_active_directory.AdalAuthentication(
context.acquire_token_with_client_credentials,
endpoint,
aad_application_id,
aad_auth_key,
)
elif util.is_not_empty(aad_password):
if settings.verbose(ctx.config):
logger.debug(
('using aad auth with username and password, auth={} '
'endpoint={} directoryid={} username={}').format(
aad_authority_url, endpoint, aad_directory_id, aad_user))
try:
return azure.common.credentials.UserPassCredentials(
username=aad_user,
password=aad_password,
tenant=aad_directory_id,
auth_uri=aad_authority_url,
resource=endpoint,
)
except msrest.exceptions.AuthenticationError as e:
if 'AADSTS50079' in e.args[0]:
raise RuntimeError('{} {}'.format(
e.args[0][2:],
'Do not pass an AAD password and try again.'))
else:
raise
else:
if settings.verbose(ctx.config):
logger.debug(
('using aad auth with device code, auth={} endpoint={} '
'directoryid={}').format(
aad_authority_url, endpoint, aad_directory_id))
return DeviceCodeAuthentication(
context=adal.AuthenticationContext(
'{}/{}'.format(aad_authority_url, aad_directory_id)),
resource=endpoint,
client_id=_CLIENT_ID,
token_cache_file=token_cache_file,
)
| {
"pile_set_name": "Github"
} |
<div class="main flex vertical centered darkBackground">
<%- @Icon('full-logo', 'wizard-logo') %>
<form class="setup wizard js-agent">
<div class="wizard-slide">
<h2><%- @T('Invite Colleagues') %></h2>
<div class="wizard-body vertical justified js-agent-form"></div>
<div class="wizard-controls center">
<a class="btn btn--primary align-left" href="#getting_started/finish"><%- @T('Continue') %></a>
<button class="btn btn--success align-right"><%- @T('Invite') %></button>
</div>
</div>
</form>
</div> | {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
<style>
.left{
background:red;
}
.right{
background:blue;
}
table{
width:800px;
height:200px;
border-collapse: collapse;
}
.table{
margin-top:20px;
display: table;
width:800px;
height:200px;
}
.table-row{
display: table-row;
}
.table-cell{
vertical-align: center;
display: table-cell;
}
</style>
</head>
<body>
<table>
<tr>
<td class="left">左</td>
<td class="right">右</td>
</tr>
</table>
<div class="table">
<div class="table-row">
<div class="left table-cell">
左
</div>
<div class="right table-cell">
右
</div>
</div>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
form=词
tags=
春入番江雨。
满湖山、莺啼燕雨,
前歌後舞。
闻道行骢行且止,
却听谯楼更鼓。
正未卜,
阴晴同否。
老子胸中高小范,
这精神、堪更开封府。
新治足,
旧民苦。
扁舟浩荡乘风去。
看莱衣、思贤堂上,
寿觞朝举。
六十二三前度者,
敢望香山老傅。
又过了、午年端午。
采采菖蒲三三节,
寄我公、矫矫扶天路。
重归衮,
到相圃。
灯共墙檠语。
记昨朝、芒鞋蓑笠,
冷风斜雨。
月入宫槐槐影澹,
化作槐花无数。
恍不记、鳌头压处。
不恨扬州吾不梦,
恨梦中、不醉琼花露。
空耿耿,
吊终古。
千蜂万蝶春为主。
怅何人、老忆江南,
北朝开府。
看取当年风景在,
不待花奴催鼓。
且未说、春丁分俎。
一曲沧浪邀吾和,
笑先生、尚是邯郸步。
如秉苘,
续残炬。
风雨东篱晚。
渺人间、南北东西,
平芜烟远。
旧日携壶吹帽处,
一色沈冥何限。
天不遣、魂销肠断。
不是苦无看山分,
料青山、也自羞人面。
秋後瘦,
老来倦。
惊回昨梦青山转。
恨一林、金粟都空,
静无人见。
默默黄花明朝有,
只待插花寻伴。
又谁笑、今朝蝶怨。
潦倒玉山休重醉,
到簪萸、忍待人频劝。
今又惜,
几人健。
何处从头说。
但倾尊、淋漓醉墨,
疏疏密密。
看取两轮东西者,
也是樊笼中物。
这光景、年来都别。
白发道人隆中像,
笑相逢、对拥炉边雪。
又过了,
上元节。
纸窗旋补寒穿穴。
柳黏窗、青青过雨,
劝君休折。
睡不成酣酒先醒,
花底东风又别。
夜复夜、吟魂飞越。
典却西湖东湖住,
十三年不出今朝出。
容易得,
二三月。
襟泪涔涔雨。
料骚魂、水解千年,
依然轻举。
还看吴儿胥涛上,
高出浪花几许。
绝倒是、东南旗鼓。
风雨蛟龙争何事,
问彩丝、香粽犹存否。
溪女伴,
采莲语。
古人不似今人苦。
漫追谈、少日风流,
三三五五。
谁似鄱阳鸱夷者,
相望怀沙终古。
待唤醒、重听金缕。
尚有远游当年恨,
恨南公、不见秦为楚。
天又暮,
黯凝伫。
锦岸吴船鼓。
问沙鸥、当日沈湘,
是何端午。
长恨青青朱门艾,
结束腰身似虎。
空泪落、婵媛嬃女。
我醉招累清醒否,
算平生、清又醒还误。
累笑我,
醉中语。
黄头舞棹临江处。
向人间、独竞南风,
叫云激楚。
笑倒两崖人如蚁,
不管颓波千屡。
忽惊抱、汨罗无柱。
欸乃渔歌斜阳外,
几书生、能办投湘赋。
歌此恨,
泪如缕。
绝北寒声动。
渺黄昏、叶满长安,
云迷章贡。
最苦周公千年後,
正与莽新同梦。
五十国、纷纷入中。
摇颺都人歌郿坞,
问何如、昨日菘高颂。
胪九锡,
竟谁风。
当初共道擎天重。
奈天教、垓下风寒,
滹沱兵冻。
寂寞放翁南园记,
带得园柑进奉。
怅回首、何人修凤。
寄语权门趋炎者,
这朝廷、不是邦昌宋。
真与赝,
可能共。
拍瓮春醅动。
洞庭霜、压绿堆黄,
林苞堪贡。
况有老人潭边菊,
摇落赏心入梦。
数百岁、半来许中。
儿女牵衣团栾处,
绕公公、愿献生申颂。
公性歰,
待重风。
人生一笑何时重。
奈今朝、有客无鱼,
有鱼留冻。
何似尊前斑斓起,
低唱浅斟齐奉。
也不待、烹龙炰凤。
此会明年知谁健,
说边愁、望断先生宋。
醒最苦,
醉聊共。
破帽吹愁去。
绕郊墟、残灰败壁,
冷烟斜雨。
舞马梦惊城乌起,
散作童妖灶语。
漫说与、谢仙一句。
犹记醉归西州路,
问行人、望望骊烽误。
几未失,
丧公屦。
高高况是兴亡处。
望平沙、落日湖光,
暗淮沈楚。
寂寞西陵歌又舞,
疑冢嵯峨新土。
黯牛笛、参差归路。
试问文君容赊否,
待东篱、更就黄花浦。
拚酩酊,
涴蓝缕。
七十三年矣。
记小人、四百四十,
五番甲子。
看到蓬莱水清浅,
休说树犹日如此。
但梦梦、昨非今是。
一曲尊前离鸾操,
抚铜仙、清泪如铅水。
歌未断,
我先醉。
新来画得耆英似。
似灞桥、风雪吟肩,
水仙梅弟。
里巷依稀灵光在,
飞过劫灰如洗。
笑少伴、乌衣馀几。
老子平生何曾默,
暮年诗、句句皆成史。
个亥字,
甲申起。
秋老寒香圃。
自春来、桔槔闲了,
去天尺五。
陌上踏歌来何暮,
收得黄云如土。
但稽首、福星初度。
不是使君人间佛,
甚今朝、欲雨今朝雨。
持寿酒,
为公舞。
虎头画手谁甚许。
写天人、方瞳红颊,
共宾笑语。
卫戟连营三千士,
簇簇满城箫鼓。
早恐是、留公不住。
飞去翩翩嫌白鹭,
算来年、稀姓多登府。
祝千岁,
奉明主。
少日都门路。
听长亭、青山落日,
不如归去。
十八年间来往断,
白首人间今古。
又惊绝、五更一句。
道是流离蜀天子,
甚当初、一似吴儿语。
臣再拜,
泪如雨。
画堂客馆真无数。
记画桥、黄竹歌声,
桃花前度。
风雨断魂苏季子,
春梦家山何处。
谁不愿、封侯万户。
寂暮江南轮四角,
问长安、道上无人住。
啼尽血,
向谁诉。
世事如何说。
似举鞍、回头笑问,
并州儿葛。
手障尘埃黄花路,
千里龙沙如雪。
著破帽、萧萧馀发。
行过故人柴桑里,
抚长松、老倒山间月。
聊共舞,
命相瑟。
春风五老多年别。
看使君、神交意气,
依然晚合。
袖有玉龙提携去,
满眼黄金台骨。
说不尽、古人痴绝。
我醉看天天看我,
听秋风、吹动檐间铁。
长啸起,
两山裂。
岁事峥嵘甚。
是当年、爆竹驱傩,
插金幡胜。
忽晓阑街儿童语,
不为上元
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: fb7e2c1cde476854fb7067dcca74511a
TextureImporter:
internalIDToNameTable: []
externalObjects: {}
serializedVersion: 9
mipmaps:
mipMapMode: 0
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -100
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 1
spriteExtrude: 1
spriteMeshType: 0
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 1
spriteTessellationDetail: -1
textureType: 0
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 3
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 0
- serializedVersion: 3
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 0
- serializedVersion: 3
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 1
- serializedVersion: 3
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 1
- serializedVersion: 3
buildTarget: Nintendo Switch
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 1
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID: 935e58b5051eebb458387c2c3bd1e082
internalID: 0
vertices: []
indices:
edges: []
weights: []
secondaryTextures: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
require 'fileutils'
module Webruby
class << self
def create_file_if_different(filename)
tmp_filename = "#{filename}.tmp"
# TODO: add support for case where block is not given,
# maybe using monkey patching on File#close?
f = File.open(tmp_filename, 'w')
yield f
f.close
if (!File.exists?(filename)) ||
(!FileUtils.compare_file(filename, tmp_filename))
puts "Creating new file: #{filename}!"
FileUtils.cp(tmp_filename, filename)
end
FileUtils.rm(tmp_filename)
end
def build_dir
Webruby::App.config.build_dir
end
def full_build_dir
File.expand_path(build_dir)
end
def build_config
"#{build_dir}/mruby_build_config.rb"
end
def full_build_config
File.expand_path(build_config)
end
def entrypoint_file
Webruby::App.config.entrypoint
end
def object_files
(Dir.glob("#{full_build_dir}/mruby/emscripten/src/**/*.o") +
Dir.glob("#{full_build_dir}/mruby/emscripten/mrblib/**/*.o") +
Dir.glob("#{full_build_dir}/mruby/emscripten/mrbgems/**/*.o"))
.reject { |f|
f.end_with? "gem_test.o"
}
end
def test_object_files
(Dir.glob("#{full_build_dir}/mruby/emscripten/test/**/*.o") +
Dir.glob("#{full_build_dir}/mruby/emscripten/mrbgems/**/gem_test.o"))
end
def rb_files
Dir.glob("#{File.dirname(entrypoint_file)}/**")
end
def gem_js_files
["#{build_dir}/gem_library.js", "#{build_dir}/gem_append.js"]
end
def gem_js_flags
"--js-library #{build_dir}/gem_library.js --pre-js #{build_dir}/gem_append.js"
end
def gem_test_js_files
["#{build_dir}/gem_test_library.js", "#{build_dir}/gem_test_append.js"]
end
def gem_test_js_flags
"--js-library #{build_dir}/gem_test_library.js --pre-js #{build_dir}/gem_test_append.js"
end
# Prepare exported functions for emscripten
# Webruby now supports 3 kinds of Ruby source code loading methods:
# * WEBRUBY.run(): this function loads source code compiled from
# the app folder, which is already contained in the js file.
# * WEBRUBY.run_bytecode(): this function loads an array of mruby
# bytecode, we can generate bytecode using mrbc binary in mruby and
# load the source code at runtime.
# * WEBRUBY.run_source(): this function parses and loads Ruby source
# code on the fly.
# Note that different functions are needed for the 3 different loading methods,
# for example, WEBRUBY.run_source requires all the parsing code is present,
# while the first 2 modes only requires code for loading bytecodes.
# Given these considerations, we allow 3 loading modes in webruby:
# 0 - only WEBRUBY.run is supported
# 1 - WEBRUBY.run and WEBRUBY.run_bytecode are supported
# 2 - all 3 loading methods are supported
# It may appear that mode 0 and mode 1 requires the same set of functions
# since they both load bytecodes, but due to the fact that mode 0 only loads
# pre-defined bytecode array, chances are optimizers may perform some tricks
# to eliminate parts of the source code for mode 0. Hence we still distinguish
# mode 0 from mode 1 here
COMMON_EXPORTED_FUNCTIONS = ['mrb_open', 'mrb_close'];
# Gets a list of all exported functions including following types:
# * Functions exported by mrbgems
# * Functions required by loading modes
# * Functions that are customly added by users
#
# ==== Attributes
#
# * +gem_function_file+ - File name of functions exported by mrbgems, this is
# generated by scripts/gen_gems_config.rb
# * +loading_mode+ - Loading mode
# * +custom_functions+ - Array of custom functions added by user
def get_exported_functions(gem_function_file, loading_mode, custom_functions)
loading_mode = loading_mode.to_i
functions = File.readlines(gem_function_file).map {|f| f.strip}
functions = functions.concat(custom_functions)
functions = functions.concat(COMMON_EXPORTED_FUNCTIONS)
functions << 'webruby_internal_setup'
# WEBRUBY.run is supported by all loading modes
functions << 'webruby_internal_run'
# WEBRUBY.run_bytecode
functions << 'webruby_internal_run_bytecode' if loading_mode > 0
# WEBRUBY.run_source
functions << 'webruby_internal_run_source' if loading_mode > 1
functions.uniq
end
# Generate command line option for exported functions, see
# gen_exported_functions for argument details
def get_exported_arg(gem_function_file, loading_mode, custom_functions)
func_str = get_exported_functions(gem_function_file, loading_mode, custom_functions)
.map{|f| "'_#{f}'"}.join ', '
"-s EXPORTED_FUNCTIONS=\"[#{func_str}]\""
end
end
end
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.thrift.perf;
import com.facebook.thrift.*;
import com.facebook.thrift.protocol.*;
import com.facebook.thrift.server.*;
import com.facebook.thrift.transport.*;
public class HsHaServerLoadTester extends LoadTester {
public static void main(String[] args) throws Exception {
new HsHaServerLoadTester().run(args);
}
public TServer createServer() throws Exception {
LoadTesterArgumentParser parser = getArgumentParser();
LoadTest.Iface handler = new LoadTestHandler();
TProcessor processor = new LoadTest.Processor(handler);
TProcessorFactory procfactory = new TProcessorFactory(processor);
TNonblockingServerTransport transport = new TNonblockingServerSocket(parser.getListenPort());
TFramedTransport.Factory tfactory = new TFramedTransport.Factory();
TProtocolFactory pfactory = new TBinaryProtocol.Factory();
THsHaServer.Options options = new THsHaServer.Options();
options.minWorkerThreads = options.maxWorkerThreads = parser.getNumberOfThreads();
TServer server =
new THsHaServer(procfactory, transport, tfactory, tfactory, pfactory, pfactory, options);
return server;
}
}
| {
"pile_set_name": "Github"
} |
contrib/bitcoind.bash-completion bitcoind
| {
"pile_set_name": "Github"
} |
-- source include/have_multi_ndb.inc
--disable_warnings
connection server1;
DROP TABLE IF EXISTS t1,t2;
connection server2;
DROP TABLE IF EXISTS t1;
set @old_auto_increment_offset = @@session.auto_increment_offset;
set @old_auto_increment_increment = @@session.auto_increment_increment;
set @old_ndb_autoincrement_prefetch_sz = @@session.ndb_autoincrement_prefetch_sz;
connection server1;
--enable_warnings
set @old_auto_increment_offset = @@session.auto_increment_offset;
set @old_auto_increment_increment = @@session.auto_increment_increment;
set @old_ndb_autoincrement_prefetch_sz = @@session.ndb_autoincrement_prefetch_sz;
flush status;
create table t1 (a int not null auto_increment primary key) engine ndb;
# Step 1: Verify simple insert
insert into t1 values (NULL);
select * from t1 order by a;
# Step 2: Verify simple update with higher than highest value causes
# next insert to use updated_value + 1
update t1 set a = 5 where a = 1;
insert into t1 values (NULL);
select * from t1 order by a;
# Step 3: Verify insert that inserts higher than highest value causes
# next insert to use inserted_value + 1
insert into t1 values (7);
insert into t1 values (NULL);
select * from t1 order by a;
# Step 4: Verify that insert into hole, lower than highest value doesn't
# affect next insert
insert into t1 values (2);
insert into t1 values (NULL);
select * from t1 order by a;
# Step 5: Verify that update into hole, lower than highest value doesn't
# affect next insert
update t1 set a = 4 where a = 2;
insert into t1 values (NULL);
select * from t1 order by a;
# Step 6: Verify that delete of highest value doesn't cause the next
# insert to reuse this value
delete from t1 where a = 10;
insert into t1 values (NULL);
select * from t1 order by a;
# Step 7: Verify that REPLACE has the same effect as INSERT
replace t1 values (NULL);
select * from t1 order by a;
replace t1 values (15);
select * from t1 order by a;
replace into t1 values (NULL);
select * from t1 order by a;
# Step 8: Verify that REPLACE has the same effect as UPDATE
replace t1 values (15);
select * from t1 order by a;
# Step 9: Verify that IGNORE doesn't affect auto_increment
insert ignore into t1 values (NULL);
select * from t1 order by a;
insert ignore into t1 values (15), (NULL);
select * from t1 order by a;
# Step 10: Verify that on duplicate key as UPDATE behaves as an
# UPDATE
insert into t1 values (15)
on duplicate key update a = 20;
insert into t1 values (NULL);
select * from t1 order by a;
# Step 11: Verify that on duplicate key as INSERT behaves as INSERT
insert into t1 values (NULL) on duplicate key update a = 30;
select * from t1 order by a;
insert into t1 values (30) on duplicate key update a = 40;
select * from t1 order by a;
#Step 12: Vefify INSERT IGNORE (bug#32055)
insert ignore into t1 values(600),(NULL),(NULL),(610),(NULL);
select * from t1 order by a;
drop table t1;
#Step 13: Verify auto_increment of unique key
create table t1 (a int not null primary key,
b int not null unique auto_increment) engine ndb;
insert into t1 values (1, NULL);
insert into t1 values (3, NULL);
update t1 set b = 3 where a = 3;
insert into t1 values (4, NULL);
select * from t1 order by a;
drop table t1;
#Step 14: Verify that auto_increment_increment and auto_increment_offset
# work as expected
CREATE TABLE t1 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=NDBCLUSTER;
CREATE TABLE t2 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=MYISAM;
SET @@session.auto_increment_increment=10;
INSERT INTO t1 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t2 (b,c) VALUES (1,0),(2,1),(3,2);
SELECT * FROM t1 ORDER BY pk;
SELECT COUNT(t1.pk) FROM t1, t2 WHERE t1.pk = t2.pk AND t1.b = t2.b AND t1.c = t1.c;
TRUNCATE t1;
TRUNCATE t2;
SET @@session.auto_increment_offset=5;
INSERT INTO t1 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t1 (pk,b,c) VALUES (27,4,3),(NULL,5,4),(99,6,5),(NULL,7,6);
INSERT INTO t2 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t2 (pk,b,c) VALUES (27,4,3),(NULL,5,4),(99,6,5),(NULL,7,6);
SELECT * FROM t1 ORDER BY pk;
SELECT COUNT(t1.pk) FROM t1, t2 WHERE t1.pk = t2.pk AND t1.b = t2.b AND t1.c = t1.c;
TRUNCATE t1;
TRUNCATE t2;
SET @@session.auto_increment_increment=2;
INSERT INTO t1 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t2 (b,c) VALUES (1,0),(2,1),(3,2);
SELECT * FROM t1 ORDER BY pk;
SELECT COUNT(t1.pk) FROM t1, t2 WHERE t1.pk = t2.pk AND t1.b = t2.b AND t1.c = t1.c;
DROP TABLE t1, t2;
CREATE TABLE t1 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=NDBCLUSTER AUTO_INCREMENT = 7;
CREATE TABLE t2 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=MYISAM AUTO_INCREMENT = 7;
SET @@session.auto_increment_offset=1;
SET @@session.auto_increment_increment=1;
INSERT INTO t1 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t2 (b,c) VALUES (1,0),(2,1),(3,2);
SELECT * FROM t1 ORDER BY pk;
SELECT COUNT(t1.pk) FROM t1, t2 WHERE t1.pk = t2.pk AND t1.b = t2.b AND t1.c = t1.c;
DROP TABLE t1, t2;
CREATE TABLE t1 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=NDBCLUSTER AUTO_INCREMENT = 3;
CREATE TABLE t2 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=MYISAM AUTO_INCREMENT = 3;
SET @@session.auto_increment_offset=5;
SET @@session.auto_increment_increment=10;
INSERT INTO t1 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t2 (b,c) VALUES (1,0),(2,1),(3,2);
SELECT * FROM t1 ORDER BY pk;
SELECT COUNT(t1.pk) FROM t1, t2 WHERE t1.pk = t2.pk AND t1.b = t2.b AND t1.c = t1.c;
DROP TABLE t1, t2;
CREATE TABLE t1 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=NDBCLUSTER AUTO_INCREMENT = 7;
CREATE TABLE t2 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=MYISAM AUTO_INCREMENT = 7;
SET @@session.auto_increment_offset=5;
SET @@session.auto_increment_increment=10;
INSERT INTO t1 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t2 (b,c) VALUES (1,0),(2,1),(3,2);
SELECT * FROM t1 ORDER BY pk;
SELECT COUNT(t1.pk) FROM t1, t2 WHERE t1.pk = t2.pk AND t1.b = t2.b AND t1.c = t1.c;
DROP TABLE t1, t2;
CREATE TABLE t1 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=NDBCLUSTER AUTO_INCREMENT = 5;
CREATE TABLE t2 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=MYISAM AUTO_INCREMENT = 5;
SET @@session.auto_increment_offset=5;
SET @@session.auto_increment_increment=10;
INSERT INTO t1 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t2 (b,c) VALUES (1,0),(2,1),(3,2);
SELECT * FROM t1 ORDER BY pk;
SELECT COUNT(t1.pk) FROM t1, t2 WHERE t1.pk = t2.pk AND t1.b = t2.b AND t1.c = t1.c;
DROP TABLE t1, t2;
CREATE TABLE t1 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=NDBCLUSTER AUTO_INCREMENT = 100;
CREATE TABLE t2 (
pk INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
b INT NOT NULL,
c INT NOT NULL UNIQUE
) ENGINE=MYISAM AUTO_INCREMENT = 100;
SET @@session.auto_increment_offset=5;
SET @@session.auto_increment_increment=10;
INSERT INTO t1 (b,c) VALUES (1,0),(2,1),(3,2);
INSERT INTO t2 (b,c) VALUES (1,0),(2,1),(3,2);
SELECT * FROM t1 ORDER BY pk;
SELECT COUNT(t1.pk) FROM t1, t2 WHERE t1.pk = t2.pk AND t1.b = t2.b AND t1.c = t1.c;
DROP TABLE t1, t2;
#Step 15: Now verify that behaviour on multiple MySQL Servers behave
# properly. Start by dropping table and recreating it to start
# counters and id caches from zero again.
--disable_warnings
connection server2;
SET @@session.auto_increment_offset=1;
SET @@session.auto_increment_increment=1;
set ndb_autoincrement_prefetch_sz = 32;
drop table if exists t1;
connection server1;
SET @@session.auto_increment_offset=1;
SET @@session.auto_increment_increment=1;
set ndb_autoincrement_prefetch_sz = 32;
--enable_warnings
create table t1 (a int not null auto_increment primary key) engine ndb;
# Basic test, ensure that the second server gets a new range.
#Generate record with key = 1
insert into t1 values (NULL);
connection server2;
#Generate record with key = 33
insert into t1 values (NULL);
connection server1;
select * from t1 order by a;
#This insert should not affect the range of the second server
insert into t1 values (20);
connection server2;
insert into t1 values (NULL);
select * from t1 order by a;
connection server1;
#This insert should remove cached values but also skip values already
#taken by server2, given that there is no method of communicating with
#the other server it should also cause a conflict
connection server1;
insert into t1 values (35);
insert into t1 values (NULL);
connection server2;
--error ER_DUP_ENTRY
insert into t1 values (NULL);
select * from t1 order by a;
insert into t1 values (100);
insert into t1 values (NULL);
connection server1;
insert into t1 values (NULL);
select * from t1 order by a;
set auto_increment_offset = @old_auto_increment_offset;
set auto_increment_increment = @old_auto_increment_increment;
set ndb_autoincrement_prefetch_sz = 1;
drop table t1;
connection server2;
set auto_increment_offset = @old_auto_increment_offset;
set auto_increment_increment = @old_auto_increment_increment;
set ndb_autoincrement_prefetch_sz = 1;
# bug#46712 Auto_increment work incorrectly when using triggers and NDB Cluster
#
# Testing that auto_increment values are set correctly when inserting from
# multiple SQL-nodes
connection server1;
CREATE TABLE `t1` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`k` int(10) unsigned NOT NULL DEFAULT '0',
`c` char(120) NOT NULL DEFAULT '',
`pad` char(60) NOT NULL DEFAULT '',
PRIMARY KEY (`id`),
KEY `k` (`k`)
) ENGINE=ndbcluster;
CREATE TABLE `t2` (
`evend_id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`timestamp` int(11) NOT NULL,
`server_id` int(11) NOT NULL,
PRIMARY KEY (`evend_id`)
) ENGINE=ndbcluster;
insert into t1 values (null,1,'',''),(null,2,'','');
DELIMITER |;
CREATE TRIGGER tr1
AFTER UPDATE ON t1
FOR EACH ROW
BEGIN
insert into t2(timestamp, server_id) values(UNIX_TIMESTAMP(),@@global.server_id);
end;
|
DELIMITER ;|
connection server2;
DELIMITER |;
CREATE TRIGGER tr1
AFTER UPDATE ON t1
FOR EACH ROW
BEGIN
insert into t2(timestamp, server_id) values(UNIX_TIMESTAMP(),@@global.server_id);
end;
|
DELIMITER ;|
connection server1;
update t1 set c='foobar' where id=1;
connection server2;
update t1 set c='foobar' where id=1;
connection server1;
update t1 set c='foobar' where id=1;
connection server2;
update t1 set c='foobar' where id=1;
connection server1;
update t1 set c='foobar' where id=1;
connection server2;
update t1 set c='foobar' where id=1;
connection server1;
update t1 set c='foobar' where id=1;
connection server2;
update t1 set c='foobar' where id=1;
connection server1;
select evend_id,server_id from t2 order by evend_id;
drop trigger tr1;
drop table t1, t2;
connection server2;
--disable_warnings
drop trigger if exists tr1;
--enable_warnings
connection server1;
#
# Bug #47865 SHOW CREATE TABLE does not show the current auto_increment number for ndb tables
#
create table t1 (a int primary key auto_increment, b int) engine=ndb;
insert into t1 values (null,1),(null,2),(null,3);
--disable_warnings
show create table t1;
--enable_warnings
drop table t1;
#
# Bug #50247 ALTER TABLE cannot change auto_increment
#
create table t1 (a int primary key auto_increment, b int) auto_increment=5
engine=ndb;
alter table t1 auto_increment=32000000;
--disable_warnings
show create table t1;
--enable_warnings
drop table t1;
#
# Bug #46985 Got error 4350 'Transaction already aborted' from NDBCLUSTER
#
# This bug was a configuration issue, but the testcase revealed another
# bug whereauto_increment was not handled correctly for INSERT IGNORE
CREATE TABLE t1 (
id bigint(20) unsigned NOT NULL AUTO_INCREMENT,
data binary(16) NOT NULL,
PRIMARY KEY (id),
UNIQUE KEY uk_t1_data (data)
) ENGINE = NDB;
INSERT IGNORE INTO t1 (data) VALUES (6),(6),(6),(6),(6),(6),(6);
SELECT id FROM t1;
DELETE from t1;
INSERT IGNORE INTO t1 (data) VALUES (6),(6),(6),(6),(6),(6),(6);
SELECT id from t1;
DROP TABLE t1;
#
# Bug #13731134 AUTO-INC COUNTER IS NOT UPDATED WITH EXPLICIT @@INSERT_ID SET
#
create table t1 (a serial) engine ndb;
set @@insert_id=1;
insert into t1 values(null);
connection server2;
insert into t1 values(null);
insert into t1 values(null);
insert into t1 values(null);
set @@insert_id=17;
insert into t1 values(null);
insert into t1 values(null);
insert into t1 values(null);
select * from t1 order by a;
set @@insert_id=1;
--error ER_DUP_ENTRY
insert into t1 values(null);
set ndb_autoincrement_prefetch_sz = @old_ndb_autoincrement_prefetch_sz;
connection server1;
set ndb_autoincrement_prefetch_sz = @old_ndb_autoincrement_prefetch_sz;
drop table t1;
#
# Bug 30316314 INCREASE AUTO INCREMENT PREFETCH SIZE
#
create table t1 (a tinyint not null auto_increment primary key) engine ndb;
drop table t1;
| {
"pile_set_name": "Github"
} |
const CONFIG = require('config')
const __ = CONFIG.universalPath
const _ = __.require('builders', 'utils')
const { createUser } = require('./users')
const { createRandomizedItems } = require('./items')
let populatePromise
const usersCount = 8
const publicItemsPerUser = 10
const API = module.exports = {
populate: () => {
if (populatePromise) return populatePromise
populatePromise = Promise.all(_.times(usersCount, API.createUserWithItems))
return populatePromise
},
createUserWithItems: async username => {
const user = await createUser(username)
const itemsData = _.times(publicItemsPerUser, () => {})
await createRandomizedItems(user, itemsData)
return user
}
}
| {
"pile_set_name": "Github"
} |
--- tightvnc-1.2.9-orig/Xvnc/config/cf/linux.cf 2002-03-20 02:49:23.000000000 -0800
+++ tightvnc/Xvnc/config/cf/linux.cf 2005-06-12 12:52:04.254321000 -0700
@@ -160,7 +160,9 @@
#define MkdirHierCmd mkdir -p
#if LinuxElfDefault
#if UseElfFormat
+#ifndef CcCmd
#define CcCmd gcc
+#endif
#define AsCmd as
#define LdCmd ld
#define AsmDefines -D__ELF__
@@ -232,7 +234,9 @@
#define BuildDynamicLoading YES
#endif
#endif
+#ifndef CppCmd
#define CppCmd /lib/cpp
+#endif
#ifdef USE_BYACC
#define YaccCmd byacc
#else
@@ -249,7 +253,7 @@
#ifdef i386Architecture
#define OptimizedCDebugFlags DefaultGcc2i386Opt
-#define LinuxMachineDefines -D__i386__
+#define LinuxMachineDefines -D__arm__
#define ServerOSDefines XFree86ServerOSDefines -DDDXTIME -DPART_NET
#define ServerExtraDefines -DGCCUSESGAS XFree86ServerDefines
#endif /* i386Architecture */
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="iso-8859-1"?>
<project>
<fileVersion>1</fileVersion>
<configuration>
<name>Debug</name>
<toolchain>
<name>ARM</name>
</toolchain>
<debug>1</debug>
<settings>
<name>General</name>
<archiveVersion>3</archiveVersion>
<data>
<version>14</version>
<wantNonLocal>1</wantNonLocal>
<debug>1</debug>
<option>
<name>ExePath</name>
<state>ewarm\Exe</state>
</option>
<option>
<name>ObjPath</name>
<state>ewarm\Obj</state>
</option>
<option>
<name>ListPath</name>
<state>ewarm\List</state>
</option>
<option>
<name>Variant</name>
<version>19</version>
<state>39</state>
</option>
<option>
<name>GEndianMode</name>
<state>0</state>
</option>
<option>
<name>Input variant</name>
<version>1</version>
<state>3</state>
</option>
<option>
<name>Input description</name>
<state>No specifier n, no float nor long long, no scan set, no assignment suppressing.</state>
</option>
<option>
<name>Output variant</name>
<version>0</version>
<state>3</state>
</option>
<option>
<name>Output description</name>
<state>No specifier a, A, no specifier n, no float nor long long, no flags.</state>
</option>
<option>
<name>GOutputBinary</name>
<state>0</state>
</option>
<option>
<name>FPU</name>
<version>2</version>
<state>5</state>
</option>
<option>
<name>OGCoreOrChip</name>
<state>1</state>
</option>
<option>
<name>GRuntimeLibSelect</name>
<version>0</version>
<state>1</state>
</option>
<option>
<name>GRuntimeLibSelectSlave</name>
<version>0</version>
<state>1</state>
</option>
<option>
<name>RTDescription</name>
<state>Use the normal configuration of the C/C++ runtime library. No locale interface, C locale, no file descriptor support, no multibytes in printf and scanf, and no hex floats in strtod.</state>
</option>
<option>
<name>RTConfigPath</name>
<state>$TOOLKIT_DIR$\INC\DLib_Config_Normal.h</state>
</option>
<option>
<name>OGProductVersion</name>
<state>5.11.0.50579</state>
</option>
<option>
<name>OGLastSavedByProductVersion</name>
<state>5.11.0.50579</state>
</option>
<option>
<name>GeneralMisraRules</name>
<version>0</version>
<state>1000111110110101101110011100111111101110011011000101110111101101100111111111111100110011111001110111001111111111111111111111111</state>
</option>
<option>
<name>GeneralEnableMisra</name>
<state>0</state>
</option>
<option>
<name>GeneralMisraVerbose</name>
<state>0</state>
</option>
<option>
<name>OGChipSelectEditMenu</name>
<state>TM4C123GH6PGE TexasInstruments TM4C123GH6PGE</state>
</option>
<option>
<name>GenLowLevelInterface</name>
<state>1</state>
</option>
<option>
<name>GEndianModeBE</name>
<state>1</state>
</option>
<option>
<name>OGBufferedTerminalOutput</name>
<state>0</state>
</option>
</data>
</settings>
<settings>
<name>ICCARM</name>
<archiveVersion>2</archiveVersion>
<data>
<version>19</version>
<wantNonLocal>1</wantNonLocal>
<debug>1</debug>
<option>
<name>CCDefines</name>
<state>ewarm</state>
<state>PART_TM4C123GH6PGE</state>
<state>TARGET_IS_TM4C123_RB1</state>
</option>
<option>
<name>CCPreprocFile</name>
<state>0</state>
</option>
<option>
<name>CCPreprocComments</name>
<state>0</state>
</option>
<option>
<name>CCPreprocLine</name>
<state>0</state>
</option>
<option>
<name>CCListCFile</name>
<state>0</state>
</option>
<option>
<name>CCListCMnemonics</name>
<state>0</state>
</option>
<option>
<name>CCListCMessages</name>
<state>0</state>
</option>
<option>
<name>CCListAssFile</name>
<state>0</state>
</option>
<option>
<name>CCListAssSource</name>
<state>0</state>
</option>
<option>
<name>CCEnableRemarks</name>
<state>0</state>
</option>
<option>
<name>CCDiagSuppress</name>
<state>Pa050</state>
</option>
<option>
<name>CCDiagRemark</name>
<state></state>
</option>
<option>
<name>CCDiagWarning</name>
<state></state>
</option>
<option>
<name>CCDiagError</name>
<state></state>
</option>
<option>
<name>CCObjPrefix</name>
<state>1</state>
</option>
<option>
<name>CCAllowList</name>
<version>1</version>
<state>1111111</state>
</option>
<option>
<name>CCDebugInfo</name>
<state>1</state>
</option>
<option>
<name>IEndianMode</name>
<state>1</state>
</option>
<option>
<name>IProcessor</name>
<state>1</state>
</option>
<option>
<name>IExtraOptionsCheck</name>
<state>0</state>
</option>
<option>
<name>IExtraOptions</name>
<state></state>
</option>
<option>
<name>CCLangConformance</name>
<state>0</state>
</option>
<option>
<name>CCSignedPlainChar</name>
<state>1</state>
</option>
<option>
<name>CCRequirePrototypes</name>
<state>0</state>
</option>
<option>
<name>CCMultibyteSupport</name>
<state>0</state>
</option>
<option>
<name>CCDiagWarnAreErr</name>
<state>0</state>
</option>
<option>
<name>CCCompilerRuntimeInfo</name>
<state>0</state>
</option>
<option>
<name>IFpuProcessor</name>
<state>1</state>
</option>
<option>
<name>OutputFile</name>
<state>$FILE_BNAME$.o</state>
</option>
<option>
<name>CCLangSelect</name>
<state>0</state>
</option>
<option>
<name>CCLibConfigHeader</name>
<state>1</state>
</option>
<option>
<name>PreInclude</name>
<state></state>
</option>
<option>
<name>CompilerMisraRules</name>
<version>0</version>
<state>1000111110110101101110011100111111101110011011000101110111101101100111111111111100110011111001110111001111111111111111111111111</state>
</option>
<option>
<name>CompilerMisraOverride</name>
<state>0</state>
</option>
<option>
<name>CCIncludePath2</name>
<state>$PROJ_DIR$\..</state>
<state>$PROJ_DIR$\..\..\..\..</state>
</option>
<option>
<name>CCStdIncCheck</name>
<state>0</state>
</option>
<option>
<name>CCStdIncludePath</name>
<state>$TOOLKIT_DIR$\INC\</state>
</option>
<option>
<name>CCCodeSection</name>
<state>.text</state>
</option>
<option>
<name>IInterwork2</name>
<state>0</state>
</option>
<option>
<name>IProcessorMode2</name>
<state>1</state>
</option>
<option>
<name>CCOptLevel</name>
<state>3</state>
</option>
<option>
<name>CCOptStrategy</name>
<version>0</version>
<state>1</state>
</option>
<option>
<name>CCOptLevelSlave</name>
<state>3</state>
</option>
</data>
</settings>
<settings>
<name>AARM</name>
<archiveVersion>2</archiveVersion>
<data>
<version>7</version>
<wantNonLocal>1</wantNonLocal>
<debug>1</debug>
<option>
<name>AObjPrefix</name>
<state>1</state>
</option>
<option>
<name>AEndian</name>
<state>1</state>
</option>
<option>
<name>ACaseSensitivity</name>
<state>1</state>
</option>
<option>
<name>MacroChars</name>
<version>0</version>
<state>0</state>
</option>
<option>
<name>AWarnEnable</name>
<state>0</state>
</option>
<option>
<name>AWarnWhat</name>
<state>0</state>
</option>
<option>
<name>AWarnOne</name>
<state></state>
</option>
<option>
<name>AWarnRange1</name>
<state></state>
</option>
<option>
<name>AWarnRange2</name>
<state></state>
</option>
<option>
<name>ADebug</name>
<state>1</state>
</option>
<option>
<name>AltRegisterNames</name>
<state>0</state>
</option>
<option>
<name>ADefines</name>
<state>ewarm</state>
</option>
<option>
<name>AList</name>
<state>0</state>
</option>
<option>
<name>AListHeader</name>
<state>1</state>
</option>
<option>
<name>AListing</name>
<state>1</state>
</option>
<option>
<name>Includes</name>
<state>0</state>
</option>
<option>
<name>MacDefs</name>
<state>0</state>
</option>
<option>
<name>MacExps</name>
<state>1</state>
</option>
<option>
<name>MacExec</name>
<state>0</state>
</option>
<option>
<name>OnlyAssed</name>
<state>0</state>
</option>
<option>
<name>MultiLine</name>
<state>0</state>
</option>
<option>
<name>PageLengthCheck</name>
<state>0</state>
</option>
<option>
<name>PageLength</name>
<state>80</state>
</option>
<option>
<name>TabSpacing</name>
<state>8</state>
</option>
<option>
<name>AXRef</name>
<state>0</state>
</option>
<option>
<name>AXRefDefines</name>
<state>0</state>
</option>
<option>
<name>AXRefInternal</name>
<state>0</state>
</option>
<option>
<name>AXRefDual</name>
<state>0</state>
</option>
<option>
<name>AProcessor</name>
<state>1</state>
</option>
<option>
<name>AFpuProcessor</name>
<state>1</state>
</option>
<option>
<name>AOutputFile</name>
<state>$FILE_BNAME$.o</state>
</option>
<option>
<name>AMultibyteSupport</name>
<state>0</state>
</option>
<option>
<name>ALimitErrorsCheck</name>
<state>0</state>
</option>
<option>
<name>ALimitErrorsEdit</name>
<state>100</state>
</option>
<option>
<name>AIgnoreStdInclude</name>
<state>0</state>
</option>
<option>
<name>AStdIncludes</name>
<state>$TOOLKIT_DIR$\INC\</state>
</option>
<option>
<name>AUserIncludes</name>
<state>$PROJ_DIR$\..</state>
<state>$PROJ_DIR$\..\..\..\..</state>
</option>
<option>
<name>AExtraOptionsCheckV2</name>
<state>0</state>
</option>
<option>
<name>AExtraOptionsV2</name>
<state></state>
</option>
</data>
</settings>
<settings>
<name>OBJCOPY</name>
<archiveVersion>0</archiveVersion>
<data>
<version>1</version>
<wantNonLocal>1</wantNonLocal>
<debug>1</debug>
<option>
<name>OOCOutputFormat</name>
<version>1</version>
<state>2</state>
</option>
<option>
<name>OCOutputOverride</name>
<state>0</state>
</option>
<option>
<name>OOCOutputFile</name>
<state>mpu_fault.bin</state>
</option>
<option>
<name>OOCCommandLineProducer</name>
<state>1</state>
</option>
<option>
<name>OOCObjCopyEnable</name>
<state>1</state>
</option>
</data>
</settings>
<settings>
<name>CUSTOM</name>
<archiveVersion>3</archiveVersion>
<data>
<extensions></extensions>
<cmdline></cmdline>
</data>
</settings>
<settings>
<name>BICOMP</name>
<archiveVersion>0</archiveVersion>
<data/>
</settings>
<settings>
<name>BUILDACTION</name>
<archiveVersion>1</archiveVersion>
<data>
<prebuild></prebuild>
<postbuild></postbuild>
</data>
</settings>
<settings>
<name>ILINK</name>
<archiveVersion>0</archiveVersion>
<data>
<version>5</version>
<wantNonLocal>1</wantNonLocal>
<debug>1</debug>
<option>
<name>IlinkLibIOConfig</name>
<state>1</state>
</option>
<option>
<name>XLinkMisraHandler</name>
<state>0</state>
</option>
<option>
<name>IlinkInputFileSlave</name>
<state>0</state>
</option>
<option>
<name>IlinkOutputFile</name>
<state>mpu_fault.out</state>
</option>
<option>
<name>IlinkDebugInfoEnable</name>
<state>1</state>
</option>
<option>
<name>IlinkKeepSymbols</name>
<state></state>
</option>
<option>
<name>IlinkRawBinaryFile</name>
<state></state>
</option>
<option>
<name>IlinkRawBinarySymbol</name>
<state></state>
</option>
<option>
<name>IlinkRawBinarySegment</name>
<state></state>
</option>
<option>
<name>IlinkRawBinaryAlign</name>
<state></state>
</option>
<option>
<name>IlinkDefines</name>
<state></state>
</option>
<option>
<name>IlinkConfigDefines</name>
<state></state>
</option>
<option>
<name>IlinkMapFile</name>
<state>1</state>
</option>
<option>
<name>IlinkLogFile</name>
<state>0</state>
</option>
<option>
<name>IlinkLogInitialization</name>
<state>0</state>
</option>
<option>
<name>IlinkLogModule</name>
<state>0</state>
</option>
<option>
<name>IlinkLogSection</name>
<state>0</state>
</option>
<option>
<name>IlinkLogVeneer</name>
<state>0</state>
</option>
<option>
<name>IlinkIcfOverride</name>
<state>1</state>
</option>
<option>
<name>IlinkIcfFile</name>
<state>$PROJ_DIR$\mpu_fault.icf</state>
</option>
<option>
<name>IlinkIcfFileSlave</name>
<state></state>
</option>
<option>
<name>IlinkEnableRemarks</name>
<state>0</state>
</option>
<option>
<name>IlinkSuppressDiags</name>
<state></state>
</option>
<option>
<name>IlinkTreatAsRem</name>
<state></state>
</option>
<option>
<name>IlinkTreatAsWarn</name>
<state></state>
</option>
<option>
<name>IlinkTreatAsErr</name>
<state></state>
</option>
<option>
<name>IlinkWarningsAreErrors</name>
<state>0</state>
</option>
<option>
<name>IlinkUseExtraOptions</name>
<state>0</state>
</option>
<option>
<name>IlinkExtraOptions</name>
<state></state>
</option>
<option>
<name>IlinkLowLevelInterfaceSlave</name>
<state>1</state>
</option>
<option>
<name>IlinkAutoLibEnable</name>
<state>1</state>
</option>
<option>
<name>IlinkAdditionalLibs</name>
<state></state>
</option>
<option>
<name>IlinkOverrideProgramEntryLabel</name>
<state>1</state>
</option>
<option>
<name>IlinkProgramEntryLabelSelect</name>
<state>0</state>
</option>
<option>
<name>IlinkProgramEntryLabel</name>
<state>__iar_program_start</state>
</option>
<option>
<name>IlinkNXPLPCChecksum</name>
<state>0</state>
</option>
<option>
<name>DoFill</name>
<state>0</state>
</option>
<option>
<name>FillerByte</name>
<state>0xFF</state>
</option>
<option>
<name>FillerStart</name>
<state>0x0</state>
</option>
<option>
<name>FillerEnd</name>
<state>0x0</state>
</option>
<option>
<name>CrcSize</name>
<version>0</version>
<state>1</state>
</option>
<option>
<name>CrcAlign</name>
<state>1</state>
</option>
<option>
<name>CrcAlgo</name>
<state>1</state>
</option>
<option>
<name>CrcPoly</name>
<state>0x11021</state>
</option>
<option>
<name>CrcCompl</name>
<version>0</version>
<state>0</state>
</option>
<option>
<name>CrcBitOrder</name>
<version>0</version>
<state>0</state>
</option>
<option>
<name>CrcInitialValue</name>
<state>0x0</state>
</option>
<option>
<name>DoCrc</name>
<state>0</state>
</option>
<option>
<name>IlinkBE8Slave</name>
<state>1</state>
</option>
<option>
<name>IlinkBufferedTerminalOutput</name>
<state>1</state>
</option>
</data>
</settings>
<settings>
<name>IARCHIVE</name>
<archiveVersion>0</archiveVersion>
<data>
<version>0</version>
<wantNonLocal>1</wantNonLocal>
<debug>1</debug>
<option>
<name>IarchiveInputs</name>
<state></state>
</option>
<option>
<name>IarchiveOverride</name>
<state>0</state>
</option>
<option>
<name>IarchiveOutput</name>
<state>###Unitialized###</state>
</option>
</data>
</settings>
<settings>
<name>BILINK</name>
<archiveVersion>0</archiveVersion>
<data/>
</settings>
</configuration>
<group>
<name>Libraries</name>
<file>
<name>$PROJ_DIR$\..\..\..\..\driverlib\ewarm\Exe\driverlib.a</name>
</file>
<file>
<name>$PROJ_DIR$\..\..\..\..\grlib\ewarm\Exe\grlib.a</name>
</file>
</group>
<group>
<name>Source</name>
<file>
<name>$PROJ_DIR$\..\drivers\cfal96x64x16.c</name>
</file>
<file>
<name>$PROJ_DIR$\mpu_fault.c</name>
</file>
<file>
<name>$PROJ_DIR$\startup_ewarm.c</name>
</file>
</group>
</project>
| {
"pile_set_name": "Github"
} |
workers 3
preload_app!
rails_env = environment ENV.fetch('RAILS_ENV') { 'development' }
if rails_env == 'development'
worker_timeout 100_000_000
end
on_worker_boot do
ActiveSupport.on_load(:active_record) do
ActiveRecord::Base.establish_connection
end
end
| {
"pile_set_name": "Github"
} |
/*
minizip.c
Version 1.1, February 14h, 2010
sample part of the MiniZip project - ( http://www.winimage.com/zLibDll/minizip.html )
Copyright (C) 1998-2010 Gilles Vollant (minizip) ( http://www.winimage.com/zLibDll/minizip.html )
Modifications of Unzip for Zip64
Copyright (C) 2007-2008 Even Rouault
Modifications for Zip64 support on both zip and unzip
Copyright (C) 2009-2010 Mathias Svensson ( http://result42.com )
*/
#if (!defined(_WIN32)) && (!defined(WIN32)) && (!defined(__APPLE__))
#ifndef __USE_FILE_OFFSET64
#define __USE_FILE_OFFSET64
#endif
#ifndef __USE_LARGEFILE64
#define __USE_LARGEFILE64
#endif
#ifndef _LARGEFILE64_SOURCE
#define _LARGEFILE64_SOURCE
#endif
#ifndef _FILE_OFFSET_BIT
#define _FILE_OFFSET_BIT 64
#endif
#endif
#ifdef __APPLE__
// In darwin and perhaps other BSD variants off_t is a 64 bit value, hence no need for specific 64 bit functions
#define FOPEN_FUNC(filename, mode) fopen(filename, mode)
#define FTELLO_FUNC(stream) ftello(stream)
#define FSEEKO_FUNC(stream, offset, origin) fseeko(stream, offset, origin)
#else
#define FOPEN_FUNC(filename, mode) fopen64(filename, mode)
#define FTELLO_FUNC(stream) ftello64(stream)
#define FSEEKO_FUNC(stream, offset, origin) fseeko64(stream, offset, origin)
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <errno.h>
#include <fcntl.h>
#ifdef _WIN32
# include <direct.h>
# include <io.h>
#else
# include <unistd.h>
# include <utime.h>
# include <sys/types.h>
# include <sys/stat.h>
#endif
#include "zip.h"
#ifdef _WIN32
#define USEWIN32IOAPI
#include "iowin32.h"
#endif
#define WRITEBUFFERSIZE (16384)
#define MAXFILENAME (256)
#ifdef _WIN32
uLong filetime(f, tmzip, dt)
char *f; /* name of file to get info on */
tm_zip *tmzip; /* return value: access, modific. and creation times */
uLong *dt; /* dostime */
{
int ret = 0;
{
FILETIME ftLocal;
HANDLE hFind;
WIN32_FIND_DATAA ff32;
hFind = FindFirstFileA(f,&ff32);
if (hFind != INVALID_HANDLE_VALUE)
{
FileTimeToLocalFileTime(&(ff32.ftLastWriteTime),&ftLocal);
FileTimeToDosDateTime(&ftLocal,((LPWORD)dt)+1,((LPWORD)dt)+0);
FindClose(hFind);
ret = 1;
}
}
return ret;
}
#else
#ifdef unix || __APPLE__
uLong filetime(f, tmzip, dt)
char *f; /* name of file to get info on */
tm_zip *tmzip; /* return value: access, modific. and creation times */
uLong *dt; /* dostime */
{
int ret=0;
struct stat s; /* results of stat() */
struct tm* filedate;
time_t tm_t=0;
if (strcmp(f,"-")!=0)
{
char name[MAXFILENAME+1];
int len = strlen(f);
if (len > MAXFILENAME)
len = MAXFILENAME;
strncpy(name, f,MAXFILENAME-1);
/* strncpy doesnt append the trailing NULL, of the string is too long. */
name[ MAXFILENAME ] = '\0';
if (name[len - 1] == '/')
name[len - 1] = '\0';
/* not all systems allow stat'ing a file with / appended */
if (stat(name,&s)==0)
{
tm_t = s.st_mtime;
ret = 1;
}
}
filedate = localtime(&tm_t);
tmzip->tm_sec = filedate->tm_sec;
tmzip->tm_min = filedate->tm_min;
tmzip->tm_hour = filedate->tm_hour;
tmzip->tm_mday = filedate->tm_mday;
tmzip->tm_mon = filedate->tm_mon ;
tmzip->tm_year = filedate->tm_year;
return ret;
}
#else
uLong filetime(f, tmzip, dt)
char *f; /* name of file to get info on */
tm_zip *tmzip; /* return value: access, modific. and creation times */
uLong *dt; /* dostime */
{
return 0;
}
#endif
#endif
int check_exist_file(filename)
const char* filename;
{
FILE* ftestexist;
int ret = 1;
ftestexist = FOPEN_FUNC(filename,"rb");
if (ftestexist==NULL)
ret = 0;
else
fclose(ftestexist);
return ret;
}
void do_banner()
{
printf("MiniZip 1.1, demo of zLib + MiniZip64 package, written by Gilles Vollant\n");
printf("more info on MiniZip at http://www.winimage.com/zLibDll/minizip.html\n\n");
}
void do_help()
{
printf("Usage : minizip [-o] [-a] [-0 to -9] [-p password] [-j] file.zip [files_to_add]\n\n" \
" -o Overwrite existing file.zip\n" \
" -a Append to existing file.zip\n" \
" -0 Store only\n" \
" -1 Compress faster\n" \
" -9 Compress better\n\n" \
" -j exclude path. store only the file name.\n\n");
}
/* calculate the CRC32 of a file,
because to encrypt a file, we need known the CRC32 of the file before */
int getFileCrc(const char* filenameinzip,void*buf,unsigned long size_buf,unsigned long* result_crc)
{
unsigned long calculate_crc=0;
int err=ZIP_OK;
FILE * fin = FOPEN_FUNC(filenameinzip,"rb");
unsigned long size_read = 0;
unsigned long total_read = 0;
if (fin==NULL)
{
err = ZIP_ERRNO;
}
if (err == ZIP_OK)
do
{
err = ZIP_OK;
size_read = (int)fread(buf,1,size_buf,fin);
if (size_read < size_buf)
if (feof(fin)==0)
{
printf("error in reading %s\n",filenameinzip);
err = ZIP_ERRNO;
}
if (size_read>0)
calculate_crc = crc32(calculate_crc,buf,size_read);
total_read += size_read;
} while ((err == ZIP_OK) && (size_read>0));
if (fin)
fclose(fin);
*result_crc=calculate_crc;
printf("file %s crc %lx\n", filenameinzip, calculate_crc);
return err;
}
int isLargeFile(const char* filename)
{
int largeFile = 0;
ZPOS64_T pos = 0;
FILE* pFile = FOPEN_FUNC(filename, "rb");
if(pFile != NULL)
{
int n = FSEEKO_FUNC(pFile, 0, SEEK_END);
pos = FTELLO_FUNC(pFile);
printf("File : %s is %lld bytes\n", filename, pos);
if(pos >= 0xffffffff)
largeFile = 1;
fclose(pFile);
}
return largeFile;
}
int main(argc,argv)
int argc;
char *argv[];
{
int i;
int opt_overwrite=0;
int opt_compress_level=Z_DEFAULT_COMPRESSION;
int opt_exclude_path=0;
int zipfilenamearg = 0;
char filename_try[MAXFILENAME+16];
int zipok;
int err=0;
int size_buf=0;
void* buf=NULL;
const char* password=NULL;
do_banner();
if (argc==1)
{
do_help();
return 0;
}
else
{
for (i=1;i<argc;i++)
{
if ((*argv[i])=='-')
{
const char *p=argv[i]+1;
while ((*p)!='\0')
{
char c=*(p++);;
if ((c=='o') || (c=='O'))
opt_overwrite = 1;
if ((c=='a') || (c=='A'))
opt_overwrite = 2;
if ((c>='0') && (c<='9'))
opt_compress_level = c-'0';
if ((c=='j') || (c=='J'))
opt_exclude_path = 1;
if (((c=='p') || (c=='P')) && (i+1<argc))
{
password=argv[i+1];
i++;
}
}
}
else
{
if (zipfilenamearg == 0)
{
zipfilenamearg = i ;
}
}
}
}
size_buf = WRITEBUFFERSIZE;
buf = (void*)malloc(size_buf);
if (buf==NULL)
{
printf("Error allocating memory\n");
return ZIP_INTERNALERROR;
}
if (zipfilenamearg==0)
{
zipok=0;
}
else
{
int i,len;
int dot_found=0;
zipok = 1 ;
strncpy(filename_try, argv[zipfilenamearg],MAXFILENAME-1);
/* strncpy doesnt append the trailing NULL, of the string is too long. */
filename_try[ MAXFILENAME ] = '\0';
len=(int)strlen(filename_try);
for (i=0;i<len;i++)
if (filename_try[i]=='.')
dot_found=1;
if (dot_found==0)
strcat(filename_try,".zip");
if (opt_overwrite==2)
{
/* if the file don't exist, we not append file */
if (check_exist_file(filename_try)==0)
opt_overwrite=1;
}
else
if (opt_overwrite==0)
if (check_exist_file(filename_try)!=0)
{
char rep=0;
do
{
char answer[128];
int ret;
printf("The file %s exists. Overwrite ? [y]es, [n]o, [a]ppend : ",filename_try);
ret = scanf("%1s",answer);
if (ret != 1)
{
exit(EXIT_FAILURE);
}
rep = answer[0] ;
if ((rep>='a') && (rep<='z'))
rep -= 0x20;
}
while ((rep!='Y') && (rep!='N') && (rep!='A'));
if (rep=='N')
zipok = 0;
if (rep=='A')
opt_overwrite = 2;
}
}
if (zipok==1)
{
zipFile zf;
int errclose;
# ifdef USEWIN32IOAPI
zlib_filefunc64_def ffunc;
fill_win32_filefunc64A(&ffunc);
zf = zipOpen2_64(filename_try,(opt_overwrite==2) ? 2 : 0,NULL,&ffunc);
# else
zf = zipOpen64(filename_try,(opt_overwrite==2) ? 2 : 0);
# endif
if (zf == NULL)
{
printf("error opening %s\n",filename_try);
err= ZIP_ERRNO;
}
else
printf("creating %s\n",filename_try);
for (i=zipfilenamearg+1;(i<argc) && (err==ZIP_OK);i++)
{
if (!((((*(argv[i]))=='-') || ((*(argv[i]))=='/')) &&
((argv[i][1]=='o') || (argv[i][1]=='O') ||
(argv[i][1]=='a') || (argv[i][1]=='A') ||
(argv[i][1]=='p') || (argv[i][1]=='P') ||
((argv[i][1]>='0') || (argv[i][1]<='9'))) &&
(strlen(argv[i]) == 2)))
{
FILE * fin;
int size_read;
const char* filenameinzip = argv[i];
const char *savefilenameinzip;
zip_fileinfo zi;
unsigned long crcFile=0;
int zip64 = 0;
zi.tmz_date.tm_sec = zi.tmz_date.tm_min = zi.tmz_date.tm_hour =
zi.tmz_date.tm_mday = zi.tmz_date.tm_mon = zi.tmz_date.tm_year = 0;
zi.dosDate = 0;
zi.internal_fa = 0;
zi.external_fa = 0;
filetime(filenameinzip,&zi.tmz_date,&zi.dosDate);
/*
err = zipOpenNewFileInZip(zf,filenameinzip,&zi,
NULL,0,NULL,0,NULL / * comment * /,
(opt_compress_level != 0) ? Z_DEFLATED : 0,
opt_compress_level);
*/
if ((password != NULL) && (err==ZIP_OK))
err = getFileCrc(filenameinzip,buf,size_buf,&crcFile);
zip64 = isLargeFile(filenameinzip);
/* The path name saved, should not include a leading slash. */
/*if it did, windows/xp and dynazip couldn't read the zip file. */
savefilenameinzip = filenameinzip;
while( savefilenameinzip[0] == '\\' || savefilenameinzip[0] == '/' )
{
savefilenameinzip++;
}
/*should the zip file contain any path at all?*/
if( opt_exclude_path )
{
const char *tmpptr;
const char *lastslash = 0;
for( tmpptr = savefilenameinzip; *tmpptr; tmpptr++)
{
if( *tmpptr == '\\' || *tmpptr == '/')
{
lastslash = tmpptr;
}
}
if( lastslash != NULL )
{
savefilenameinzip = lastslash+1; // base filename follows last slash.
}
}
/**/
err = zipOpenNewFileInZip3_64(zf,savefilenameinzip,&zi,
NULL,0,NULL,0,NULL /* comment*/,
(opt_compress_level != 0) ? Z_DEFLATED : 0,
opt_compress_level,0,
/* -MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY, */
-MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY,
password,crcFile, zip64);
if (err != ZIP_OK)
printf("error in opening %s in zipfile\n",filenameinzip);
else
{
fin = FOPEN_FUNC(filenameinzip,"rb");
if (fin==NULL)
{
err=ZIP_ERRNO;
printf("error in opening %s for reading\n",filenameinzip);
}
}
if (err == ZIP_OK)
do
{
err = ZIP_OK;
size_read = (int)fread(buf,1,size_buf,fin);
if (size_read < size_buf)
if (feof(fin)==0)
{
printf("error in reading %s\n",filenameinzip);
err = ZIP_ERRNO;
}
if (size_read>0)
{
err = zipWriteInFileInZip (zf,buf,size_read);
if (err<0)
{
printf("error in writing %s in the zipfile\n",
filenameinzip);
}
}
} while ((err == ZIP_OK) && (size_read>0));
if (fin)
fclose(fin);
if (err<0)
err=ZIP_ERRNO;
else
{
err = zipCloseFileInZip(zf);
if (err!=ZIP_OK)
printf("error in closing %s in the zipfile\n",
filenameinzip);
}
}
}
errclose = zipClose(zf,NULL);
if (errclose != ZIP_OK)
printf("error in closing %s\n",filename_try);
}
else
{
do_help();
}
free(buf);
return 0;
}
| {
"pile_set_name": "Github"
} |
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package net_test
import (
"io"
"net"
"testing"
"time"
"golang.org/x/net/nettest"
)
func TestPipe(t *testing.T) {
nettest.TestConn(t, func() (c1, c2 net.Conn, stop func(), err error) {
c1, c2 = net.Pipe()
stop = func() {
c1.Close()
c2.Close()
}
return
})
}
func TestPipeCloseError(t *testing.T) {
c1, c2 := net.Pipe()
c1.Close()
if _, err := c1.Read(nil); err != io.ErrClosedPipe {
t.Errorf("c1.Read() = %v, want io.ErrClosedPipe", err)
}
if _, err := c1.Write(nil); err != io.ErrClosedPipe {
t.Errorf("c1.Write() = %v, want io.ErrClosedPipe", err)
}
if err := c1.SetDeadline(time.Time{}); err != io.ErrClosedPipe {
t.Errorf("c1.SetDeadline() = %v, want io.ErrClosedPipe", err)
}
if _, err := c2.Read(nil); err != io.EOF {
t.Errorf("c2.Read() = %v, want io.EOF", err)
}
if _, err := c2.Write(nil); err != io.ErrClosedPipe {
t.Errorf("c2.Write() = %v, want io.ErrClosedPipe", err)
}
if err := c2.SetDeadline(time.Time{}); err != io.ErrClosedPipe {
t.Errorf("c2.SetDeadline() = %v, want io.ErrClosedPipe", err)
}
}
| {
"pile_set_name": "Github"
} |
\version "2.17.23"
\header {
texidoc = "Text is framed properly with @code{\\box},
@code{\\circle}, @code{\\oval} and @code{\\ellipse}"
}
\markup \column {
\line { \box { text in boxes "1" "12" "123" } }
\line { \circle { text in circles "1" "12" "123" } }
\line { \oval { text in ovals "1" "12" "123" } }
\line { \ellipse { text in ellipses "1" "12" "123" } }
}
| {
"pile_set_name": "Github"
} |
# Ram widget
This widget shows the RAM usage. When clicked another widget appears with more detailed information:

## Installation
Please refer to the [installation](https://github.com/streetturtle/awesome-wm-widgets#installation) section of the repo. | {
"pile_set_name": "Github"
} |
#!/usr/bin/clitoris ## -*- shell-script -*-
$ dseq 2014-01-12 2014-01-13 -f '%G-%U'
2014-02
2014-02
$
## dseq.24.clit ends here
| {
"pile_set_name": "Github"
} |
@import url(http://fonts.googleapis.com/css?family=Karla|Quicksand);
@import url('styles/boots-bones.css');
/*
Theme Name: Naked Wordpress
Theme URI: http://bckmn.com/naked-wordpress
Author: J Beckman
Author URI: http://bckmn.com
Description: A super bare theme for designers who don't know Wordpress PHP but want to build with Wordpress.
Version: 1.0
License: GNU General Public License
-------------------------------------------------------------- */
/* Reset
-------------------------------------------------------------- */
html,
body,
div,
span,
applet,
object,
iframe,
h1,
h2,
h3,
h4,
h5,
h6,
p,
blockquote,
pre,
a,
abbr,
acronym,
address,
big,
cite,
code,
del,
dfn,
em,
img,
ins,
kbd,
q,
s,
samp,
small,
strike,
strong,
sub,
sup,
tt,
var,
b,
u,
i,
center,
dl,
dt,
dd,
ol,
ul,
li,
fieldset,
form,
label,
legend,
table,
caption,
tbody,
tfoot,
thead,
tr,
th,
td,
article,
aside,
canvas,
details,
embed,
figure,
figcaption,
footer,
header,
hgroup,
menu,
nav,
output,
ruby,
section,
summary,
time,
mark,
audio,
video {
margin: 0;
padding: 0;
border: 0;
vertical-align: baseline;
font: inherit;
font-size: 100%;
}
/* HTML5 display-role reset for older browsers */
article,
aside,
details,
figcaption,
figure,
footer,
header,
hgroup,
menu,
nav,
section {
display: block;
}
html {
margin: 0;
padding: 0;
font-size: 62.5%;
/* Corrects text resizing oddly in IE6/7 when body font-size is set using em units http://clagnut.com/blog/348/#c790 */
-webkit-text-size-adjust: 100%;
/* Prevents iOS text size adjust after orientation change, without disabling user zoom */
-ms-text-size-adjust: 100%;
/* www.456bereastreet.com/archive/201012/controlling_text_size_in_safari_for_ios_without_disabling_user_zoom/ */
}
ol,
ul {
list-style: none;
}
blockquote,
q {
quotes: none;
}
blockquote:before,
blockquote:after,
q:before,
q:after {
content: '';
content: none;
}
table {
border-spacing: 0;
border-collapse: collapse;
}
code,
kbd,
tt,
var {
font: 15px Monaco, Consolas, "Andale Mono", "DejaVu Sans Mono", monospace;
}
abbr,
acronym {
border-bottom: 1px dotted #666;
cursor: help;
}
mark,
ins {
background: #fff9c0;
text-decoration: none;
}
sup,
sub {
font-size: 75%;
height: 0;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sup {
bottom: 1ex;
}
sub {
top: .5ex;
}
small {
font-size: 75%;
}
big {
font-size: 125%;
}
figure {
margin: 0;
}
table {
margin: 0 0 1.5em;
width: 100%;
}
th {
font-weight: bold;
}
button,
input,
select,
textarea {
font-size: 100%;
/* Corrects font size not being inherited in all browsers */
margin: 0;
/* Addresses margins set differently in IE6/7, F3/4, S5, Chrome */
vertical-align: baseline;
vertical-align: middle;
/* Improves appearance and consistency in all browsers */
}
button,
input {
line-height: normal;
/* Addresses FF3/4 setting line-height using !important in the UA stylesheet */
overflow: visible;
/* Corrects inner spacing displayed oddly in IE6/7 */
}
button,
html input[type="button"],
input[type="reset"],
input[type="submit"] {
border: none;
border-radius: 10px;
background: #ccc;
color: rgba(0, 0, 0, 0.8);
cursor: pointer;
/* Improves usability and consistency of cursor style between image-type 'input' and others */
-webkit-appearance: button;
/* Corrects inability to style clickable 'input' types in iOS */
font-size: 12px;
font-size: 1.4rem;
line-height: 1;
padding: 1.12em 1.5em 1em;
}
button:hover,
html input[type="button"]:hover,
input[type="reset"]:hover,
input[type="submit"]:hover {
-webkit-box-shadow: 0 0 3px rgba(0,0,0,0.5);
-moz-box-shadow: 0 0 3px rgba(0,0,0,0.5);
box-shadow: 0 0 3px rgba(0,0,0,0.5);
}
button:focus,
html input[type="button"]:focus,
input[type="reset"]:focus,
input[type="submit"]:focus,
button:active,
html input[type="button"]:active,
input[type="reset"]:active,
input[type="submit"]:active {
-webkit-box-shadow: 0 0 3px rgba(0,0,0,0.5);
-moz-box-shadow: 0 0 3px rgba(0,0,0,0.5);
box-shadow: 0 0 3px rgba(0,0,0,0.5);
}
input[type="checkbox"],
input[type="radio"] {
box-sizing: border-box;
/* Addresses box sizing set to content-box in IE8/9 */
padding: 0;
/* Addresses excess padding in IE8/9 */
}
input[type="search"] {
-webkit-appearance: textfield;
/* Addresses appearance set to searchfield in S5, Chrome */
-moz-box-sizing: content-box;
-webkit-box-sizing: content-box;
/* Addresses box sizing set to border-box in S5, Chrome (include -moz to future-proof) */
box-sizing: content-box;
}
input[type="search"]::-webkit-search-decoration {
/* Corrects inner padding displayed oddly in S5, Chrome on OSX */
-webkit-appearance: none;
}
button::-moz-focus-inner,
input::-moz-focus-inner {
/* Corrects inner padding and border displayed oddly in FF3/4 www.sitepen.com/blog/2008/05/14/the-devils-in-the-details-fixing-dojos-toolbar-buttons/ */
border: 0;
padding: 0;
}
input[type=text],
input[type=email],
textarea {
color: #888888;
border: 1px solid #dddddd;
border-radius: 5px;
}
input[type=text]:focus,
input[type=email]:focus,
textarea:focus {
color: #101010;
}
input[type=text],
input[type=email] {
padding: 3px;
}
textarea {
overflow: auto;
/* Removes default vertical scrollbar in IE6/7/8/9 */
padding: 10px;
vertical-align: top;
/* Improves readability and alignment in all browsers */
width: 98%;
}
/* styles
-------------------------------------------------------------- */
body,
button,
input,
select {
padding: 0;
margin: 0;
background: #ffffff;
font-family: "Karla", Helvetica, sans-serif;
line-height: 26.64px;
font-size: 18px;
font-size: 1.8rem;
}
/* typography
-------------------------------------------------------------- */
/* Headings */
h1,
h2,
h3,
h4,
h5,
h6 {
clear: both;
font-family: "Quicksand", Helvetica, sans-serif;
}
h1 {
line-height: 71px;
font-size: 48px;
font-size: 4.8rem;
}
h2 {
line-height: 50px;
font-size: 34px;
font-size: 3.4rem;
}
h3 {
line-height: 41px;
font-size: 28px;
font-size: 2.8rem;
}
h4 {
line-height: 26px;
font-size: 18px;
font-size: 1.8rem;
}
hr {
background-color: #ccc;
border: 0;
height: 1px;
margin-bottom: 1.5em;
}
/* Text elements */
p {
margin-bottom: 1.5em;
}
ul,
ol {
margin: 0 0 1.5em 3em;
}
ul {
list-style: disc;
}
ol {
list-style: decimal;
}
ul ul,
ol ol,
ul ol,
ol ul {
margin-bottom: 0;
margin-left: 1.5em;
}
dt {
font-weight: bold;
}
dd {
margin: 0 1.5em 1.5em;
}
b,
strong {
font-weight: bold;
}
dfn,
cite,
em,
i {
font-style: italic;
}
blockquote {
margin: 0 1.5em;
}
address {
margin: 0 0 1.5em;
}
pre {
background: #eee;
font-family: "Courier 10 Pitch", Courier, monospace;
font-size: 0.7em;
line-height: 1.5em;
margin-bottom: 1.6em;
padding: 1.6em;
overflow: auto;
max-width: 100%;
box-shadow: 0 3px 5px #ddd inset;
}
/* Links */
a {
color: #007998;
-webkit-transition: color 0.25s ease-in-out;
-moz-transition: color 0.25s ease-in-out;
-o-transition: color 0.25s ease-in-out;
-ms-transition: color 0.25s ease-in-out;
transition: color 0.25s ease-in-out;
text-decoration: none;
}
a:hover,
a:focus,
a:active {
color: rgba(0, 121, 152, 0.5);
}
/* Alignment */
.alignleft {
display: inline;
float: left;
margin-right: 1.5em;
}
.alignright {
display: inline;
float: right;
margin-left: 1.5em;
}
.aligncenter {
clear: both;
display: block;
margin: 0 auto;
}
.center {
text-align: center;
}
/* Header
-------------------------------------------------------------- */
.site-title a{
text-transform: uppercase;
font-size: 4em;
text-align: center;
}
.site-title a:hover{
}
.site-description{
font-style: italic;
}
header {
padding: 20px 0;
background: transparent;
margin: 20px auto;
}
header .gravatar {
overflow: hidden;
width: 100px;
height: 100px;
-moz-border-radius: 50%;
-webkit-border-radius: 50%;
border-radius: 50%;
margin: 0;
float: right;
}
header #brand {
margin:1em 0;
}
header #brand h1 {
font-weight: 900;
color: #565656;
font-size: 16px;
font-size: 1.6rem;
margin: 0;
padding: 0;
text-transform: uppercase;
letter-spacing: 0.45em;
}
header #brand h1 a {
color: #565656;
text-decoration: none;
}
header #brand h1 span {
font-weight: 200;
color: #888888;
text-transform: lowercase;
}
header nav {
font-family: "Quicksand", Helvetica, sans-serif;
}
.menu-main-nav-container, .menu-main-container {
float: right;
}
header nav ul {
list-style: none;
margin: 0;
}
header nav ul li {
float: left;
margin: 2px 10px 0 0;
}
header nav ul li a {
color: #888888;
font-size: 14px;
}
/* Posts/Articles
-------------------------------------------------------------- */
article {
background: transparent;
padding: 0;
margin-bottom: 80px;
border-bottom: 1px solid #dddddd;
}
article .title {
color: #404040;
line-height: 56.24px;
font-size: 38px;
font-size: 3.8rem;
font-weight: 400;
text-align: center;
letter-spacing: 0.1em;
}
article .title a {
text-decoration: none;
color: #404040;
}
article .title a:hover {
color: #007998;
}
article .post-meta {
margin-bottom: 40px;
font-size: 14px;
text-align: center;
font-family: "Karla", Helvetica, sans-serif;
text-transform: uppercase;
font-weight: 500;
}
article .post-meta a {
color: #888888;
text-decoration: none;
}
article .post-meta a:hover {
color: rgba(136, 136, 136, 0.5);
}
article .the-content a {
font-weight: 900;
text-decoration: none;
}
article .the-content p {
color: #191919;
text-align: justify;
}
article .the-content img {
width: 100%;
height: auto;
}
article .meta {
line-height: 14.8px;
font-size: 10px;
font-size: 1rem;
text-transform: uppercase;
letter-spacing: .9px;
}
article .meta div {
margin:1px auto;
}
article .meta .post-categories {
list-style: none;
margin: 0 0 10px 0;
}
article .meta .post-categories li {
display: inline-block;
margin-right: 10px;
}
/* Page loop styling */
.page article {
border-bottom: none;
}
.page article .title {
margin-bottom: 40px;
}
/* Pagination */
#pagination {
margin-bottom: 40px;
width: 100%;
}
#pagination .past-page {
float: right;
width: 49%;
text-align:right;
}
#pagination .next-page {
float: left;
width: 50%;
border-right: 1px solid #ccc;
height: 200px;
}
.next-page a, .past-page a {
font-size: 2em;
font-family: "Quicksand", Helvetica, sans-serif;
font-weight: 400;
font-style: italic;
padding: 10%;
}
/* Comments */
h3#comments {
margin-bottom: 80px;
font-size: 16px;
}
.commentlist {
list-style: none;
margin: 0;
}
.commentlist .comment {
margin: 0 0 40px 50px;
padding: 20px;
position: relative;
list-style: none;
-moz-box-shadow: 0 0 3px rgba(0, 0, 0, 0.2);
-webkit-box-shadow: 0 0 3px rgba(0, 0, 0, 0.2);
box-shadow: 0 0 3px rgba(0, 0, 0, 0.2);
}
.commentlist .comment .children {
margin-top: 40px;
}
.commentlist .comment-author {
float: left;
margin-right: 10px;
}
.commentlist .comment-author img {
position: absolute;
left: -50px;
top: 5px;
-moz-box-shadow: 0 0 3px rgba(0, 0, 0, 0.2);
-webkit-box-shadow: 0 0 3px rgba(0, 0, 0, 0.2);
box-shadow: 0 0 3px rgba(0, 0, 0, 0.2);
}
.commentlist .comment-author .says {
display: none;
}
.comment-meta {
font-size: 12px;
margin-bottom: 10px;
}
.comment-subscription-form,
.comment-subscription-form {
margin-bottom: 0;
font-size: 14px;
}
#respond {
margin-bottom: 80px;
}
#respond > h3 {
font-size: 16px;
margin-bottom: 20px;
}
#respond form label {
color: #888888;
}
footer {
text-align: center;
padding-bottom: 40px;
font-size: 12px;
}
/* Misc & mixens
-------------------------------------------------------------- */
.site-footer{
}
.hide-text {
overflow: hidden;
text-indent: 100%;
white-space: nowrap;
}
.clear {
clear: both;
}
/* clear floats */
.clearfix:after {
content: ".";
display: block;
clear: both;
visibility: hidden;
line-height: 0;
height: 0;
}
.clearfix {
display: inline-block;
}
html[xmlns] .clearfix {
display: block;
}
* html .clearfix {
height: 1%;
}
/* #Media Queries
================================================== */
/* Those wide screens above 1280px */
@media only screen and (min-width: 1281px) {
body{
font-size: 2.5em;
line-height: 1.5em;
}
article .title{
font-size: 1.8em;
line-height: 1.5em;
}
article .post-meta{
font-size: 0.7em;
line-height: 1em;
}
.menu-main-nav-container a, .side-title, .menu-main-container a{
font-size: 1em;
line-height: 1.5em;
}
#sidebar{
font-size: 0.7em;
line-height: 1.5em;
}
#sidebar h3{
font-size: 1.5em;
line-height: 1.5em;
}
}
/* Smaller than standard 960 (devices and browsers) */
@media only screen and (max-width: 959px) {
.side-title{
font-size: 1em;
line-height: 1.5em;
}
}
/* Tablet Portrait size to standard 960 (devices and browsers) */
@media only screen and (min-width: 768px) and (max-width: 959px) {
.side-title{
font-size: 1em;
line-height: 1.5em;
}
}
/* All Mobile Sizes (devices and browser) */
@media only screen and (max-width: 767px) {
.side-title{
font-size: 1em;
line-height: 1.5em;
}
}
/* Mobile Landscape Size to Tablet Portrait (devices and browsers) */
@media only screen and (min-width: 480px) and (max-width: 767px) {
body, article .post-meta{
font-size: 1.2em;
line-height: 1.5em;
}
article .title{
font-size: 2em;
line-height: 1.5em;
}
.site-title{
font-size: 10px !important;
line-height: 50px;
}
.menu-main-nav-container a, .side-title, .menu-main-container a{
font-size: 1em;
line-height: 1.5em;
}
}
/* Mobile Portrait Size to Mobile Landscape Size (devices and browsers) */
@media only screen and (max-width: 479px) {
body, article .post-meta{
font-size: 1.2em;
line-height: 1.5em;
}
article .title{
font-size: 2em;
line-height: 1.5em;
}
.site-title{
font-size: 6px !important;
line-height: 30px;
}
.menu-main-nav-container a, .side-title, .menu-main-container a{
font-size: 1em;
line-height: 1.5em;
}
}
| {
"pile_set_name": "Github"
} |
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /Users/ttdevs/android/android-sdk-macosx/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
-dontwarn retrofit2.**
-keep class retrofit2.** { *; }
-keepattributes Signature
-keepattributes Exceptions
| {
"pile_set_name": "Github"
} |
'use strict';
var test = require('tape');
var qs = require('../');
var utils = require('../lib/utils');
var iconv = require('iconv-lite');
var SaferBuffer = require('safer-buffer').Buffer;
test('stringify()', function (t) {
t.test('stringifies a querystring object', function (st) {
st.equal(qs.stringify({ a: 'b' }), 'a=b');
st.equal(qs.stringify({ a: 1 }), 'a=1');
st.equal(qs.stringify({ a: 1, b: 2 }), 'a=1&b=2');
st.equal(qs.stringify({ a: 'A_Z' }), 'a=A_Z');
st.equal(qs.stringify({ a: '€' }), 'a=%E2%82%AC');
st.equal(qs.stringify({ a: '' }), 'a=%EE%80%80');
st.equal(qs.stringify({ a: 'א' }), 'a=%D7%90');
st.equal(qs.stringify({ a: '𐐷' }), 'a=%F0%90%90%B7');
st.end();
});
t.test('adds query prefix', function (st) {
st.equal(qs.stringify({ a: 'b' }, { addQueryPrefix: true }), '?a=b');
st.end();
});
t.test('with query prefix, outputs blank string given an empty object', function (st) {
st.equal(qs.stringify({}, { addQueryPrefix: true }), '');
st.end();
});
t.test('stringifies a nested object', function (st) {
st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c');
st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e');
st.end();
});
t.test('stringifies a nested object with dots notation', function (st) {
st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c');
st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e');
st.end();
});
t.test('stringifies an array value', function (st) {
st.equal(
qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'indices' }),
'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d',
'indices => indices'
);
st.equal(
qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'brackets' }),
'a%5B%5D=b&a%5B%5D=c&a%5B%5D=d',
'brackets => brackets'
);
st.equal(
qs.stringify({ a: ['b', 'c', 'd'] }),
'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d',
'default => indices'
);
st.end();
});
t.test('omits nulls when asked', function (st) {
st.equal(qs.stringify({ a: 'b', c: null }, { skipNulls: true }), 'a=b');
st.end();
});
t.test('omits nested nulls when asked', function (st) {
st.equal(qs.stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), 'a%5Bb%5D=c');
st.end();
});
t.test('omits array indices when asked', function (st) {
st.equal(qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d');
st.end();
});
t.test('stringifies a nested array value', function (st) {
st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'indices' }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d');
st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'brackets' }), 'a%5Bb%5D%5B%5D=c&a%5Bb%5D%5B%5D=d');
st.equal(qs.stringify({ a: { b: ['c', 'd'] } }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d');
st.end();
});
t.test('stringifies a nested array value with dots notation', function (st) {
st.equal(
qs.stringify(
{ a: { b: ['c', 'd'] } },
{ allowDots: true, encode: false, arrayFormat: 'indices' }
),
'a.b[0]=c&a.b[1]=d',
'indices: stringifies with dots + indices'
);
st.equal(
qs.stringify(
{ a: { b: ['c', 'd'] } },
{ allowDots: true, encode: false, arrayFormat: 'brackets' }
),
'a.b[]=c&a.b[]=d',
'brackets: stringifies with dots + brackets'
);
st.equal(
qs.stringify(
{ a: { b: ['c', 'd'] } },
{ allowDots: true, encode: false }
),
'a.b[0]=c&a.b[1]=d',
'default: stringifies with dots + indices'
);
st.end();
});
t.test('stringifies an object inside an array', function (st) {
st.equal(
qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'indices' }),
'a%5B0%5D%5Bb%5D=c',
'indices => brackets'
);
st.equal(
qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'brackets' }),
'a%5B%5D%5Bb%5D=c',
'brackets => brackets'
);
st.equal(
qs.stringify({ a: [{ b: 'c' }] }),
'a%5B0%5D%5Bb%5D=c',
'default => indices'
);
st.equal(
qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'indices' }),
'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1',
'indices => indices'
);
st.equal(
qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'brackets' }),
'a%5B%5D%5Bb%5D%5Bc%5D%5B%5D=1',
'brackets => brackets'
);
st.equal(
qs.stringify({ a: [{ b: { c: [1] } }] }),
'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1',
'default => indices'
);
st.end();
});
t.test('stringifies an array with mixed objects and primitives', function (st) {
st.equal(
qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'indices' }),
'a[0][b]=1&a[1]=2&a[2]=3',
'indices => indices'
);
st.equal(
qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'brackets' }),
'a[][b]=1&a[]=2&a[]=3',
'brackets => brackets'
);
st.equal(
qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false }),
'a[0][b]=1&a[1]=2&a[2]=3',
'default => indices'
);
st.end();
});
t.test('stringifies an object inside an array with dots notation', function (st) {
st.equal(
qs.stringify(
{ a: [{ b: 'c' }] },
{ allowDots: true, encode: false, arrayFormat: 'indices' }
),
'a[0].b=c',
'indices => indices'
);
st.equal(
qs.stringify(
{ a: [{ b: 'c' }] },
{ allowDots: true, encode: false, arrayFormat: 'brackets' }
),
'a[].b=c',
'brackets => brackets'
);
st.equal(
qs.stringify(
{ a: [{ b: 'c' }] },
{ allowDots: true, encode: false }
),
'a[0].b=c',
'default => indices'
);
st.equal(
qs.stringify(
{ a: [{ b: { c: [1] } }] },
{ allowDots: true, encode: false, arrayFormat: 'indices' }
),
'a[0].b.c[0]=1',
'indices => indices'
);
st.equal(
qs.stringify(
{ a: [{ b: { c: [1] } }] },
{ allowDots: true, encode: false, arrayFormat: 'brackets' }
),
'a[].b.c[]=1',
'brackets => brackets'
);
st.equal(
qs.stringify(
{ a: [{ b: { c: [1] } }] },
{ allowDots: true, encode: false }
),
'a[0].b.c[0]=1',
'default => indices'
);
st.end();
});
t.test('does not omit object keys when indices = false', function (st) {
st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c');
st.end();
});
t.test('uses indices notation for arrays when indices=true', function (st) {
st.equal(qs.stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c');
st.end();
});
t.test('uses indices notation for arrays when no arrayFormat is specified', function (st) {
st.equal(qs.stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c');
st.end();
});
t.test('uses indices notation for arrays when no arrayFormat=indices', function (st) {
st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c');
st.end();
});
t.test('uses repeat notation for arrays when no arrayFormat=repeat', function (st) {
st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c');
st.end();
});
t.test('uses brackets notation for arrays when no arrayFormat=brackets', function (st) {
st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c');
st.end();
});
t.test('stringifies a complicated object', function (st) {
st.equal(qs.stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e');
st.end();
});
t.test('stringifies an empty value', function (st) {
st.equal(qs.stringify({ a: '' }), 'a=');
st.equal(qs.stringify({ a: null }, { strictNullHandling: true }), 'a');
st.equal(qs.stringify({ a: '', b: '' }), 'a=&b=');
st.equal(qs.stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b=');
st.equal(qs.stringify({ a: { b: '' } }), 'a%5Bb%5D=');
st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D');
st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D=');
st.end();
});
t.test('stringifies a null object', { skip: !Object.create }, function (st) {
var obj = Object.create(null);
obj.a = 'b';
st.equal(qs.stringify(obj), 'a=b');
st.end();
});
t.test('returns an empty string for invalid input', function (st) {
st.equal(qs.stringify(undefined), '');
st.equal(qs.stringify(false), '');
st.equal(qs.stringify(null), '');
st.equal(qs.stringify(''), '');
st.end();
});
t.test('stringifies an object with a null object as a child', { skip: !Object.create }, function (st) {
var obj = { a: Object.create(null) };
obj.a.b = 'c';
st.equal(qs.stringify(obj), 'a%5Bb%5D=c');
st.end();
});
t.test('drops keys with a value of undefined', function (st) {
st.equal(qs.stringify({ a: undefined }), '');
st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), 'a%5Bc%5D');
st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), 'a%5Bc%5D=');
st.equal(qs.stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D=');
st.end();
});
t.test('url encodes values', function (st) {
st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c');
st.end();
});
t.test('stringifies a date', function (st) {
var now = new Date();
var str = 'a=' + encodeURIComponent(now.toISOString());
st.equal(qs.stringify({ a: now }), str);
st.end();
});
t.test('stringifies the weird object from qs', function (st) {
st.equal(qs.stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F');
st.end();
});
t.test('skips properties that are part of the object prototype', function (st) {
Object.prototype.crash = 'test';
st.equal(qs.stringify({ a: 'b' }), 'a=b');
st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c');
delete Object.prototype.crash;
st.end();
});
t.test('stringifies boolean values', function (st) {
st.equal(qs.stringify({ a: true }), 'a=true');
st.equal(qs.stringify({ a: { b: true } }), 'a%5Bb%5D=true');
st.equal(qs.stringify({ b: false }), 'b=false');
st.equal(qs.stringify({ b: { c: false } }), 'b%5Bc%5D=false');
st.end();
});
t.test('stringifies buffer values', function (st) {
st.equal(qs.stringify({ a: SaferBuffer.from('test') }), 'a=test');
st.equal(qs.stringify({ a: { b: SaferBuffer.from('test') } }), 'a%5Bb%5D=test');
st.end();
});
t.test('stringifies an object using an alternative delimiter', function (st) {
st.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d');
st.end();
});
t.test('doesn\'t blow up when Buffer global is missing', function (st) {
var tempBuffer = global.Buffer;
delete global.Buffer;
var result = qs.stringify({ a: 'b', c: 'd' });
global.Buffer = tempBuffer;
st.equal(result, 'a=b&c=d');
st.end();
});
t.test('selects properties when filter=array', function (st) {
st.equal(qs.stringify({ a: 'b' }, { filter: ['a'] }), 'a=b');
st.equal(qs.stringify({ a: 1 }, { filter: [] }), '');
st.equal(
qs.stringify(
{ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' },
{ filter: ['a', 'b', 0, 2], arrayFormat: 'indices' }
),
'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3',
'indices => indices'
);
st.equal(
qs.stringify(
{ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' },
{ filter: ['a', 'b', 0, 2], arrayFormat: 'brackets' }
),
'a%5Bb%5D%5B%5D=1&a%5Bb%5D%5B%5D=3',
'brackets => brackets'
);
st.equal(
qs.stringify(
{ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' },
{ filter: ['a', 'b', 0, 2] }
),
'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3',
'default => indices'
);
st.end();
});
t.test('supports custom representations when filter=function', function (st) {
var calls = 0;
var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } };
var filterFunc = function (prefix, value) {
calls += 1;
if (calls === 1) {
st.equal(prefix, '', 'prefix is empty');
st.equal(value, obj);
} else if (prefix === 'c') {
return void 0;
} else if (value instanceof Date) {
st.equal(prefix, 'e[f]');
return value.getTime();
}
return value;
};
st.equal(qs.stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000');
st.equal(calls, 5);
st.end();
});
t.test('can disable uri encoding', function (st) {
st.equal(qs.stringify({ a: 'b' }, { encode: false }), 'a=b');
st.equal(qs.stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c');
st.equal(qs.stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), 'a=b&c');
st.end();
});
t.test('can sort the keys', function (st) {
var sort = function (a, b) {
return a.localeCompare(b);
};
st.equal(qs.stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y');
st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a');
st.end();
});
t.test('can sort the keys at depth 3 or more too', function (st) {
var sort = function (a, b) {
return a.localeCompare(b);
};
st.equal(
qs.stringify(
{ a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' },
{ sort: sort, encode: false }
),
'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb'
);
st.equal(
qs.stringify(
{ a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' },
{ sort: null, encode: false }
),
'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b'
);
st.end();
});
t.test('can stringify with custom encoding', function (st) {
st.equal(qs.stringify({ 県: '大阪府', '': '' }, {
encoder: function (str) {
if (str.length === 0) {
return '';
}
var buf = iconv.encode(str, 'shiftjis');
var result = [];
for (var i = 0; i < buf.length; ++i) {
result.push(buf.readUInt8(i).toString(16));
}
return '%' + result.join('%');
}
}), '%8c%a7=%91%e5%8d%e3%95%7b&=');
st.end();
});
t.test('receives the default encoder as a second argument', function (st) {
st.plan(2);
qs.stringify({ a: 1 }, {
encoder: function (str, defaultEncoder) {
st.equal(defaultEncoder, utils.encode);
}
});
st.end();
});
t.test('throws error with wrong encoder', function (st) {
st['throws'](function () {
qs.stringify({}, { encoder: 'string' });
}, new TypeError('Encoder has to be a function.'));
st.end();
});
t.test('can use custom encoder for a buffer object', { skip: typeof Buffer === 'undefined' }, function (st) {
st.equal(qs.stringify({ a: SaferBuffer.from([1]) }, {
encoder: function (buffer) {
if (typeof buffer === 'string') {
return buffer;
}
return String.fromCharCode(buffer.readUInt8(0) + 97);
}
}), 'a=b');
st.end();
});
t.test('serializeDate option', function (st) {
var date = new Date();
st.equal(
qs.stringify({ a: date }),
'a=' + date.toISOString().replace(/:/g, '%3A'),
'default is toISOString'
);
var mutatedDate = new Date();
mutatedDate.toISOString = function () {
throw new SyntaxError();
};
st['throws'](function () {
mutatedDate.toISOString();
}, SyntaxError);
st.equal(
qs.stringify({ a: mutatedDate }),
'a=' + Date.prototype.toISOString.call(mutatedDate).replace(/:/g, '%3A'),
'toISOString works even when method is not locally present'
);
var specificDate = new Date(6);
st.equal(
qs.stringify(
{ a: specificDate },
{ serializeDate: function (d) { return d.getTime() * 7; } }
),
'a=42',
'custom serializeDate function called'
);
st.end();
});
t.test('RFC 1738 spaces serialization', function (st) {
st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC1738 }), 'a=b+c');
st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC1738 }), 'a+b=c+d');
st.end();
});
t.test('RFC 3986 spaces serialization', function (st) {
st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC3986 }), 'a=b%20c');
st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC3986 }), 'a%20b=c%20d');
st.end();
});
t.test('Backward compatibility to RFC 3986', function (st) {
st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c');
st.end();
});
t.test('Edge cases and unknown formats', function (st) {
['UFO1234', false, 1234, null, {}, []].forEach(
function (format) {
st['throws'](
function () {
qs.stringify({ a: 'b c' }, { format: format });
},
new TypeError('Unknown format option provided.')
);
}
);
st.end();
});
t.test('encodeValuesOnly', function (st) {
st.equal(
qs.stringify(
{ a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] },
{ encodeValuesOnly: true }
),
'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h'
);
st.equal(
qs.stringify(
{ a: 'b', c: ['d', 'e'], f: [['g'], ['h']] }
),
'a=b&c%5B0%5D=d&c%5B1%5D=e&f%5B0%5D%5B0%5D=g&f%5B1%5D%5B0%5D=h'
);
st.end();
});
t.test('encodeValuesOnly - strictNullHandling', function (st) {
st.equal(
qs.stringify(
{ a: { b: null } },
{ encodeValuesOnly: true, strictNullHandling: true }
),
'a[b]'
);
st.end();
});
t.test('does not mutate the options argument', function (st) {
var options = {};
qs.stringify({}, options);
st.deepEqual(options, {});
st.end();
});
t.end();
});
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2017 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.group.db;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableSet;
import com.google.gerrit.entities.Account;
import com.google.gerrit.entities.AccountGroup;
import java.sql.Timestamp;
import java.util.Optional;
import java.util.Set;
/**
* Definition of an update to a group.
*
* <p>An {@code InternalGroupUpdate} only specifies the modifications which should be applied to a
* group. Each of the modifications and hence each call on {@link InternalGroupUpdate.Builder} is
* optional.
*/
@AutoValue
public abstract class InternalGroupUpdate {
/** Representation of a member modification as defined by {@link #apply(ImmutableSet)}. */
@FunctionalInterface
public interface MemberModification {
/**
* Applies the modification to the given members.
*
* @param originalMembers current members of the group. If used for a group creation, this set
* is empty.
* @return the desired resulting members (not the diff of the members!)
*/
Set<Account.Id> apply(ImmutableSet<Account.Id> originalMembers);
}
@FunctionalInterface
public interface SubgroupModification {
/**
* Applies the modification to the given subgroups.
*
* @param originalSubgroups current subgroups of the group. If used for a group creation, this
* set is empty.
* @return the desired resulting subgroups (not the diff of the subgroups!)
*/
Set<AccountGroup.UUID> apply(ImmutableSet<AccountGroup.UUID> originalSubgroups);
}
/** Defines the new name of the group. If not specified, the name remains unchanged. */
public abstract Optional<AccountGroup.NameKey> getName();
/**
* Defines the new description of the group. If not specified, the description remains unchanged.
*
* <p><strong>Note: </strong>Passing the empty string unsets the description.
*/
public abstract Optional<String> getDescription();
/** Defines the new owner of the group. If not specified, the owner remains unchanged. */
public abstract Optional<AccountGroup.UUID> getOwnerGroupUUID();
/**
* Defines the new state of the 'visibleToAll' flag of the group. If not specified, the flag
* remains unchanged.
*/
public abstract Optional<Boolean> getVisibleToAll();
/**
* Defines how the members of the group should be modified. By default (that is if nothing is
* specified), the members remain unchanged.
*
* @return a {@link MemberModification} which gets the current members of the group as input and
* outputs the desired resulting members
*/
public abstract MemberModification getMemberModification();
/**
* Defines how the subgroups of the group should be modified. By default (that is if nothing is
* specified), the subgroups remain unchanged.
*
* @return a {@link SubgroupModification} which gets the current subgroups of the group as input
* and outputs the desired resulting subgroups
*/
public abstract SubgroupModification getSubgroupModification();
/**
* Defines the {@code Timestamp} to be used for the NoteDb commits of the update. If not
* specified, the current {@code Timestamp} when creating the commit will be used.
*
* <p>If this {@code InternalGroupUpdate} is passed next to an {@link InternalGroupCreation}
* during a group creation, this {@code Timestamp} is used for the NoteDb commits of the new
* group. Hence, the {@link com.google.gerrit.server.group.InternalGroup#getCreatedOn()
* InternalGroup#getCreatedOn()} field will match this {@code Timestamp}.
*
* <p><strong>Note: </strong>{@code Timestamp}s of NoteDb commits for groups are used for events
* in the audit log. For this reason, specifying this field will have an effect on the resulting
* audit log.
*/
public abstract Optional<Timestamp> getUpdatedOn();
public abstract Builder toBuilder();
public static Builder builder() {
return new AutoValue_InternalGroupUpdate.Builder()
.setMemberModification(in -> in)
.setSubgroupModification(in -> in);
}
/** A builder for an {@link InternalGroupUpdate}. */
@AutoValue.Builder
public abstract static class Builder {
/** @see #getName() */
public abstract Builder setName(AccountGroup.NameKey name);
/** @see #getDescription() */
public abstract Builder setDescription(String description);
/** @see #getOwnerGroupUUID() */
public abstract Builder setOwnerGroupUUID(AccountGroup.UUID ownerGroupUUID);
/** @see #getVisibleToAll() */
public abstract Builder setVisibleToAll(boolean visibleToAll);
/** @see #getMemberModification() */
public abstract Builder setMemberModification(MemberModification memberModification);
/**
* Returns the currently defined {@link MemberModification} for the prospective {@link
* InternalGroupUpdate}.
*
* <p>This modification can be tweaked further and passed to {@link
* #setMemberModification(InternalGroupUpdate.MemberModification)} in order to combine multiple
* member additions, deletions, or other modifications into one update.
*/
public abstract MemberModification getMemberModification();
/** @see #getSubgroupModification() */
public abstract Builder setSubgroupModification(SubgroupModification subgroupModification);
/**
* Returns the currently defined {@link SubgroupModification} for the prospective {@link
* InternalGroupUpdate}.
*
* <p>This modification can be tweaked further and passed to {@link
* #setSubgroupModification(InternalGroupUpdate.SubgroupModification)} in order to combine
* multiple subgroup additions, deletions, or other modifications into one update.
*/
public abstract SubgroupModification getSubgroupModification();
/** @see #getUpdatedOn() */
public abstract Builder setUpdatedOn(Timestamp timestamp);
public abstract InternalGroupUpdate build();
}
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.head;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import org.apache.wicket.Application;
import org.apache.wicket.core.util.string.JavaScriptUtils;
import org.apache.wicket.request.Response;
import org.apache.wicket.request.resource.ResourceReference;
import org.apache.wicket.settings.JavaScriptLibrarySettings;
import org.apache.wicket.util.string.Strings;
import org.apache.wicket.util.value.AttributeMap;
/**
* {@link HeaderItem} for scripts that need to be executed after the entire page is loaded.
*
* @author papegaaij
*/
public class OnLoadHeaderItem extends AbstractCspHeaderItem
{
private static final long serialVersionUID = 1L;
/**
* Creates a {@link OnLoadHeaderItem} for the script.
*
* @param javaScript
* The script to execute on the load event.
*
* @return A newly created {@link OnLoadHeaderItem}.
*/
public static OnLoadHeaderItem forScript(CharSequence javaScript)
{
return new OnLoadHeaderItem(javaScript);
}
private final CharSequence javaScript;
/**
* Constructor.
*
* The JavaScript should be provided by overloaded #getJavaScript
*/
public OnLoadHeaderItem()
{
this(null);
}
/**
* Construct.
*
* @param javaScript
*/
public OnLoadHeaderItem(CharSequence javaScript)
{
this.javaScript = javaScript;
}
/**
* @return the script that gets executed after the entire is loaded.
*/
public CharSequence getJavaScript()
{
return javaScript;
}
@Override
public void render(Response response)
{
CharSequence js = getJavaScript();
if (Strings.isEmpty(js) == false)
{
AttributeMap attributes = new AttributeMap();
attributes.putAttribute(JavaScriptUtils.ATTR_TYPE, "text/javascript");
attributes.putAttribute(JavaScriptUtils.ATTR_CSP_NONCE, getNonce());
JavaScriptUtils.writeInlineScript(response, "Wicket.Event.add(window, \"load\", " +
"function(event) { " + js + ";});", attributes);
}
}
@Override
public Iterable<?> getRenderTokens()
{
return Collections.singletonList("javascript-load-" + getJavaScript());
}
@Override
public String toString()
{
return "OnLoadHeaderItem('" + getJavaScript() + "')";
}
@Override
public boolean equals(Object o)
{
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
OnLoadHeaderItem that = (OnLoadHeaderItem) o;
return Objects.equals(javaScript, that.javaScript);
}
@Override
public int hashCode()
{
return Objects.hash(javaScript);
}
@Override
public List<HeaderItem> getDependencies()
{
JavaScriptLibrarySettings ajaxSettings = Application.get().getJavaScriptLibrarySettings();
ResourceReference wicketAjaxReference = ajaxSettings.getWicketAjaxReference();
List<HeaderItem> dependencies = super.getDependencies();
dependencies.add(JavaScriptHeaderItem.forReference(wicketAjaxReference));
return dependencies;
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<odoo>
<data noupdate="1">
<record id="op_department_1" model="op.department">
<field name="name">Department of Business Law</field>
<field name="code">D1</field>
</record>
<record id="op_department_2" model="op.department">
<field name="name">Department of Economics</field>
<field name="code">D2</field>
</record>
<record id="op_department_3" model="op.department">
<field name="name">Department of Accounts</field>
<field name="code">D3</field>
</record>
<record id="op_department_4" model="op.department">
<field name="name">Department of Management and Marketing</field>
<field name="code">D4</field>
</record>
</data>
</odoo> | {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Common types, and routines for manually loading types from file
via GCC.
"""
import glob
import os
import subprocess
import sys
import tempfile
import gdb
import pwndbg.events
import pwndbg.gcc
import pwndbg.memoize
module = sys.modules[__name__]
def is_pointer(value):
type = value
if isinstance(value, gdb.Value):
type = value.type
type = type.strip_typedefs()
return type.code == gdb.TYPE_CODE_PTR
def lookup_types(*types):
for type_str in types:
try:
return gdb.lookup_type(type_str)
except Exception as e:
exc = e
raise exc
@pwndbg.events.new_objfile
@pwndbg.events.start
@pwndbg.events.stop
def update():
module.char = gdb.lookup_type('char')
module.ulong = lookup_types('unsigned long', 'uint', 'u32', 'uint32')
module.long = lookup_types('long', 'int', 'i32', 'int32')
module.uchar = lookup_types('unsigned char', 'ubyte', 'u8', 'uint8')
module.ushort = lookup_types('unsigned short', 'ushort', 'u16', 'uint16')
module.uint = lookup_types('unsigned int', 'uint', 'u32', 'uint32')
module.void = lookup_types('void', '()')
module.uint8 = module.uchar
module.uint16 = module.ushort
module.uint32 = module.uint
module.uint64 = lookup_types('unsigned long long', 'ulong', 'u64', 'uint64')
module.unsigned = {
1: module.uint8,
2: module.uint16,
4: module.uint32,
8: module.uint64
}
module.int8 = lookup_types('char', 'i8', 'int8')
module.int16 = lookup_types('short', 'i16', 'int16')
module.int32 = lookup_types('int', 'i32', 'int32')
module.int64 = lookup_types('long long', 'long', 'i64', 'int64')
module.signed = {
1: module.int8,
2: module.int16,
4: module.int32,
8: module.int64
}
module.pvoid = void.pointer()
module.ppvoid = pvoid.pointer()
module.pchar = char.pointer()
module.ptrsize = pvoid.sizeof
if pvoid.sizeof == 4:
module.ptrdiff = module.uint32
module.size_t = module.uint32
module.ssize_t = module.int32
elif pvoid.sizeof == 8:
module.ptrdiff = module.uint64
module.size_t = module.uint64
module.ssize_t = module.int64
else:
raise Exception('Pointer size not supported')
module.null = gdb.Value(0).cast(void)
# Call it once so we load all of the types
update()
tempdir = tempfile.gettempdir() + '/pwndbg'
if not os.path.exists(tempdir):
os.mkdir(tempdir)
# Trial and error until things work
blacklist = ['regexp.h', 'xf86drm.h', 'libxl_json.h', 'xf86drmMode.h',
'caca0.h', 'xenguest.h', '_libxl_types_json.h', 'term_entry.h', 'slcurses.h',
'pcreposix.h', 'sudo_plugin.h', 'tic.h', 'sys/elf.h', 'sys/vm86.h',
'xenctrlosdep.h', 'xenctrl.h', 'cursesf.h', 'cursesm.h', 'gdbm.h', 'dbm.h',
'gcrypt-module.h', 'term.h', 'gmpxx.h', 'pcap/namedb.h', 'pcap-namedb.h',
'evr.h', 'mpc.h', 'fdt.h', 'mpfr.h', 'evrpc.h', 'png.h', 'zlib.h', 'pngconf.h',
'libelfsh.h', 'libmjollnir.h', 'hwloc.h', 'ares.h', 'revm.h', 'ares_rules.h',
'libunwind-ptrace.h', 'libui.h', 'librevm-color.h', 'libedfmt.h','revm-objects.h',
'libetrace.h', 'revm-io.h','libasm-mips.h','libstderesi.h','libasm.h','libaspect.h',
'libunwind.h','libmjollnir-objects.h','libunwind-coredump.h','libunwind-dynamic.h']
def load(name):
"""Load symbol by name from headers in standard system include directory"""
try:
return gdb.lookup_type(name)
except gdb.error:
pass
# s, _ = gdb.lookup_symbol(name)
# Try to find an architecture-specific include path
arch = pwndbg.arch.current.split(':')[0]
include_dir = glob.glob('/usr/%s*/include' % arch)
if include_dir:
include_dir = include_dir[0]
else:
include_dir = '/usr/include'
source = '#include <fstream>\n'
for subdir in ['', 'sys', 'netinet']:
dirname = os.path.join(include_dir, subdir)
for path in glob.glob(os.path.join(dirname, '*.h')):
if any(b in path for b in blacklist):
continue
print(path)
source += '#include "%s"\n' % path
source += '''
{name} foo;
'''.format(**locals())
filename = '%s/%s_%s.cc' % (tempdir, arch, '-'.join(name.split()))
with open(filename, 'w+') as f:
f.write(source)
f.flush()
os.fsync(f.fileno())
compile(filename)
return gdb.lookup_type(name)
def compile(filename=None, address=0):
"""Compile and extract symbols from specified file"""
if filename is None:
print("Specify a filename to compile.")
return
objectname = os.path.splitext(filename)[0] + ".o"
if not os.path.exists(objectname):
gcc = pwndbg.gcc.which()
gcc += ['-w', '-c', '-g', filename, '-o', objectname]
try:
subprocess.check_output(gcc)
except subprocess.CalledProcessError as e:
return
add_symbol_file(objectname, address)
def add_symbol_file(filename=None, address=0):
"""Read additional symbol table information from the object file filename"""
if filename is None:
print("Specify a symbol file to add.")
return
with pwndbg.events.Pause():
gdb.execute('add-symbol-file %s %s' % (filename, address), from_tty=False, to_string=True)
def read_gdbvalue(type_name, addr):
""" Read the memory contents at addr and interpret them as a GDB value with the given type """
gdb_type = pwndbg.typeinfo.load(type_name)
return gdb.Value(addr).cast(gdb_type.pointer()).dereference()
| {
"pile_set_name": "Github"
} |
/*
* bf5xx-ac97.c -- AC97 support for the ADI blackfin chip.
*
* Author: Roy Huang
* Created: 11th. June 2007
* Copyright: Analog Device Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*/
#include <linux/init.h>
#include <linux/module.h>
#include <linux/platform_device.h>
#include <linux/interrupt.h>
#include <linux/wait.h>
#include <linux/delay.h>
#include <linux/slab.h>
#include <sound/core.h>
#include <sound/pcm.h>
#include <sound/ac97_codec.h>
#include <sound/initval.h>
#include <sound/soc.h>
#include <asm/irq.h>
#include <asm/portmux.h>
#include <linux/mutex.h>
#include <linux/gpio.h>
#include "bf5xx-sport.h"
#include "bf5xx-ac97.h"
/* Anomaly notes:
* 05000250 - AD1980 is running in TDM mode and RFS/TFS are generated by SPORT
* contrtoller. But, RFSDIV and TFSDIV are always set to 16*16-1,
* while the max AC97 data size is 13*16. The DIV is always larger
* than data size. AD73311 and ad2602 are not running in TDM mode.
* AD1836 and AD73322 depend on external RFS/TFS only. So, this
* anomaly does not affect blackfin sound drivers.
*/
static struct sport_device *ac97_sport_handle;
void bf5xx_pcm_to_ac97(struct ac97_frame *dst, const __u16 *src,
size_t count, unsigned int chan_mask)
{
while (count--) {
dst->ac97_tag = TAG_VALID;
if (chan_mask & SP_FL) {
dst->ac97_pcm_r = *src++;
dst->ac97_tag |= TAG_PCM_RIGHT;
}
if (chan_mask & SP_FR) {
dst->ac97_pcm_l = *src++;
dst->ac97_tag |= TAG_PCM_LEFT;
}
#if defined(CONFIG_SND_BF5XX_MULTICHAN_SUPPORT)
if (chan_mask & SP_SR) {
dst->ac97_sl = *src++;
dst->ac97_tag |= TAG_PCM_SL;
}
if (chan_mask & SP_SL) {
dst->ac97_sr = *src++;
dst->ac97_tag |= TAG_PCM_SR;
}
if (chan_mask & SP_LFE) {
dst->ac97_lfe = *src++;
dst->ac97_tag |= TAG_PCM_LFE;
}
if (chan_mask & SP_FC) {
dst->ac97_center = *src++;
dst->ac97_tag |= TAG_PCM_CENTER;
}
#endif
dst++;
}
}
EXPORT_SYMBOL(bf5xx_pcm_to_ac97);
void bf5xx_ac97_to_pcm(const struct ac97_frame *src, __u16 *dst,
size_t count)
{
while (count--) {
*(dst++) = src->ac97_pcm_l;
*(dst++) = src->ac97_pcm_r;
src++;
}
}
EXPORT_SYMBOL(bf5xx_ac97_to_pcm);
static unsigned int sport_tx_curr_frag(struct sport_device *sport)
{
return sport->tx_curr_frag = sport_curr_offset_tx(sport) /
sport->tx_fragsize;
}
static void enqueue_cmd(struct snd_ac97 *ac97, __u16 addr, __u16 data)
{
struct sport_device *sport = ac97_sport_handle;
int *cmd_count = sport->private_data;
int nextfrag = sport_tx_curr_frag(sport);
struct ac97_frame *nextwrite;
sport_incfrag(sport, &nextfrag, 1);
nextwrite = (struct ac97_frame *)(sport->tx_buf +
nextfrag * sport->tx_fragsize);
pr_debug("sport->tx_buf:%p, nextfrag:0x%x nextwrite:%p, cmd_count:%d\n",
sport->tx_buf, nextfrag, nextwrite, cmd_count[nextfrag]);
nextwrite[cmd_count[nextfrag]].ac97_tag |= TAG_CMD;
nextwrite[cmd_count[nextfrag]].ac97_addr = addr;
nextwrite[cmd_count[nextfrag]].ac97_data = data;
++cmd_count[nextfrag];
pr_debug("ac97_sport: Inserting %02x/%04x into fragment %d\n",
addr >> 8, data, nextfrag);
}
static unsigned short bf5xx_ac97_read(struct snd_ac97 *ac97,
unsigned short reg)
{
struct sport_device *sport_handle = ac97_sport_handle;
struct ac97_frame out_frame[2], in_frame[2];
pr_debug("%s enter 0x%x\n", __func__, reg);
/* When dma descriptor is enabled, the register should not be read */
if (sport_handle->tx_run || sport_handle->rx_run) {
pr_err("Could you send a mail to [email protected] "
"to report this?\n");
return -EFAULT;
}
memset(&out_frame, 0, 2 * sizeof(struct ac97_frame));
memset(&in_frame, 0, 2 * sizeof(struct ac97_frame));
out_frame[0].ac97_tag = TAG_VALID | TAG_CMD;
out_frame[0].ac97_addr = ((reg << 8) | 0x8000);
sport_send_and_recv(sport_handle, (unsigned char *)&out_frame,
(unsigned char *)&in_frame,
2 * sizeof(struct ac97_frame));
return in_frame[1].ac97_data;
}
void bf5xx_ac97_write(struct snd_ac97 *ac97, unsigned short reg,
unsigned short val)
{
struct sport_device *sport_handle = ac97_sport_handle;
pr_debug("%s enter 0x%x:0x%04x\n", __func__, reg, val);
if (sport_handle->tx_run) {
enqueue_cmd(ac97, (reg << 8), val); /* write */
enqueue_cmd(ac97, (reg << 8) | 0x8000, 0); /* read back */
} else {
struct ac97_frame frame;
memset(&frame, 0, sizeof(struct ac97_frame));
frame.ac97_tag = TAG_VALID | TAG_CMD;
frame.ac97_addr = (reg << 8);
frame.ac97_data = val;
sport_send_and_recv(sport_handle, (unsigned char *)&frame, \
NULL, sizeof(struct ac97_frame));
}
}
static void bf5xx_ac97_warm_reset(struct snd_ac97 *ac97)
{
struct sport_device *sport_handle = ac97_sport_handle;
u16 gpio = P_IDENT(sport_handle->pin_req[3]);
pr_debug("%s enter\n", __func__);
peripheral_free_list(sport_handle->pin_req);
gpio_request(gpio, "bf5xx-ac97");
gpio_direction_output(gpio, 1);
udelay(2);
gpio_set_value(gpio, 0);
udelay(1);
gpio_free(gpio);
peripheral_request_list(sport_handle->pin_req, "soc-audio");
}
static void bf5xx_ac97_cold_reset(struct snd_ac97 *ac97)
{
#ifdef CONFIG_SND_BF5XX_HAVE_COLD_RESET
pr_debug("%s enter\n", __func__);
/* It is specified for bf548-ezkit */
gpio_set_value(CONFIG_SND_BF5XX_RESET_GPIO_NUM, 0);
/* Keep reset pin low for 1 ms */
mdelay(1);
gpio_set_value(CONFIG_SND_BF5XX_RESET_GPIO_NUM, 1);
/* Wait for bit clock recover */
mdelay(1);
#else
pr_info("%s: Not implemented\n", __func__);
#endif
}
static struct snd_ac97_bus_ops bf5xx_ac97_ops = {
.read = bf5xx_ac97_read,
.write = bf5xx_ac97_write,
.warm_reset = bf5xx_ac97_warm_reset,
.reset = bf5xx_ac97_cold_reset,
};
#ifdef CONFIG_PM
static int bf5xx_ac97_suspend(struct snd_soc_dai *dai)
{
struct sport_device *sport = snd_soc_dai_get_drvdata(dai);
pr_debug("%s : sport %d\n", __func__, dai->id);
if (!dai->active)
return 0;
if (dai->capture_active)
sport_rx_stop(sport);
if (dai->playback_active)
sport_tx_stop(sport);
return 0;
}
static int bf5xx_ac97_resume(struct snd_soc_dai *dai)
{
int ret;
struct sport_device *sport = snd_soc_dai_get_drvdata(dai);
pr_debug("%s : sport %d\n", __func__, dai->id);
if (!dai->active)
return 0;
#if defined(CONFIG_SND_BF5XX_MULTICHAN_SUPPORT)
ret = sport_set_multichannel(sport, 16, 0x3FF, 0x3FF, 1);
#else
ret = sport_set_multichannel(sport, 16, 0x1F, 0x1F, 1);
#endif
if (ret) {
pr_err("SPORT is busy!\n");
return -EBUSY;
}
ret = sport_config_rx(sport, IRFS, 0xF, 0, (16*16-1));
if (ret) {
pr_err("SPORT is busy!\n");
return -EBUSY;
}
ret = sport_config_tx(sport, ITFS, 0xF, 0, (16*16-1));
if (ret) {
pr_err("SPORT is busy!\n");
return -EBUSY;
}
return 0;
}
#else
#define bf5xx_ac97_suspend NULL
#define bf5xx_ac97_resume NULL
#endif
static struct snd_soc_dai_driver bfin_ac97_dai = {
.bus_control = true,
.suspend = bf5xx_ac97_suspend,
.resume = bf5xx_ac97_resume,
.playback = {
.stream_name = "AC97 Playback",
.channels_min = 2,
#if defined(CONFIG_SND_BF5XX_MULTICHAN_SUPPORT)
.channels_max = 6,
#else
.channels_max = 2,
#endif
.rates = SNDRV_PCM_RATE_48000,
.formats = SNDRV_PCM_FMTBIT_S16_LE, },
.capture = {
.stream_name = "AC97 Capture",
.channels_min = 2,
.channels_max = 2,
.rates = SNDRV_PCM_RATE_48000,
.formats = SNDRV_PCM_FMTBIT_S16_LE, },
};
static const struct snd_soc_component_driver bfin_ac97_component = {
.name = "bfin-ac97",
};
static int asoc_bfin_ac97_probe(struct platform_device *pdev)
{
struct sport_device *sport_handle;
int ret;
#ifdef CONFIG_SND_BF5XX_HAVE_COLD_RESET
/* Request PB3 as reset pin */
ret = devm_gpio_request_one(&pdev->dev,
CONFIG_SND_BF5XX_RESET_GPIO_NUM,
GPIOF_OUT_INIT_HIGH, "SND_AD198x RESET");
if (ret) {
dev_err(&pdev->dev,
"Failed to request GPIO_%d for reset: %d\n",
CONFIG_SND_BF5XX_RESET_GPIO_NUM, ret);
return ret;
}
#endif
sport_handle = sport_init(pdev, 2, sizeof(struct ac97_frame),
PAGE_SIZE);
if (!sport_handle) {
ret = -ENODEV;
goto sport_err;
}
/*SPORT works in TDM mode to simulate AC97 transfers*/
#if defined(CONFIG_SND_BF5XX_MULTICHAN_SUPPORT)
ret = sport_set_multichannel(sport_handle, 16, 0x3FF, 0x3FF, 1);
#else
ret = sport_set_multichannel(sport_handle, 16, 0x1F, 0x1F, 1);
#endif
if (ret) {
pr_err("SPORT is busy!\n");
ret = -EBUSY;
goto sport_config_err;
}
ret = sport_config_rx(sport_handle, IRFS, 0xF, 0, (16*16-1));
if (ret) {
pr_err("SPORT is busy!\n");
ret = -EBUSY;
goto sport_config_err;
}
ret = sport_config_tx(sport_handle, ITFS, 0xF, 0, (16*16-1));
if (ret) {
pr_err("SPORT is busy!\n");
ret = -EBUSY;
goto sport_config_err;
}
ret = snd_soc_set_ac97_ops(&bf5xx_ac97_ops);
if (ret != 0) {
dev_err(&pdev->dev, "Failed to set AC'97 ops: %d\n", ret);
goto sport_config_err;
}
ret = snd_soc_register_component(&pdev->dev, &bfin_ac97_component,
&bfin_ac97_dai, 1);
if (ret) {
pr_err("Failed to register DAI: %d\n", ret);
goto sport_config_err;
}
ac97_sport_handle = sport_handle;
return 0;
sport_config_err:
sport_done(sport_handle);
sport_err:
snd_soc_set_ac97_ops(NULL);
return ret;
}
static int asoc_bfin_ac97_remove(struct platform_device *pdev)
{
struct sport_device *sport_handle = platform_get_drvdata(pdev);
snd_soc_unregister_component(&pdev->dev);
sport_done(sport_handle);
snd_soc_set_ac97_ops(NULL);
return 0;
}
static struct platform_driver asoc_bfin_ac97_driver = {
.driver = {
.name = "bfin-ac97",
},
.probe = asoc_bfin_ac97_probe,
.remove = asoc_bfin_ac97_remove,
};
module_platform_driver(asoc_bfin_ac97_driver);
MODULE_AUTHOR("Roy Huang");
MODULE_DESCRIPTION("AC97 driver for ADI Blackfin");
MODULE_LICENSE("GPL");
| {
"pile_set_name": "Github"
} |
using System.Collections.Generic;
using Newtonsoft.Json;
using RiotSharp.Endpoints.LeagueEndpoint.Enums;
namespace RiotSharp.Endpoints.LeagueEndpoint
{
/// <summary>
/// Class representing a LeagueList in the API.
/// </summary>
public class League
{
internal League() { }
/// <summary>
/// The requested league entries.
/// </summary>
[JsonProperty("entries")]
public List<LeagueItem> Entries { get; set; }
/// <summary>
/// This name is an internal place-holder name only.
/// Display and localization of names in the game client are handled client-side.
/// </summary>
[JsonProperty("name")]
public string Name { get; set; }
/// <summary>
/// The league id.
/// </summary>
[JsonProperty("leagueId")]
public string LeagueId { get; set; }
/// <summary>
/// League queue (eg: RankedSolo5x5).
/// </summary>
[JsonProperty("queue")]
public string Queue { get; set; }
/// <summary>
/// League tier (eg: Challenger).
/// </summary>
[JsonProperty("tier")]
public Tier Tier { get; set; }
}
}
| {
"pile_set_name": "Github"
} |
class Reverse < MethodDecorators::Decorator
def call(orig, this, *args, &blk)
orig.call(*args.reverse, &blk)
end
end
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.