text
stringlengths
2
100k
meta
dict
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>login</title> <script src="https://cdn.bootcss.com/jquery/2.2.4/jquery.min.js"></script> <link href="https://cdn.bootcss.com/bootstrap/3.3.7/css/bootstrap.min.css" rel="stylesheet"> <link href="https://cdn.bootcss.com/font-awesome/4.7.0/css/font-awesome.min.css" rel="stylesheet"> <script src="https://cdn.bootcss.com/bootstrap/3.3.7/js/bootstrap.min.js"></script> <style> body { background: url("https://i.loli.net/2018/04/17/5ad5bfebd42e9.jpg"); animation-name: myfirst; animation-duration: 12s; /*变换时间*/ animation-delay: 2s; /*动画开始时间*/ animation-iteration-count: infinite; /*下一周期循环播放*/ animation-play-state: running; /*动画开始运行*/ } @keyframes myfirst { 0% { background: url("https://i.loli.net/2018/04/17/5ad5bfebd42e9.jpg"); } 34% { background: url("https://i.loli.net/2018/04/17/5ad5bff050f1e.jpg"); } 67% { background: url("https://i.loli.net/2018/04/17/5ad5bff733f6d.jpg"); } 100% { background: url("https://i.loli.net/2018/04/17/5ad5bff772dda.jpg"); } } .form { background: rgba(255, 255, 255, 0.2); width: 400px; margin: 120px auto; } /*阴影*/ .fa { display: inline-block; top: 27px; left: 6px; position: relative; color: #ccc; } input[type="text"], input[type="password"] { padding-left: 26px; } .checkbox { padding-left: 21px; } .captcha{ background-color: white; text-align: center; cursor: pointer; } </style> </head> <body> <div class="container"> <div class="form row"> <div class="form-horizontal col-md-offset-3" id="login_form"> <h3 class="form-title">LOGIN</h3> <div class="col-md-9"> <div class="form-group"> <i class="fa fa-user fa-lg"></i> <input class="form-control required" type="text" placeholder="Username" id="username" name="username" autofocus="autofocus" maxlength="20" /> </div> <div class="form-group"> <i class="fa fa-lock fa-lg"></i> <input class="form-control required" type="password" placeholder="Password" id="password" name="password" maxlength="8" /> </div> <div class="form-group captcha "> <img src='/captcha'> </div> <div class="form-group"> <label class="checkbox"> <input type="checkbox" name="remember" value="1" />记住我 </label> </div> <div class="form-group col-md-offset-9"> <button type="submit" class="btn btn-success pull-right" name="submit">登录</button> </div> </div> </div> </div> </div> <script> $('.captcha img').on('click',function(){ $(this).attr('src','/captcha?random='+Math.random()); }) </script> </body> </html>
{ "pile_set_name": "Github" }
Universitat de Barcelona
{ "pile_set_name": "Github" }
### Get set up for Moby development * [README first](who-written-for.md) * [Get the required software](software-required.md) * [Set up for development on Windows](software-req-win.md) * [Configure Git for contributing](set-up-git.md) * [Work with a development container](set-up-dev-env.md) * [Run tests and test documentation](test.md)
{ "pile_set_name": "Github" }
new A < T;
{ "pile_set_name": "Github" }
#!/bin/bash # ***** BEGIN LICENSE BLOCK ***** # This file is part of Natron <http://www.natron.fr/>, # Copyright (C) 2016 INRIA and Alexandre Gauthier # # Natron is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Natron is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Natron. If not, see <http://www.gnu.org/licenses/gpl-2.0.html> # ***** END LICENSE BLOCK ***** # # Build packages and installer for Linux # # Options: # DISABLE_BREAKPAD=1: Disable automatic crash report set -e # Exit immediately if a command exits with a non-zero status set -u # Treat unset variables as an error when substituting. #set -x # Print commands and their arguments as they are executed. echo "*** Linux installer..." source common.sh source manageBuildOptions.sh source manageLog.sh updateBuildOptions pushd () { command pushd "$@" > /dev/null } popd () { command popd "$@" > /dev/null } LD_LIBRARY_PATH="${SDK_HOME}/lib:${FFMPEG_PATH}/lib:${SDK_HOME}/qt${QT_VERSION_MAJOR}/lib" PATH="${SDK_HOME}/gcc/bin:${SDK_HOME}/bin:$PATH" export C_INCLUDE_PATH="${SDK_HOME}/gcc/include:${SDK_HOME}/include:${SDK_HOME}/qt${QT_VERSION_MAJOR}/include" export CPLUS_INCLUDE_PATH="${C_INCLUDE_PATH}" if [ "${ARCH}" = "x86_64" ]; then LD_LIBRARY_PATH="${SDK_HOME}/gcc/lib64:${FFMPEG_PATH}/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}" else LD_LIBRARY_PATH="${SDK_HOME}/gcc/lib:${FFMPEG_PATH}/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}" fi export LD_LIBRARY_PATH if [ -d "${BUILD_ARCHIVE_DIRECTORY}" ]; then rm -rf "${BUILD_ARCHIVE_DIRECTORY}" fi mkdir -p "${BUILD_ARCHIVE_DIRECTORY}" if [ "${DISABLE_BREAKPAD:-}" != "1" ]; then mkdir "${BUILD_ARCHIVE_DIRECTORY}/symbols" fi INSTALLER_PATH="${TMP_BINARIES_PATH}/Natron-installer" # If we are in DEBUG_SYMBOLS mode there might already be deployed installer, remove them if [ ! -z "$DEBUG_SCRIPTS" ]; then (cd "${TMP_BINARIES_PATH}" ; find . -type d -name 'Natron-*' -exec rm -rf {} \;) || true fi mkdir -p "${TMP_PORTABLE_DIR}" if [ -z "${NATRON_BUILD_CONFIG:-}" ]; then echo "NATRON_BUILD_CONFIG empty" exit 1 fi # The following should be consistent with paths configured in uploadArtifactsMain.sh if [ "$NATRON_BUILD_CONFIG" = "SNAPSHOT" ]; then REMOTE_PATH="${REMOTE_PREFIX}/snapshots" APP_INSTALL_SUFFIX="Natron-snapshot" APP_ADMIN_INSTALL_SUFFIX="$APP_INSTALL_SUFFIX" elif [ "$NATRON_BUILD_CONFIG" = "RELEASE" ] || [ "$NATRON_BUILD_CONFIG" = "STABLE" ]; then REMOTE_PATH="${REMOTE_PREFIX}/releases" APP_INSTALL_SUFFIX="Natron-${NATRON_VERSION_FULL}" APP_ADMIN_INSTALL_SUFFIX="$APP_INSTALL_SUFFIX" else REMOTE_PATH="${REMOTE_PREFIX}/other_builds" APP_INSTALL_SUFFIX="Natron" APP_ADMIN_INSTALL_SUFFIX="$APP_INSTALL_SUFFIX" fi REMOTE_PROJECT_PATH="$REMOTE_PATH/$PKGOS/$BITS/$BUILD_NAME" REMOTE_ONLINE_PACKAGES_PATH="$REMOTE_PROJECT_PATH/packages" # The date passed to the ReleaseDate tag of the xml config file of the installer. This has a different format than CURRENT_DATE. INSTALLER_XML_DATE="$(date -u "+%Y-%m-%d")" # tag symbols we want to keep with 'release' VERSION_TAG="${CURRENT_DATE}" if [ "${NATRON_BUILD_CONFIG}" = "RELEASE" ] || [ "$NATRON_BUILD_CONFIG" = "STABLE" ]; then BPAD_TAG="-release" VERSION_TAG="${NATRON_VERSION_FULL}" fi # SETUP XML="$INC_PATH/xml" QS="$INC_PATH/qs" mkdir -p "${INSTALLER_PATH}/config" "${INSTALLER_PATH}/packages" # Customize the config file $GSED "s/_VERSION_/${NATRON_VERSION_FULL}/;s#_RBVERSION_#${NATRON_GIT_BRANCH}#g;s#_REMOTE_PATH#${REMOTE_ONLINE_PACKAGES_PATH}#g;s#_REMOTE_URL_#${REMOTE_URL}#g;s#_APP_INSTALL_SUFFIX_#${APP_INSTALL_SUFFIX}#g;s#_APP_ADMIN_INSTALL_SUFFIX_#${APP_ADMIN_INSTALL_SUFFIX}#g" "$INC_PATH/config/$PKGOS.xml" > "${INSTALLER_PATH}/config/config.xml" # Copy installer images to the config folder cp "$INC_PATH/config"/*.png "${INSTALLER_PATH}/config/" function installPlugin() { OFX_BINARY="$1" PACKAGE_NAME="$2" PACKAGE_XML="$3" PACKAGE_INSTALL_SCRIPT="$4" NATRON_LIBS="$5" if [ ! -d "${TMP_BINARIES_PATH}/OFX/Plugins/${OFX_BINARY}.ofx.bundle" ]; then return 0 fi # Create package PKG_PATH="${INSTALLER_PATH}/packages/$PACKAGE_NAME" if [ ! -d "$PKG_PATH" ]; then mkdir -p "$PKG_PATH/data" "$PKG_PATH/meta" "$PKG_PATH/data/Plugins/OFX/Natron" fi # Copy to portable archive if [ ! -d "${TMP_PORTABLE_DIR}/Plugins/OFX/Natron" ]; then mkdir -p "${TMP_PORTABLE_DIR}/Plugins/OFX/Natron" fi # Configure package for installer if [ ! -f "$PKG_PATH/meta/package.xml" ]; then $GSED -e "s/_VERSION_/${VERSION_TAG}/;s/_DATE_/${INSTALLER_XML_DATE}/" < "$PACKAGE_XML" > "$PKG_PATH/meta/package.xml" fi if [ ! -f "$PKG_PATH/meta/installscript.qs" ]; then cp "$PACKAGE_INSTALL_SCRIPT" "$PKG_PATH/meta/installscript.qs" fi # Dump symbols if [ "${DISABLE_BREAKPAD:-}" != "1" ]; then "${SDK_HOME}/bin/dump_syms" "${TMP_BINARIES_PATH}"/OFX/Plugins/"${OFX_BINARY}".ofx.bundle/Contents/*/"${OFX_BINARY}".ofx > "${BUILD_ARCHIVE_DIRECTORY}/symbols/${OFX_BINARY}.ofx-${CURRENT_DATE}${BPAD_TAG:-}-${PKGOS_BITS}.sym" fi # Extract dependencies OFX_DEPENDS="$(ldd $(find "${TMP_BINARIES_PATH}/OFX/Plugins/${OFX_BINARY}.ofx.bundle/Contents/Linux-"* -maxdepth 1 -type f) | grep "${SDK_HOME}" | awk '{print $3}'|sort|uniq)" if [ ! -z "$OFX_DEPENDS" ]; then LIBS_DIR="${TMP_BINARIES_PATH}/OFX/Plugins/${OFX_BINARY}.ofx.bundle/Libraries" mkdir -p "$LIBS_DIR" DEPENDS_NOT_PART_OF_NATRON=() for x in $OFX_DEPENDS; do # Add the dep only if it is not part of Natron libs pluginlib="$(basename "$x")" CREATE_SYM_LINK=0 for y in $NATRON_LIBS; do natronlib="$(basename "$y")" if [ "$pluginlib" = "$natronlib" ]; then CREATE_SYM_LINK=1 break fi done if [ "$CREATE_SYM_LINK" = "1" ]; then # Create a sym-link to the already bundled dependency in ${TMP_PORTABLE_DIR}/lib # This is a relative path, assuming the plug-in Libraries directory is: # ${TMP_PORTABLE_DIR}/Plugins/OFX/Natron/${OFX_BINARY}.ofx.bundle/Libraries (cd "$LIBS_DIR"; ln -sf ../../../../../lib/"$pluginlib" .) else DEPENDS_NOT_PART_OF_NATRON+=("$x") fi done # Copy deps to Libraries directory in the plug-in bundle for x in "${DEPENDS_NOT_PART_OF_NATRON[@]-}"; do if [ ! -z "$x" ]; then pluginlib="$(basename "$x")" if [ -f "$x" ] && [ ! -f "$LIBS_DIR/$pluginlib" ] && [ ! -L "$LIBS_DIR/$pluginlib" ]; then cp -f "$x" "$LIBS_DIR/" fi fi done # Extract dependencies of the dependencies OFX_LIB_DEP=$(ldd $(find "$LIBS_DIR" -maxdepth 1 -type f) |grep "$SDK_HOME" | awk '{print $3}'|sort|uniq) for y in $OFX_LIB_DEP; do pluginlib="$(basename "$y")" if [ ! -f "$LIBS_DIR/$pluginlib" ] && [ ! -L "$LIBS_DIR/$pluginlib" ]; then cp -f "$y" "$LIBS_DIR/" fi done # Set the rpath of the shared libraries to origin pushd "$LIBS_DIR" chmod 755 *.so* || true for i in *.so*; do patchelf --force-rpath --set-rpath "\$ORIGIN" "$i" || true done popd fi # Strip binary if [ "$COMPILE_TYPE" != "debug" ]; then find "${TMP_BINARIES_PATH}/OFX/Plugins/${OFX_BINARY}.ofx.bundle" -type f \( -iname '*.so' -o -iname '*.ofx' \) -exec strip -s {} \; &>/dev/null fi for location in "$PKG_PATH/data" "${TMP_PORTABLE_DIR}"; do # Copy plug-in bundle cp -a "${TMP_BINARIES_PATH}/OFX/Plugins/${OFX_BINARY}.ofx.bundle" "$location/Plugins/OFX/Natron/" done } # Natron package NATRON_PKG="fr.inria.natron" NATRON_PACKAGE_PATH="${INSTALLER_PATH}/packages/${NATRON_PKG}" PACKAGES="${NATRON_PKG}" # Create package directories mkdir -p "$NATRON_PACKAGE_PATH/meta" # Configure natron package xml $GSED "s/_VERSION_/${CURRENT_DATE}/;s/_DATE_/${INSTALLER_XML_DATE}/" "$XML/natron.xml" > "$NATRON_PACKAGE_PATH/meta/package.xml" cat "$QS/$PKGOS/natron.qs" > "$NATRON_PACKAGE_PATH/meta/installscript.qs" cat "${TMP_BINARIES_PATH}/docs/natron/LICENSE.txt" > "$NATRON_PACKAGE_PATH/meta/natron-license.txt" #GLIBCXX 3.4.19 is for GCC 4.8.3, see https://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html compat_version_script=3.4.19 compat_version=3.4.19 case "${TC_GCC:-}" in 4.4.[2-9]) compat_version=3.4.13 ;; 4.5.*) compat_version=3.4.14 ;; 4.6.0) compat_version=3.4.15 ;; 4.6.[1-9]) compat_version=3.4.16 ;; 4.7.*) compat_version=3.4.17 ;; 4.8.[0-2]) compat_version=3.4.18 ;; 4.8.[3-9]) compat_version=3.4.19 ;; 4.9.*|5.0.*) compat_version=3.4.20 ;; 5.[1-9].*|6.0.*) compat_version=3.4.21 ;; 6.[1-9].*|7.0*) compat_version=3.4.22 ;; 7.1.*) compat_version=3.4.23 ;; 7.[2-9].*) compat_version=3.4.24 ;; 8.*) compat_version=3.4.25 ;; 9.*) compat_version=3.4.26 ;; esac BINARIES_TO_INSTALL="" if [ "${DISABLE_BREAKPAD:-}" != "1" ]; then mv "${TMP_BINARIES_PATH}/bin/Natron" "${TMP_BINARIES_PATH}/bin/Natron-bin" mv "${TMP_BINARIES_PATH}/bin/NatronRenderer" "${TMP_BINARIES_PATH}/bin/NatronRenderer-bin" mv "${TMP_BINARIES_PATH}/bin/NatronCrashReporter" "${TMP_BINARIES_PATH}/bin/Natron" mv "${TMP_BINARIES_PATH}/bin/NatronRendererCrashReporter" "${TMP_BINARIES_PATH}/bin/NatronRenderer" BINARIES_TO_INSTALL="${TMP_BINARIES_PATH}/bin/Natron ${TMP_BINARIES_PATH}/bin/NatronRenderer ${TMP_BINARIES_PATH}/bin/Natron-bin ${TMP_BINARIES_PATH}/bin/NatronRenderer-bin" else BINARIES_TO_INSTALL="${TMP_BINARIES_PATH}/bin/Natron ${TMP_BINARIES_PATH}/bin/NatronRenderer" fi if [ -f "${TMP_BINARIES_PATH}/bin/NatronProjectConverter" ]; then BINARIES_TO_INSTALL="$BINARIES_TO_INSTALL ${TMP_BINARIES_PATH}/bin/NatronProjectConverter" fi if [ -f "${TMP_BINARIES_PATH}/bin/natron-python" ]; then BINARIES_TO_INSTALL="$BINARIES_TO_INSTALL ${TMP_BINARIES_PATH}/bin/natron-python" fi BINARIES_TO_INSTALL="$BINARIES_TO_INSTALL ${SDK_HOME}/bin/iconvert ${SDK_HOME}/bin/idiff ${SDK_HOME}/bin/igrep ${SDK_HOME}/bin/iinfo" BINARIES_TO_INSTALL="$BINARIES_TO_INSTALL ${SDK_HOME}/bin/exrheader ${SDK_HOME}/bin/tiffinfo" BINARIES_TO_INSTALL="$BINARIES_TO_INSTALL ${FFMPEG_PATH}/bin/ffmpeg ${FFMPEG_PATH}/bin/ffprobe" # We copy all files to both the portable archive and the package for the installer in a loop COPY_LOCATIONS=("${TMP_PORTABLE_DIR}" "$NATRON_PACKAGE_PATH/data") for location in "${COPY_LOCATIONS[@]}"; do mkdir -p "$location/docs" "$location/bin" "$location/Resources" "$location/Plugins/PyPlugs" "$location/Resources/stylesheets" cp -pPR "${TMP_BINARIES_PATH}/docs/natron"/* "$location/docs/" [ -f "$location/docs/TuttleOFX-README.txt" ] && rm "$location/docs/TuttleOFX-README.txt" cp -R "${TMP_BINARIES_PATH}/Resources/etc" "$location/Resources/" cp "${TMP_BINARIES_PATH}/Resources/stylesheets"/mainstyle.qss "$location/Resources/stylesheets/" cp "$INC_PATH/natron/natron-mime.sh" "$location/bin/" cp "${TMP_BINARIES_PATH}/PyPlugs"/* "$location/Plugins/PyPlugs/" # OCIO -> has its own package, see below #cp -LR "${TMP_BINARIES_PATH}/Natron/OpenColorIO-Configs" "$location/Resources/" # Configure shell launch script with gcc compat version $GSED -e "s#${compat_version_script}#${compat_version}#" "$INC_PATH/scripts/Natron-Linux.sh" > "$location/Natron" $GSED -e "s#${compat_version_script}#${compat_version}#" -e "s#bin/Natron#bin/NatronRenderer#" "$INC_PATH/scripts/Natron-Linux.sh" > "$location/NatronRenderer" chmod a+x "$location/Natron" "$location/NatronRenderer" # Include in the portable version the test program that we will use later on if [ "$location" = "${TMP_PORTABLE_DIR}" ]; then cp "${TMP_BINARIES_PATH}/bin/Tests" "$location/bin/" fi for b in $BINARIES_TO_INSTALL; do cp "$b" "$location/bin/" done # If the binary contains upper case letters, make a symbolic link. # Solves https://github.com/NatronGitHub/Natron/issues/225 for b in Natron NatronRenderer NatronProjectConverter; do if [ -f "$location/bin/$b" ]; then fname="$(basename "$b")" fnamel="$(echo "$fname" | tr '[:upper:]' '[:lower:]')" # https://stackoverflow.com/a/2264537 if [ "$fname" != "$fnamel" ]; then ln -s "$fname" "$location/bin/$fnamel" fi fi done # end for all locations done # Dump symbols for crash reporting if [ "${DISABLE_BREAKPAD:-}" != "1" ]; then "${SDK_HOME}/bin"/dump_syms "${TMP_BINARIES_PATH}/bin/Natron" > "${BUILD_ARCHIVE_DIRECTORY}/symbols/Natron-${CURRENT_DATE}${BPAD_TAG:-}-${PKGOS_BITS}.sym" "${SDK_HOME}/bin"/dump_syms "${TMP_BINARIES_PATH}/bin/NatronRenderer" > "${BUILD_ARCHIVE_DIRECTORY}/symbols/NatronRenderer-${CURRENT_DATE}${BPAD_TAG:-}-${PKGOS_BITS}.sym" fi # Get all dependencies of the binaries CORE_DEPENDS="$(ldd $(find "${TMP_PORTABLE_DIR}/bin" -maxdepth 1 -type f) | grep "$SDK_HOME" | awk '{print $3}'|sort|uniq)" # icu libraries don't seem to be picked up by this ldd call above pushd "${SDK_HOME}/lib" for dep in {libicudata.so.*,libicui18n.so.*,libicuuc.so.*,libbz2.so.*,liblcms2.so.*,libcairo.so.*,libOpenColorIO.so.*}; do if [ -f "$dep" ]; then CORE_DEPENDS="$CORE_DEPENDS ${SDK_HOME}/lib/$dep" fi done popd # OCIO package OCIO_PKG="fr.inria.natron.color" PACKAGES="${PACKAGES},${OCIO_PKG}" OCIO_PACKAGE_PATH="${INSTALLER_PATH}/packages/${OCIO_PKG}" # OCIO package version (linux/windows) # bump number when OpenColorIO-Configs changes OCIO_VERSION="20180327000000" # OCIO for c in blender natron nuke-default; do lib="${TMP_BINARIES_PATH}/Resources/OpenColorIO-Configs/${c}/config.ocio" LAST_MODIFICATION_DATE="$(date -u -r "$lib" "+%Y%m%d%H%M%S")" if [ "$LAST_MODIFICATION_DATE" -gt "$OCIO_VERSION" ]; then OCIO_VERSION="$LAST_MODIFICATION_DATE" fi done # Create package directories mkdir -p "$OCIO_PACKAGE_PATH/meta" $GSED "s/_VERSION_/${OCIO_VERSION}/;s/_DATE_/${INSTALLER_XML_DATE}/" "$XML/ocio.xml" > "$OCIO_PACKAGE_PATH/meta/package.xml" cat "$QS/ocio.qs" > "$OCIO_PACKAGE_PATH/meta/installscript.qs" # We copy all files to both the portable archive and the package for the installer in a loop COPY_LOCATIONS=("${TMP_PORTABLE_DIR}" "$OCIO_PACKAGE_PATH/data") for location in "${COPY_LOCATIONS[@]}"; do mkdir -p "$location/Resources" cp -LR "${TMP_BINARIES_PATH}/Resources/OpenColorIO-Configs" "$location/Resources/" # end for all locations done # Distribute Natron dependencies in a separate package so that the user only # receive updates for DLLs when we actually update them # rather than every time we recompile Natron CORELIBS_PKG="fr.inria.natron.libs" PACKAGES="${PACKAGES},${CORELIBS_PKG}" LIBS_PACKAGE_PATH="${INSTALLER_PATH}/packages/${CORELIBS_PKG}" mkdir -p "$LIBS_PACKAGE_PATH/meta" # Function to fix rpath of libraries in a list of folders function fixrpath() { FOLDERS="$1" RPATH="$2" for folder in $FOLDERS; do (cd "$folder"; for i in *; do if [ -f "$i" ]; then chmod u+w $i patchelf --force-rpath --set-rpath "\$ORIGIN${RPATH}" "$i" || true optlibs="$(ldd "$i" | grep "$SDK_HOME" | awk '{print $3}'|sort|uniq)" if [ ! -z "$optlibs" ]; then for r in $optlibs; do echo "Warning: runtime path remaining to $r for $folder/$i" done fi fi done ) done } # We copy all files to both the portable archive and the package for the installer in a loop COPY_LOCATIONS=("${TMP_PORTABLE_DIR}" "$LIBS_PACKAGE_PATH/data") for location in "${COPY_LOCATIONS[@]}"; do mkdir -p "${location}/bin" "${location}/lib" "${location}/Resources/pixmaps" #cp "${SDK_HOME}/qt${QT_VERSION_MAJOR}/lib/libQtDBus.so.4" "${location}/lib/" cp "${TMP_BINARIES_PATH}/Resources/pixmaps/natronIcon256_linux.png" "${location}/Resources/pixmaps/" cp "${TMP_BINARIES_PATH}/Resources/pixmaps/natronProjectIcon_linux.png" "${location}/Resources/pixmaps/" cp -pPR "${SDK_HOME}/share/poppler" "${location}/Resources/" cp -pPR "${SDK_HOME}/qt${QT_VERSION_MAJOR}/plugins"/* "${location}/bin/" # Copy dependencies for i in $CORE_DEPENDS; do dep=$(basename "$i") if [ ! -f "${location}/lib/$dep" ]; then cp -f "$i" "${location}/lib/" fi done # Copy dependencies of the libraries LIB_DEPENDS=$(ldd $(find "${location}/lib" -maxdepth 1 -type f) |grep "$SDK_HOME" | awk '{print $3}'|sort|uniq) for y in $LIB_DEPENDS; do dep=$(basename "$y") if [ ! -f "${location}/lib/$dep" ]; then cp -f "$y" "${location}/lib/" fi done # Qt plug-in dependencies QT_PLUG_DEPENDS=$(ldd $(find "${location}/bin" -maxdepth 2 -type f -name '*.so') | grep "$SDK_HOME" | awk '{print $3}'|sort|uniq) for z in $QT_PLUG_DEPENDS; do dep=$(basename "$z") if [ ! -f "${location}/lib/$dep" ]; then cp -f "$z" "${location}/lib/" fi done # Copy gcc compat libs #if [ -f "$INC_PATH/misc/compat${BITS}.tgz" ] && [ "$SDK_VERSION" = "CY2015" ]; then # tar xvf "$INC_PATH/misc/compat${BITS}.tgz" -C "${location}/lib/" #fi # done in build-natron.sh #mkdir -p "${location}/Resources/etc/fonts/conf.d" #cp "${SDK_HOME}/etc/fonts/fonts.conf" "${location}/Resources/etc/fonts/" #cp "${SDK_HOME}/share/fontconfig/conf.avail"/* "${location}/Resources/etc/fonts/conf.d/" #$GSED -i "s#${SDK_HOME}/#/#;/conf.d/d" "${location}/Resources/etc/fonts/fonts.conf" # strip binaries if [ "$COMPILE_TYPE" != "debug" ]; then strip -s "${location}/bin"/* &>/dev/null || true strip -s "${location}/lib"/* &>/dev/null || true strip -s "${location}/bin"/*/* &>/dev/null || true fi if [ ! -d "${location}/Plugins" ]; then mkdir -p "${location}/Plugins" fi # end for all locations done # Copy Python distrib # Remove it if it existed already (with DEBUG_SCRIPTS=1) if [ -d "${TMP_PORTABLE_DIR}/lib/python${PYVER}" ]; then rm -rf "${TMP_PORTABLE_DIR}/lib/python${PYVER}" fi # The whitelist of python site-packages: #python_site_packages=(easy_install.py easy_install.pyc pip pkg_resources PySide README setuptools wheel shiboken.so) # Note that pip and dependencies were already installed by get-pip.py python_site_packages=(PySide shiboken.so) mkdir -p "${TMP_PORTABLE_DIR}/lib/python${PYVER}" for pydir in "${SDK_HOME}/lib/python${PYVER}" "${SDK_HOME}/qt${QT_VERSION_MAJOR}/lib/python${PYVER}"; do (cd "$pydir"; tar cf - . --exclude site-packages)|(cd "${TMP_PORTABLE_DIR}/lib/python${PYVER}"; tar xf -) for p in "${python_site_packages[@]}"; do if [ -e "$pydir/site-packages/$p" ]; then (cd "$pydir"; tar cf - "site-packages/$p") | (cd "${TMP_PORTABLE_DIR}/lib/python${PYVER}"; tar xf -) fi done done # Move PySide to plug-ins directory and keep a symbolic link in site-packages mv "${TMP_PORTABLE_DIR}/lib/python${PYVER}/site-packages/PySide" "${TMP_PORTABLE_DIR}/Plugins/" (cd "${TMP_PORTABLE_DIR}/lib/python${PYVER}/site-packages"; ln -sf "../../../Plugins/PySide" . ) # Remove unused stuff rm -rf "${TMP_PORTABLE_DIR}/lib/python${PYVER}"/{test,config,config-"${PYVER}m"} # Copy PySide dependencies PYSIDE_DEPENDS=$(ldd $(find "${SDK_HOME}/qt${QT_VERSION_MAJOR}/lib/python${PYVER}/site-packages/PySide" -maxdepth 1 -type f) | grep "$SDK_HOME" | awk '{print $3}'|sort|uniq) for y in $PYSIDE_DEPENDS; do dep=$(basename "$y") if [ ! -f "${TMP_PORTABLE_DIR}/lib/$dep" ]; then cp -f "$y" "${TMP_PORTABLE_DIR}/lib/" fi done # Remove any pycache (cd "${TMP_PORTABLE_DIR}" ; find . -type d -name __pycache__ -exec rm -rf {} \;) # Strip pyside and python depends if [ "${COMPILE_TYPE}" != "debug" ]; then strip -s "${TMP_PORTABLE_DIR}/Plugins/PySide"/* "${TMP_PORTABLE_DIR}/lib"/python*/* "${TMP_PORTABLE_DIR}/lib"/python*/*/* &>/dev/null || true fi # Remove pyo files and prune unneeded files using python-exclude file PYDIR="${TMP_PORTABLE_DIR}/lib/python${PYVER:-}" find "${PYDIR}" -type f -name '*.pyo' -exec rm {} \; (cd "${PYDIR}"; xargs rm -rf || true) < "$INC_PATH/python-exclude.txt" # python zip if [ "${USE_QT5:-}" != 1 ]; then rm -rf "$PYDIR"/site-packages/shiboken2* "$PYDIR"/site-packages/PySide2 || true fi fixrpath "${TMP_PORTABLE_DIR}/Plugins/PySide" "/../../lib" fixrpath "${TMP_PORTABLE_DIR}/lib/python${PYVER:-}/lib-dynload ${TMP_PORTABLE_DIR}/lib/python${PYVER:-}/site-packages" "/../.." export PY_BIN="${SDK_HOME}/bin/python${PYVER:-}" export PYDIR="$PYDIR" . "$CWD"/zip-python.sh # Install pip if [ -x "${TMP_PORTABLE_DIR}"/bin/natron-python ]; then $CURL --remote-name --insecure http://bootstrap.pypa.io/get-pip.py "${TMP_PORTABLE_DIR}"/bin/natron-python get-pip.py rm get-pip.py fi # Run extra user provided pip install scripts if [ -f "${EXTRA_PYTHON_MODULES_SCRIPT:-}" ]; then "${TMP_PORTABLE_DIR}"/bin/natron-python "$EXTRA_PYTHON_MODULES_SCRIPT" || true fi # Copy Python distrib to installer package cp -pPR "$PYDIR" "$LIBS_PACKAGE_PATH/data/lib/" cp "${TMP_PORTABLE_DIR}"/lib/python*.zip "${LIBS_PACKAGE_PATH}/data/lib/" mkdir -p "$LIBS_PACKAGE_PATH/data/Plugins/" cp -pPR "${TMP_PORTABLE_DIR}/Plugins/PySide" "$LIBS_PACKAGE_PATH/data/Plugins/" # Fix RPATH (we don't want to link against system libraries when deployed) for location in "$LIBS_PACKAGE_PATH/data" "${TMP_PORTABLE_DIR}"; do fixrpath "${location}/bin" "/../lib" fixrpath "${location}/lib" "" BIN_SUBDIRS=$(find "${location}/bin" -type d ! -path "${location}/bin") fixrpath "$BIN_SUBDIRS" "/../../lib" done ALL_NATRON_LIBS=$(ls "$LIBS_PACKAGE_PATH/data/lib"/*.so*) PACKAGES="${PACKAGES},fr.inria.openfx.io" installPlugin "IO" "fr.inria.openfx.io" "$XML/openfx-io.xml" "$QS/openfx-io.qs" "$ALL_NATRON_LIBS" PACKAGES="${PACKAGES},fr.inria.openfx.misc" installPlugin "Misc" "fr.inria.openfx.misc" "$XML/openfx-misc.xml" "$QS/openfx-misc.qs" "$ALL_NATRON_LIBS" installPlugin "CImg" "fr.inria.openfx.misc" "$XML/openfx-misc.xml" "$QS/openfx-misc.qs" "$ALL_NATRON_LIBS" installPlugin "Shadertoy" "fr.inria.openfx.misc" "$XML/openfx-misc.xml" "$QS/openfx-misc.qs" "$ALL_NATRON_LIBS" PACKAGES="${PACKAGES},fr.inria.openfx.extra" installPlugin "Arena" "fr.inria.openfx.extra" "$XML/openfx-arena.xml" "$QS/openfx-arena.qs" "$ALL_NATRON_LIBS" installPlugin "ArenaCL" "fr.inria.openfx.extra" "$XML/openfx-arena.xml" "$QS/openfx-arena.qs" "$ALL_NATRON_LIBS" PACKAGES="${PACKAGES},fr.inria.openfx.gmic" installPlugin "GMIC" "fr.inria.openfx.gmic" "$XML/openfx-gmic.xml" "$QS/openfx-gmic.qs" "$ALL_NATRON_LIBS" # Configure the package date using the most recent library modification date CLIBS_VERSION="00000000000000" for lib in $ALL_NATRON_LIBS; do LAST_MODIFICATION_DATE=$(date -u -r "$lib" "+%Y%m%d%H%M%S") if [ "$LAST_MODIFICATION_DATE" -gt "$CLIBS_VERSION" ]; then CLIBS_VERSION="$LAST_MODIFICATION_DATE" fi done $GSED "s/_VERSION_/${CLIBS_VERSION}/;s/_DATE_/${INSTALLER_XML_DATE}/" "$XML/corelibs.xml" > "$LIBS_PACKAGE_PATH/meta/package.xml" cat "$QS/$PKGOS/corelibs.qs" > "$LIBS_PACKAGE_PATH/meta/installscript.qs" # Generate documentation bash "$CWD"/gen-natron-doc.sh # Copy documentation installed in the portable dir to installer package if [ -d "$NATRON_PACKAGE_PATH/data/Resources/docs" ]; then rm -rf "$NATRON_PACKAGE_PATH/data/Resources/docs" fi if [ -d "${TMP_PORTABLE_DIR}/Resources/docs" ]; then cp -R "${TMP_PORTABLE_DIR}/Resources/docs" "$NATRON_PACKAGE_PATH/data/Resources/" fi # At this point we can run Natron unit tests to check that the deployment is ok. rm -rf "$HOME/.cache/INRIA/Natron"* &> /dev/null || true $TIMEOUT -s KILL 1800 valgrind --tool=memcheck --suppressions="$INC_PATH/natron/valgrind-python.supp" "${TMP_PORTABLE_DIR}/bin/Tests" rm "${TMP_PORTABLE_DIR}/bin/Tests" # Clean and perms (cd "${INSTALLER_PATH}"; find . -type d -name .git -exec rm -rf {} \;) # Build repo and packages ONLINE_INSTALL_DIR="online_installer" BUNDLED_INSTALL_DIR="offline_installer" ZIP_INSTALL_DIR="compressed_no_installer" DEB_INSTALL_DIR="deb_package" RPM_INSTALL_DIR="rpm_package" if [ -d "${BUILD_ARCHIVE_DIRECTORY}" ]; then rm -rf "${BUILD_ARCHIVE_DIRECTORY}" fi mkdir -p "${BUILD_ARCHIVE_DIRECTORY}/$BUNDLED_INSTALL_DIR" if [ "$DISABLE_PORTABLE_ARCHIVE" != "1" ]; then # Portable archive mkdir -p "${BUILD_ARCHIVE_DIRECTORY}/$ZIP_INSTALL_DIR" (cd "${TMP_BINARIES_PATH}" && tar Jcf "${PORTABLE_DIRNAME}.tar.xz" "${PORTABLE_DIRNAME}"; mv "${PORTABLE_DIRNAME}.tar.xz" "${BUILD_ARCHIVE_DIRECTORY}/$ZIP_INSTALL_DIR/${PORTABLE_DIRNAME}.tar.xz") fi if [ "$WITH_ONLINE_INSTALLER" = "1" ]; then mkdir -p "${BUILD_ARCHIVE_DIRECTORY}/$ONLINE_INSTALL_DIR" mkdir -p "${BUILD_ARCHIVE_DIRECTORY}/$ONLINE_INSTALL_DIR/packages" # Gen online repo "${SDK_HOME}/installer/bin"/repogen -v --update-new-components -p "${INSTALLER_PATH}/packages" -c "${INSTALLER_PATH}/config/config.xml" "${BUILD_ARCHIVE_DIRECTORY}/$ONLINE_INSTALL_DIR/packages" # Online installer echo "*** Creating online installer ${BUILD_ARCHIVE_DIRECTORY}/$ONLINE_INSTALL_DIR/$INSTALLER_BASENAME" "${SDK_HOME}/installer/bin"/binarycreator -v -n -p "${INSTALLER_PATH}/packages" -c "${INSTALLER_PATH}/config/config.xml" "${BUILD_ARCHIVE_DIRECTORY}/$ONLINE_INSTALL_DIR/${INSTALLER_BASENAME}-online" (cd "${BUILD_ARCHIVE_DIRECTORY}/$ONLINE_INSTALL_DIR" && tar zcf "${INSTALLER_BASENAME}-online.tgz" "${INSTALLER_BASENAME}-online" && rm "${INSTALLER_BASENAME}-online") fi # Offline installer echo "*** Creating offline installer ${BUILD_ARCHIVE_DIRECTORY}/$BUNDLED_INSTALL_DIR/$INSTALLER_BASENAME" "${SDK_HOME}/installer/bin"/binarycreator -v -f -p "${INSTALLER_PATH}/packages" -c "${INSTALLER_PATH}/config/config.xml" -i "$PACKAGES" "${BUILD_ARCHIVE_DIRECTORY}/$BUNDLED_INSTALL_DIR/$INSTALLER_BASENAME" (cd "${BUILD_ARCHIVE_DIRECTORY}/$BUNDLED_INSTALL_DIR" && tar zcf "${INSTALLER_BASENAME}.tgz" "$INSTALLER_BASENAME" && rm "$INSTALLER_BASENAME") # collect debug versions for gdb #if [ "$NATRON_BUILD_CONFIG" = "STABLE" ]; then # DEBUG_DIR=${INSTALLER_PATH}/Natron-$NATRON_VERSION_STRING-Linux${BITS}-Debug # rm -rf "$DEBUG_DIR" # mkdir "$DEBUG_DIR" # cp -pPR "${SDK_HOME}/bin"/Natron* "$DEBUG_DIR/" # cp -pPR "${SDK_HOME}/Plugins"/*.ofx.bundle/Contents/Linux*/*.ofx "$DEBUG_DIR/" # ( cd "${INSTALLER_PATH}"; tar Jcf "Natron-$NATRON_VERSION_STRING-Linux${BITS}-Debug.tar.xz" "Natron-$NATRON_VERSION-Linux${BITS}-Debug" ) # mv "${DEBUG_DIR}.tar.xz" "$BUILD_ARCHIVE"/ #fi # Build native packages for linux if ( [ "$NATRON_BUILD_CONFIG" = "RELEASE" ] || [ "$NATRON_BUILD_CONFIG" = "STABLE" ] ) && [ "$DISABLE_RPM_DEB_PKGS" != "1" ]; then # rpm echo "*** Creating rpm" mkdir -p "${BUILD_ARCHIVE_DIRECTORY}/$RPM_INSTALL_DIR" if [ ! -f "/usr/bin/rpmbuild" ]; then if [ $EUID -ne 0 ]; then echo "Error: rpmdevtools not installed, please run: sudo yum install -y rpmdevtools" exit 2 else yum install -y rpmdevtools fi fi rm -rf ~/rpmbuild/* if $(gpg --list-keys | fgrep [email protected] > /dev/null); then echo "Info: gpg key for [email protected] found, all is right" else echo "Error: gpg key for [email protected] not found" exit fi $GSED "s/REPLACE_VERSION/$(echo "$NATRON_VERSION_STRING" | $GSED 's/-/./g')/;s#__NATRON_INSTALLER__#${INSTALLER_PATH}#;s#__INC__#${INC_PATH}#;s#__TMP_BINARIES_PATH__#${TMP_BINARIES_PATH}#" "$INC_PATH/natron/Natron.spec" > "$TMP_PATH/Natron.spec" #Only need to build once, so uncomment as default #echo "" | setsid rpmbuild -bb --define="%_gpg_name [email protected]" --sign $INC_PATH/natron/Natron-repo.spec echo "" | setsid rpmbuild -bb --define="%_gpg_name [email protected]" --sign "$TMP_PATH/Natron.spec" mv ~/rpmbuild/RPMS/*/Natron*.rpm "${BUILD_ARCHIVE_DIRECTORY}/$RPM_INSTALL_DIR/" # deb echo "*** Creating deb" mkdir -p "${BUILD_ARCHIVE_DIRECTORY}/$DEB_INSTALL_DIR" if [ ! -f "/usr/bin/dpkg-deb" ]; then if [ $EUID -ne 0 ]; then echo "Error: dpkg-dev not installed, please run: sudo yum install -y dpkg-dev" exit 2 else yum install -y dpkg-dev fi fi rm -rf "${INSTALLER_PATH}/natron" mkdir -p "${INSTALLER_PATH}/natron" cd "${INSTALLER_PATH}/natron" mkdir -p opt/Natron2 DEBIAN usr/share/doc/natron usr/share/{applications,pixmaps} usr/share/mime/packages usr/bin cp -pPR "${INSTALLER_PATH}/packages"/fr.inria.*/data/* opt/Natron2/ cp "$INC_PATH/debian"/post* DEBIAN/ chmod +x DEBIAN/post* if [ "${BITS}" = "64" ]; then DEB_ARCH=amd64 else DEB_ARCH=i386 fi DEB_VERSION=$(echo "$NATRON_VERSION_STRING" | $GSED 's/-/./g') DEB_DATE=$(date -u +"%a, %d %b %Y %T %z") DEB_SIZE=$(du -ks opt|cut -f 1) DEB_PKG="natron_${DEB_VERSION}_${DEB_ARCH}.deb" cat "$INC_PATH/debian/copyright" > usr/share/doc/natron/copyright $GSED "s/__VERSION__/${DEB_VERSION}/;s/__ARCH__/${DEB_ARCH}/;s/__SIZE__/${DEB_SIZE}/" "$INC_PATH/debian/control" > DEBIAN/control $GSED "s/__VERSION__/${DEB_VERSION}/;s/__DATE__/${DEB_DATE}/" "$INC_PATH/debian/changelog.Debian" > changelog.Debian gzip changelog.Debian mv changelog.Debian.gz usr/share/doc/natron/ cat "$INC_PATH/natron/Natron2.desktop" > usr/share/applications/Natron2.desktop cat "$INC_PATH/natron/x-natron.xml" > usr/share/mime/packages/x-natron.xml cp "${TMP_BINARIES_PATH}/Resources/pixmaps/natronIcon256_linux.png" usr/share/pixmaps/ cp "${TMP_BINARIES_PATH}/Resources/pixmaps/natronProjectIcon_linux.png" usr/share/pixmaps/ (cd usr/bin; ln -sf ../../opt/Natron2/Natron .) (cd usr/bin; ln -sf ../../opt/Natron2/NatronRenderer .) # why? #chown root:root -R "${INSTALLER_PATH}/natron" cd "${INSTALLER_PATH}" dpkg-deb -Zxz -z9 --build natron mv natron.deb "${DEB_PKG}" mv "${DEB_PKG}" "${BUILD_ARCHIVE_DIRECTORY}/$DEB_INSTALL_DIR/" fi echo "*** Artifacts:" ls -R "${BUILD_ARCHIVE_DIRECTORY}" echo "*** Linux installer: done!" # Local variables: # mode: shell-script # sh-basic-offset: 4 # sh-indent-comment: t # indent-tabs-mode: nil # End:
{ "pile_set_name": "Github" }
/* Include this file in your html if you are using the CSP mode. */ @charset "UTF-8"; [ng\:cloak], [ng-cloak], [data-ng-cloak], [x-ng-cloak], .ng-cloak, .x-ng-cloak, .ng-hide:not(.ng-hide-animate) { display: none !important; } ng\:form { display: block; }
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 25 2017 03:49:04). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <AppKit/NSView.h> #import "NSTextFinderBarContainer-Protocol.h" @class HVLayerUpdateView, NSLayoutConstraint, NSString, WebView; @interface HVWebViewContainerView : NSView <NSTextFinderBarContainer> { HVLayerUpdateView *_findBarContainerView; NSView *_findBarView; BOOL _findBarVisible; WebView *_contentView; NSLayoutConstraint *_contentViewTopConstraint; NSLayoutConstraint *_findBarHeightConstraint; } @property(nonatomic) __weak NSLayoutConstraint *findBarHeightConstraint; // @synthesize findBarHeightConstraint=_findBarHeightConstraint; @property(nonatomic) __weak NSLayoutConstraint *contentViewTopConstraint; // @synthesize contentViewTopConstraint=_contentViewTopConstraint; @property(nonatomic) __weak WebView *contentView; // @synthesize contentView=_contentView; - (void).cxx_destruct; - (void)_tileSubviews; - (void)findBarViewDidChangeHeight; @property(getter=isFindBarVisible) BOOL findBarVisible; @property(retain) NSView *findBarView; @property(readonly, nonatomic) NSView *findBarContainerView; // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) Class superclass; @end
{ "pile_set_name": "Github" }
// Copyright (c) 2012 Ecma International. All rights reserved. // Ecma International makes this code available under the terms and conditions set // forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the // "Use Terms"). Any redistribution of this code must retain the above // copyright and this notice and otherwise comply with the Use Terms. /*--- info: > This test is actually testing the [[Delete]] internal method (8.12.8). Since the language provides no way to directly exercise [[Delete]], the tests are placed here. es5id: 11.4.1-4.a-3-s description: > delete operator throws TypeError when deleting a non-configurable data property in strict mode flags: [onlyStrict] includes: [runTestCase.js] ---*/ function testcase() { 'use strict'; var o = {}; var desc = { value : 1 }; // all other attributes default to false Object.defineProperty(o, "foo", desc); // Now, deleting o.foo should throw TypeError because [[Configurable]] on foo is false. try { delete o.foo; return false; } catch (e) { return (e instanceof TypeError); } } runTestCase(testcase);
{ "pile_set_name": "Github" }
yeoman.io
{ "pile_set_name": "Github" }
/* ** DynASM s390x encoding engine. ** Copyright (C) 2005-2017 Mike Pall. All rights reserved. ** Released under the MIT license. See dynasm.lua for full copyright notice. */ #include <stddef.h> #include <stdarg.h> #include <string.h> #include <stdlib.h> #define DASM_ARCH "s390x" #ifndef DASM_EXTERN #define DASM_EXTERN(a,b,c,d) 0 #endif /* Action definitions. */ enum { DASM_STOP, DASM_SECTION, DASM_ESC, DASM_REL_EXT, /* The following actions need a buffer position. */ DASM_ALIGN, DASM_REL_LG, DASM_LABEL_LG, /* The following actions also have an argument. */ DASM_REL_PC, DASM_LABEL_PC, DASM_DISP12, DASM_DISP20, DASM_IMM8, DASM_IMM16, DASM_IMM32, DASM_LEN8R,DASM_LEN4HR,DASM_LEN4LR, DASM__MAX }; /* Maximum number of section buffer positions for a single dasm_put() call. */ #define DASM_MAXSECPOS 25 /* DynASM encoder status codes. Action list offset or number are or'ed in. */ #define DASM_S_OK 0x00000000 #define DASM_S_NOMEM 0x01000000 #define DASM_S_PHASE 0x02000000 #define DASM_S_MATCH_SEC 0x03000000 #define DASM_S_RANGE_I 0x11000000 #define DASM_S_RANGE_SEC 0x12000000 #define DASM_S_RANGE_LG 0x13000000 #define DASM_S_RANGE_PC 0x14000000 #define DASM_S_RANGE_REL 0x15000000 #define DASM_S_UNDEF_LG 0x21000000 #define DASM_S_UNDEF_PC 0x22000000 /* Macros to convert positions (8 bit section + 24 bit index). */ #define DASM_POS2IDX(pos) ((pos)&0x00ffffff) #define DASM_POS2BIAS(pos) ((pos)&0xff000000) #define DASM_SEC2POS(sec) ((sec)<<24) #define DASM_POS2SEC(pos) ((pos)>>24) #define DASM_POS2PTR(D, pos) (D->sections[DASM_POS2SEC(pos)].rbuf + (pos)) /* Action list type. */ typedef const unsigned short *dasm_ActList; /* Per-section structure. */ typedef struct dasm_Section { int *rbuf; /* Biased buffer pointer (negative section bias). */ int *buf; /* True buffer pointer. */ size_t bsize; /* Buffer size in bytes. */ int pos; /* Biased buffer position. */ int epos; /* End of biased buffer position - max single put. */ int ofs; /* Byte offset into section. */ } dasm_Section; /* Core structure holding the DynASM encoding state. */ struct dasm_State { size_t psize; /* Allocated size of this structure. */ dasm_ActList actionlist; /* Current actionlist pointer. */ int *lglabels; /* Local/global chain/pos ptrs. */ size_t lgsize; int *pclabels; /* PC label chains/pos ptrs. */ size_t pcsize; void **globals; /* Array of globals (bias -10). */ dasm_Section *section; /* Pointer to active section. */ size_t codesize; /* Total size of all code sections. */ int maxsection; /* 0 <= sectionidx < maxsection. */ int status; /* Status code. */ dasm_Section sections[1]; /* All sections. Alloc-extended. */ }; /* The size of the core structure depends on the max. number of sections. */ #define DASM_PSZ(ms) (sizeof(dasm_State)+(ms-1)*sizeof(dasm_Section)) /* Initialize DynASM state. */ void dasm_init(Dst_DECL, int maxsection) { dasm_State *D; size_t psz = 0; int i; Dst_REF = NULL; DASM_M_GROW(Dst, struct dasm_State, Dst_REF, psz, DASM_PSZ(maxsection)); D = Dst_REF; D->psize = psz; D->lglabels = NULL; D->lgsize = 0; D->pclabels = NULL; D->pcsize = 0; D->globals = NULL; D->maxsection = maxsection; for (i = 0; i < maxsection; i++) { D->sections[i].buf = NULL; /* Need this for pass3. */ D->sections[i].rbuf = D->sections[i].buf - DASM_SEC2POS(i); D->sections[i].bsize = 0; D->sections[i].epos = 0; /* Wrong, but is recalculated after resize. */ } } /* Free DynASM state. */ void dasm_free(Dst_DECL) { dasm_State *D = Dst_REF; int i; for (i = 0; i < D->maxsection; i++) if (D->sections[i].buf) DASM_M_FREE(Dst, D->sections[i].buf, D->sections[i].bsize); if (D->pclabels) DASM_M_FREE(Dst, D->pclabels, D->pcsize); if (D->lglabels) DASM_M_FREE(Dst, D->lglabels, D->lgsize); DASM_M_FREE(Dst, D, D->psize); } /* Setup global label array. Must be called before dasm_setup(). */ void dasm_setupglobal(Dst_DECL, void **gl, unsigned int maxgl) { dasm_State *D = Dst_REF; D->globals = gl - 10; /* Negative bias to compensate for locals. */ DASM_M_GROW(Dst, int, D->lglabels, D->lgsize, (10 + maxgl) * sizeof(int)); } /* Grow PC label array. Can be called after dasm_setup(), too. */ void dasm_growpc(Dst_DECL, unsigned int maxpc) { dasm_State *D = Dst_REF; size_t osz = D->pcsize; DASM_M_GROW(Dst, int, D->pclabels, D->pcsize, maxpc * sizeof(int)); memset((void *)(((unsigned char *)D->pclabels) + osz), 0, D->pcsize - osz); } /* Setup encoder. */ void dasm_setup(Dst_DECL, const void *actionlist) { dasm_State *D = Dst_REF; int i; D->actionlist = (dasm_ActList) actionlist; D->status = DASM_S_OK; D->section = &D->sections[0]; memset((void *)D->lglabels, 0, D->lgsize); if (D->pclabels) memset((void *)D->pclabels, 0, D->pcsize); for (i = 0; i < D->maxsection; i++) { D->sections[i].pos = DASM_SEC2POS(i); D->sections[i].ofs = 0; } } #ifdef DASM_CHECKS #define CK(x, st) \ do { if (!(x)) { \ D->status = DASM_S_##st|(p-D->actionlist-1); return; } } while (0) #define CKPL(kind, st) \ do { if ((size_t)((char *)pl-(char *)D->kind##labels) >= D->kind##size) { \ D->status = DASM_S_RANGE_##st|(p-D->actionlist-1); return; } } while (0) #else #define CK(x, st) ((void)0) #define CKPL(kind, st) ((void)0) #endif /* Pass 1: Store actions and args, link branches/labels, estimate offsets. */ void dasm_put(Dst_DECL, int start, ...) { va_list ap; dasm_State *D = Dst_REF; dasm_ActList p = D->actionlist + start; dasm_Section *sec = D->section; int pos = sec->pos, ofs = sec->ofs; int *b; if (pos >= sec->epos) { DASM_M_GROW(Dst, int, sec->buf, sec->bsize, sec->bsize + 2 * DASM_MAXSECPOS * sizeof(int)); sec->rbuf = sec->buf - DASM_POS2BIAS(pos); sec->epos = (int)sec->bsize / sizeof(int) - DASM_MAXSECPOS + DASM_POS2BIAS(pos); } b = sec->rbuf; b[pos++] = start; va_start(ap, start); while (1) { unsigned short ins = *p++; unsigned short action = ins; if (action >= DASM__MAX) { ofs += 2; continue; } int *pl, n = action >= DASM_REL_PC ? va_arg(ap, int) : 0; switch (action) { case DASM_STOP: goto stop; case DASM_SECTION: n = *p++ & 255; CK(n < D->maxsection, RANGE_SEC); D->section = &D->sections[n]; goto stop; case DASM_ESC: p++; ofs += 2; break; case DASM_REL_EXT: p++; ofs += 4; break; case DASM_ALIGN: ofs += *p++; b[pos++] = ofs; break; case DASM_REL_LG: if (p[-2] >> 12 == 0xc) { /* RIL instruction needs 32-bit immediate. */ ofs += 2; } n = *p++ - 10; pl = D->lglabels + n; /* Bkwd rel or global. */ if (n >= 0) { CK(n >= 10 || *pl < 0, RANGE_LG); CKPL(lg, LG); goto putrel; } pl += 10; n = *pl; if (n < 0) n = 0; /* Start new chain for fwd rel if label exists. */ goto linkrel; case DASM_REL_PC: if (p[-2] >> 12 == 0xc) { /* RIL instruction needs 32-bit immediate. */ ofs += 2; } pl = D->pclabels + n; CKPL(pc, PC); putrel: n = *pl; if (n < 0) { /* Label exists. Get label pos and store it. */ b[pos] = -n; } else { linkrel: b[pos] = n; /* Else link to rel chain, anchored at label. */ *pl = pos; } ofs += 2; pos++; break; case DASM_LABEL_LG: pl = D->lglabels + *p++ - 10; CKPL(lg, LG); goto putlabel; case DASM_LABEL_PC: pl = D->pclabels + n; CKPL(pc, PC); putlabel: n = *pl; /* n > 0: Collapse rel chain and replace with label pos. */ while (n > 0) { int *pb = DASM_POS2PTR(D, n); n = *pb; *pb = pos; } *pl = -pos; /* Label exists now. */ b[pos++] = ofs; /* Store pass1 offset estimate. */ break; case DASM_IMM8: b[pos++] = n; break; case DASM_IMM16: CK(((short)n) == n || ((unsigned short)n) == n, RANGE_I); /* TODO: is this the right way to handle unsigned immediates? */ ofs += 2; b[pos++] = n; break; case DASM_IMM32: ofs += 4; b[pos++] = n; break; case DASM_DISP20: CK(-(1 << 19) <= n && n < (1 << 19), RANGE_I); b[pos++] = n; break; case DASM_DISP12: CK((n >> 12) == 0, RANGE_I); b[pos++] = n; break; case DASM_LEN8R: CK(n >= 1 && n <= 256, RANGE_I); b[pos++] = n; break; case DASM_LEN4HR: case DASM_LEN4LR: CK(n >= 1 && n <= 128, RANGE_I); b[pos++] = n; break; } } stop: va_end(ap); sec->pos = pos; sec->ofs = ofs; } #undef CK /* Pass 2: Link sections, shrink aligns, fix label offsets. */ int dasm_link(Dst_DECL, size_t * szp) { dasm_State *D = Dst_REF; int secnum; int ofs = 0; #ifdef DASM_CHECKS *szp = 0; if (D->status != DASM_S_OK) return D->status; { int pc; for (pc = 0; pc * sizeof(int) < D->pcsize; pc++) if (D->pclabels[pc] > 0) return DASM_S_UNDEF_PC | pc; } #endif { /* Handle globals not defined in this translation unit. */ int idx; for (idx = 20; idx * sizeof(int) < D->lgsize; idx++) { int n = D->lglabels[idx]; /* Undefined label: Collapse rel chain and replace with marker (< 0). */ while (n > 0) { int *pb = DASM_POS2PTR(D, n); n = *pb; *pb = -idx; } } } /* Combine all code sections. No support for data sections (yet). */ for (secnum = 0; secnum < D->maxsection; secnum++) { dasm_Section *sec = D->sections + secnum; int *b = sec->rbuf; int pos = DASM_SEC2POS(secnum); int lastpos = sec->pos; while (pos != lastpos) { dasm_ActList p = D->actionlist + b[pos++]; while (1) { unsigned short ins = *p++; unsigned short action = ins; switch (action) { case DASM_STOP: case DASM_SECTION: goto stop; case DASM_ESC: p++; break; case DASM_REL_EXT: p++; break; case DASM_ALIGN: ofs -= (b[pos++] + ofs) & *p++; break; case DASM_REL_LG: case DASM_REL_PC: p++; pos++; break; case DASM_LABEL_LG: case DASM_LABEL_PC: p++; b[pos++] += ofs; break; case DASM_IMM8: case DASM_IMM16: case DASM_IMM32: case DASM_DISP20: case DASM_DISP12: case DASM_LEN8R: case DASM_LEN4HR: case DASM_LEN4LR: pos++; break; } } stop:(void)0; } ofs += sec->ofs; /* Next section starts right after current section. */ } D->codesize = ofs; /* Total size of all code sections */ *szp = ofs; return DASM_S_OK; } #ifdef DASM_CHECKS #define CK(x, st) \ do { if (!(x)) return DASM_S_##st|(p-D->actionlist-1); } while (0) #else #define CK(x, st) ((void)0) #endif /* Pass 3: Encode sections. */ int dasm_encode(Dst_DECL, void *buffer) { dasm_State *D = Dst_REF; char *base = (char *)buffer; unsigned short *cp = (unsigned short *)buffer; int secnum; /* Encode all code sections. No support for data sections (yet). */ for (secnum = 0; secnum < D->maxsection; secnum++) { dasm_Section *sec = D->sections + secnum; int *b = sec->buf; int *endb = sec->rbuf + sec->pos; while (b != endb) { dasm_ActList p = D->actionlist + *b++; while (1) { unsigned short ins = *p++; unsigned short action = ins; int n = (action >= DASM_ALIGN && action < DASM__MAX) ? *b++ : 0; switch (action) { case DASM_STOP: case DASM_SECTION: goto stop; case DASM_ESC: *cp++ = *p++; break; case DASM_REL_EXT: n = DASM_EXTERN(Dst, (unsigned char *)cp, *p++, 1) - 4; goto patchrel; case DASM_ALIGN: ins = *p++; /* TODO: emit 4-byte noprs instead of 2-byte nops where possible. */ while ((((char *)cp - base) & ins)) *cp++ = 0x0700; /* nop */ break; case DASM_REL_LG: CK(n >= 0, UNDEF_LG); case DASM_REL_PC: CK(n >= 0, UNDEF_PC); n = *DASM_POS2PTR(D, n) - (int)((char *)cp - base); p++; /* skip argument */ patchrel: /* Offsets are halfword aligned (so need to be halved). */ n += 2; /* Offset is relative to start of instruction. */ if (cp[-1] >> 12 == 0xc) { *cp++ = n >> 17; } else { CK(-(1 << 16) <= n && n < (1 << 16) && (n & 1) == 0, RANGE_LG); } *cp++ = n >> 1; break; case DASM_LABEL_LG: ins = *p++; if (ins >= 20) D->globals[ins - 10] = (void *)(base + n); break; case DASM_LABEL_PC: break; case DASM_IMM8: cp[-1] |= n & 0xff; break; case DASM_IMM16: *cp++ = n; break; case DASM_IMM32: *cp++ = n >> 16; *cp++ = n; break; case DASM_DISP20: cp[-2] |= n & 0xfff; cp[-1] |= (n >> 4) & 0xff00; break; case DASM_DISP12: cp[-1] |= n & 0xfff; break; case DASM_LEN8R: cp[-1] |= (n - 1) & 0xff; break; case DASM_LEN4HR: cp[-1] |= ((n - 1) << 4) & 0xf0; break; case DASM_LEN4LR: cp[-1] |= (n - 1) & 0x0f; break; default: *cp++ = ins; break; } } stop:(void)0; } } if (base + D->codesize != (char *)cp) /* Check for phase errors. */ return DASM_S_PHASE; return DASM_S_OK; } #undef CK /* Get PC label offset. */ int dasm_getpclabel(Dst_DECL, unsigned int pc) { dasm_State *D = Dst_REF; if (pc * sizeof(int) < D->pcsize) { int pos = D->pclabels[pc]; if (pos < 0) return *DASM_POS2PTR(D, -pos); if (pos > 0) return -1; /* Undefined. */ } return -2; /* Unused or out of range. */ } #ifdef DASM_CHECKS /* Optional sanity checker to call between isolated encoding steps. */ int dasm_checkstep(Dst_DECL, int secmatch) { dasm_State *D = Dst_REF; if (D->status == DASM_S_OK) { int i; for (i = 1; i <= 9; i++) { if (D->lglabels[i] > 0) { D->status = DASM_S_UNDEF_LG | i; break; } D->lglabels[i] = 0; } } if (D->status == DASM_S_OK && secmatch >= 0 && D->section != &D->sections[secmatch]) D->status = DASM_S_MATCH_SEC | (D->section - D->sections); return D->status; } #endif
{ "pile_set_name": "Github" }
import { dbCompanies, gradeNameList, gradeProportionMap } from '/db/dbCompanies'; import { debug } from '/server/imports/utils/debug'; // 對全市場進行公司評級統計 export function updateCompanyGrades() { debug.log('updateCompanyGrades'); const companyCount = dbCompanies.find({ isSeal: false }).count(); const capitalBoundaries = gradeNameList.map((grade) => { const splitPosition = Math.round(companyCount * gradeProportionMap[grade]); const companyData = dbCompanies.findOne({ isSeal: false }, { sort: { capital: -1 }, skip: splitPosition }); return companyData ? companyData.capital : -Infinity; }); gradeNameList.forEach((grade, i) => { const capitalUpperBound = capitalBoundaries[i - 1] || Infinity; const capitalLowerBound = capitalBoundaries[i]; dbCompanies.update({ isSeal: false, capital: { $lte: capitalUpperBound, $gt: capitalLowerBound } }, { $set: { grade } }, { multi: true }); }); }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <resources> <string name="arenaTournaments">Arena-turneringar</string> <string name="isItRated">Räknas rating?</string> <string name="willBeNotified">Du får ett meddelande när turneringen startar, så du kan utan problem spela i en annan flik medan du väntar.</string> <string name="isRated">Turneringen är rankad och kommer påverka din rating.</string> <string name="isNotRated">Turneringen är *inte* rankad och kommer *inte* att påverka din rating.</string> <string name="someRated">Vissa turneringar är rankade och kommer att påverka din rating.</string> <string name="howAreScoresCalculated">Hur räknas poängen?</string> <string name="howAreScoresCalculatedAnswer">En vinst har baspoäng 2, remi 1 och förlust 0 poäng. Om du vinner två partier i rad så startar en dubbelpoängserie representerad av en flamma som ikon. Efterföljande partier är då värda dubbla poäng tills du misslyckas med att vinna ett parti. En vinst är då värd 4 poäng, remi 2 och förlust 0 poäng. Ett exempel, två vinster följt av en remi är värt 6 poäng: 2 + 2 + (2 x 1)</string> <string name="berserk">Arena Berserk</string> <string name="berserkAnswer">När en spelare klickar på Berserk-knappen i början av partiet, halveras dennes betänketid, men en vinst blir värd en extra turneringspoäng. Att gå Berserk vid tidskontroller med tidstillägg medför också att tidstillägget förloras. (1+2 är ett undantag, vilket blir 1+0) Berserk är ej möjligt för partier med 0 i initialtid (0+1, 0+2). Berserk ger en extrapoäng endast om du spelar minst 7 drag i partiet.</string> <string name="howIsTheWinnerDecided">Hur avgörs vem som vunnit turneringen?</string> <string name="howIsTheWinnerDecidedAnswer">Den/de spelare som har flest poäng när turneringstiden går ut utropas till turneringsvinnare.</string> <string name="howDoesPairingWork">Hur fungerar lottningen?</string> <string name="howDoesPairingWorkAnswer">I början av turneringen lottas spelarna baserat på deras rating. Så snart du avslutat ett parti och återvänt till turneringslobbyn, kommer du lottas med en spelare som ligger nära dig poängmässigt. Detta garanterar minimal väntetid, men samtidigt är det inte säkert att du får möta alla deltagare i turneringen. Ett tips är att spela fort och återvända till lobbyn snarast möjligt för chans till fler partier och fler poäng.</string> <string name="howDoesItEnd">Hur slutar turneringen?</string> <string name="howDoesItEndAnswer">Turneringen har en tidsnedräkning. När den nått noll, fryses turneringsplaceringarna och vinnaren annonseras. Turneringspartier som då pågår spelas klart men poängen räknas inte in i turneringsresultatet.</string> <string name="otherRules">Andra viktiga regler.</string> <string name="thereIsACountdown">Det finns en nedräkning för ditt första drag. Om du inte gör ett drag inom denna tid kommer du att förlora partiet.</string> <plurals name="drawingWithinNbMoves"> <item quantity="one">Att göra remi inom %s drag kommer inte ge poäng till någon av spelarna.</item> <item quantity="other">Att göra remi inom %s drag kommer inte ge poäng till någon av spelarna.</item> </plurals> <string name="thisIsPrivate">Detta är en privat turnering</string> <string name="shareUrl">Dela denna länk för att låta spelare delta: %s</string> <string name="drawStreak">Remi-streaks: När en spelare har efterföljande remier i en arena, kommer bara den första remin att resultera i en poäng, eller om remin varat fler än %s drag. Remi-streaken kan bara avbrytas av en vinst, inte av förlust eller remi.</string> <string name="history">Arena-historik</string> </resources>
{ "pile_set_name": "Github" }
/* * MiracleCast - Wifi-Display/Miracast Implementation * * Copyright (c) 2013-2014 David Herrmann <[email protected]> * * MiracleCast is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * MiracleCast is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with MiracleCast; If not, see <http://www.gnu.org/licenses/>. */ /* * Small DHCP Client/Server * Wifi-P2P requires us to use DHCP to set up a private P2P network. As all * DHCP daemons available have horrible interfaces for ad-hoc setups, we have * this small replacement for all DHCP operations. Once sd-dhcp is public, we * will switch to it instead of this helper binary. However, that also requires * DHCP-server support in sd-dhcp. * * This program implements a DHCP server and daemon. See --help for usage * information. We build on gdhcp from connman as the underlying DHCP protocol * implementation. To configure network devices, we actually invoke the "ip" * binary. * * Note that this is a gross hack! We don't intend to provide a fully functional * DHCP server or client here. This is only a replacement for the current lack * of Wifi-P2P support in common network managers. Once they gain proper * support, we will drop this helper! * * The "ip" invokation is quite fragile and ugly. However, performing these * steps directly involves netlink operations and more. As no-one came up with * patches, yet, we keep the hack. To anyone trying to fix it: Please, spend * this time hacking on NetworkManager, connman and friends instead! If they * gain Wifi-P2P support, this whole thing will get trashed. */ #define LOG_SUBSYSTEM "dhcp" #include <arpa/inet.h> #include <errno.h> #include <fcntl.h> #include <getopt.h> #include <glib.h> #include <net/if.h> #include <netinet/in.h> #include <netinet/udp.h> #include <signal.h> #include <stdbool.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/epoll.h> #include <sys/ioctl.h> #include <sys/signalfd.h> #include <sys/socket.h> #include <sys/wait.h> #include <unistd.h> #include "gdhcp.h" #include "shl_log.h" #include "config.h" static const char *arg_netdev; static const char *arg_ip_binary = "/bin/ip"; static bool arg_server; static char arg_local[INET_ADDRSTRLEN]; static char arg_gateway[INET_ADDRSTRLEN]; static char arg_dns[INET_ADDRSTRLEN]; static char arg_subnet[INET_ADDRSTRLEN]; static char arg_from[INET_ADDRSTRLEN]; static char arg_to[INET_ADDRSTRLEN]; static int arg_comm = -1; struct manager { int ifindex; GMainLoop *loop; int sfd; GIOChannel *sfd_chan; guint sfd_id; GDHCPClient *client; char *client_addr; GDHCPServer *server; char *server_addr; }; /* * We send prefixed messages via @comm. You should use a packet-based * socket-type so boundaries are preserved. Following packets are sent: * sent on local lease: * L:<addr> # local iface addr * S:<addr> # subnet mask * D:<addr> # primary DNS server * G:<addr> # primary gateway * sent on remote lease: * R:<mac> <addr> # addr given to remote device */ static void write_comm(const void *msg, size_t size) { static bool warned; int r; if (arg_comm < 0) return; r = send(arg_comm, msg, size, MSG_NOSIGNAL); if (r < 0 && !warned) { warned = true; arg_comm = -1; log_error("cannot write to comm-socket, disabling it: %m"); } } static void writef_comm(const void *format, ...) { va_list args; char *msg; int r; va_start(args, format); r = vasprintf(&msg, format, args); va_end(args); if (r < 0) return log_vENOMEM(); write_comm(msg, r); free(msg); } static int flush_if_addr(void) { char *argv[64]; int i, r; pid_t pid, rp; sigset_t mask; pid = fork(); if (pid < 0) { return log_ERRNO(); } else if (!pid) { /* child */ sigemptyset(&mask); sigprocmask(SIG_SETMASK, &mask, NULL); /* redirect stdout to stderr */ dup2(2, 1); i = 0; argv[i++] = (char*)arg_ip_binary; argv[i++] = "addr"; argv[i++] = "flush"; argv[i++] = "dev"; argv[i++] = (char*)arg_netdev; argv[i] = NULL; execve(argv[0], argv, environ); _exit(1); } log_info("flushing local if-addr"); rp = waitpid(pid, &r, 0); if (rp != pid) { log_error("cannot flush local if-addr via '%s'", arg_ip_binary); return -EFAULT; } else if (!WIFEXITED(r)) { log_error("flushing local if-addr via '%s' failed", arg_ip_binary); return -EFAULT; } else if (WEXITSTATUS(r)) { log_error("flushing local if-addr via '%s' failed with: %d", arg_ip_binary, WEXITSTATUS(r)); return -EFAULT; } log_debug("successfully flushed local if-addr via %s", arg_ip_binary); return 0; } static int add_if_addr(const char *addr) { char *argv[64]; int i, r; pid_t pid, rp; sigset_t mask; pid = fork(); if (pid < 0) { return log_ERRNO(); } else if (!pid) { /* child */ sigemptyset(&mask); sigprocmask(SIG_SETMASK, &mask, NULL); /* redirect stdout to stderr */ dup2(2, 1); i = 0; argv[i++] = (char*)arg_ip_binary; argv[i++] = "addr"; argv[i++] = "add"; argv[i++] = (char*)addr; argv[i++] = "dev"; argv[i++] = (char*)arg_netdev; argv[i] = NULL; execve(argv[0], argv, environ); _exit(1); } log_info("adding local if-addr %s", addr); rp = waitpid(pid, &r, 0); if (rp != pid) { log_error("cannot set local if-addr %s via '%s'", addr, arg_ip_binary); return -EFAULT; } else if (!WIFEXITED(r)) { log_error("setting local if-addr %s via '%s' failed", addr, arg_ip_binary); return -EFAULT; } else if (WEXITSTATUS(r)) { log_error("setting local if-addr %s via '%s' failed with: %d", addr, arg_ip_binary, WEXITSTATUS(r)); return -EFAULT; } log_debug("successfully set local if-addr %s via %s", addr, arg_ip_binary); return 0; } int if_name_to_index(const char *name) { struct ifreq ifr; int fd, r; if (strlen(name) > sizeof(ifr.ifr_name)) return -EINVAL; fd = socket(PF_INET, SOCK_DGRAM | SOCK_CLOEXEC, 0); if (fd < 0) return -errno; memset(&ifr, 0, sizeof(ifr)); strncpy(ifr.ifr_name, name, sizeof(ifr.ifr_name)); r = ioctl(fd, SIOCGIFINDEX, &ifr); if (r < 0) r = -errno; else r = ifr.ifr_ifindex; close(fd); return r; } static void sig_dummy(int sig) { } static void client_lease_fn(GDHCPClient *client, gpointer data) { struct manager *m = data; char *addr = NULL, *a, *subnet = NULL, *gateway = NULL, *dns = NULL; GList *l; int r; log_info("lease available"); addr = g_dhcp_client_get_address(client); log_info("lease: address: %s", addr); l = g_dhcp_client_get_option(client, G_DHCP_SUBNET); for ( ; l; l = l->next) { subnet = subnet ? : (char*)l->data; log_info("lease: subnet: %s", (char*)l->data); } l = g_dhcp_client_get_option(client, G_DHCP_DNS_SERVER); for ( ; l; l = l->next) { dns = dns ? : (char*)l->data; log_info("lease: dns-server: %s", (char*)l->data); } l = g_dhcp_client_get_option(client, G_DHCP_ROUTER); for ( ; l; l = l->next) { gateway = gateway ? : (char*)l->data; log_info("lease: router: %s", (char*)l->data); } if (!addr) { log_error("lease without IP address"); goto error; } if (!subnet) { log_warning("lease without subnet mask, using 24"); subnet = "24"; } r = asprintf(&a, "%s/%s", addr, subnet); if (r < 0) { log_vENOMEM(); goto error; } if (m->client_addr && !strcmp(m->client_addr, a)) { log_info("given address already set"); free(a); } else { free(m->client_addr); m->client_addr = a; r = flush_if_addr(); if (r < 0) { log_error("cannot flush addr on local interface %s", arg_netdev); goto error; } r = add_if_addr(m->client_addr); if (r < 0) { log_error("cannot set parameters on local interface %s", arg_netdev); goto error; } writef_comm("L:%s", addr); writef_comm("S:%s", subnet); if (dns) writef_comm("D:%s", dns); if (gateway) writef_comm("G:%s", gateway); } g_free(addr); return; error: g_free(addr); g_main_loop_quit(m->loop); } static void client_no_lease_fn(GDHCPClient *client, gpointer data) { struct manager *m = data; log_error("no lease available"); g_main_loop_quit(m->loop); } static void server_log_fn(const char *str, void *data) { log_format(NULL, 0, NULL, "gdhcp", LOG_DEBUG, "%s", str); } static void server_event_fn(const char *mac, const char *lease, void *data) { log_debug("remote lease: %s %s", mac, lease); writef_comm("R:%s %s", mac, lease); } static gboolean manager_signal_fn(GIOChannel *chan, GIOCondition mask, gpointer data) { struct manager *m = data; ssize_t l; struct signalfd_siginfo info; if (mask & (G_IO_HUP | G_IO_ERR)) { log_vEPIPE(); g_main_loop_quit(m->loop); return FALSE; } l = read(m->sfd, &info, sizeof(info)); if (l < 0) { log_vERRNO(); g_main_loop_quit(m->loop); return FALSE; } else if (l != sizeof(info)) { log_vEFAULT(); return TRUE; } log_notice("received signal %d: %s", info.ssi_signo, strsignal(info.ssi_signo)); g_main_loop_quit(m->loop); return FALSE; } static void manager_free(struct manager *m) { if (!m) return; if (!arg_server) { if (m->client) { g_dhcp_client_stop(m->client); if (m->client_addr) { flush_if_addr(); free(m->client_addr); } g_dhcp_client_unref(m->client); } } else { if (m->server) { g_dhcp_server_stop(m->server); g_dhcp_server_unref(m->server); } if (m->server_addr) { flush_if_addr(); free(m->server_addr); } } if (m->sfd >= 0) { g_source_remove(m->sfd_id); g_io_channel_unref(m->sfd_chan); close(m->sfd); } if (m->loop) g_main_loop_unref(m->loop); free(m); } static int manager_new(struct manager **out) { static const int sigs[] = { SIGINT, SIGTERM, SIGQUIT, SIGHUP, SIGPIPE, 0 }; int r, i; sigset_t mask; struct sigaction sig; GDHCPClientError cerr; GDHCPServerError serr; struct manager *m; m = calloc(1, sizeof(*m)); if (!m) return log_ENOMEM(); m->sfd = -1; if (geteuid()) log_warning("not running as uid=0, dhcp might not work"); m->ifindex = if_name_to_index(arg_netdev); if (m->ifindex < 0) { r = -EINVAL; log_error("cannot find interface %s (%d)", arg_netdev, m->ifindex); goto error; } m->loop = g_main_loop_new(NULL, FALSE); sigemptyset(&mask); memset(&sig, 0, sizeof(sig)); sig.sa_handler = sig_dummy; sig.sa_flags = SA_RESTART; for (i = 0; sigs[i]; ++i) { sigaddset(&mask, sigs[i]); r = sigaction(sigs[i], &sig, NULL); if (r < 0) { r = log_ERRNO(); goto error; } } r = sigprocmask(SIG_BLOCK, &mask, NULL); if (r < 0) { r = log_ERRNO(); goto error; } m->sfd = signalfd(-1, &mask, SFD_CLOEXEC | SFD_NONBLOCK); if (m->sfd < 0) { r = log_ERRNO(); goto error; } m->sfd_chan = g_io_channel_unix_new(m->sfd); m->sfd_id = g_io_add_watch(m->sfd_chan, G_IO_HUP | G_IO_ERR | G_IO_IN, manager_signal_fn, m); if (!arg_server) { m->client = g_dhcp_client_new(G_DHCP_IPV4, m->ifindex, &cerr); if (!m->client) { r = -EINVAL; switch (cerr) { case G_DHCP_CLIENT_ERROR_INTERFACE_UNAVAILABLE: log_error("cannot create GDHCP client: interface %s unavailable", arg_netdev); break; case G_DHCP_CLIENT_ERROR_INTERFACE_IN_USE: log_error("cannot create GDHCP client: interface %s in use", arg_netdev); break; case G_DHCP_CLIENT_ERROR_INTERFACE_DOWN: log_error("cannot create GDHCP client: interface %s down", arg_netdev); break; case G_DHCP_CLIENT_ERROR_NOMEM: r = log_ENOMEM(); break; case G_DHCP_CLIENT_ERROR_INVALID_INDEX: log_error("cannot create GDHCP client: invalid interface %s", arg_netdev); break; case G_DHCP_CLIENT_ERROR_INVALID_OPTION: log_error("cannot create GDHCP client: invalid options"); break; default: log_error("cannot create GDHCP client (%d)", cerr); break; } goto error; } g_dhcp_client_set_send(m->client, G_DHCP_HOST_NAME, "<hostname>"); g_dhcp_client_set_request(m->client, G_DHCP_SUBNET); g_dhcp_client_set_request(m->client, G_DHCP_DNS_SERVER); g_dhcp_client_set_request(m->client, G_DHCP_ROUTER); g_dhcp_client_register_event(m->client, G_DHCP_CLIENT_EVENT_LEASE_AVAILABLE, client_lease_fn, m); g_dhcp_client_register_event(m->client, G_DHCP_CLIENT_EVENT_NO_LEASE, client_no_lease_fn, m); } else { r = asprintf(&m->server_addr, "%s/%s", arg_local, arg_subnet); if (r < 0) { r = log_ENOMEM(); goto error; } r = flush_if_addr(); if (r < 0) { log_error("cannot flush addr on local interface %s", arg_netdev); goto error; } r = add_if_addr(m->server_addr); if (r < 0) { log_error("cannot set parameters on local interface %s", arg_netdev); goto error; } m->server = g_dhcp_server_new(G_DHCP_IPV4, m->ifindex, &serr, server_event_fn, m); if (!m->server) { r = -EINVAL; switch(serr) { case G_DHCP_SERVER_ERROR_INTERFACE_UNAVAILABLE: log_error("cannot create GDHCP server: interface %s unavailable", arg_netdev); break; case G_DHCP_SERVER_ERROR_INTERFACE_IN_USE: log_error("cannot create GDHCP server: interface %s in use", arg_netdev); break; case G_DHCP_SERVER_ERROR_INTERFACE_DOWN: log_error("cannot create GDHCP server: interface %s down", arg_netdev); break; case G_DHCP_SERVER_ERROR_NOMEM: r = log_ENOMEM(); break; case G_DHCP_SERVER_ERROR_INVALID_INDEX: log_error("cannot create GDHCP server: invalid interface %s", arg_netdev); break; case G_DHCP_SERVER_ERROR_INVALID_OPTION: log_error("cannot create GDHCP server: invalid options"); break; case G_DHCP_SERVER_ERROR_IP_ADDRESS_INVALID: log_error("cannot create GDHCP server: invalid ip address"); break; default: log_error("cannot create GDHCP server (%d)", serr); break; } goto error; } g_dhcp_server_set_debug(m->server, server_log_fn, NULL); g_dhcp_server_set_lease_time(m->server, 60 * 60); r = g_dhcp_server_set_option(m->server, G_DHCP_SUBNET, arg_subnet); if (r != 0) { log_vERR(r); goto error; } r = g_dhcp_server_set_option(m->server, G_DHCP_ROUTER, arg_gateway); if (r != 0) { log_vERR(r); goto error; } r = g_dhcp_server_set_option(m->server, G_DHCP_DNS_SERVER, arg_dns); if (r != 0) { log_vERR(r); goto error; } r = g_dhcp_server_set_ip_range(m->server, arg_from, arg_to); if (r != 0) { log_vERR(r); goto error; } } *out = m; return 0; error: manager_free(m); return r; } static int manager_run(struct manager *m) { int r; if (!arg_server) { log_info("running dhcp client on %s via '%s'", arg_netdev, arg_ip_binary); r = g_dhcp_client_start(m->client, NULL); if (r != 0) { log_error("cannot start DHCP client: %d", r); return -EFAULT; } } else { log_info("running dhcp server on %s via '%s'", arg_netdev, arg_ip_binary); r = g_dhcp_server_start(m->server); if (r != 0) { log_error("cannot start DHCP server: %d", r); return -EFAULT; } writef_comm("L:%s", arg_local); } g_main_loop_run(m->loop); return 0; } static int make_address(char *buf, const char *prefix, const char *suffix, const char *name) { int r; struct in_addr addr; if (!prefix) prefix = "192.168.77"; r = snprintf(buf, INET_ADDRSTRLEN, "%s.%s", prefix, suffix); if (r >= INET_ADDRSTRLEN) goto error; r = inet_pton(AF_INET, buf, &addr); if (r != 1) goto error; inet_ntop(AF_INET, &addr, buf, INET_ADDRSTRLEN); buf[INET_ADDRSTRLEN - 1] = 0; return 0; error: log_error("Invalid address --%s=%s.%s (prefix: %s suffix: %s)", name, prefix, suffix, prefix, suffix); return -EINVAL; } static int make_subnet(char *buf, const char *subnet) { int r; struct in_addr addr; r = inet_pton(AF_INET, subnet, &addr); if (r != 1) goto error; inet_ntop(AF_INET, &addr, buf, INET_ADDRSTRLEN); buf[INET_ADDRSTRLEN - 1] = 0; return 0; error: log_error("Invalid address --subnet=%s", subnet); return -EINVAL; } static int help(void) { printf("%s [OPTIONS...] ...\n\n" "Ad-hoc IPv4 DHCP Server/Client.\n\n" " -h --help Show this help\n" " --version Show package version\n" " --log-level <lvl> Maximum level for log messages\n" " --log-time Prefix log-messages with timestamp\n" "\n" " --netdev <dev> Network device to run on\n" " --ip-binary <path> Path to 'ip' binary [default: /bin/ip]\n" " --comm-fd <int> Comm-socket FD passed through execve()\n" "\n" "Server Options:\n" " --server Run as DHCP server instead of client\n" " --prefix <net-prefix> Network prefix [default: 192.168.77]\n" " --local <suffix> Local address suffix [default: 1]\n" " --gateway <suffix> Gateway suffix [default: 1]\n" " --dns <suffix> DNS suffix [default: 1]\n" " --subnet <mask> Subnet mask [default: 255.255.255.0]\n" " --from <suffix> Start address [default: 100]\n" " --to <suffix> End address [default: 199]\n" , program_invocation_short_name); return 0; } static int parse_argv(int argc, char *argv[]) { enum { ARG_VERSION = 0x100, ARG_LOG_LEVEL, ARG_LOG_TIME, ARG_NETDEV, ARG_IP_BINARY, ARG_COMM_FD, ARG_SERVER, ARG_PREFIX, ARG_LOCAL, ARG_GATEWAY, ARG_DNS, ARG_SUBNET, ARG_FROM, ARG_TO, }; static const struct option options[] = { { "help", no_argument, NULL, 'h' }, { "version", no_argument, NULL, ARG_VERSION }, { "log-level", required_argument, NULL, ARG_LOG_LEVEL }, { "log-time", no_argument, NULL, ARG_LOG_TIME }, { "netdev", required_argument, NULL, ARG_NETDEV }, { "ip-binary", required_argument, NULL, ARG_IP_BINARY }, { "comm-fd", required_argument, NULL, ARG_COMM_FD }, { "server", no_argument, NULL, ARG_SERVER }, { "prefix", required_argument, NULL, ARG_PREFIX }, { "local", required_argument, NULL, ARG_LOCAL }, { "gateway", required_argument, NULL, ARG_GATEWAY }, { "dns", required_argument, NULL, ARG_DNS }, { "subnet", required_argument, NULL, ARG_SUBNET }, { "from", required_argument, NULL, ARG_FROM }, { "to", required_argument, NULL, ARG_TO }, {} }; int c, r; const char *prefix = NULL, *local = NULL, *gateway = NULL; const char *dns = NULL, *subnet = NULL, *from = NULL, *to = NULL; while ((c = getopt_long(argc, argv, "hs:", options, NULL)) >= 0) { switch (c) { case 'h': return help(); case ARG_VERSION: puts(PACKAGE_STRING); return 0; case ARG_LOG_LEVEL: log_max_sev = log_parse_arg(optarg); break; case ARG_LOG_TIME: log_init_time(); break; case ARG_NETDEV: arg_netdev = optarg; break; case ARG_IP_BINARY: arg_ip_binary = optarg; break; case ARG_COMM_FD: arg_comm = atoi(optarg); break; case ARG_SERVER: arg_server = true; break; case ARG_PREFIX: prefix = optarg; break; case ARG_LOCAL: local = optarg; break; case ARG_GATEWAY: gateway = optarg; break; case ARG_DNS: dns = optarg; break; case ARG_SUBNET: subnet = optarg; break; case ARG_FROM: from = optarg; break; case ARG_TO: to = optarg; break; case '?': return -EINVAL; } } if (optind < argc) { log_error("unparsed remaining arguments starting with: %s", argv[optind]); return -EINVAL; } if (!arg_netdev) { log_error("no network-device given (see --help for --netdev)"); return -EINVAL; } if (access(arg_ip_binary, X_OK) < 0) { log_error("execution of ip-binary (%s) not allowed: %m", arg_ip_binary); return -EINVAL; } if (!arg_server) { if (prefix || local || gateway || dns || subnet || from || to) { log_error("server option given, but running as client"); return -EINVAL; } } else { r = make_address(arg_local, prefix, local ? : "1", "local"); if (r < 0) return -EINVAL; r = make_address(arg_gateway, prefix, gateway ? : "1", "gateway"); if (r < 0) return -EINVAL; r = make_address(arg_dns, prefix, dns ? : "1", "dns"); if (r < 0) return -EINVAL; r = make_subnet(arg_subnet, subnet ? : "255.255.255.0"); if (r < 0) return -EINVAL; r = make_address(arg_from, prefix, from ? : "100", "from"); if (r < 0) return -EINVAL; r = make_address(arg_to, prefix, to ? : "199", "to"); if (r < 0) return -EINVAL; } log_format(LOG_DEFAULT_BASE, NULL, LOG_INFO, "miracle-dhcp - revision %s %s %s", "1.0", __DATE__, __TIME__); return 1; } int main(int argc, char **argv) { struct manager *m = NULL; int r; r = parse_argv(argc, argv); if (r < 0) return EXIT_FAILURE; if (!r) return EXIT_SUCCESS; r = manager_new(&m); if (r < 0) goto finish; r = manager_run(m); finish: manager_free(m); log_debug("exiting.."); return abs(r); }
{ "pile_set_name": "Github" }
var gulp = require('gulp'); var sourcemaps = require('gulp-sourcemaps'); module.exports = function(newTS, lib, output, reporter) { var tsProject = newTS.createProject('test/tsconfigInclude/tsconfig.json', { typescript: lib, }); var tsResult = tsProject.src() .pipe(tsProject(reporter)) .on('error', () => {}); return tsResult.pipe(gulp.dest(output)); }
{ "pile_set_name": "Github" }
web: bundle exec ruby app.rb -p $PORT redis: redis-server
{ "pile_set_name": "Github" }
<?php use Illuminate\Support\Facades\DB; use Illuminate\Database\Migrations\Migration; class ChangeActivityModelLocation extends Migration { /** * Run the migrations. * * @return void */ public function up() { DB::table('journal_entries') ->where('journalable_type', 'App\Models\Contact\Activity') ->update(['journalable_type' => 'App\Models\Account\Activity']); } }
{ "pile_set_name": "Github" }
%YAML 1.1 --- !!seq [ !!null "", !!str "block node\n", !!seq [ !!str "one", !!str "two", ], !!map { ? !!str "one" : !!str "two", } ]
{ "pile_set_name": "Github" }
{ "parent": "create:block/belt_funnel/block_extended", "textures": { "particle": "block/polished_andesite", "2": "create:block/andesite_funnel_push", "3": "create:block/andesite_funnel_back", "5": "create:block/andesite_funnel_tall_powered", "6": "create:block/andesite_funnel_powered", "7": "create:block/andesite_funnel_plating" } }
{ "pile_set_name": "Github" }
<?php /** * Copyright 2018 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // [START vision_product_search_delete_product_set] namespace Google\Cloud\Samples\Vision; use Google\Cloud\Vision\V1\ProductSearchClient; /** * Deletes product set * * @param string $projectId Your Google Cloud project ID * @param string $location Google Cloud compute region name * @param string $productSetId ID of the product */ function product_set_delete($projectId, $location, $productSetId) { $client = new ProductSearchClient(); # get the name of the product set $productSetPath = $client->productSetName($projectId, $location, $productSetId); # delete the product set $client->deleteProductSet($productSetPath); print('Product set deleted.' . PHP_EOL); $client->close(); } // [END vision_product_search_delete_product_set]
{ "pile_set_name": "Github" }
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!! # This file is machine-generated by lib/unicore/mktables from the Unicode # database, Version 6.0.0. Any changes made here will be lost! # !!!!!!! INTERNAL PERL USE ONLY !!!!!!! # This file is for internal use by the Perl program only. The format and even # the name or existence of this file are subject to change without notice. # Don't use it directly. return <<'END'; 2A00 2AFF END
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- """ lantz.drivers.tektronix.tds1012 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Implements the drivers to control an oscilloscope. :copyright: 2015 by Lantz Authors, see AUTHORS for more details. :license: BSD, see LICENSE for more details. Source: Tektronix Manual """ import numpy as np from lantz.feat import Feat from lantz.action import Action from lantz.messagebased import MessageBasedDriver from lantz.errors import InvalidCommand class TDS1012(MessageBasedDriver): """Tektronix TDS1012 100MHz 2 Channel Digital Storage Oscilloscope """ MANUFACTURER_ID = '0x699' @Action() def initiate(self): """ Initiates the acquisition in the osciloscope. """ self.send(':ACQ:STATE ON') @Action() def idn(self): """ Identify the Osciloscope """ return self.query('*IDN?') @Action() def autoset(self): """ Adjust the vertical, horizontal and trigger controls to display a stable waveform. """ self.send('AUTOS EXEC') @Action() def autocal(self): """ Autocalibration of osciloscope. It may take several minutes to complete """ return self.send('*CAL') @Feat(limits=(1,2)) def datasource(self): """ Retrieves the data source from which data is going to be taken. TDS1012 has 2 channels """ return self.query('DAT:SOU?') @datasource.setter def datasource(self,value): """ Sets the data source for the acquisition of data. """ self.send('DAT:SOU CH{}'.format(value)) @Action() def acquire_parameters(self): """ Acquire parameters of the osciloscope. It is intended for adjusting the values obtained in acquire_curve """ values = 'XZE?;XIN?;PT_OF?;YZE?;YMU?;YOF?;' answer = self.query('WFMP:{}'.format(values)) parameters = {} for v, j in zip(values.split('?;'),answer.split(';')): parameters[v] = float(j) return parameters @Action() def data_setup(self): """ Sets the way data is going to be encoded for sending. """ self.send('DAT:ENC ASCI;WID 2') #ASCII is the least efficient way, but # couldn't make the binary mode to work @Action() def acquire_curve(self,start=1,stop=2500): """ Gets data from the oscilloscope. It accepts setting the start and stop points of the acquisition (by default the entire range). """ parameters = self.acquire_parameters() self.data_setup() self.send('DAT:STAR {}'.format(start)) self.send('DAT:STOP {}'.format(stop)) data = self.query('CURV?') data = data.split(',') data = np.array(list(map(float,data))) ydata = (data - parameters['YOF']) * parameters['YMU']\ + parameters['YZE'] xdata = np.arange(len(data))*parameters['XIN'] + parameters['XZE'] return list(xdata), list(ydata) @Action() def forcetrigger(self): """ Creates a trigger event. """ self.send('TRIG:FORC') return @Action() def triggerlevel(self): """ Sets the trigger level to 50% of the minimum and maximum values of the signal. """ self.send('TRIG:MAI SETL') @Feat(values={'AUTO', 'NORMAL'}) def trigger(self): """ Retrieves trigger state. """ return self.query('TRIG:MAIN:MODE?') @trigger.setter def trigger(self,state): """ Sets the trigger state. """ self.send('TRIG:MAI:MOD {}'.format(state)) return @Feat() def horizontal_division(self): """ Horizontal time base division. """ return float(self.query('HOR:MAI:SCA?')) @horizontal_division.setter def horizontal_division(self,value): """ Sets the horizontal time base division. """ self.send('HOR:MAI:SCA {}'.format(value)) return @Feat(values={0, 4, 16, 64, 128}) def number_averages(self): """ Number of averages """ answer = self.query('ACQ?') answer = answer.split(';') if answer[0] == 'SAMPLE': return 0 elif answer[0] == 'AVERAGE': return int(self.query('ACQ:NUMAV?')) else: raise InvalidCommand @number_averages.setter def number_averages(self,value): """ Sets the number of averages. If 0, the it is a continous sample. """ if value == 0: self.send('ACQ:MOD SAMPLE') else: self.send('ACQ:MOD AVE;NUMAV {}'.format(value)) @Action(values={'FREQ', 'MINI', 'MAXI', 'MEAN'}) def _measure(self, mode): """ Measures the Frequency, Minimum, Maximum or Mean of a signal. """ self.send('MEASU:IMM:TYP {}'.format(mode)) return float(self.query('MEASU:IMM:VAL?')) def measure_mean(self): """ Gets the mean of the signal. """ answer = self._measure('MEAN') return answer def measure_frequency(self): """ Gets the frequency of the signal. """ answer = self._measure('FREQ') return answer def measure_minimum(self): """ Gets the minimum of the signal. """ answer = self._measure('MINI') return answer def measure_maximum(self): """ Gets the mean of the signal. """ answer = self._measure('MAXI') return answer if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description='Measure using TDS1012 and dump to screen') parser.add_argument('-p', '--port', default='/dev/ttyS0', help='Serial port') parser.add_argument('-v', '--view', action='store_true', default=True, help='View ') parser.add_argument('-c', '--channel', default=1, type=int, help='Channel to use') args = parser.parse_args() osc = TDS1012(args.port) osc.initiate() print('Osciloscope Identification: {}'.format(osc.idn)) print(osc.trigger) osc.forcetrigger() osc.triggerlevel() osc.trigger = "AUTO" print(osc.trigger) params = osc.acquire_parameters() if args.view: import matplotlib.pyplot as plt import numpy as np if args.view: osc.datasource = args.channel x, y = osc.acquire_curve() x = np.array(x) x = x - x.min() y = np.array(y) plt.plot(x, y) plt.show()
{ "pile_set_name": "Github" }
package proto // go:generate make generate import ( "bytes" "encoding/gob" "encoding/json" "fmt" "runtime/debug" "time" "github.com/reconquest/karma-go" "github.com/MagalixCorp/magalix-agent/v2/watcher" "github.com/MagalixTechnologies/uuid-go" "github.com/golang/snappy" "github.com/kovetskiy/lorg" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime/schema" satori "github.com/satori/go.uuid" corev1 "k8s.io/api/core/v1" ) var ( gobTypesRegistered bool gobTypes = []interface{}{ uuid.UUID{}, satori.UUID{}, [uuid.Size]byte{}, new(watcher.Status), new(watcher.ContainerStatusSource), make(map[string]interface{}), make([]interface{}, 0), } ) type PacketHello struct { Major uint `json:"major"` Minor uint `json:"minor"` Build string `json:"build"` StartID string `json:"start_id"` AccountID uuid.UUID `json:"account_id"` ClusterID uuid.UUID `json:"cluster_id"` PacketV2Enabled bool `json:"packet_v2_enabled,omitempty"` ServerVersion string `json:"server_version"` AgentPermissions string `json:"agent_permissions"` } type PacketAuthorizationRequest struct { AccountID uuid.UUID `json:"account_id"` ClusterID uuid.UUID `json:"cluster_id"` } type PacketAuthorizationQuestion struct { Token []byte `json:"token"` } type PacketAuthorizationAnswer struct { Token []byte `json:"token"` } type PacketAuthorizationFailure struct{} type PacketAuthorizationSuccess struct{} type PacketBye struct { Reason string `json:"reason,omitempty"` } type PacketPing struct { Number int `json:"number,omitempty"` Started time.Time `json:"started"` } type PacketPong struct { Number int `json:"number,omitempty"` Started time.Time `json:"started"` } type PacketLogItem struct { Level lorg.Level `json:"level"` Date time.Time `json:"date"` Data interface{} `json:"data"` } type PacketRegisterEntityItem struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Kind string `json:"kind,omitempty"` Annotations map[string]string `json:"annotations,omitempty"` } type PacketRegisterApplicationItem struct { PacketRegisterEntityItem LimitRanges []corev1.LimitRange `json:"limit_ranges"` Services []PacketRegisterServiceItem `json:"services"` } type PacketRegisterServiceItem struct { PacketRegisterEntityItem ReplicasStatus ReplicasStatus `json:"replicas_status,omitempty"` Containers []PacketRegisterContainerItem `json:"containers"` } type ReplicasStatus struct { Desired *int32 `json:"desired,omitempty"` Current *int32 `json:"current,omitempty"` Ready *int32 `json:"ready,omitempty"` Available *int32 `json:"available,omitempty"` } type PacketRegisterContainerItem struct { PacketRegisterEntityItem Image string `json:"image"` Resources json.RawMessage `json:"resources"` LivenessProbe json.RawMessage `json:"liveness_probe"` ReadinessProbe json.RawMessage `json:"readiness_probe"` } type ContainerResourceRequirements struct { corev1.ResourceRequirements SpecResourceRequirements corev1.ResourceRequirements `json:"spec_resources_requirements,omitempty"` LimitsKinds ResourcesRequirementsKind `json:"limits_kinds,omitempty"` RequestsKinds ResourcesRequirementsKind `json:"requests_kinds,omitempty"` } type ResourcesRequirementsKind = map[corev1.ResourceName]string const ( ResourceRequirementKindSet = "set" ResourceRequirementKindDefaultsLimitRange = "defaults-limit-range" ResourceRequirementKindDefaultFromLimits = "default-from-limits" ) type PacketApplicationsStoreRequest []PacketRegisterApplicationItem type PacketApplicationsStoreResponse struct{} type PacketMetricsStoreV2Request []MetricStoreV2Request type MetricStoreV2Request struct { Name string `json:"name"` Type string `json:"type"` NodeName string `json:"node_name"` NodeIP string `json:"node_ip"` NamespaceName string `json:"namespace_name"` ControllerName string `json:"controller_name"` ControllerKind string `json:"controller_kind"` ContainerName string `json:"container_name"` Timestamp time.Time `json:"timestamp"` Value int64 `json:"value"` PodName string `json:"pod_name"` AdditionalTags map[string]interface{} `json:"additional_tags"` } type PacketMetricValueItem struct { Node *uuid.UUID Application *uuid.UUID Service *uuid.UUID Container *uuid.UUID Tags map[string]string Value float64 } type PacketMetricFamilyItem struct { Name string Help string Type string Tags []string Values []*PacketMetricValueItem } type PacketMetricsPromStoreRequest struct { Timestamp time.Time Metrics []*PacketMetricFamilyItem } type PacketMetricsPromStoreResponse struct { } type PacketRegisterNodeCapacityItem struct { CPU int `json:"cpu"` Memory int `json:"memory"` StorageEphemeral int `json:"storage_ephemeral"` Pods int `json:"pods"` } type PacketRegisterNodeItem struct { ID uuid.UUID `json:"id,omitempty"` Name string `json:"name"` IP string `json:"ip"` Roles string `json:"roles"` Region string `json:"region,omitempty"` Provider string `json:"provider,omitempty"` InstanceType string `json:"instance_type,omitempty"` InstanceSize string `json:"instance_size,omitempty"` Capacity PacketRegisterNodeCapacityItem `json:"capacity"` Allocatable PacketRegisterNodeCapacityItem `json:"allocatable"` Containers int `json:"containers,omitempty"` ContainerList []*PacketRegisterNodeContainerListItem `json:"container_list,omitempty"` } type PacketRegisterNodeContainerListItem struct { // cluster where host of container located in Cluster string `json:"cluster"` // image of container Image string `json:"image"` // limits of container Limits *PacketRegisterNodeContainerListResourcesItem `json:"limits"` // requests of container Requests *PacketRegisterNodeContainerListResourcesItem `json:"requests"` // name of container (not guaranteed to be unique in cluster scope) Name string `json:"name"` // namespace where pod located in Namespace string `json:"namespace"` // node where container located in Node string `json:"node"` // pod where container located in Pod string `json:"pod"` } // PacketRegisterNodeContainerListResourcesItem type PacketRegisterNodeContainerListResourcesItem struct { CPU int `json:"cpu"` Memory int `json:"memory"` } type PacketNodesStoreRequest []PacketRegisterNodeItem type PacketNodesStoreResponse struct{} type PacketLogs []PacketLogItem type RequestLimit struct { CPU *int64 `json:"cpu,omitempty"` Memory *int64 `json:"memory,omitempty"` } type ContainerResources struct { Requests *RequestLimit `json:"requests,omitempty"` Limits *RequestLimit `json:"limits,omitempty"` } type PacketDecision struct { ID uuid.UUID `json:"id"` ServiceId uuid.UUID `json:"service_id"` ContainerId uuid.UUID `json:"container_id"` ContainerResources ContainerResources `json:"container_resources"` } type DecisionExecutionStatus string const ( DecisionExecutionStatusSucceed DecisionExecutionStatus = "executed" DecisionExecutionStatusFailed DecisionExecutionStatus = "failed" DecisionExecutionStatusSkipped DecisionExecutionStatus = "skipped" ) type PacketDecisionFeedbackRequest struct { ID uuid.UUID `json:"id"` ServiceId uuid.UUID `json:"service_id"` ContainerId uuid.UUID `json:"container_id"` Status DecisionExecutionStatus `json:"status"` Message string `json:"message"` } type PacketDecisionFeedbackResponse struct{} type PacketDecisionResponse struct { Error *string `json:"error"` } type PacketDecisionPullRequest struct{} type PacketDecisionPullResponse struct { Decisions []*PacketDecision `json:"decisions"` } type PacketRestart struct { Status int `json:"status"` } type EntityDeltaKind string const ( EntityEventTypeUpsert EntityDeltaKind = "UPSERT" EntityEventTypeDelete EntityDeltaKind = "DELETE" ) type ParentController struct { Kind string `json:"kind"` Name string `json:"name"` APIVersion string `json:"api_version"` IsWatched bool `json:"is_watched"` Parent *ParentController `json:"parent"` } type GroupVersionResourceKind struct { schema.GroupVersionResource Kind string `json:"kind"` } type PacketEntityDelta struct { Gvrk GroupVersionResourceKind `json:"gvrk"` DeltaKind EntityDeltaKind `json:"delta_kind"` Data unstructured.Unstructured `json:"data"` Parent *ParentController `json:"parents"` Timestamp time.Time `json:"timestamp"` } type PacketEntitiesDeltasRequest struct { Items []PacketEntityDelta `json:"items"` Timestamp time.Time `json:"timestamp"` } type PacketEntitiesDeltasResponse struct{} type PacketEntitiesResyncItem struct { Gvrk GroupVersionResourceKind `json:"gvrk"` Data []*unstructured.Unstructured `json:"data"` } type PacketEntitiesResyncRequest struct { Timestamp time.Time `json:"timestamp"` // map of entities kind and entities definitions // it holds other entities not already specified in attributes above Snapshot map[string]PacketEntitiesResyncItem `json:"snapshot"` } type PacketEntitiesResyncResponse struct{} // Deprecated: Fall back to EncodeGOB. Kept only for backward compatibility. Should be removed. func Encode(in interface{}) (out []byte, err error) { return EncodeGOB(in) } // Deprecated: Falls back to DecodeGOB. Kept only for backward compatibility. Should be removed. func Decode(in []byte, out interface{}) error { return DecodeGOB(in, out) } func EncodeSnappy(in interface{}) (out []byte, err error) { defer func() { if r := recover(); r != nil { stack := string(debug.Stack()) err = karma.Format(stack, fmt.Sprintf("panic: %v", r)) } }() jsonIn, err := json.Marshal(in) if err != nil { return nil, karma.Format(err, "unable to encode to snappy") } out = snappy.Encode(nil, jsonIn) return out, err } func DecodeSnappy(in []byte, out interface{}) error { jsonIn, err := snappy.Decode(nil, in) if err != nil { return karma.Format(err, "unable to decode to snappy") } return json.Unmarshal(jsonIn, out) } func DecodeGOB(in []byte, out interface{}) error { RegisterGOBTypes() inBuf := bytes.NewBuffer(in) dec := gob.NewDecoder(inBuf) return dec.Decode(out) } func EncodeGOB(in interface{}) ([]byte, error) { RegisterGOBTypes() var outBuf bytes.Buffer enc := gob.NewEncoder(&outBuf) if err := enc.Encode(in); err != nil { return nil, err } return outBuf.Bytes(), nil } func DecodeJSON(in []byte, out interface{}) error { return json.Unmarshal(in, out) } func EncodeJSON(in interface{}) ([]byte, error) { return json.Marshal(in) } func RegisterGOBTypes() { if !gobTypesRegistered { for _, t := range gobTypes { gob.Register(t) } gobTypesRegistered = true } }
{ "pile_set_name": "Github" }
############################################################################### # International Holiday Data provided by Holidata.net # http://holidata.net/en-ZA/2015.json # http://holidata.net/en-ZA/2016.json # # Copyright 2006 - 2016, Paul Beckingham, Federico Hernandez. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # https://www.opensource.org/licenses/mit-license.php # ############################################################################### holiday.en-ZA1.name=New Year's Day holiday.en-ZA1.date=20150101 holiday.en-ZA2.name=Human Rights Day holiday.en-ZA2.date=20150321 holiday.en-ZA3.name=Good Friday holiday.en-ZA3.date=20150403 holiday.en-ZA4.name=Family Day holiday.en-ZA4.date=20150406 holiday.en-ZA5.name=Freedom Day holiday.en-ZA5.date=20150427 holiday.en-ZA6.name=Workers' Day holiday.en-ZA6.date=20150501 holiday.en-ZA7.name=Youth Day holiday.en-ZA7.date=20150616 holiday.en-ZA8.name=National Women's Day holiday.en-ZA8.date=20150809 holiday.en-ZA9.name=Heritage Day holiday.en-ZA9.date=20150924 holiday.en-ZA10.name=Day of Reconciliation holiday.en-ZA10.date=20151217 holiday.en-ZA11.name=Christmas Day holiday.en-ZA11.date=20151225 holiday.en-ZA12.name=Day of Goodwill holiday.en-ZA12.date=20151226 holiday.en-ZA13.name=New Year's Day holiday.en-ZA13.date=20160101 holiday.en-ZA14.name=Human Rights Day holiday.en-ZA14.date=20160321 holiday.en-ZA15.name=Good Friday holiday.en-ZA15.date=20160325 holiday.en-ZA16.name=Family Day holiday.en-ZA16.date=20160328 holiday.en-ZA17.name=Freedom Day holiday.en-ZA17.date=20160427 holiday.en-ZA18.name=Workers' Day holiday.en-ZA18.date=20160501 holiday.en-ZA19.name=Youth Day holiday.en-ZA19.date=20160616 holiday.en-ZA20.name=National Women's Day holiday.en-ZA20.date=20160809 holiday.en-ZA21.name=Heritage Day holiday.en-ZA21.date=20160924 holiday.en-ZA22.name=Day of Reconciliation holiday.en-ZA22.date=20161217 holiday.en-ZA23.name=Christmas Day holiday.en-ZA23.date=20161225 holiday.en-ZA24.name=Day of Goodwill holiday.en-ZA24.date=20161226
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <project version="4"> <component name="MarkdownEnhProjectSettings"> <AnnotatorSettings targetHasSpaces="true" linkCaseMismatch="true" wikiCaseMismatch="true" wikiLinkHasDashes="true" notUnderWikiHome="true" targetNotWikiPageExt="true" notUnderSourceWikiHome="true" targetNameHasAnchor="true" targetPathHasAnchor="true" wikiLinkHasSlash="true" wikiLinkHasSubdir="true" wikiLinkHasOnlyAnchor="true" linkTargetsWikiHasExt="true" linkTargetsWikiHasBadExt="true" notUnderSameRepo="true" targetNotUnderVcs="false" linkNeedsExt="true" linkHasBadExt="true" linkTargetNeedsExt="true" linkTargetHasBadExt="true" wikiLinkNotInWiki="true" imageTargetNotInRaw="true" repoRelativeAcrossVcsRoots="true" multipleWikiTargetsMatch="true" unresolvedLinkReference="true" linkIsIgnored="true" anchorIsIgnored="true" anchorIsUnresolved="true" anchorLineReferenceIsUnresolved="true" anchorLineReferenceFormat="true" anchorHasDuplicates="true" abbreviationDuplicates="true" abbreviationNotUsed="true" attributeIdDuplicateDefinition="true" attributeIdNotUsed="true" footnoteDuplicateDefinition="true" footnoteUnresolved="true" footnoteDuplicates="true" footnoteNotUsed="true" macroDuplicateDefinition="true" macroUnresolved="true" macroDuplicates="true" macroNotUsed="true" referenceDuplicateDefinition="true" referenceUnresolved="true" referenceDuplicates="true" referenceNotUsed="true" referenceUnresolvedNumericId="true" enumRefDuplicateDefinition="true" enumRefUnresolved="true" enumRefDuplicates="true" enumRefNotUsed="true" enumRefLinkUnresolved="true" enumRefLinkDuplicates="true" simTocUpdateNeeded="true" simTocTitleSpaceNeeded="true" /> <HtmlExportSettings updateOnSave="false" parentDir="" targetDir="" cssDir="css" scriptDir="js" plainHtml="false" imageDir="" copyLinkedImages="false" imagePathType="0" targetPathType="2" targetExt="" useTargetExt="false" noCssNoScripts="false" useElementStyleAttribute="false" linkToExportedHtml="true" exportOnSettingsChange="true" regenerateOnProjectOpen="false" linkFormatType="HTTP_ABSOLUTE" /> <LinkMapSettings> <textMaps /> </LinkMapSettings> </component> <component name="MarkdownNavigatorHistory"> <PasteImageHistory checkeredTransparentBackground="false" filename="image" directory="" onPasteImageTargetRef="3" onPasteLinkText="0" onPasteImageElement="1" onPasteLinkElement="1" onPasteReferenceElement="2" cornerRadius="20" borderColor="0" transparentColor="16777215" borderWidth="1" trimTop="0" trimBottom="0" trimLeft="0" trimRight="0" transparent="false" roundCorners="false" showPreview="true" bordered="false" scaled="false" cropped="false" hideInapplicableOperations="false" preserveLinkFormat="false" scale="50" scalingInterpolation="1" transparentTolerance="0" saveAsDefaultOnOK="false" linkFormat="0" addHighlights="false" showHighlightCoordinates="true" showHighlights="false" mouseSelectionAddsHighlight="false" outerFilled="false" outerFillColor="0" outerFillTransparent="true" outerFillAlpha="30"> <highlightList /> <directories /> <filenames /> </PasteImageHistory> <CopyImageHistory checkeredTransparentBackground="false" filename="image" directory="" onPasteImageTargetRef="3" onPasteLinkText="0" onPasteImageElement="1" onPasteLinkElement="1" onPasteReferenceElement="2" cornerRadius="20" borderColor="0" transparentColor="16777215" borderWidth="1" trimTop="0" trimBottom="0" trimLeft="0" trimRight="0" transparent="false" roundCorners="false" showPreview="true" bordered="false" scaled="false" cropped="false" hideInapplicableOperations="false" preserveLinkFormat="false" scale="50" scalingInterpolation="1" transparentTolerance="0" saveAsDefaultOnOK="false" linkFormat="0" addHighlights="false" showHighlightCoordinates="true" showHighlights="false" mouseSelectionAddsHighlight="false" outerFilled="false" outerFillColor="0" outerFillTransparent="true" outerFillAlpha="30"> <highlightList /> <directories /> <filenames /> </CopyImageHistory> <PasteLinkHistory onPasteImageTargetRef="3" onPasteTargetRef="1" onPasteLinkText="0" onPasteImageElement="1" onPasteLinkElement="1" onPasteWikiElement="2" onPasteReferenceElement="2" hideInapplicableOperations="false" preserveLinkFormat="false" useHeadingForLinkText="false" linkFormat="0" saveAsDefaultOnOK="false" /> <TableToJsonHistory> <entries /> </TableToJsonHistory> <TableSortHistory> <entries /> </TableSortHistory> </component> </project>
{ "pile_set_name": "Github" }
# Pandora language file EN=>BN # See full list of phrases in "ru.txt" file _Business=>ব্যবসায় _Node=>নোড _Region=>এলাকা _World=>বিশ্ব People=>সম্প্রদায় Communities=>সম্প্রদায়গুলি Files=>নথি পত্র
{ "pile_set_name": "Github" }
## ## PHPExcel ## ## Copyright (c) 2006 - 2013 PHPExcel ## ## This library is free software; you can redistribute it and/or ## modify it under the terms of the GNU Lesser General Public ## License as published by the Free Software Foundation; either ## version 2.1 of the License, or (at your option) any later version. ## ## This library is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## Lesser General Public License for more details. ## ## You should have received a copy of the GNU Lesser General Public ## License along with this library; if not, write to the Free Software ## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ## ## @category PHPExcel ## @package PHPExcel_Settings ## @copyright Copyright (c) 2006 - 2013 PHPExcel (http://www.codeplex.com/PHPExcel) ## @license http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt LGPL ## @version ##VERSION##, ##DATE## ## ## ## ## (For future use) ## currencySymbol = £
{ "pile_set_name": "Github" }
# Get Python six functionality: from __future__ import\ absolute_import, print_function, division, unicode_literals ############################################################################### ############################################################################### ############################################################################### import keras.layers import keras.models import numpy as np import pytest import innvestigate.tools.perturbate import innvestigate.utils as iutils ############################################################################### ############################################################################### ############################################################################### @pytest.mark.fast @pytest.mark.precommit def test_fast__PerturbationAnalysis(): # Some test data if keras.backend.image_data_format() == "channels_first": input_shape = (2, 1, 4, 4) else: input_shape = (2, 4, 4, 1) x = np.arange(2 * 4 * 4).reshape(input_shape) generator = iutils.BatchSequence([x, np.zeros(x.shape[0])], batch_size=x.shape[0]) # Simple model model = keras.models.Sequential([ keras.layers.Flatten(input_shape=x.shape[1:]), keras.layers.Dense(1, use_bias=False), ]) weights = np.arange(4 * 4 * 1).reshape((4 * 4, 1)) model.layers[-1].set_weights([weights]) model.compile(loss='mean_squared_error', optimizer='sgd') expected_output = np.array([[1240.], [3160.]]) assert np.all(np.isclose(model.predict(x), expected_output)) # Analyzer analyzer = innvestigate.create_analyzer("gradient", model, postprocess="abs") # Run perturbation analysis perturbation = innvestigate.tools.perturbate.Perturbation("zeros", region_shape=(2, 2), in_place=False) perturbation_analysis = innvestigate.tools.perturbate.PerturbationAnalysis(analyzer, model, generator, perturbation, recompute_analysis=False, steps=3, regions_per_step=1, verbose=False) scores = perturbation_analysis.compute_perturbation_analysis() expected_scores = np.array([5761600.0, 1654564.0, 182672.0, 21284.0]) assert np.all(np.isclose(scores, expected_scores)) @pytest.mark.fast @pytest.mark.precommit def test_fast__Perturbation(): if keras.backend.image_data_format() == "channels_first": input_shape = (1, 1, 4, 4) else: input_shape = (1, 4, 4, 1) x = np.arange(1 * 4 * 4).reshape(input_shape) perturbation = innvestigate.tools.perturbate.Perturbation("zeros", region_shape=(2, 2), in_place=False) analysis = np.zeros((4, 4)) analysis[:2, 2:] = 1 analysis[2:, :2] = 2 analysis[2:, 2:] = 3 analysis = analysis.reshape(input_shape) if keras.backend.image_data_format() == "channels_last": x = np.moveaxis(x, 3, 1) analysis = np.moveaxis(analysis, 3, 1) analysis = perturbation.reduce_function(analysis, axis=1, keepdims=True) aggregated_regions = perturbation.aggregate_regions(analysis) assert np.all(np.isclose(aggregated_regions[0, 0, :, :], np.array([[0, 1], [2, 3]]))) ranks = perturbation.compute_region_ordering(aggregated_regions) assert np.all(np.isclose(ranks[0, 0, :, :], np.array([[3, 2], [1, 0]]))) perturbation_mask_regions = perturbation.compute_perturbation_mask(ranks, 1) assert np.all(perturbation_mask_regions == np.array([[0, 0], [0, 1]])) perturbation_mask_regions = perturbation.compute_perturbation_mask(ranks, 4) assert np.all(perturbation_mask_regions == np.array([[1, 1], [1, 1]])) perturbation_mask_regions = perturbation.compute_perturbation_mask(ranks, 0) assert np.all(perturbation_mask_regions == np.array([[0, 0], [0, 0]]))
{ "pile_set_name": "Github" }
<domain type='xen'> <name>XenGuest1</name> <uuid>45b60f51-88a9-47a8-a3b3-5e66d71b2283</uuid> <memory unit='KiB'>524288</memory> <currentMemory unit='KiB'>524288</currentMemory> <vcpu placement='static'>1</vcpu> <bootloader>/usr/bin/pygrub</bootloader> <os> <type arch='x86_64' machine='xenpv'>linux</type> </os> <clock offset='utc' adjustment='reset'/> <on_poweroff>preserve</on_poweroff> <on_reboot>restart</on_reboot> <on_crash>preserve</on_crash> <devices> <disk type='file' device='disk'> <driver name='qemu' type='qcow2'/> <source file='/var/lib/xen/images/debian/disk.qcow2'/> <target dev='xvda' bus='xen'/> </disk> <controller type='xenbus' index='0'/> <controller type='usb' index='0' model='qusb2' ports='6'/> <interface type='ethernet'> <mac address='5a:36:0e:be:00:09'/> </interface> <console type='pty'> <target type='xen' port='0'/> </console> <input type='mouse' bus='xen'/> <input type='keyboard' bus='xen'/> <memballoon model='xen'/> </devices> </domain>
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 -fsyntax-only -Wno-objc-root-class -verify %s // rdar://11618852 @protocol TestProtocol - (void)newProtocolMethod; - (void)deprecatedProtocolMethod __attribute__((deprecated)); // expected-note 2 {{'deprecatedProtocolMethod' has been explicitly marked deprecated here}} @end @interface NSObject @end @interface TestClass : NSObject <TestProtocol> - (void)newInstanceMethod; - (void)deprecatedInstanceMethod __attribute__((deprecated)); // expected-note {{'deprecatedInstanceMethod' has been explicitly marked deprecated here}} @end int main(int argc, const char * argv[]) { TestClass *testObj = (TestClass*)0; [testObj newInstanceMethod]; [testObj deprecatedInstanceMethod]; // expected-warning {{'deprecatedInstanceMethod' is deprecated}} [testObj newProtocolMethod]; [testObj deprecatedProtocolMethod]; // expected-warning {{'deprecatedProtocolMethod' is deprecated}} id <TestProtocol> testProto = testObj; [testProto newProtocolMethod]; [testProto deprecatedProtocolMethod]; // expected-warning {{'deprecatedProtocolMethod' is deprecated}} return 0; }
{ "pile_set_name": "Github" }
// TiebaManagerCore.cpp : 定义 DLL 的初始化例程。 // #include "stdafx.h" #ifdef _DEBUG #define new DEBUG_NEW #endif
{ "pile_set_name": "Github" }
module.exports = function(WorkflowStageOperationMap) { };
{ "pile_set_name": "Github" }
<?php namespace jorenvanhocht\Blogify\Middleware; use Closure; use Illuminate\Contracts\Auth\Guard; use App\User; class IsOwner { /** * The Guard implementation. * * @var \Illuminate\Contracts\Auth\Guard */ protected $auth; /** * @var \App\User */ protected $user; /** * Create a new filter instance. * * @param \Illuminate\Contracts\Auth\Guard $auth * @param \App\User $user */ public function __construct(Guard $auth, User $user) { $this->auth = $auth; $this->user = $user; } /** * Handle an incoming request. * * @param \Illuminate\Http\Request $request * @param \Closure $next * @return mixed */ public function handle($request, Closure $next) { $user = $this->user->byHash($request->segment(3)); if ($this->auth->user()->getAuthIdentifier() != $user->id) { abort(404); } return $next($request); } }
{ "pile_set_name": "Github" }
namespace ShaderTools.CodeAnalysis.Hlsl.Binding.BoundNodes { internal abstract class BoundVariableQualifier : BoundNode { protected BoundVariableQualifier(BoundNodeKind kind) : base(kind) { } } }
{ "pile_set_name": "Github" }
/* This file is part of the iText (R) project. Copyright (c) 1998-2020 iText Group NV Authors: Bruno Lowagie, Paulo Soares, et al. This program is free software; you can redistribute it and/or modify it under the terms of the GNU Affero General Public License version 3 as published by the Free Software Foundation with the addition of the following permission added to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED BY ITEXT GROUP. ITEXT GROUP DISCLAIMS THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program; if not, see http://www.gnu.org/licenses or write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA, 02110-1301 USA, or download the license from the following URL: http://itextpdf.com/terms-of-use/ The interactive user interfaces in modified source and object code versions of this program must display Appropriate Legal Notices, as required under Section 5 of the GNU Affero General Public License. In accordance with Section 7(b) of the GNU Affero General Public License, a covered work must retain the producer line in every PDF that is created or manipulated using iText. You can be released from the requirements of the license by purchasing a commercial license. Buying such a license is mandatory as soon as you develop commercial activities involving the iText software without disclosing the source code of your own applications. These activities include: offering paid services to customers as an ASP, serving PDFs on the fly in a web application, shipping iText with a closed source product. For more information, please contact iText Software Corp. at this address: [email protected] */ using System; using System.Collections.Generic; using System.IO; using Common.Logging; using Org.BouncyCastle.Asn1.Esf; using Org.BouncyCastle.Crypto; using Org.BouncyCastle.Security; using Org.BouncyCastle.X509; using iText.Forms; using iText.Forms.Fields; using iText.IO.Source; using iText.IO.Util; using iText.Kernel; using iText.Kernel.Geom; using iText.Kernel.Pdf; using iText.Kernel.Pdf.Annot; using iText.Pdfa; namespace iText.Signatures { /// <summary>Takes care of the cryptographic options and appearances that form a signature.</summary> public class PdfSigner { /// <summary>Enum containing the Cryptographic Standards.</summary> /// <remarks>Enum containing the Cryptographic Standards. Possible values are "CMS" and "CADES".</remarks> public enum CryptoStandard { /// <summary>Cryptographic Message Syntax.</summary> CMS, /// <summary>CMS Advanced Electronic Signatures.</summary> CADES } /// <summary>Approval signature.</summary> public const int NOT_CERTIFIED = 0; /// <summary>Author signature, no changes allowed.</summary> public const int CERTIFIED_NO_CHANGES_ALLOWED = 1; /// <summary>Author signature, form filling allowed.</summary> public const int CERTIFIED_FORM_FILLING = 2; /// <summary>Author signature, form filling and annotations allowed.</summary> public const int CERTIFIED_FORM_FILLING_AND_ANNOTATIONS = 3; /// <summary>The certification level.</summary> protected internal int certificationLevel = NOT_CERTIFIED; /// <summary>The name of the field.</summary> protected internal String fieldName; /// <summary>The file right before the signature is added (can be null).</summary> protected internal FileStream raf; /// <summary>The bytes of the file right before the signature is added (if raf is null).</summary> protected internal byte[] bout; /// <summary>Array containing the byte positions of the bytes that need to be hashed.</summary> protected internal long[] range; /// <summary>The PdfDocument.</summary> protected internal PdfDocument document; /// <summary>The crypto dictionary.</summary> protected internal PdfSignature cryptoDictionary; private PdfName digestMethod; /// <summary>Holds value of property signatureEvent.</summary> protected internal PdfSigner.ISignatureEvent signatureEvent; /// <summary>OutputStream for the bytes of the document.</summary> protected internal Stream originalOS; /// <summary>Outputstream that temporarily holds the output in memory.</summary> protected internal MemoryStream temporaryOS; /// <summary>Tempfile to hold the output temporarily.</summary> protected internal FileInfo tempFile; /// <summary>Name and content of keys that can only be added in the close() method.</summary> protected internal IDictionary<PdfName, PdfLiteral> exclusionLocations; /// <summary>Indicates if the pdf document has already been pre-closed.</summary> protected internal bool preClosed = false; /// <summary>Signature field lock dictionary.</summary> protected internal PdfSigFieldLock fieldLock; /// <summary>The signature appearance.</summary> protected internal PdfSignatureAppearance appearance; /// <summary>Holds value of property signDate.</summary> protected internal DateTime signDate; /// <summary>Boolean to check if this PdfSigner instance has been closed already or not.</summary> protected internal bool closed; /// <summary>Creates a PdfSigner instance.</summary> /// <remarks> /// Creates a PdfSigner instance. Uses a /// <see cref="System.IO.MemoryStream"/> /// instead of a temporary file. /// </remarks> /// <param name="reader">PdfReader that reads the PDF file</param> /// <param name="outputStream">OutputStream to write the signed PDF file</param> /// <param name="append">boolean to indicate whether the signing should happen in append mode or not</param> [System.ObsoleteAttribute(@"will be removed in next major release. Use PdfSigner(iText.Kernel.Pdf.PdfReader, System.IO.Stream, iText.Kernel.Pdf.StampingProperties) instead." )] public PdfSigner(PdfReader reader, Stream outputStream, bool append) : this(reader, outputStream, null, append) { } /// <summary>Creates a PdfSigner instance.</summary> /// <remarks> /// Creates a PdfSigner instance. Uses a /// <see cref="System.IO.MemoryStream"/> /// instead of a temporary file. /// </remarks> /// <param name="reader">PdfReader that reads the PDF file</param> /// <param name="outputStream">OutputStream to write the signed PDF file</param> /// <param name="path">File to which the output is temporarily written</param> /// <param name="append">boolean to indicate whether the signing should happen in append mode or not</param> [System.ObsoleteAttribute(@"will be removed in next major release. Use PdfSigner(iText.Kernel.Pdf.PdfReader, System.IO.Stream, System.String, iText.Kernel.Pdf.StampingProperties) instead." )] public PdfSigner(PdfReader reader, Stream outputStream, String path, bool append) : this(reader, outputStream, path, InitStampingProperties(append)) { } /// <summary>Creates a PdfSigner instance.</summary> /// <remarks> /// Creates a PdfSigner instance. Uses a /// <see cref="System.IO.MemoryStream"/> /// instead of a temporary file. /// </remarks> /// <param name="reader">PdfReader that reads the PDF file</param> /// <param name="outputStream">OutputStream to write the signed PDF file</param> /// <param name="properties"> /// /// <see cref="iText.Kernel.Pdf.StampingProperties"/> /// for the signing document. Note that encryption will be /// preserved regardless of what is set in properties. /// </param> public PdfSigner(PdfReader reader, Stream outputStream, StampingProperties properties) : this(reader, outputStream, null, properties) { } /// <summary>Creates a PdfSigner instance.</summary> /// <remarks> /// Creates a PdfSigner instance. Uses a /// <see cref="System.IO.MemoryStream"/> /// instead of a temporary file. /// </remarks> /// <param name="reader">PdfReader that reads the PDF file</param> /// <param name="outputStream">OutputStream to write the signed PDF file</param> /// <param name="path">File to which the output is temporarily written</param> /// <param name="properties"> /// /// <see cref="iText.Kernel.Pdf.StampingProperties"/> /// for the signing document. Note that encryption will be /// preserved regardless of what is set in properties. /// </param> public PdfSigner(PdfReader reader, Stream outputStream, String path, StampingProperties properties) { StampingProperties localProps = new StampingProperties(properties).PreserveEncryption(); if (path == null) { temporaryOS = new MemoryStream(); document = InitDocument(reader, new PdfWriter(temporaryOS), localProps); } else { this.tempFile = FileUtil.CreateTempFile(path); document = InitDocument(reader, new PdfWriter(FileUtil.GetFileOutputStream(tempFile)), localProps); } originalOS = outputStream; signDate = DateTimeUtil.GetCurrentTime(); fieldName = GetNewSigFieldName(); appearance = new PdfSignatureAppearance(document, new Rectangle(0, 0), 1); appearance.SetSignDate(signDate); closed = false; } protected internal virtual PdfDocument InitDocument(PdfReader reader, PdfWriter writer, StampingProperties properties) { PdfAConformanceLevel conformanceLevel = reader.GetPdfAConformanceLevel(); if (null == conformanceLevel) { return new PdfDocument(reader, writer, properties); } else { return new PdfADocument(reader, writer, properties); } } /// <summary>Gets the signature date.</summary> /// <returns>Calendar set to the signature date</returns> public virtual DateTime GetSignDate() { return signDate; } /// <summary>Sets the signature date.</summary> /// <param name="signDate">the signature date</param> public virtual void SetSignDate(DateTime signDate) { this.signDate = signDate; this.appearance.SetSignDate(signDate); } /// <summary>Provides access to a signature appearance object.</summary> /// <remarks> /// Provides access to a signature appearance object. Use it to /// customize the appearance of the signature. /// <para /> /// Be aware: /// <list type="bullet"> /// <item><description>If you create new signature field (either use /// <see cref="SetFieldName(System.String)"/> /// with /// the name that doesn't exist in the document or don't specify it at all) then /// the signature is invisible by default. /// </description></item> /// <item><description>If you sign already existing field, then the signature appearance object /// is modified to have all the properties (page num., rect etc.) consistent with /// the state of the field (<strong>if you customized the appearance object /// before the /// <see cref="SetFieldName(System.String)"/> /// call you'll have to do it again</strong>) /// </description></item> /// </list> /// <para /> /// </remarks> /// <returns> /// /// <see cref="PdfSignatureAppearance"/> /// object. /// </returns> public virtual PdfSignatureAppearance GetSignatureAppearance() { return appearance; } /// <summary>Returns the document's certification level.</summary> /// <remarks> /// Returns the document's certification level. /// For possible values see /// <see cref="SetCertificationLevel(int)"/>. /// </remarks> /// <returns>The certified status.</returns> public virtual int GetCertificationLevel() { return this.certificationLevel; } /// <summary>Sets the document's certification level.</summary> /// <param name="certificationLevel"> /// a new certification level for a document. /// Possible values are: <list type="bullet"> /// <item><description> /// <see cref="NOT_CERTIFIED"/> /// </description></item> /// <item><description> /// <see cref="CERTIFIED_NO_CHANGES_ALLOWED"/> /// </description></item> /// <item><description> /// <see cref="CERTIFIED_FORM_FILLING"/> /// </description></item> /// <item><description> /// <see cref="CERTIFIED_FORM_FILLING_AND_ANNOTATIONS"/> /// </description></item> /// </list> /// </param> public virtual void SetCertificationLevel(int certificationLevel) { this.certificationLevel = certificationLevel; } /// <summary>Gets the field name.</summary> /// <returns>the field name</returns> public virtual String GetFieldName() { return fieldName; } /// <summary>Returns the user made signature dictionary.</summary> /// <remarks> /// Returns the user made signature dictionary. This is the dictionary at the /V key /// of the signature field. /// </remarks> /// <returns>The user made signature dictionary.</returns> public virtual PdfSignature GetSignatureDictionary() { return cryptoDictionary; } /// <summary>Getter for property signatureEvent.</summary> /// <returns>Value of property signatureEvent.</returns> public virtual PdfSigner.ISignatureEvent GetSignatureEvent() { return this.signatureEvent; } /// <summary>Sets the signature event to allow modification of the signature dictionary.</summary> /// <param name="signatureEvent">the signature event</param> public virtual void SetSignatureEvent(PdfSigner.ISignatureEvent signatureEvent) { this.signatureEvent = signatureEvent; } /// <summary>Gets a new signature field name that doesn't clash with any existing name.</summary> /// <returns>A new signature field name.</returns> public virtual String GetNewSigFieldName() { PdfAcroForm acroForm = PdfAcroForm.GetAcroForm(document, true); String name = "Signature"; int step = 1; while (acroForm.GetField(name + step) != null) { ++step; } return name + step; } /// <summary>Sets the name indicating the field to be signed.</summary> /// <remarks> /// Sets the name indicating the field to be signed. The field can already be presented in the /// document but shall not be signed. If the field is not presented in the document, it will be created. /// </remarks> /// <param name="fieldName">The name indicating the field to be signed.</param> public virtual void SetFieldName(String fieldName) { if (fieldName != null) { if (fieldName.IndexOf('.') >= 0) { throw new ArgumentException(PdfException.FieldNamesCannotContainADot); } PdfAcroForm acroForm = PdfAcroForm.GetAcroForm(document, true); if (acroForm.GetField(fieldName) != null) { PdfFormField field = acroForm.GetField(fieldName); if (!PdfName.Sig.Equals(field.GetFormType())) { throw new ArgumentException(PdfException.FieldTypeIsNotASignatureFieldType); } if (field.GetValue() != null) { throw new ArgumentException(PdfException.FieldAlreadySigned); } appearance.SetFieldName(fieldName); IList<PdfWidgetAnnotation> widgets = field.GetWidgets(); if (widgets.Count > 0) { PdfWidgetAnnotation widget = widgets[0]; appearance.SetPageRect(GetWidgetRectangle(widget)); appearance.SetPageNumber(GetWidgetPageNumber(widget)); } } this.fieldName = fieldName; } } /// <summary>Gets the PdfDocument associated with this instance.</summary> /// <returns>the PdfDocument associated with this instance</returns> public virtual PdfDocument GetDocument() { return document; } /// <summary>Sets the PdfDocument.</summary> /// <param name="document">The PdfDocument</param> protected internal virtual void SetDocument(PdfDocument document) { this.document = document; } /// <summary>Setter for the OutputStream.</summary> /// <param name="originalOS">OutputStream for the bytes of the document</param> public virtual void SetOriginalOutputStream(Stream originalOS) { this.originalOS = originalOS; } /// <summary>Getter for the field lock dictionary.</summary> /// <returns>Field lock dictionary.</returns> public virtual PdfSigFieldLock GetFieldLockDict() { return fieldLock; } /// <summary>Setter for the field lock dictionary.</summary> /// <remarks> /// Setter for the field lock dictionary. /// <para /> /// <strong>Be aware:</strong> if a signature is created on an existing signature field, /// then its /Lock dictionary takes the precedence (if it exists). /// </remarks> /// <param name="fieldLock">Field lock dictionary</param> public virtual void SetFieldLockDict(PdfSigFieldLock fieldLock) { this.fieldLock = fieldLock; } /// <summary>Signs the document using the detached mode, CMS or CAdES equivalent.</summary> /// <remarks> /// Signs the document using the detached mode, CMS or CAdES equivalent. /// <br /><br /> /// NOTE: This method closes the underlying pdf document. This means, that current instance /// of PdfSigner cannot be used after this method call. /// </remarks> /// <param name="externalSignature">the interface providing the actual signing</param> /// <param name="chain">the certificate chain</param> /// <param name="crlList">the CRL list</param> /// <param name="ocspClient">the OCSP client</param> /// <param name="tsaClient">the Timestamp client</param> /// <param name="estimatedSize">the reserved size for the signature. It will be estimated if 0</param> /// <param name="sigtype">Either Signature.CMS or Signature.CADES</param> public virtual void SignDetached(IExternalSignature externalSignature, X509Certificate[] chain, ICollection <ICrlClient> crlList, IOcspClient ocspClient, ITSAClient tsaClient, int estimatedSize, PdfSigner.CryptoStandard sigtype) { SignDetached(externalSignature, chain, crlList, ocspClient, tsaClient, estimatedSize, sigtype, (SignaturePolicyIdentifier )null); } /// <summary>Signs the document using the detached mode, CMS or CAdES equivalent.</summary> /// <remarks> /// Signs the document using the detached mode, CMS or CAdES equivalent. /// <br /><br /> /// NOTE: This method closes the underlying pdf document. This means, that current instance /// of PdfSigner cannot be used after this method call. /// </remarks> /// <param name="externalSignature">the interface providing the actual signing</param> /// <param name="chain">the certificate chain</param> /// <param name="crlList">the CRL list</param> /// <param name="ocspClient">the OCSP client</param> /// <param name="tsaClient">the Timestamp client</param> /// <param name="estimatedSize">the reserved size for the signature. It will be estimated if 0</param> /// <param name="sigtype">Either Signature.CMS or Signature.CADES</param> /// <param name="signaturePolicy">the signature policy (for EPES signatures)</param> public virtual void SignDetached(IExternalSignature externalSignature, X509Certificate[] chain, ICollection <ICrlClient> crlList, IOcspClient ocspClient, ITSAClient tsaClient, int estimatedSize, PdfSigner.CryptoStandard sigtype, SignaturePolicyInfo signaturePolicy) { SignDetached(externalSignature, chain, crlList, ocspClient, tsaClient, estimatedSize, sigtype, signaturePolicy .ToSignaturePolicyIdentifier()); } /// <summary>Signs the document using the detached mode, CMS or CAdES equivalent.</summary> /// <remarks> /// Signs the document using the detached mode, CMS or CAdES equivalent. /// <br /><br /> /// NOTE: This method closes the underlying pdf document. This means, that current instance /// of PdfSigner cannot be used after this method call. /// </remarks> /// <param name="externalSignature">the interface providing the actual signing</param> /// <param name="chain">the certificate chain</param> /// <param name="crlList">the CRL list</param> /// <param name="ocspClient">the OCSP client</param> /// <param name="tsaClient">the Timestamp client</param> /// <param name="estimatedSize">the reserved size for the signature. It will be estimated if 0</param> /// <param name="sigtype">Either Signature.CMS or Signature.CADES</param> /// <param name="signaturePolicy">the signature policy (for EPES signatures)</param> public virtual void SignDetached(IExternalSignature externalSignature, X509Certificate[] chain, ICollection <ICrlClient> crlList, IOcspClient ocspClient, ITSAClient tsaClient, int estimatedSize, PdfSigner.CryptoStandard sigtype, SignaturePolicyIdentifier signaturePolicy) { if (closed) { throw new PdfException(PdfException.ThisInstanceOfPdfSignerAlreadyClosed); } if (certificationLevel > 0 && IsDocumentPdf2()) { if (DocumentContainsCertificationOrApprovalSignatures()) { throw new PdfException(PdfException.CertificationSignatureCreationFailedDocShallNotContainSigs); } } ICollection<byte[]> crlBytes = null; int i = 0; while (crlBytes == null && i < chain.Length) { crlBytes = ProcessCrl(chain[i++], crlList); } if (estimatedSize == 0) { estimatedSize = 8192; if (crlBytes != null) { foreach (byte[] element in crlBytes) { estimatedSize += element.Length + 10; } } if (ocspClient != null) { estimatedSize += 4192; } if (tsaClient != null) { estimatedSize += 4192; } } PdfSignatureAppearance appearance = GetSignatureAppearance(); appearance.SetCertificate(chain[0]); if (sigtype == PdfSigner.CryptoStandard.CADES && !IsDocumentPdf2()) { AddDeveloperExtension(PdfDeveloperExtension.ESIC_1_7_EXTENSIONLEVEL2); } String hashAlgorithm = externalSignature.GetHashAlgorithm(); PdfSignature dic = new PdfSignature(PdfName.Adobe_PPKLite, sigtype == PdfSigner.CryptoStandard.CADES ? PdfName .ETSI_CAdES_DETACHED : PdfName.Adbe_pkcs7_detached); dic.SetReason(appearance.GetReason()); dic.SetLocation(appearance.GetLocation()); dic.SetSignatureCreator(appearance.GetSignatureCreator()); dic.SetContact(appearance.GetContact()); dic.SetDate(new PdfDate(GetSignDate())); // time-stamp will over-rule this cryptoDictionary = dic; digestMethod = GetHashAlgorithmNameInCompatibleForPdfForm(hashAlgorithm); IDictionary<PdfName, int?> exc = new Dictionary<PdfName, int?>(); exc.Put(PdfName.Contents, estimatedSize * 2 + 2); PreClose(exc); PdfPKCS7 sgn = new PdfPKCS7((ICipherParameters)null, chain, hashAlgorithm, false); if (signaturePolicy != null) { sgn.SetSignaturePolicy(signaturePolicy); } Stream data = GetRangeStream(); byte[] hash = DigestAlgorithms.Digest(data, SignUtils.GetMessageDigest(hashAlgorithm)); IList<byte[]> ocspList = new List<byte[]>(); if (chain.Length > 1 && ocspClient != null) { for (int j = 0; j < chain.Length - 1; ++j) { byte[] ocsp = ocspClient.GetEncoded((X509Certificate)chain[j], (X509Certificate)chain[j + 1], null); if (ocsp != null) { ocspList.Add(ocsp); } } } byte[] sh = sgn.GetAuthenticatedAttributeBytes(hash, sigtype, ocspList, crlBytes); byte[] extSignature = externalSignature.Sign(sh); sgn.SetExternalDigest(extSignature, null, externalSignature.GetEncryptionAlgorithm()); byte[] encodedSig = sgn.GetEncodedPKCS7(hash, sigtype, tsaClient, ocspList, crlBytes); if (estimatedSize < encodedSig.Length) { throw new System.IO.IOException("Not enough space"); } byte[] paddedSig = new byte[estimatedSize]; Array.Copy(encodedSig, 0, paddedSig, 0, encodedSig.Length); PdfDictionary dic2 = new PdfDictionary(); dic2.Put(PdfName.Contents, new PdfString(paddedSig).SetHexWriting(true)); Close(dic2); closed = true; } /// <summary>Sign the document using an external container, usually a PKCS7.</summary> /// <remarks> /// Sign the document using an external container, usually a PKCS7. The signature is fully composed /// externally, iText will just put the container inside the document. /// <br /><br /> /// NOTE: This method closes the underlying pdf document. This means, that current instance /// of PdfSigner cannot be used after this method call. /// </remarks> /// <param name="externalSignatureContainer">the interface providing the actual signing</param> /// <param name="estimatedSize">the reserved size for the signature</param> public virtual void SignExternalContainer(IExternalSignatureContainer externalSignatureContainer, int estimatedSize ) { if (closed) { throw new PdfException(PdfException.ThisInstanceOfPdfSignerAlreadyClosed); } PdfSignature dic = new PdfSignature(); PdfSignatureAppearance appearance = GetSignatureAppearance(); dic.SetReason(appearance.GetReason()); dic.SetLocation(appearance.GetLocation()); dic.SetSignatureCreator(appearance.GetSignatureCreator()); dic.SetContact(appearance.GetContact()); dic.SetDate(new PdfDate(GetSignDate())); // time-stamp will over-rule this externalSignatureContainer.ModifySigningDictionary(dic.GetPdfObject()); cryptoDictionary = dic; IDictionary<PdfName, int?> exc = new Dictionary<PdfName, int?>(); exc.Put(PdfName.Contents, estimatedSize * 2 + 2); PreClose(exc); Stream data = GetRangeStream(); byte[] encodedSig = externalSignatureContainer.Sign(data); if (estimatedSize < encodedSig.Length) { throw new System.IO.IOException("Not enough space"); } byte[] paddedSig = new byte[estimatedSize]; Array.Copy(encodedSig, 0, paddedSig, 0, encodedSig.Length); PdfDictionary dic2 = new PdfDictionary(); dic2.Put(PdfName.Contents, new PdfString(paddedSig).SetHexWriting(true)); Close(dic2); closed = true; } /// <summary>Signs a document with a PAdES-LTV Timestamp.</summary> /// <remarks> /// Signs a document with a PAdES-LTV Timestamp. The document is closed at the end. /// <br /><br /> /// NOTE: This method closes the underlying pdf document. This means, that current instance /// of PdfSigner cannot be used after this method call. /// </remarks> /// <param name="tsa">the timestamp generator</param> /// <param name="signatureName"> /// the signature name or null to have a name generated /// automatically /// </param> public virtual void Timestamp(ITSAClient tsa, String signatureName) { if (closed) { throw new PdfException(PdfException.ThisInstanceOfPdfSignerAlreadyClosed); } int contentEstimated = tsa.GetTokenSizeEstimate(); if (!IsDocumentPdf2()) { AddDeveloperExtension(PdfDeveloperExtension.ESIC_1_7_EXTENSIONLEVEL5); } SetFieldName(signatureName); PdfSignature dic = new PdfSignature(PdfName.Adobe_PPKLite, PdfName.ETSI_RFC3161); dic.Put(PdfName.Type, PdfName.DocTimeStamp); cryptoDictionary = dic; IDictionary<PdfName, int?> exc = new Dictionary<PdfName, int?>(); exc.Put(PdfName.Contents, contentEstimated * 2 + 2); PreClose(exc); Stream data = GetRangeStream(); IDigest messageDigest = tsa.GetMessageDigest(); byte[] buf = new byte[4096]; int n; while ((n = data.Read(buf)) > 0) { messageDigest.Update(buf, 0, n); } byte[] tsImprint = messageDigest.Digest(); byte[] tsToken; try { tsToken = tsa.GetTimeStampToken(tsImprint); } catch (Exception e) { throw new GeneralSecurityException(e.Message, e); } if (contentEstimated + 2 < tsToken.Length) { throw new System.IO.IOException("Not enough space"); } byte[] paddedSig = new byte[contentEstimated]; Array.Copy(tsToken, 0, paddedSig, 0, tsToken.Length); PdfDictionary dic2 = new PdfDictionary(); dic2.Put(PdfName.Contents, new PdfString(paddedSig).SetHexWriting(true)); Close(dic2); closed = true; } /// <summary>Signs a PDF where space was already reserved.</summary> /// <param name="document">the original PDF</param> /// <param name="fieldName">the field to sign. It must be the last field</param> /// <param name="outs">the output PDF</param> /// <param name="externalSignatureContainer"> /// the signature container doing the actual signing. Only the /// method ExternalSignatureContainer.sign is used /// </param> public static void SignDeferred(PdfDocument document, String fieldName, Stream outs, IExternalSignatureContainer externalSignatureContainer) { SignatureUtil signatureUtil = new SignatureUtil(document); PdfSignature signature = signatureUtil.GetSignature(fieldName); if (signature == null) { throw new PdfException(PdfException.ThereIsNoFieldInTheDocumentWithSuchName1).SetMessageParams(fieldName); } if (!signatureUtil.SignatureCoversWholeDocument(fieldName)) { throw new PdfException(PdfException.SignatureWithName1IsNotTheLastItDoesntCoverWholeDocument).SetMessageParams (fieldName); } PdfArray b = signature.GetByteRange(); long[] gaps = b.ToLongArray(); if (b.Size() != 4 || gaps[0] != 0) { throw new ArgumentException("Single exclusion space supported"); } IRandomAccessSource readerSource = document.GetReader().GetSafeFile().CreateSourceView(); Stream rg = new RASInputStream(new RandomAccessSourceFactory().CreateRanged(readerSource, gaps)); byte[] signedContent = externalSignatureContainer.Sign(rg); int spaceAvailable = (int)(gaps[2] - gaps[1]) - 2; if ((spaceAvailable & 1) != 0) { throw new ArgumentException("Gap is not a multiple of 2"); } spaceAvailable /= 2; if (spaceAvailable < signedContent.Length) { throw new PdfException(PdfException.AvailableSpaceIsNotEnoughForSignature); } StreamUtil.CopyBytes(readerSource, 0, gaps[1] + 1, outs); ByteBuffer bb = new ByteBuffer(spaceAvailable * 2); foreach (byte bi in signedContent) { bb.AppendHex(bi); } int remain = (spaceAvailable - signedContent.Length) * 2; for (int k = 0; k < remain; ++k) { bb.Append((byte)48); } byte[] bbArr = bb.ToByteArray(); outs.Write(bbArr); StreamUtil.CopyBytes(readerSource, gaps[2] - 1, gaps[3] + 1, outs); } /// <summary>Processes a CRL list.</summary> /// <param name="cert">a Certificate if one of the CrlList implementations needs to retrieve the CRL URL from it. /// </param> /// <param name="crlList">a list of CrlClient implementations</param> /// <returns>a collection of CRL bytes that can be embedded in a PDF</returns> protected internal virtual ICollection<byte[]> ProcessCrl(X509Certificate cert, ICollection<ICrlClient> crlList ) { if (crlList == null) { return null; } IList<byte[]> crlBytes = new List<byte[]>(); foreach (ICrlClient cc in crlList) { if (cc == null) { continue; } ICollection<byte[]> b = cc.GetEncoded((X509Certificate)cert, null); if (b == null) { continue; } crlBytes.AddAll(b); } if (crlBytes.Count == 0) { return null; } else { return crlBytes; } } protected internal virtual void AddDeveloperExtension(PdfDeveloperExtension extension) { document.GetCatalog().AddDeveloperExtension(extension); } /// <summary>Checks if the document is in the process of closing.</summary> /// <returns>true if the document is in the process of closing, false otherwise</returns> protected internal virtual bool IsPreClosed() { return preClosed; } /// <summary>This is the first method to be called when using external signatures.</summary> /// <remarks> /// This is the first method to be called when using external signatures. The general sequence is: /// preClose(), getDocumentBytes() and close(). /// <para /> /// <c>exclusionSizes</c> must contain at least /// the <c>PdfName.CONTENTS</c> key with the size that it will take in the /// document. Note that due to the hex string coding this size should be byte_size*2+2. /// </remarks> /// <param name="exclusionSizes"> /// Map with names and sizes to be excluded in the signature /// calculation. The key is a PdfName and the value an Integer. At least the /Contents must be present /// </param> protected internal virtual void PreClose(IDictionary<PdfName, int?> exclusionSizes) { if (preClosed) { throw new PdfException(PdfException.DocumentAlreadyPreClosed); } // TODO: add mergeVerification functionality preClosed = true; PdfAcroForm acroForm = PdfAcroForm.GetAcroForm(document, true); SignatureUtil sgnUtil = new SignatureUtil(document); String name = GetFieldName(); bool fieldExist = sgnUtil.DoesSignatureFieldExist(name); acroForm.SetSignatureFlags(PdfAcroForm.SIGNATURE_EXIST | PdfAcroForm.APPEND_ONLY); PdfSigFieldLock fieldLock = null; if (cryptoDictionary == null) { throw new PdfException(PdfException.NoCryptoDictionaryDefined); } cryptoDictionary.GetPdfObject().MakeIndirect(document); if (fieldExist) { PdfSignatureFormField sigField = (PdfSignatureFormField)acroForm.GetField(fieldName); sigField.Put(PdfName.V, cryptoDictionary.GetPdfObject()); fieldLock = sigField.GetSigFieldLockDictionary(); if (fieldLock == null && this.fieldLock != null) { this.fieldLock.GetPdfObject().MakeIndirect(document); sigField.Put(PdfName.Lock, this.fieldLock.GetPdfObject()); fieldLock = this.fieldLock; } sigField.Put(PdfName.P, document.GetPage(appearance.GetPageNumber()).GetPdfObject()); sigField.Put(PdfName.V, cryptoDictionary.GetPdfObject()); PdfObject obj = sigField.GetPdfObject().Get(PdfName.F); int flags = 0; if (obj != null && obj.IsNumber()) { flags = ((PdfNumber)obj).IntValue(); } flags |= PdfAnnotation.LOCKED; sigField.Put(PdfName.F, new PdfNumber(flags)); PdfDictionary ap = new PdfDictionary(); ap.Put(PdfName.N, appearance.GetAppearance().GetPdfObject()); sigField.Put(PdfName.AP, ap); sigField.SetModified(); } else { PdfWidgetAnnotation widget = new PdfWidgetAnnotation(appearance.GetPageRect()); widget.SetFlags(PdfAnnotation.PRINT | PdfAnnotation.LOCKED); PdfSignatureFormField sigField = PdfFormField.CreateSignature(document); sigField.SetFieldName(name); sigField.Put(PdfName.V, cryptoDictionary.GetPdfObject()); sigField.AddKid(widget); if (this.fieldLock != null) { this.fieldLock.GetPdfObject().MakeIndirect(document); sigField.Put(PdfName.Lock, this.fieldLock.GetPdfObject()); fieldLock = this.fieldLock; } int pagen = appearance.GetPageNumber(); widget.SetPage(document.GetPage(pagen)); PdfDictionary ap = widget.GetAppearanceDictionary(); if (ap == null) { ap = new PdfDictionary(); widget.Put(PdfName.AP, ap); } ap.Put(PdfName.N, appearance.GetAppearance().GetPdfObject()); acroForm.AddField(sigField, document.GetPage(pagen)); if (acroForm.GetPdfObject().IsIndirect()) { acroForm.SetModified(); } else { //Acroform dictionary is a Direct dictionary, //for proper flushing, catalog needs to be marked as modified document.GetCatalog().SetModified(); } } exclusionLocations = new Dictionary<PdfName, PdfLiteral>(); PdfLiteral lit = new PdfLiteral(80); exclusionLocations.Put(PdfName.ByteRange, lit); cryptoDictionary.Put(PdfName.ByteRange, lit); foreach (KeyValuePair<PdfName, int?> entry in exclusionSizes) { PdfName key = entry.Key; lit = new PdfLiteral((int)entry.Value); exclusionLocations.Put(key, lit); cryptoDictionary.Put(key, lit); } if (certificationLevel > 0) { AddDocMDP(cryptoDictionary); } if (fieldLock != null) { AddFieldMDP(cryptoDictionary, fieldLock); } if (signatureEvent != null) { signatureEvent.GetSignatureDictionary(cryptoDictionary); } if (certificationLevel > 0) { // add DocMDP entry to root PdfDictionary docmdp = new PdfDictionary(); docmdp.Put(PdfName.DocMDP, cryptoDictionary.GetPdfObject()); document.GetCatalog().Put(PdfName.Perms, docmdp); document.GetCatalog().SetModified(); } cryptoDictionary.GetPdfObject().Flush(false); document.Close(); range = new long[exclusionLocations.Count * 2]; long byteRangePosition = exclusionLocations.Get(PdfName.ByteRange).GetPosition(); exclusionLocations.JRemove(PdfName.ByteRange); int idx = 1; foreach (PdfLiteral lit1 in exclusionLocations.Values) { long n = lit1.GetPosition(); range[idx++] = n; range[idx++] = lit1.GetBytesCount() + n; } JavaUtil.Sort(range, 1, range.Length - 1); for (int k = 3; k < range.Length - 2; k += 2) { range[k] -= range[k - 1]; } if (tempFile == null) { bout = temporaryOS.ToArray(); range[range.Length - 1] = bout.Length - range[range.Length - 2]; MemoryStream bos = new MemoryStream(); PdfOutputStream os = new PdfOutputStream(bos); os.Write('['); for (int k = 0; k < range.Length; ++k) { os.WriteLong(range[k]).Write(' '); } os.Write(']'); Array.Copy(bos.ToArray(), 0, bout, (int)byteRangePosition, (int)bos.Length); } else { try { raf = FileUtil.GetRandomAccessFile(tempFile); long len = raf.Length; range[range.Length - 1] = len - range[range.Length - 2]; MemoryStream bos = new MemoryStream(); PdfOutputStream os = new PdfOutputStream(bos); os.Write('['); for (int k = 0; k < range.Length; ++k) { os.WriteLong(range[k]).Write(' '); } os.Write(']'); raf.Seek(byteRangePosition); raf.Write(bos.ToArray(), 0, (int)bos.Length); } catch (System.IO.IOException e) { try { raf.Dispose(); } catch (Exception) { } try { tempFile.Delete(); } catch (Exception) { } throw; } } } /// <summary>Gets the document bytes that are hashable when using external signatures.</summary> /// <remarks> /// Gets the document bytes that are hashable when using external signatures. /// The general sequence is: /// <see cref="PreClose(System.Collections.Generic.IDictionary{K, V})"/> /// , /// <see cref="GetRangeStream()"/> /// and /// <see cref="Close(iText.Kernel.Pdf.PdfDictionary)"/>. /// </remarks> /// <returns> /// The /// <see cref="System.IO.Stream"/> /// of bytes to be signed. /// </returns> protected internal virtual Stream GetRangeStream() { RandomAccessSourceFactory fac = new RandomAccessSourceFactory(); return new RASInputStream(fac.CreateRanged(GetUnderlyingSource(), range)); } /// <summary>This is the last method to be called when using external signatures.</summary> /// <remarks> /// This is the last method to be called when using external signatures. The general sequence is: /// preClose(), getDocumentBytes() and close(). /// <para /> /// update is a PdfDictionary that must have exactly the /// same keys as the ones provided in /// <see cref="PreClose(System.Collections.Generic.IDictionary{K, V})"/>. /// </remarks> /// <param name="update"> /// a PdfDictionary with the key/value that will fill the holes defined /// in /// <see cref="PreClose(System.Collections.Generic.IDictionary{K, V})"/> /// </param> protected internal virtual void Close(PdfDictionary update) { try { if (!preClosed) { throw new PdfException(PdfException.DocumentMustBePreClosed); } MemoryStream bous = new MemoryStream(); PdfOutputStream os = new PdfOutputStream(bous); foreach (PdfName key in update.KeySet()) { PdfObject obj = update.Get(key); PdfLiteral lit = exclusionLocations.Get(key); if (lit == null) { throw new ArgumentException("The key didn't reserve space in preclose"); } bous.JReset(); os.Write(obj); if (bous.Length > lit.GetBytesCount()) { throw new ArgumentException("The key is too big"); } if (tempFile == null) { Array.Copy(bous.ToArray(), 0, bout, (int)lit.GetPosition(), (int)bous.Length); } else { raf.Seek(lit.GetPosition()); raf.Write(bous.ToArray(), 0, (int)bous.Length); } } if (update.Size() != exclusionLocations.Count) { throw new ArgumentException("The update dictionary has less keys than required"); } if (tempFile == null) { originalOS.Write(bout, 0, bout.Length); } else { if (originalOS != null) { raf.Seek(0); long length = raf.Length; byte[] buf = new byte[8192]; while (length > 0) { int r = raf.JRead(buf, 0, (int)Math.Min((long)buf.Length, length)); if (r < 0) { throw new EndOfStreamException("unexpected eof"); } originalOS.Write(buf, 0, r); length -= r; } } } } finally { if (tempFile != null) { raf.Dispose(); if (originalOS != null) { tempFile.Delete(); } } if (originalOS != null) { try { originalOS.Dispose(); } catch (Exception) { } } } } /// <summary>Returns the underlying source.</summary> /// <returns>the underlying source</returns> protected internal virtual IRandomAccessSource GetUnderlyingSource() { RandomAccessSourceFactory fac = new RandomAccessSourceFactory(); return raf == null ? fac.CreateSource(bout) : fac.CreateSource(raf); } /// <summary>Adds keys to the signature dictionary that define the certification level and the permissions.</summary> /// <remarks> /// Adds keys to the signature dictionary that define the certification level and the permissions. /// This method is only used for Certifying signatures. /// </remarks> /// <param name="crypto">the signature dictionary</param> protected internal virtual void AddDocMDP(PdfSignature crypto) { PdfDictionary reference = new PdfDictionary(); PdfDictionary transformParams = new PdfDictionary(); transformParams.Put(PdfName.P, new PdfNumber(certificationLevel)); transformParams.Put(PdfName.V, new PdfName("1.2")); transformParams.Put(PdfName.Type, PdfName.TransformParams); reference.Put(PdfName.TransformMethod, PdfName.DocMDP); reference.Put(PdfName.Type, PdfName.SigRef); reference.Put(PdfName.TransformParams, transformParams); SetDigestParamToSigRefIfNeeded(reference); reference.Put(PdfName.Data, document.GetTrailer().Get(PdfName.Root)); PdfArray types = new PdfArray(); types.Add(reference); crypto.Put(PdfName.Reference, types); } /// <summary>Adds keys to the signature dictionary that define the field permissions.</summary> /// <remarks> /// Adds keys to the signature dictionary that define the field permissions. /// This method is only used for signatures that lock fields. /// </remarks> /// <param name="crypto">the signature dictionary</param> /// <param name="fieldLock"> /// the /// <see cref="iText.Forms.PdfSigFieldLock"/> /// instance specified the field lock to be set /// </param> protected internal virtual void AddFieldMDP(PdfSignature crypto, PdfSigFieldLock fieldLock) { PdfDictionary reference = new PdfDictionary(); PdfDictionary transformParams = new PdfDictionary(); transformParams.PutAll(fieldLock.GetPdfObject()); transformParams.Put(PdfName.Type, PdfName.TransformParams); transformParams.Put(PdfName.V, new PdfName("1.2")); reference.Put(PdfName.TransformMethod, PdfName.FieldMDP); reference.Put(PdfName.Type, PdfName.SigRef); reference.Put(PdfName.TransformParams, transformParams); SetDigestParamToSigRefIfNeeded(reference); reference.Put(PdfName.Data, document.GetTrailer().Get(PdfName.Root)); PdfArray types = crypto.GetPdfObject().GetAsArray(PdfName.Reference); if (types == null) { types = new PdfArray(); crypto.Put(PdfName.Reference, types); } types.Add(reference); } protected internal virtual bool DocumentContainsCertificationOrApprovalSignatures() { bool containsCertificationOrApprovalSignature = false; PdfDictionary urSignature = null; PdfDictionary catalogPerms = document.GetCatalog().GetPdfObject().GetAsDictionary(PdfName.Perms); if (catalogPerms != null) { urSignature = catalogPerms.GetAsDictionary(PdfName.UR3); } PdfAcroForm acroForm = PdfAcroForm.GetAcroForm(document, false); if (acroForm != null) { foreach (KeyValuePair<String, PdfFormField> entry in acroForm.GetFormFields()) { PdfDictionary fieldDict = entry.Value.GetPdfObject(); if (!PdfName.Sig.Equals(fieldDict.Get(PdfName.FT))) { continue; } PdfDictionary sigDict = fieldDict.GetAsDictionary(PdfName.V); if (sigDict == null) { continue; } PdfSignature pdfSignature = new PdfSignature(sigDict); if (pdfSignature.GetContents() == null || pdfSignature.GetByteRange() == null) { continue; } if (!pdfSignature.GetType().Equals(PdfName.DocTimeStamp) && sigDict != urSignature) { containsCertificationOrApprovalSignature = true; break; } } } return containsCertificationOrApprovalSignature; } /// <summary>Get the rectangle associated to the provided widget.</summary> /// <param name="widget">PdfWidgetAnnotation to extract the rectangle from</param> /// <returns>Rectangle</returns> protected internal virtual Rectangle GetWidgetRectangle(PdfWidgetAnnotation widget) { return widget.GetRectangle().ToRectangle(); } /// <summary>Get the page number associated to the provided widget.</summary> /// <param name="widget">PdfWidgetAnnotation from which to extract the page number</param> /// <returns>page number</returns> protected internal virtual int GetWidgetPageNumber(PdfWidgetAnnotation widget) { int pageNumber = 0; PdfDictionary pageDict = widget.GetPdfObject().GetAsDictionary(PdfName.P); if (pageDict != null) { pageNumber = document.GetPageNumber(pageDict); } else { for (int i = 1; i <= document.GetNumberOfPages(); i++) { PdfPage page = document.GetPage(i); if (!page.IsFlushed()) { if (page.ContainsAnnotation(widget)) { pageNumber = i; break; } } } } return pageNumber; } private void SetDigestParamToSigRefIfNeeded(PdfDictionary reference) { if (document.GetPdfVersion().CompareTo(PdfVersion.PDF_1_6) < 0) { // Don't really know what to say about this if-clause code. // Let's leave it, assuming that it is reasoned in some very specific way, until opposite is not proven. reference.Put(PdfName.DigestValue, new PdfString("aa")); PdfArray loc = new PdfArray(); loc.Add(new PdfNumber(0)); loc.Add(new PdfNumber(0)); reference.Put(PdfName.DigestLocation, loc); reference.Put(PdfName.DigestMethod, PdfName.MD5); } else { if (IsDocumentPdf2()) { if (digestMethod != null) { reference.Put(PdfName.DigestMethod, digestMethod); } else { ILog logger = LogManager.GetLogger(typeof(PdfSigner)); logger.Error(iText.IO.LogMessageConstant.UNKNOWN_DIGEST_METHOD); } } } } private PdfName GetHashAlgorithmNameInCompatibleForPdfForm(String hashAlgorithm) { PdfName pdfCompatibleName = null; String hashAlgOid = DigestAlgorithms.GetAllowedDigest(hashAlgorithm); if (hashAlgOid != null) { String hashAlgorithmNameInCompatibleForPdfForm = DigestAlgorithms.GetDigest(hashAlgOid); if (hashAlgorithmNameInCompatibleForPdfForm != null) { pdfCompatibleName = new PdfName(hashAlgorithmNameInCompatibleForPdfForm); } } return pdfCompatibleName; } private bool IsDocumentPdf2() { return document.GetPdfVersion().CompareTo(PdfVersion.PDF_2_0) >= 0; } private static StampingProperties InitStampingProperties(bool append) { StampingProperties properties = new StampingProperties(); if (append) { properties.UseAppendMode(); } return properties; } /// <summary>An interface to retrieve the signature dictionary for modification.</summary> public interface ISignatureEvent { /// <summary>Allows modification of the signature dictionary.</summary> /// <param name="sig">The signature dictionary</param> void GetSignatureDictionary(PdfSignature sig); } } }
{ "pile_set_name": "Github" }
/************************************************************* Download latest Blynk library here: https://github.com/blynkkk/blynk-library/releases/latest Blynk is a platform with iOS and Android apps to control Arduino, Raspberry Pi and the likes over the Internet. You can easily build graphic interfaces for all your projects by simply dragging and dropping widgets. Downloads, docs, tutorials: http://www.blynk.cc Sketch generator: http://examples.blynk.cc Blynk community: http://community.blynk.cc Follow us: http://www.fb.com/blynkapp http://twitter.com/blynk_app Blynk library is licensed under MIT license This example code is in public domain. ************************************************************* This example shows how to keep WiFi connection on ESP8266. *************************************************************/ /* Comment this out to disable prints and save space */ #define BLYNK_PRINT Serial #include <ESP8266WiFi.h> #include <BlynkSimpleEsp8266.h> // You should get Auth Token in the Blynk App. // Go to the Project Settings (nut icon). char auth[] = "YourAuthToken"; // Your WiFi credentials. // Set password to "" for open networks. char ssid[] = "YourNetworkName"; char pass[] = "YourPassword"; int lastConnectionAttempt = millis(); int connectionDelay = 5000; // try to reconnect every 5 seconds void setup() { // Debug console Serial.begin(9600); Blynk.begin(auth, ssid, pass); } void loop() { // check WiFi connection: if (WiFi.status() != WL_CONNECTED) { // (optional) "offline" part of code // check delay: if (millis() - lastConnectionAttempt >= connectionDelay) { lastConnectionAttempt = millis(); // attempt to connect to Wifi network: if (pass && strlen(pass)) { WiFi.begin((char*)ssid, (char*)pass); } else { WiFi.begin((char*)ssid); } } } else { Blynk.run(); } }
{ "pile_set_name": "Github" }
[#]: collector: (lujun9972) [#]: translator: ( ) [#]: reviewer: ( ) [#]: publisher: ( ) [#]: url: ( ) [#]: subject: (How to teach software engineering students about the enterprise) [#]: via: (https://opensource.com/article/19/7/enterprise-technology) [#]: author: (Tomas Cerny https://opensource.com/users/tomcze) How to teach software engineering students about the enterprise ====== Start with a solid foundation of polymorphism, object-oriented programming, collections, lambda, and design patterns. ![Tall building with windows][1] In this opinion article, you will find a set of suggestions for the inclusion of enterprise technology into software engineering courses. This piece goes through the difficulties that students face and proposes simplifications successfully used in the past. The continual advancement of enterprise technologies leads to a simplifying of the inclusion process in education. In the coming years, one can expect that industry demand for experts who know the technology used in enterprise development processes and production systems will increase. Academic institutions are here to prepare experts and leaders for industry, and thus they should know the technologies being used. It has been ten years since I taught my first software engineering course. Since then, I have taught this course every year. Many software engineering courses put emphasis on analysis and design from the abstract perspective, involving UML models and notations, and letting students develop software projects on their own. However, in my course, I chose a harder path rooted in theory and practice. This path includes lectures and labs on enterprise Java technology. When we pinpoint where actual software engineering skills are needed, we can point to large and complex systems. How could you become a software engineer without being involved in the development of such a system? For large systems, standard development technologies are no longer sufficient, since their constructs are too low-level to address the typical problems and situations in enterprise development. Moreover, why would someone build large systems from objects when we could involve components that are built for specific purposes? With objects, we are trying to reinvent the wheel of existing enterprise practice. While there are many pros highlighted, frankly speaking, including enterprise technology in your coursework can turn a rather simple software engineering course into a quite difficult one, especially for the first few iterations. ### Unfamiliar territory As long as we understand enterprise technology and standards, and have developed a larger system on our own, we are ready to include enterprise technologies into lectures. If we have not developed a large system before, we still can try to include the technology, but we must be ready to run through multiple examples and demos beforehand, and especially put it all together. However, where do we start? I remember the times when early versions of enterprise Java were released with initial demos on hotel booking that seemed ideal for learning the technology, at least from an initial examination. The difficulties quickly became clear when students got stuck. The available tutorials with the documentation are not written for novice students and beginners; they are made for users who have already used similar technologies before. ### More than just Java The first issue novices run into are related to running enterprise Java itself. Enterprise Java no longer needs only a Java virtual machine to run. Now, it needs a container and a web server compliant with the technology in order to use its many components. Students of software engineering must suddenly assume the role of a system administrator to install a complex environment on their machines that requires further configuration. For many students who have never opened a terminal before, it becomes a tedious task just to prepare the needed environment. Operating systems don’t always make it easy for novices, as not all terminals are as friendly as others. In the ideal case, students would need to reinstall their operating system to Linux, but that takes a software engineering course into a completely different level. Those who manage to install and configure the server are suddenly told that the server runs on a certain port. Perhaps the most breath-taking question to come from a student is, what is a port? In such a case, the intent to deploy our first example takes another detour to explain networking, since we must connect to the enterprise system over the network. When we finally roll over operating systems and networking, student motivation is almost gone, and suddenly we face another challenge: where to store data? Enterprise systems are all about big data, and one could barely imagine them without a database. However, it assumes that students not only know databases but also know to configure them to accept new connections. Our initial intention for a quick demo to students almost failed, as our software engineering efforts took multiple detours to get into our first demo. When we finally got to a running demo, students wanted to update the demo and rerun it. They soon realized that the changes do not propagate to the running demo, and they need to blindly develop the code, and then redeploy it. This process takes up to a minute in some cases at school workstations. At that point, students have lost most of their motivation and initial drive. And of all these efforts were just to run the initial demo, not to learn the technology itself. In most cases, one has to further advise students about version control and [Maven][2], but we still have not gotten to the point of learning the various components needed to develop such systems. The initial great idea to expose students to enterprise technology thus changes into teaching them all the needed supportive materials. This is simply too much work for a software engineering course to demonstrate component-based development. The actual fruit of the intention comes when students spent hours on configuration and setup, leaving not much space in the semester. ### Fixes for the problem With recent technology example projects such as [kitchen-sink][3], we can find every piece of important technology applied in a single demo, which is great. From there, it is pretty straight-forward to cover topics on object-relational mapping and persistence, and session beans to handle business logic, as well as context and dependency injection, which very nicely correspond to the components and UML component diagrams commonly taught in software engineering courses. While Java still promotes server-side user interface development, in many cases students reject the choice and prefer to go with [React][4] or [Angular][5] frameworks, which promote the need to cover XML bindings and JSON transformation. Over the years of promoting enterprise technologies in my courses, I have experimented with many supportive instruments to reduce the initial efforts needed to learn and cover the introduction materials and deploy the first demo. Here’s what I found. #### Back to the basics Primarily, I must highlight that it is not possible to teach enterprise technology to immature students. It is much more important to teach such students polymorphism, object-oriented programming, collections, lambda, and design patterns so that they understand the primary design. Why? Because otherwise, our students cannot become who we want them to be. In enterprise Java, great solutions are component-based, but in the background, they are full of polymorphism, patterns, and collections, and lacking a full understanding of these will yield significant issues in later design on real systems. Thus, it is better to exclude enterprise Java when students lack basics, and instead focus on the core skills, and possibly postpone the topic for a later course if allowed by the curriculum. #### Primer courses At my previous university, one of the new undergraduate programs perfected the curriculum with well-formed preceding courses. Students taking software engineering had covered all of the prerequisites, such as networks, operating systems, databases, and object-oriented programming, before starting the course. However, the mentioned curriculum was to prepare bachelors for industry needs, and thus theoretical coursework was not as emphasized. #### Multimedia One significant time reduction for the initial demo setup could be achieved through step-by-step video tutorials detailing each stage on how to run, debug, and redeploy it. This seems great, but often students want to install the demo on their personal machine, and it is simply hard to make a perfect tutorial prepared for all conditions that can be found on multiple operating systems. Students found videos very helpful, as they may perform the learning process when they choose. Skipping all the errors faced with the initial demo helps students keep their motivation and drive. #### Virtual machines Another significant improvement is to prepare a virtual image of the operating system, with a setup demo and environment for the students. In the simplest case, students only need to start their integrated development environment (IDE) and click a button to see the demo running. Later, they can install the demo in their own environment, but only after they have a running example in place and hands-on experience. #### The right technology My last semester, with a course on a slightly different topic, I came across a significant improvement, and perhaps something that changes enterprise development forever. Enterprise microservice architecture came recently as the answer to cloud-based demands. Eclipse [MicroProfile][6] is perhaps the right ingredient to teach enterprise development. It allows developers to only include technologies that are needed for the particular application. The idea is to run an enterprise application from a JAR file that contains only the needed libraries. This practice allows running the application from outside of the container. One could see it as a configurable microcontainer that includes the minimum setup for your JAR and runs as a server. This is exactly what we need to simplify our coursework. We no longer need to explain all of the technologies provided by enterprise containers, and can instead focus our attention on a much smaller set. This can bring us quickly to the point we want to make in academia. We can focus on teaching our students components, and skip the necessity of container knowledge and complicated redeploys. While everyone loves standards, it seems that the [Spring][7] framework—a strong competitor to enterprise Java—predated the idea of running applications outside of a container almost by a decade. Thus, to get to the point in an academic environment, it might be the right way to go (on the other hand, such a choice sacrifices the standardized technology agreed upon by the main industry players). ### Next steps What should we do for our next course? First, know who your audience is and whether they are mature enough to learn enterprise technology. A simple evaluation test can tell more. With a large class audience, you should consider including video tutorials. Otherwise, labs could turn into underprovisioned debugging sessions. With video tutorials used as homework, you make use of the time for lectures and labs more effectively and cover other important topics. If you are expecting troubles with operating systems, consider making a virtual machine image, or prepare [Docker][8] images for particular pieces such as the database, etc. Most importantly, keep innovating since technologies come and leave, such as the [JRebel][9] (for hot deploying changes) academic license that is no longer available. Fortunately, recent advancements in enterprise technologies bring simplifications, and learning technology will be easier for the next generations. In the end, we will be able to focus on the intended topics to take students in our intended direction. Nevertheless, starting with enterprise technology too early would be counterproductive, and no advancement can change that. -------------------------------------------------------------------------------- via: https://opensource.com/article/19/7/enterprise-technology 作者:[Tomas Cerny][a] 选题:[lujun9972][b] 译者:[译者ID](https://github.com/译者ID) 校对:[校对者ID](https://github.com/校对者ID) 本文由 [LCTT](https://github.com/LCTT/TranslateProject) 原创编译,[Linux中国](https://linux.cn/) 荣誉推出 [a]: https://opensource.com/users/tomcze [b]: https://github.com/lujun9972 [1]: https://opensource.com/sites/default/files/styles/image-full-size/public/lead-images/windows_building_sky_scale.jpg?itok=mH6CAX29 (Tall building with windows) [2]: https://maven.apache.org/index.html [3]: https://developers.redhat.com/quickstarts/eap/kitchensink/ [4]: https://reactjs.org/ [5]: https://angular.io/ [6]: https://microprofile.io/ [7]: https://spring.io/ [8]: https://www.docker.com/ [9]: https://jrebel.com/
{ "pile_set_name": "Github" }
/* Copyright (c) 2004-2010, The Dojo Foundation All Rights Reserved. Available via Academic Free License >= 2.1 OR the modified BSD license. see: http://dojotoolkit.org/license for details */ if(!dojo._hasResource["dojox.geo.charting._Feature"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojox.geo.charting._Feature"] = true; dojo.provide("dojox.geo.charting._Feature"); dojo.require("dojox.gfx.fx"); dojo.declare("dojox.geo.charting._Feature", null, { _isZoomIn: false, _isFocused: false, markerText:null, constructor: function(parent, name, shapeData){ this.id = name; this.shape = parent.mapObj.createGroup(); this.parent = parent; this.mapObj = parent.mapObj; this._bbox = shapeData.bbox; this._center = shapeData.center; //TODO: fill color would be defined by charting data and legend //this._defaultFill = ["#FFCE52", "#CE6342", "#63A584"][Math.floor(Math.random() * 3)]; this._defaultFill = parent.defaultColor; this._highlightFill = parent.highlightColor; this._defaultStroke = { width: this._normalizeStrokeWeight(.5), color: "white" }; this._scale = Math.min(this.parent.containerSize.w / this._bbox.w, this.parent.containerSize.h / this._bbox.h); var shapes = (dojo.isArray(shapeData.shape[0])) ? shapeData.shape : [shapeData.shape]; dojo.forEach(shapes, function(points){ this.shape.createPolyline(points).setStroke(this._defaultStroke).setFill(this._defaultFill); }, this); }, setValue:function(value){ this.value = value; if(this.parent.series.length != 0){ for(var i = 0;i < this.parent.series.length;i++){ var range = this.parent.series[i]; if((value>=range.min)&&(value<range.max)){ this._setFillWith(range.color); this._defaultFill = range.color; } } } }, _setFillWith: function(color){ var borders = (dojo.isArray(this.shape.children)) ? this.shape.children : [this.shape.children]; dojo.forEach(borders, function(item){ item.setFill(color); }); }, _setStrokeWith: function(stroke){ var borders = (dojo.isArray(this.shape.children)) ? this.shape.children : [this.shape.children]; dojo.forEach(borders, function(item){ item.setStroke({ color: stroke.color, width: stroke.width, join: "round" }); }); }, _normalizeStrokeWeight: function(weight){ var matrix = this.shape._getRealMatrix(); return (dojox.gfx.renderer != "vml")?weight/(this.shape._getRealMatrix()||{xx:1}).xx:weight; }, _onmouseoverHandler: function(evt){ this.parent.onFeatureOver(this); this._setFillWith(this._highlightFill); this.mapObj.marker.show(this.id); }, _onmouseoutHandler: function(){ this._setFillWith(this._defaultFill); this.mapObj.marker.hide(); dojo.style("mapZoomCursor", "display", "none"); }, _onmousemoveHandler: function(evt){ if(this._isFocused){ var evt = dojo.fixEvent(evt || window.event); dojo.style("mapZoomCursor", "left", evt.pageX + 12 + "px"); dojo.style("mapZoomCursor", "top", evt.pageY + "px"); dojo.byId("mapZoomCursor").className = (this._isZoomIn)?"mapZoomOut":"mapZoomIn"; dojo.style("mapZoomCursor", "display", "block"); } }, _onclickHandler: function(){ if(!this._isFocused){ for (var name in this.mapObj.features){ if (this.mapObj.features[name] != this){ this.mapObj.features[name]._setStrokeWith(this._defaultStroke); this.mapObj.features[name]._setFillWith(this.mapObj.features[name]._defaultFill); this.mapObj.features[name]._isFocused = false; this.mapObj.features[name]._isZoomIn = false; } } this._focus(); } else if (this._isZoomIn){ this._zoomOut(); } else { this._zoomIn(); } }, _focus: function(){ this.shape._moveToFront(); this._setStrokeWith({color:"black",width:this._normalizeStrokeWeight(2)}); this.parent.onFeatureClick(this); this._isFocused = true; }, _zoomIn: function(){ var anim = dojox.gfx.fx.animateTransform({ duration: 1000, shape: this.mapObj, transform: [{ name: "translate", start: [-this.mapObj.currentBBox.x, -this.mapObj.currentBBox.y], end: [-this._bbox.x, -this._bbox.y] },{ name: "scaleAt", start: [this.mapObj.currentScale, this.mapObj.currentBBox.x, this.mapObj.currentBBox.y], end: [this._scale, this._bbox.x, this._bbox.y] }] }); dojo.connect(anim,"onEnd",this,function(){ this._setStrokeWith({color:"black",width:this._normalizeStrokeWeight(2)}); this.parent.onZoomEnd(this); }); anim.play(); this.mapObj.currentScale = this._scale; this.mapObj.currentBBox = { x: this._bbox.x, y: this._bbox.y }; this._isZoomIn = true; dojo.byId("mapZoomCursor").className = ""; }, _zoomOut: function(){ var anim = dojox.gfx.fx.animateTransform({ duration: 1000, shape: this.mapObj, transform: [{ name: "translate", start: [-this._bbox.x, -this._bbox.y], end: [-this.mapObj.boundBox[0], -this.mapObj.boundBox[1]] }, { name: "scaleAt", start: [this._scale, this._bbox.x, this._bbox.y], end: [this.mapObj.scale, this.mapObj.boundBox[0], this.mapObj.boundBox[1]] }] }); dojo.connect(anim,"onEnd",this,function(){ this._setStrokeWith({color:"black",width:this._normalizeStrokeWeight(2)}); }); anim.play(); this.mapObj.currentScale = this.mapObj.scale; this.mapObj.currentBBox = { x: this.mapObj.boundBox[0], y: this.mapObj.boundBox[1] }; this._isZoomIn = false; dojo.byId("mapZoomCursor").className = ""; }, init: function(){ this.shape.rawNode.id = this.id; this.tooltip = null; this.shape.connect("onmouseover", this, this._onmouseoverHandler); this.shape.connect("onmouseout", this, this._onmouseoutHandler); this.shape.connect("onmousemove", this, this._onmousemoveHandler); this.shape.connect("onclick", this, this._onclickHandler); } }); }
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package request creates admissionReview request based on admission attributes. package request // import "k8s.io/apiserver/pkg/admission/plugin/webhook/request"
{ "pile_set_name": "Github" }
package(default_visibility = ["//visibility:public"]) load( "@io_bazel_rules_go//go:def.bzl", "go_library", "go_test", ) go_library( name = "go_default_library", srcs = ["util.go"], importpath = "k8s.io/kubernetes/pkg/util/metrics", deps = [ "//staging/src/k8s.io/client-go/util/flowcontrol:go_default_library", "//vendor/github.com/prometheus/client_golang/prometheus:go_default_library", ], ) go_test( name = "go_default_test", srcs = ["util_test.go"], embed = [":go_default_library"], deps = [ "//staging/src/k8s.io/client-go/util/flowcontrol:go_default_library", ], ) filegroup( name = "package-srcs", srcs = glob(["**"]), tags = ["automanaged"], visibility = ["//visibility:private"], ) filegroup( name = "all-srcs", srcs = [":package-srcs"], tags = ["automanaged"], )
{ "pile_set_name": "Github" }
class CreatePollAnswers < ActiveRecord::Migration def self.up create_table :poll_answers do |t| t.integer :poll_id t.integer :poll_option_id t.string :answerer_id t.text :comment t.timestamps end end def self.down drop_table :poll_answers end end
{ "pile_set_name": "Github" }
##ld1_advsimd_sngl_execute CheckFPAdvSIMDEnabled64(); bits(64) address; bits(64) offs; bits(128) rval; bits(esize) element; integer s; constant integer ebytes = esize / 8; if n == 31 then address = SP[]; else address = X[n]; end offs = Zeros(64); if replicate then // load and replicate to all elements for s = 0 to selem-1 element = Mem[address + offs, ebytes, AccType_VEC]; // replicate to fill 128- or 64-bit register V[t] = Replicate(element, datasize / esize); offs = offs + ebytes; t = (t + 1) MOD 32; else // load/store one element per register for s = 0 to selem-1 rval = V[t]; if memop == MemOp_LOAD then // insert into one lane of 128-bit register Elem[rval, index, esize] = Mem[address + offs, ebytes, AccType_VEC]; V[t] = rval; else // memop == MemOp_STORE // extract from one lane of 128-bit register Mem[address + offs, ebytes, AccType_VEC] = Elem[rval, index, esize]; end offs = offs + ebytes; t = (t + 1) MOD 32; end if wback then if m != 31 then offs = X[m]; end if n == 31 then SP[] = address + offs; else X[n] = address + offs; end end @@
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <root> <!-- Microsoft ResX Schema Version 2.0 The primary goals of this format is to allow a simple XML format that is mostly human readable. The generation and parsing of the various data types are done through the TypeConverter classes associated with the data types. Example: ... ado.net/XML headers & schema ... <resheader name="resmimetype">text/microsoft-resx</resheader> <resheader name="version">2.0</resheader> <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader> <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader> <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data> <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data> <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64"> <value>[base64 mime encoded serialized .NET Framework object]</value> </data> <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64"> <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value> <comment>This is a comment</comment> </data> There are any number of "resheader" rows that contain simple name/value pairs. Each data row contains a name, and value. The row also contains a type or mimetype. Type corresponds to a .NET class that support text/value conversion through the TypeConverter architecture. Classes that don't support this are serialized and stored with the mimetype set. The mimetype is used for serialized objects, and tells the ResXResourceReader how to depersist the object. This is currently not extensible. For a given mimetype the value must be set accordingly: Note - application/x-microsoft.net.object.binary.base64 is the format that the ResXResourceWriter will generate, however the reader can read any of the formats listed below. mimetype: application/x-microsoft.net.object.binary.base64 value : The object must be serialized with : System.Serialization.Formatters.Binary.BinaryFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.soap.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Soap.SoapFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.bytearray.base64 value : The object must be serialized into a byte array : using a System.ComponentModel.TypeConverter : and then encoded with base64 encoding. --> <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata"> <xsd:element name="root" msdata:IsDataSet="true"> <xsd:complexType> <xsd:choice maxOccurs="unbounded"> <xsd:element name="metadata"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" /> <xsd:attribute name="type" type="xsd:string" /> <xsd:attribute name="mimetype" type="xsd:string" /> </xsd:complexType> </xsd:element> <xsd:element name="assembly"> <xsd:complexType> <xsd:attribute name="alias" type="xsd:string" /> <xsd:attribute name="name" type="xsd:string" /> </xsd:complexType> </xsd:element> <xsd:element name="data"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" msdata:Ordinal="1" /> <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" /> <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" /> </xsd:complexType> </xsd:element> <xsd:element name="resheader"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" /> </xsd:complexType> </xsd:element> </xsd:choice> </xsd:complexType> </xsd:element> </xsd:schema> <resheader name="resmimetype"> <value>text/microsoft-resx</value> </resheader> <resheader name="version"> <value>2.0</value> </resheader> <resheader name="reader"> <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> <resheader name="writer"> <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> </root>
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- import mock import pytest import unittest from nose.tools import * # noqa from tests.base import OsfTestCase, get_default_metaschema from osf_tests.factories import ProjectFactory, UserFactory, DraftRegistrationFactory from framework.auth import Auth from addons.base.tests.models import (OAuthAddonNodeSettingsTestSuiteMixin, OAuthAddonUserSettingTestSuiteMixin) from addons.gitlab.exceptions import NotFoundError from addons.gitlab.models import NodeSettings from addons.gitlab.tests.factories import ( GitLabAccountFactory, GitLabNodeSettingsFactory, GitLabUserSettingsFactory ) from .utils import create_mock_gitlab mock_gitlab = create_mock_gitlab() pytestmark = pytest.mark.django_db class TestNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase): short_name = 'gitlab' full_name = 'GitLab' ExternalAccountFactory = GitLabAccountFactory NodeSettingsFactory = GitLabNodeSettingsFactory NodeSettingsClass = NodeSettings UserSettingsFactory = GitLabUserSettingsFactory ## Mixin Overrides ## def _node_settings_class_kwargs(self, node, user_settings): return { 'user_settings': self.user_settings, 'repo': 'mock', 'user': 'abc', 'owner': self.node, 'repo_id': '123' } def test_set_folder(self): # GitLab doesn't use folderpicker, and the nodesettings model # does not need a `set_repo` method pass def test_serialize_settings(self): # GitLab's serialized_settings are a little different from # common storage addons. settings = self.node_settings.serialize_waterbutler_settings() expected = {'host': 'some-super-secret', 'owner': 'abc', 'repo': 'mock', 'repo_id': '123'} assert_equal(settings, expected) @mock.patch( 'addons.gitlab.models.UserSettings.revoke_remote_oauth_access', mock.PropertyMock() ) def test_complete_has_auth_not_verified(self): super(TestNodeSettings, self).test_complete_has_auth_not_verified() @mock.patch('addons.gitlab.api.GitLabClient.repos') def test_to_json(self, mock_repos): mock_repos.return_value = {} super(TestNodeSettings, self).test_to_json() @mock.patch('addons.gitlab.api.GitLabClient.repos') def test_to_json_user_is_owner(self, mock_repos): mock_repos.return_value = {} result = self.node_settings.to_json(self.user) assert_true(result['user_has_auth']) assert_equal(result['gitlab_user'], 'abc') assert_true(result['is_owner']) assert_true(result['valid_credentials']) assert_equal(result.get('gitlab_repo', None), 'mock') @mock.patch('addons.gitlab.api.GitLabClient.repos') def test_to_json_user_is_not_owner(self, mock_repos): mock_repos.return_value = {} not_owner = UserFactory() result = self.node_settings.to_json(not_owner) assert_false(result['user_has_auth']) assert_equal(result['gitlab_user'], 'abc') assert_false(result['is_owner']) assert_true(result['valid_credentials']) assert_equal(result.get('repo_names', None), None) class TestUserSettings(OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase): short_name = 'gitlab' full_name = 'GitLab' ExternalAccountFactory = GitLabAccountFactory class TestCallbacks(OsfTestCase): def setUp(self): super(TestCallbacks, self).setUp() self.project = ProjectFactory.build() self.consolidated_auth = Auth(self.project.creator) self.project.creator.save() self.non_authenticator = UserFactory() self.non_authenticator.save() self.project.save() self.project.add_contributor( contributor=self.non_authenticator, auth=self.consolidated_auth, ) self.project.add_addon('gitlab', auth=self.consolidated_auth) self.project.creator.add_addon('gitlab') self.external_account = GitLabAccountFactory() self.project.creator.external_accounts.add(self.external_account) self.project.creator.save() self.node_settings = self.project.get_addon('gitlab') self.user_settings = self.project.creator.get_addon('gitlab') self.node_settings.user_settings = self.user_settings self.node_settings.user = 'Queen' self.node_settings.repo = 'Sheer-Heart-Attack' self.node_settings.external_account = self.external_account self.node_settings.save() self.node_settings.set_auth @mock.patch('addons.gitlab.api.GitLabClient.repo') def test_before_make_public(self, mock_repo): mock_repo.side_effect = NotFoundError result = self.node_settings.before_make_public(self.project) assert_is(result, None) def test_before_page_load_not_contributor(self): message = self.node_settings.before_page_load(self.project, UserFactory()) assert_false(message) def test_before_page_load_not_logged_in(self): message = self.node_settings.before_page_load(self.project, None) assert_false(message) def test_before_remove_contributor_authenticator(self): message = self.node_settings.before_remove_contributor( self.project, self.project.creator ) assert_true(message) def test_before_remove_contributor_not_authenticator(self): message = self.node_settings.before_remove_contributor( self.project, self.non_authenticator ) assert_false(message) def test_after_remove_contributor_authenticator_self(self): message = self.node_settings.after_remove_contributor( self.project, self.project.creator, self.consolidated_auth ) assert_equal( self.node_settings.user_settings, None ) assert_true(message) assert_not_in('You can re-authenticate', message) def test_after_remove_contributor_authenticator_not_self(self): auth = Auth(user=self.non_authenticator) message = self.node_settings.after_remove_contributor( self.project, self.project.creator, auth ) assert_equal( self.node_settings.user_settings, None ) assert_true(message) assert_in('You can re-authenticate', message) def test_after_remove_contributor_not_authenticator(self): self.node_settings.after_remove_contributor( self.project, self.non_authenticator, self.consolidated_auth ) assert_not_equal( self.node_settings.user_settings, None, ) def test_after_fork_authenticator(self): fork = ProjectFactory() clone = self.node_settings.after_fork( self.project, fork, self.project.creator, ) assert_equal( self.node_settings.user_settings, clone.user_settings, ) def test_after_fork_not_authenticator(self): fork = ProjectFactory() clone = self.node_settings.after_fork( self.project, fork, self.non_authenticator, ) assert_equal( clone.user_settings, None, ) def test_after_delete(self): self.project.remove_node(Auth(user=self.project.creator)) # Ensure that changes to node settings have been saved self.node_settings.reload() assert_true(self.node_settings.user_settings is None) @mock.patch('website.archiver.tasks.archive') def test_does_not_get_copied_to_registrations(self, mock_archive): registration = self.project.register_node( schema=get_default_metaschema(), auth=Auth(user=self.project.creator), draft_registration=DraftRegistrationFactory(branched_from=self.project), ) assert_false(registration.has_addon('gitlab')) class TestGitLabNodeSettings(unittest.TestCase): def setUp(self): super(TestGitLabNodeSettings, self).setUp() self.user = UserFactory() self.user.add_addon('gitlab') self.user_settings = self.user.get_addon('gitlab') self.external_account = GitLabAccountFactory() self.user_settings.owner.external_accounts.add(self.external_account) self.user_settings.owner.save() self.node_settings = GitLabNodeSettingsFactory(user_settings=self.user_settings) @mock.patch('addons.gitlab.api.GitLabClient.delete_hook') def test_delete_hook_no_hook(self, mock_delete_hook): res = self.node_settings.delete_hook() assert_false(res) assert_false(mock_delete_hook.called)
{ "pile_set_name": "Github" }
import logging import os import django # Base paths DJANGO_ROOT = os.path.dirname(os.path.realpath(django.__file__)) SITE_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) # Debugging DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '[email protected]'), ) MANAGERS = ADMINS # Database # Note: DATABASE_USER and DATABASE_PASSWORD are defined in the staging and # production settings.py files. For local use, either define them in # local_settings.py or ignore to use your local user. DATABASE_ENGINE = 'postgresql_psycopg2' DATABASE_HOST = 'localhost' DATABASE_PORT = '5432' DATABASE_NAME = '$(project)' # Local time TIME_ZONE = 'America/Chicago' # Local language LANGUAGE_CODE = 'en-us' # Site framework SITE_ID = 1 # Internationalization USE_I18N = False # Absolute path to the directory that holds media. MEDIA_ROOT = os.path.join(SITE_ROOT, 'assets') # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash if there is a path component (optional in other cases). # Examples: "http://media.lawrence.com", "http://example.com/media/" MEDIA_URL = '' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". ADMIN_MEDIA_PREFIX = '/media/' # Make this unique, and don't share it with anybody. SECRET_KEY = ${repr($secret_key)} # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.load_template_source', 'django.template.loaders.app_directories.load_template_source', 'django.template.loaders.eggs.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( 'django.middleware.gzip.GZipMiddleware', 'django.middleware.cache.UpdateCacheMiddleware', 'django.middleware.common.CommonMiddleware', 'debug_toolbar.middleware.DebugToolbarMiddleware', 'django.middleware.cache.FetchFromCacheMiddleware', ) ROOT_URLCONF = '$(project).configs.common.urls' TEMPLATE_DIRS = ( os.path.join(SITE_ROOT, 'templates') ) INSTALLED_APPS = ( 'django.contrib.contenttypes', 'django.contrib.sites', 'django.contrib.admin', 'django.contrib.admindocs', 'django.contrib.humanize', 'django.contrib.gis', 'django.contrib.sitemaps', ) # Predefined domain MY_SITE_DOMAIN = 'localhost:8000' # Email # run "python -m smtpd -n -c DebuggingServer localhost:1025" to see outgoing # messages dumped to the terminal EMAIL_HOST = 'localhost' EMAIL_PORT = 1025 DEFAULT_FROM_EMAIL = 'do.not.reply@$(production_domain)' # Caching CACHE_MIDDLEWARE_KEY_PREFIX='$(project)' CACHE_MIDDLEWARE_SECONDS=90 * 60 # 90 minutes CACHE_BACKEND="dummy:///" # Logging logging.basicConfig( level=logging.DEBUG, ) # Allow for local (per-user) override try: from local_settings import * except ImportError: pass
{ "pile_set_name": "Github" }
/* * Copyright (c) 2000-2007 Apple Inc. All rights reserved. * * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apple Public Source License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. The rights granted to you under the License * may not be used to create, or enable the creation or redistribution of, * unlawful or unlicensed copies of an Apple operating system, or to * circumvent, violate, or enable the circumvention or violation of, any * terms of an Apple operating system software license agreement. * * Please obtain a copy of the License at * http://www.opensource.apple.com/apsl/ and read it before using this file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ */ #include <mach/mach_types.h> #include <mach/vm_attributes.h> #include <mach/vm_param.h> #include <vm/pmap.h> #include <mach/thread_status.h> #include <mach-o/loader.h> #include <mach/vm_region.h> #include <mach/vm_statistics.h> #include <vm/vm_kern.h> #include <vm/vm_object.h> #include <vm/vm_protos.h> #include <kdp/kdp_core.h> #include <kdp/kdp_udp.h> #include <kdp/kdp_internal.h> #include <arm/misc_protos.h> #include <arm/caches_internal.h> #include <arm/cpu_data_internal.h> pmap_t kdp_pmap = 0; boolean_t kdp_trans_off; boolean_t kdp_read_io = 0; pmap_paddr_t kdp_vtophys(pmap_t pmap, vm_offset_t va); /* * kdp_vtophys */ pmap_paddr_t kdp_vtophys( pmap_t pmap, vm_offset_t va) { pmap_paddr_t pa; ppnum_t pp; /* Ensure that the provided va resides within the provided pmap range. */ if(!pmap || ((pmap != kernel_pmap) && ((va < pmap->min) || (va >= pmap->max)))) { #ifdef KDP_VTOPHYS_DEBUG printf("kdp_vtophys(%08x, %016lx) not in range %08x .. %08x\n", (unsigned int) pmap, (unsigned long) va, (unsigned int) (pmap ? pmap->min : 0), (unsigned int) (pmap ? pmap->max : 0)); #endif return 0; /* Just return if no translation */ } pp = pmap_find_phys(pmap, va); /* Get the page number */ if (!pp) return 0; /* Just return if no translation */ pa = ((pmap_paddr_t) pp << PAGE_SHIFT) | (va & PAGE_MASK); /* Insert page offset */ return (pa); } /* * kdp_machine_vm_read * * Verify that src is valid, and physically copy len bytes from src to * dst, translating if necessary. If translation is enabled * (kdp_trans_off is 0), a non-zero kdp_pmap specifies the pmap to use * when translating src. */ mach_vm_size_t kdp_machine_vm_read( mach_vm_address_t src, caddr_t dst, mach_vm_size_t len) { addr64_t cur_virt_src, cur_virt_dst; addr64_t cur_phys_src, cur_phys_dst; mach_vm_size_t resid, cnt; pmap_t pmap; #ifdef KDP_VM_READ_DEBUG kprintf("kdp_machine_vm_read1: src %x dst %x len %x - %08X %08X\n", src, dst, len, ((unsigned long *) src)[0], ((unsigned long *) src)[1]); #endif cur_virt_src = (addr64_t) src; cur_virt_dst = (addr64_t) dst; if (kdp_trans_off) { kdp_readphysmem64_req_t rq; mach_vm_size_t ret; rq.address = src; rq.nbytes = (uint32_t)len; ret = kdp_machine_phys_read(&rq, dst, 0 /* unused */); return ret; } else { resid = len; if (kdp_pmap) pmap = kdp_pmap; /* If special pmap, use it */ else pmap = kernel_pmap; /* otherwise, use kernel's */ while (resid != 0) { /* * Always translate the destination using the * kernel_pmap. */ if ((cur_phys_dst = kdp_vtophys(kernel_pmap, cur_virt_dst)) == 0) goto exit; if ((cur_phys_src = kdp_vtophys(pmap, cur_virt_src)) == 0) goto exit; /* Attempt to ensure that there are valid translations for src and dst. */ if (!kdp_read_io && ((!pmap_valid_address(cur_phys_dst)) || (!pmap_valid_address(cur_phys_src)))) goto exit; cnt = ARM_PGBYTES - (cur_virt_src & PAGE_MASK); /* Get length left on * page */ if (cnt > (ARM_PGBYTES - (cur_virt_dst & PAGE_MASK))) cnt = ARM_PGBYTES - (cur_virt_dst & PAGE_MASK); if (cnt > resid) cnt = resid; #ifdef KDP_VM_READ_DEBUG kprintf("kdp_machine_vm_read2: pmap %08X, virt %016LLX, phys %016LLX\n", pmap, cur_virt_src, cur_phys_src); #endif bcopy_phys(cur_phys_src, cur_phys_dst, cnt); cur_virt_src += cnt; cur_virt_dst += cnt; resid -= cnt; } } exit: #ifdef KDP_VM_READ_DEBUG kprintf("kdp_machine_vm_read: ret %08X\n", len - resid); #endif return (len - resid); } mach_vm_size_t kdp_machine_phys_read(kdp_readphysmem64_req_t *rq, caddr_t dst, uint16_t lcpu __unused) { mach_vm_address_t src = rq->address; mach_vm_size_t len = rq->nbytes; addr64_t cur_virt_dst; addr64_t cur_phys_src, cur_phys_dst; mach_vm_size_t resid = len; mach_vm_size_t cnt = 0, cnt_src, cnt_dst; #ifdef KDP_VM_READ_DEBUG kprintf("kdp_phys_read src %x dst %p len %x\n", src, dst, len); #endif cur_virt_dst = (addr64_t) dst; cur_phys_src = (addr64_t) src; while (resid != 0) { if ((cur_phys_dst = kdp_vtophys(kernel_pmap, cur_virt_dst)) == 0) goto exit; /* Get length left on page */ cnt_src = ARM_PGBYTES - (cur_phys_src & PAGE_MASK); cnt_dst = ARM_PGBYTES - (cur_phys_dst & PAGE_MASK); if (cnt_src > cnt_dst) cnt = cnt_dst; else cnt = cnt_src; if (cnt > resid) cnt = resid; bcopy_phys(cur_phys_src, cur_phys_dst, cnt); /* Copy stuff over */ cur_phys_src += cnt; cur_virt_dst += cnt; resid -= cnt; } exit: return (len - resid); } /* * kdp_vm_write */ mach_vm_size_t kdp_machine_vm_write( caddr_t src, mach_vm_address_t dst, mach_vm_size_t len) { addr64_t cur_virt_src, cur_virt_dst; addr64_t cur_phys_src, cur_phys_dst; mach_vm_size_t resid, cnt, cnt_src, cnt_dst; #ifdef KDP_VM_WRITE_DEBUG printf("kdp_vm_write: src %x dst %x len %x - %08X %08X\n", src, dst, len, ((unsigned long *) src)[0], ((unsigned long *) src)[1]); #endif cur_virt_src = (addr64_t) src; cur_virt_dst = (addr64_t) dst; resid = len; while (resid != 0) { if ((cur_phys_dst = kdp_vtophys(kernel_pmap, cur_virt_dst)) == 0) goto exit; if ((cur_phys_src = kdp_vtophys(kernel_pmap, cur_virt_src)) == 0) goto exit; /* Attempt to ensure that there are valid translations for src and dst. */ /* No support for enabling writes for an invalid translation at the moment. */ if ((!pmap_valid_address(cur_phys_dst)) || (!pmap_valid_address(cur_phys_src))) goto exit; cnt_src = ((cur_phys_src + ARM_PGBYTES) & (-ARM_PGBYTES)) - cur_phys_src; cnt_dst = ((cur_phys_dst + ARM_PGBYTES) & (-ARM_PGBYTES)) - cur_phys_dst; if (cnt_src > cnt_dst) cnt = cnt_dst; else cnt = cnt_src; if (cnt > resid) cnt = resid; #ifdef KDP_VM_WRITE_DEBUG printf("kdp_vm_write: cur_phys_src %x cur_phys_src %x len %x - %08X %08X\n", src, dst, cnt); #endif bcopy_phys(cur_phys_src, cur_phys_dst, cnt); /* Copy stuff over */ flush_dcache64(cur_phys_dst, (unsigned int)cnt, TRUE); invalidate_icache64(cur_phys_dst, (unsigned int)cnt, TRUE); cur_virt_src += cnt; cur_virt_dst += cnt; resid -= cnt; } exit: return (len - resid); } mach_vm_size_t kdp_machine_phys_write(kdp_writephysmem64_req_t *rq __unused, caddr_t src __unused, uint16_t lcpu __unused) { return 0; /* unimplemented */ } void kern_collectth_state_size(uint64_t * tstate_count, uint64_t * tstate_size) { uint64_t count = ml_get_max_cpu_number() + 1; *tstate_count = count; *tstate_size = sizeof(struct thread_command) + (sizeof(arm_state_hdr_t) #if defined(__arm64__) + ARM_THREAD_STATE64_COUNT * sizeof(uint32_t)); #else + ARM_THREAD_STATE32_COUNT * sizeof(uint32_t)); #endif } void kern_collectth_state(thread_t thread __unused, void *buffer, uint64_t size, void ** iter) { cpu_data_entry_t *cpuentryp = *iter; if (cpuentryp == NULL) cpuentryp = &CpuDataEntries[0]; if (cpuentryp == &CpuDataEntries[ml_get_max_cpu_number()]) *iter = NULL; else *iter = cpuentryp + 1; struct cpu_data *cpudatap = cpuentryp->cpu_data_vaddr; struct thread_command *tc = (struct thread_command *)buffer; arm_state_hdr_t *hdr = (arm_state_hdr_t *)(void *)(tc + 1); #if defined(__arm64__) hdr->flavor = ARM_THREAD_STATE64; hdr->count = ARM_THREAD_STATE64_COUNT; arm_thread_state64_t *state = (arm_thread_state64_t *)(void *)(hdr + 1); #else hdr->flavor = ARM_THREAD_STATE; hdr->count = ARM_THREAD_STATE_COUNT; arm_thread_state_t *state = (arm_thread_state_t *)(void *)(hdr + 1); #endif tc->cmd = LC_THREAD; tc->cmdsize = (uint32_t) size; if ((cpudatap != NULL) && (cpudatap->halt_status == CPU_HALTED_WITH_STATE)) { *state = cpudatap->halt_state; return; } if ((cpudatap == NULL) || (cpudatap->cpu_processor == NULL) || (cpudatap->cpu_processor->active_thread == NULL)) { bzero(state, hdr->count * sizeof(uint32_t)); return; } vm_offset_t kstackptr = (vm_offset_t) cpudatap->cpu_processor->active_thread->machine.kstackptr; arm_saved_state_t *saved_state = (arm_saved_state_t *) kstackptr; #if defined(__arm64__) state->fp = saved_state->ss_64.fp; state->lr = saved_state->ss_64.lr; state->sp = saved_state->ss_64.sp; state->pc = saved_state->ss_64.pc; state->cpsr = saved_state->ss_64.cpsr; bcopy(&saved_state->ss_64.x[0], &state->x[0], sizeof(state->x)); #else /* __arm64__ */ state->lr = saved_state->lr; state->sp = saved_state->sp; state->pc = saved_state->pc; state->cpsr = saved_state->cpsr; bcopy(&saved_state->r[0], &state->r[0], sizeof(state->r)); #endif /* !__arm64__ */ }
{ "pile_set_name": "Github" }
import coininfo from 'coininfo' import { Cryptos } from '../constants' export default Object.freeze({ [Cryptos.DOGE]: coininfo.dogecoin.main.toBitcoinJS(), [Cryptos.DASH]: coininfo.dash.main.toBitcoinJS(), [Cryptos.BTC]: coininfo.bitcoin.main.toBitcoinJS() })
{ "pile_set_name": "Github" }
StartChar: uni069D.init_PreYaa Encoding: 1116863 -1 3035 Width: 533 Flags: HW AnchorPoint: "TashkilAbove" 146 801 basechar 0 AnchorPoint: "TashkilBelow" 181 -327 basechar 0 LayerCount: 2 Fore Refer: 195 -1 N 1 0 0 1 250 -215 2 Refer: 188 -1 N 1 0 0 1 0 0 3 EndChar
{ "pile_set_name": "Github" }
/*! @file Defines the `BOOST_HANA_DEFINE_STRUCT` macro. @copyright Louis Dionne 2013-2016 Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt) */ #ifndef BOOST_HANA_DEFINE_STRUCT_HPP #define BOOST_HANA_DEFINE_STRUCT_HPP #include <boost/hana/fwd/define_struct.hpp> #include <boost/hana/detail/struct_macros.hpp> #endif // !BOOST_HANA_DEFINE_STRUCT_HPP
{ "pile_set_name": "Github" }
// This is core/vnl/vnl_det.hxx #ifndef vnl_det_hxx_ #define vnl_det_hxx_ #include "vnl_det.h" template <class T> T vnl_det(T const *row0, T const *row1) { return row0[0]*row1[1] - row0[1]*row1[0]; } template <class T> T vnl_det(T const *row0, T const *row1, T const *row2) { return // the extra '+' makes it work nicely with emacs indentation. + row0[0]*row1[1]*row2[2] - row0[0]*row2[1]*row1[2] - row1[0]*row0[1]*row2[2] + row1[0]*row2[1]*row0[2] + row2[0]*row0[1]*row1[2] - row2[0]*row1[1]*row0[2]; } template <class T> T vnl_det(T const *row0, T const *row1, T const *row2, T const *row3) { return + row0[0]*row1[1]*row2[2]*row3[3] - row0[0]*row1[1]*row3[2]*row2[3] - row0[0]*row2[1]*row1[2]*row3[3] + row0[0]*row2[1]*row3[2]*row1[3] + row0[0]*row3[1]*row1[2]*row2[3] - row0[0]*row3[1]*row2[2]*row1[3] - row1[0]*row0[1]*row2[2]*row3[3] + row1[0]*row0[1]*row3[2]*row2[3] + row1[0]*row2[1]*row0[2]*row3[3] - row1[0]*row2[1]*row3[2]*row0[3] - row1[0]*row3[1]*row0[2]*row2[3] + row1[0]*row3[1]*row2[2]*row0[3] + row2[0]*row0[1]*row1[2]*row3[3] - row2[0]*row0[1]*row3[2]*row1[3] - row2[0]*row1[1]*row0[2]*row3[3] + row2[0]*row1[1]*row3[2]*row0[3] + row2[0]*row3[1]*row0[2]*row1[3] - row2[0]*row3[1]*row1[2]*row0[3] - row3[0]*row0[1]*row1[2]*row2[3] + row3[0]*row0[1]*row2[2]*row1[3] + row3[0]*row1[1]*row0[2]*row2[3] - row3[0]*row1[1]*row2[2]*row0[3] - row3[0]*row2[1]*row0[2]*row1[3] + row3[0]*row2[1]*row1[2]*row0[3]; } //-------------------------------------------------------------------------------- #define VNL_DET_INSTANTIATE(T) \ template VNL_EXPORT T vnl_det(T const *, T const *); \ template VNL_EXPORT T vnl_det(T const *, T const *, T const *); \ template VNL_EXPORT T vnl_det(T const *, T const *, T const *, T const *) #endif // vnl_det_hxx_
{ "pile_set_name": "Github" }
--- title: Filtering --- **What are filters?** With _Hot Chocolate_ filters, you can expose complex filter objects through your GraphQL API that translates to native database queries. The default filter implementation translates filters to expression trees that are applied to `IQueryable`. # Overview Filters by default work on `IQueryable` but you can also easily customize them to use other interfaces. _Hot Chocolate_ by default will inspect your .NET model and infer the possible filter operations from it. The following type would yield the following filter operations: ```csharp public class Foo { public string Bar { get; set; } } ``` ```sdl input FooFilter { bar: String bar_contains: String bar_ends_with: String bar_in: [String] bar_not: String bar_not_contains: String bar_not_ends_with: String bar_not_in: [String] bar_not_starts_with: String bar_starts_with: String AND: [FooFilter!] OR: [FooFilter!] } ``` **So how can we get started with filters?** Getting started with filters is very easy, especially if you do not want to explicitly define filters or customize anything. Hot Chocolate will infer the filters directly from your .Net Model and then use a Middleware to apply filters to `IQueryable<T>` or `IEnumerable<T>` on execution. > ⚠️ **Note:** If you use more than middleware, keep in mind that **ORDER MATTERS** _Why order matters_ <<Add link >> > ⚠️ **Note:** Be sure to install the `HotChocolate.Types.Filters` NuGet package. In the following example, the person resolver returns the `IQueryable` representing the data source. The `IQueryable` represents a not executed database query on which _Hot Chocolate_ can apply filters. **Code First** The next thing to note is the `UseFiltering` extension method which adds the filter argument to the field and a middleware that can apply those filters to the `IQueryable`. The execution engine will, in the end, execute the `IQueryable` and fetch the data. ```csharp public class QueryType : ObjectType<Query> { protected override void Configure(IObjectTypeDescriptor<Query> descriptor) { descriptor.Field(t => t.GetPersons(default)) .Type<ListType<NonNullType<PersonType>>>() .UseFiltering(); } } public class Query { public IQueryable<Person> GetPersons([Service]IPersonRepository repository) { repository.GetPersons(); } } ``` **Pure Code First** The field descriptor attribute `[UseFiltering]` does apply the extension method `UseFiltering()` on the field descriptor. ```csharp public class Query { [UseFiltering] public IQueryable<Person> GetPersons([Service]IPersonRepository repository) { repository.GetPersons(); } } ``` **Schema First** > ⚠️ **Note:** Schema first does currently not support filtering! # Customizing Filters The filter objects can be customized and you can rename and remove operations from it or define operations explicitly. Filters are input objects and are defined through a `FilterInputType<T>`. To define and customize a filter we have to inherit from `FilterInputType<T>` and configure it like any other type. ```csharp public class PersonFilterType : FilterInputType<Person> { protected override void Configure( IFilterInputTypeDescriptor<Person> descriptor) { descriptor .BindFieldsExplicitly() .Filter(t => t.Name) .BindOperationsExplicitly() .AllowEquals().Name("equals").And() .AllowContains().Name("contains").And() .AllowIn().Name("in"); } } ``` The above type defines explicitly for what fields filter operations are allowed and what filter operations are allowed. Also, the filter renames the equals filter to `equals`. To apply this filter type we just have to provide it to the `UseFiltering` extension method with as the generic type argument. ```csharp public class QueryType : ObjectType<Query> { protected override void Configure(IObjectTypeDescriptor<Query> descriptor) { descriptor.Field(t => t.GetPerson(default)) .Type<ListType<NonNullType<PersonType>>>(); .UseFiltering<PersonFilterType>() } } ``` # Sorting Like with filter support you can add sorting support to your database queries. ```csharp public class QueryType : ObjectType<Query> { protected override void Configure(IObjectTypeDescriptor<Query> descriptor) { descriptor.Field(t => t.GetPerson(default)) .Type<ListType<NonNullType<PersonType>>>(); .UseSorting() } } ``` > ⚠️ **Note**: Be sure to install the `HotChocolate.Types.Sorting` NuGet package. If you want to combine for instance paging, filtering and sorting make sure that the order is like follows: ```csharp public class QueryType : ObjectType<Query> { protected override void Configure(IObjectTypeDescriptor<Query> descriptor) { descriptor.Field(t => t.GetPerson(default)) .UsePaging<PersonType>() .UseFiltering() .UseSorting(); } } ``` **Why is order important?** Paging, filtering and sorting are modular middlewares that form the field resolver pipeline. The above example forms the following pipeline: `Paging -> Filtering -> Sorting -> Field Resolver` The paging middleware will first delegate to the next middleware, which is filtering. The filtering middleware will also first delegate to the next middleware, which is sorting. The sorting middleware will again first delegate to the next middleware, which is the actual field resolver. The field resolver will call `GetPerson` which returns in this example an `IQueryable<Person>`. The queryable represents a not yet executed database query. After the resolver has been executed and puts its result onto the middleware context the sorting middleware will apply for the sort order on the query. After the sorting middleware has been executed and updated the result on the middleware context the filtering middleware will apply its filters on the queryable and updates the result on the middleware context. After the paging middleware has been executed and updated the result on the middleware context the paging middleware will slice the data and execute the queryable which will then actually pull in data from the data source. So, if we, for instance, applied paging as our last middleware the data set would have been sliced first and then filtered which in most cases is not what we actually want. # Filter & Operations Kinds Filtering can be broken down into different kinds of filters that then have different operations. The filter kind is bound to the type. A string is fundamentally something different than an array or an object. Each filter kind has different operations that can be applied to it. Some operations are unique to a filter and some operations are shared across multiple filters. e.g. A string filter has string specific operations like `Contains` or `EndsWith` but still shares the operations `Equals` and `NotEquals` with the boolean filter. ## Filter Kinds Hot Chocolate knows following filter kinds | Kind | Operations | | ---------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | String | Equals, In, EndsWith, StartsWith, Contains, NotEquals, NotIn, NotEndsWith, NotStartsWith, NotContains | | Bool | Equals, NotEquals | | Object | Equals | | Array | Some, Any, All, None | | Comparable | Equals, In, GreaterThan, GreaterThanOrEqual, LowerThan, LowerThanOrEqual, NotEquals, NotIn, NotGreaterThan, NotGreaterThanOrEqual, NotLowerThan, NotLowerThanOrEqual | ## Operations Kinds Hot Chocolate knows following operation kinds | Kind | Operations | | ---------------------- | ----------------------------------------------------------------------------------------------------- | | Equals | Compares the equality of input value and property value | | NotEquals | negation of Equals | | In | Checks if the property value is contained in a given list of input values | | NotIn | negation of In | | GreaterThan | checks if the input value is greater than the property value | | NotGreaterThan | negation of GreaterThan | | GreaterThanOrEquals | checks if the input value is greater than or equal to the property value | | NotGreaterThanOrEquals | negation of GreaterThanOrEquals | | LowerThan | checks if the input value is lower than the property value | | NotLowerThan | negation of LowerThan | | LowerThanOrEquals | checks if the input value is lower than or equal to the property value | | NotLowerThanOrEquals | negation of LowerThanOrEquals | | EndsWith | checks if the property value ends with the input value | | NotEndsWith | negation of EndsWith | | StartsWith | checks if the property value starts with the input value | | NotStartsWith | negation of StartsWith | | Contains | checks if the input value is contained in the property value | | NotContains | negation of Contains | | Some | checks if at least one element in the collection exists | | Some | checks if at least one element of the property value meets the condition provided by the input value | | None | checks if no element of the property value meets the condition provided by the input value | | All | checks if all least one element of the property value meets the condition provided by the input value | ## Boolean Filter In this example, we look at the filter configuration of a Boolean filter. As an example, we will use the following model: ```csharp public class User { public bool IsOnline {get;set;} } public class Query : ObjectType { [UseFiltering] public IQueryable<User> GetUsers([Service]UserService users ) => users.AsQueryable(); } ``` The produced GraphQL SDL will look like the following: ```sdl type Query { users(where: UserFilter): [User] } type User { isOnline: Boolean } input UserFilter { isOnline: Boolean isOnline_not: Boolean AND: [UserFilter!] OR: [UserFilter!] } ``` ### Boolean Operation Descriptor The example above showed that configuring the operations is optional. If you want to have access to the actual field input types or allow only a subset of Boolean filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor<User>` ```csharp public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.Filter(x => x.Name) .AllowEquals().And() .AllowNotEquals(); } } ``` ## Comparable Filter In this example, we look at the filter configuration of a comparable filter. A comparable filter is generated for all values that implement IComparable except string and boolean. e.g. `csharp±enum`, `csharp±int`, `csharp±DateTime`... As an example we will use the following model: ```csharp public class User { public int LoggingCount {get;set;} } public class Query : ObjectType { [UseFiltering] public IQueryable<User> GetUsers([Service]UserService users ) => users.AsQueryable(); } ``` The produced GraphQL SDL will look like the following: ```sdl type Query { users(where: UserFilter): [User] } type User { loggingCount: Int } input UserFilter { loggingCount: Int loggingCount_gt: Int loggingCount_gte: Int loggingCount_in: [Int!] loggingCount_lt: Int loggingCount_lte: Int loggingCount_not: Int loggingCount_not_gt: Int loggingCount_not_gte: Int loggingCount_not_in: [Int!] loggingCount_not_lt: Int loggingCount_not_lte: Int AND: [UserFilter!] OR: [UserFilter!] } ``` ### Comparable Operation Descriptor The example above showed that configuring the operations is optional. If you want to have access to the actual field input types or allow only a subset of comparable filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor<User>` ```csharp public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.Filter(x => x.Name) .AllowEquals().And() .AllowNotEquals().And() .AllowGreaterThan().And() .AllowNotGreaterThan().And() .AllowGreaterThanOrEqals().And() .AllowNotGreaterThanOrEqals().And() .AllowLowerThan().And() .AllowNotLowerThan().And() .AllowLowerThanOrEqals().And() .AllowNotLowerThanOrEqals().And() .AllowIn().And() .AllowNotIn(); } } ``` ## String Filter In this example, we look at the filter configuration of a String filter. As an example we will use the following model: ```csharp public class User { public string Name {get;set;} } public class Query : ObjectType { [UseFiltering] public IQueryable<User> GetUsers([Service]UserService users ) => users.AsQueryable(); } ``` The produced GraphQL SDL will look like the following: ```sdl type Query { users(where: UserFilter): [User] } type User { name: String } input UserFilter { name: String name_contains: String name_ends_with: String name_in: [String] name_not: String name_not_contains: String name_not_ends_with: String name_not_in: [String] name_not_starts_with: String name_starts_with: String AND: [UserFilter!] OR: [UserFilter!] } ``` ### String Operation Descriptor The example above showed that configuring the operations is optional. If you want to have access to the actual field input types or allow only a subset of string filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor<User>` ```csharp public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.Filter(x => x.Name) .AllowEquals().And() .AllowNotEquals().And() .AllowContains().And() .AllowNotContains().And() .AllowStartsWith().And() .AllowNotStartsWith().And() .AllowEndsWith().And() .AllowNotEndsWith().And() .AllowIn().And() .AllowNotIn(); } } ``` ## Object Filter In this example, we look at the filter configuration of an object filter. An object filter is generated for all nested objects. The object filter can also be used to filter over database relations. For each nested object, filters are generated. As an example we will use the following model: ```csharp public class User { public Address Address {get;set;} } public class Address { public string Street {get;set;} public bool IsPrimary {get;set;} } public class Query : ObjectType { [UseFiltering] public IQueryable<User> GetUsers([Service]UserService users ) => users.AsQueryable(); } ``` The produced GraphQL SDL will look like the following: ```sdl type Query { users(where: UserFilter): [User] } type User { address: Address } type Address { isPrimary: Boolean street: String } input UserFilter { address: AddressFilter AND: [UserFilter!] OR: [UserFilter!] } input AddressFilter { is_primary: Boolean is_primary_not: Boolean street: String street_contains: String street_ends_with: String street_in: [String] street_not: String street_not_contains: String street_not_ends_with: String street_not_in: [String] street_not_starts_with: String street_starts_with: String AND: [AddressFilter!] OR: [AddressFilter!] } ``` ### Object Operation Descriptor The example above showed that configuring the operations is optional. If you want to have access to the actual field input types or allow only a subset of comparable filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor<User>` ```csharp public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.Object(x => x.Address); } } ``` **Configuring a custom nested filter type:** ```csharp public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.Object(x => x.Address).AllowObject<AddressFilterType>(); } } public class AddressFilterType : FilterInputType<Address> { protected override void Configure( IFilterInputTypeDescriptor<Address> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.Filter(x => x.IsPrimary); } } // Or Inline public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.Object(x => x.Address) .AllowObject( y => y.BindFieldsExplicitly().Filter(z => z.IsPrimary)); } } ``` ## List Filter In this example, we look at the filter configuration of a list filter. List filters are generated for all nested IEnumerables. The array filter addresses scalars and object values differently. In the case of a scalar, an object type is generated to address the different operations of this scalar. If a list of strings is filtered, an object type is created to address all string operations. In case the list contains a complex object, an object filter for this object is generated. A list filter is generated for all properties that implement IEnumerable. e.g. `csharp±string[]`, `csharp±List<Foo>`, `csharp±IEnumerable<Bar>`... As an example we will use the following model: ```csharp public class User { public string[] Roles {get;set;} public IEnumerable<Address> Addresses {get;set;} } public class Address { public string Street {get;set;} public bool IsPrimary {get;set;} } public class Query : ObjectType { [UseFiltering] public IQueryable<User> GetUsers([Service]UserService users ) => users.AsQueryable(); } ``` The produced GraphQL SDL will look like the following: ```sdl type Query { users(where: UserFilter): [User] } type User { addresses: [Address] roles: [String] } type Address { isPrimary: Boolean street: String } input UserFilter { addresses_some: AddressFilter addresses_all: AddressFilter addresses_none: AddressFilter addresses_any: Boolean roles_some: ISingleFilterOfStringFilter roles_all: ISingleFilterOfStringFilter roles_none: ISingleFilterOfStringFilter roles_any: Boolean AND: [UserFilter!] OR: [UserFilter!] } input AddressFilter { is_primary: Boolean is_primary_not: Boolean street: String street_contains: String street_ends_with: String street_in: [String] street_not: String street_not_contains: String street_not_ends_with: String street_not_in: [String] street_not_starts_with: String street_starts_with: String AND: [AddressFilter!] OR: [AddressFilter!] } input ISingleFilterOfStringFilter { AND: [ISingleFilterOfStringFilter!] element: String element_contains: String element_ends_with: String element_in: [String] element_not: String element_not_contains: String46 element_not_ends_with: String element_not_in: [String] element_not_starts_with: String element_starts_with: String OR: [ISingleFilterOfStringFilter!] } ``` ### Array Operation Descriptor The example above showed that configuring the operations is optional. If you want to have access to the actual field input types or allow only a subset of array filters for a given property, you can configure the operation over the `IFilterInputTypeDescriptor<User>` ```csharp public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.List(x => x.Addresses) .AllowSome().And() .AlloAny().And() .AllowAll().And() .AllowNone(); descriptor.List(x => x.Roles) .AllowSome().And() .AlloAny().And() .AllowAll().And() .AllowNone(); } } ``` # Naming Conventions _Hot Chococlate_ already provides two naming schemes for filters. If you would like to define your own naming scheme or extend existing ones have a look at the documentation of <<LINk FILTER CONVENTIONS>> ## Snake Case **Configuration** You can configure the Snake Case with the `UseSnakeCase` extension method convention on the `IFilterConventionDescriptor` ```csharp public class CustomConvention : FilterConvention { protected override void Configure(IFilterConventionDescriptor descriptor) { descriptor.UseSnakeCase() } } SchemaBuilder.New().AddConvention<CustomConvention>(); // SchemaBuilder.New().AddConvention(new FilterConvention(x => x.UseSnakeCase()) ``` ```sdl input FooBarFilter { AND: [FooBarFilter!] nested: String nested_contains: String nested_ends_with: String nested_in: [String] nested_not: String nested_not_contains: String nested_not_ends_with: String **Change the name of an operation** nested_not_in: [String] nested_not_starts_with: String nested_starts_with: String OR: [FooBarFilter!] } input FooFilter { AND: [FooFilter!] bool: Boolean bool_not: Boolean comparable: Short comparableEnumerable_all: ISingleFilterOfInt16Filter comparableEnumerable_any: Boolean comparableEnumerable_none: ISingleFilterOfInt16Filter comparableEnumerable_some: ISingleFilterOfInt16Filter comparable_gt: Short comparable_gte: Short comparable_in: [Short!] comparable_lt: Short comparable_lte: Short comparable_not: Short comparable_not_gt: Short comparable_not_gte: Short comparable_not_in: [Short!] comparable_not_lt: Short comparable_not_lte: Short object: FooBarFilter OR: [FooFilter!] } input ISingleFilterOfInt16Filter { AND: [ISingleFilterOfInt16Filter!] element: Short element_gt: Short element_gte: Short element_in: [Short!] element_lt: Short element_lte: Short element_not: Short element_not_gt: Short element_not_gte: Short element_not_in: [Short!] element_not_lt: Short element_not_lte: Short OR: [ISingleFilterOfInt16Filter!] } ``` ## Pascal Case **Configuration** You can configure the Pascal Case with the `UsePascalCase` extension method convention on the `IFilterConventionDescriptor` ```csharp public class CustomConvention : FilterConvention { protected override void Configure(IFilterConventionDescriptor descriptor) { descriptor.UsePascalCase() } } SchemaBuilder.New().AddConvention<CustomConvention>(); // SchemaBuilder.New().AddConvention(new FilterConvention(x => x.UsePascalCase()) ``` ```sdl input FooBarFilter { AND: [FooBarFilter!] Nested: String Nested_Contains: String Nested_EndsWith: String Nested_In: [String] Nested_Not: String Nested_Not_Contains: String Nested_Not_EndsWith: String Nested_Not_In: [String] Nested_Not_StartsWith: String Nested_StartsWith: String OR: [FooBarFilter!] } input FooFilter { AND: [FooFilter!] Bool: Boolean Bool_Not: Boolean Comparable: Short ComparableEnumerable_All: ISingleFilterOfInt16Filter ComparableEnumerable_Any: Boolean ComparableEnumerable_None: ISingleFilterOfInt16Filter ComparableEnumerable_Some: ISingleFilterOfInt16Filter Comparable_Gt: Short Comparable_Gte: Short Comparable_In: [Short!] Comparable_Lt: Short Comparable_Lte: Short Comparable_Not: Short Comparable_Not_Gt: Short Comparable_Not_Gte: Short Comparable_Not_In: [Short!] Comparable_Not_Lt: Short Comparable_Not_Lte: Short Object: FooBarFilter OR: [FooFilter!] } input ISingleFilterOfInt16Filter { AND: [ISingleFilterOfInt16Filter!] Element: Short Element_Gt: Short Element_Gte: Short Element_In: [Short!] Element_Lt: Short Element_Lte: Short Element_Not_Gt: Short Element_Not: Short Element_Not_Gte: Short Element_Not_In: [Short!] Element_Not_Lt: Short Element_Not_Lte: Short OR: [ISingleFilterOfInt16Filter!] } ``` # Customizing Filter Hot Chocolate provides different APIs to customize filtering. You can write custom filter input types, customize the inference behavior of .NET Objects, customize the generated expression, or create a custom visitor and attach your exotic database. **As this can be a bit overwhelming the following questionnaire might help:** | | | | -------------------------------------------------------------------------------------------------------------------------- | ------------------------------- | | _You do not want all of the generated filters and only allow a particular set of filters in a specific case?_ | Custom&nbsp;FilterInputType | | _You want to change the name of a field or a whole type?_ | Custom&nbsp;FilterInputType | | _You want to change the name of the `where` argument?_ | Filter Conventions ArgumentName | | _You want to configure how the name and the description of filters are generated in general? e.g. `PascalCaseFilterType`?_ | Filter&nbsp;Conventions | | _You want to configure what filters are allowed in general?_ | Filter&nbsp;Conventions | | \_Your database provider does not support certain operations of `IQueryable` | Filter&nbsp;Conventions | | _You want to change the naming of a particular filter type? e.g._ `foo_contains` _should be_ `foo_like` | Filter&nbsp;Conventions | | _You want to customize the expression a filter is generating: e.g._ `_equals` _should not be case sensitive?_ | Expression&nbsp;Visitor&nbsp; | | _You want to create your own filter types with custom parameters and custom expressions? e.g. GeoJson?_ | Filter&nbsp;Conventions | | _You have a database client that does not support `IQueryable` and wants to generate filters for it?_ | Custom&nbsp;Visitor | # Custom&nbsp;FilterInputType Under the hood, filtering is based on top of normal _Hot Chocolate_ input types. You can easily customize them with a very familiar fluent interface. The filter input types follow the same `descriptor` scheme as you are used to from the normal filter input types. Just extend the base class `FilterInputType<T>` and override the descriptor method. ```csharp public class User { public string Name {get; set; } public string LastName {get; set; } } public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { } } ``` `IFilterInputTypeDescriptor<T>` supports most of the methods of `IInputTypeDescriptor<T>` and adds the configuration interface for the filters. By default filters for all fields of the type are generated. If you do want to specify the filters by yourself you can change this behavior with `BindFields`, `BindFieldsExplicitly` or `BindFieldsImplicitly`. ```csharp public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { descriptor.BindFieldsExplicitly(); descriptor.Filter(x => x.Name); } } ``` ```sdl input UserFilter { name: String name_contains: String name_ends_with: String name_in: [String] name_not: String name_not_contains: String name_not_ends_with: String name_not_in: [String] name_not_starts_with: String name_starts_with: String AND: [UserFilter!] OR: [UserFilter!] } ``` To add or customize a filter you have to use `Filter(x => x.Foo)` for scalars `List(x => x.Bar)` for lists and `Object(x => x.Baz)` for nested objects. These methods will return fluent interfaces to configure the filter for the selected field. A field has different filter operations that can be configured. You will find more about filter types and filter operations here <<LINK>> When fields are bound implicitly, meaning filters are added for all properties, you may want to hide a few fields. You can do this with `Ignore(x => Bar)`. Operations on fields can again be bound implicitly or explicitly. By default, operations are generated for all fields of the type. If you do want to specify the operations by yourself you can change this behavior with `BindFilters`, `BindFiltersExplicitly` or `BindFiltersImplicitly`. It is also possible to customize the GraphQL field of the operation further. You can change the name, add a description or directive. ```csharp public class UserFilterType : FilterInputType<User> { protected override void Configure( IFilterInputTypeDescriptor<User> descriptor) { // descriptor.BindFieldsImplicitly(); <- is already the default descriptor.Filter(x => x.Name) .BindFilterExplicitly() .AllowContains() .Description("Checks if the provided string is contained in the `Name` of a User") .And() .AllowEquals() .Name("exits_with_name") .Directive("name"); descriptor.Ignore(x => x.Bar); } } ``` ```sdl input UserFilter { exits_with_name: String @name """ Checks if the provided string is contained in the `Name` of a User """ name_contains: String AND: [UserFilter!] OR: [UserFilter!] } ``` **API Documentation** | Method | Description | | -------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------- | | `csharp±BindFields(BindingBehavior bindingBehavior)` | Defines the filter binding behavior. `Explicitly`or `Implicitly`. Default is `Implicitly` | | `csharp±BindFieldsExplicitly` | Defines that all filters have to be specified explicitly. This means that only the filters are applied that are added with `Filter(x => x.Foo)` | | `csharp±BindFieldsImplicitly` | The filter type will add filters for all compatible fields. | | `csharp±Description(string value)` | Adds explanatory text of the `FilterInputType<T>` that can be accessed via introspection. | | `csharp±Name(NameString value)` | Defines the graphql name of the `FilterInputType<T>`. | | `csharp±Ignore( Expression<Func<T, object>> property);` | Ignore the specified property. | | `csharp±Filter( Expression<Func<T, string>> property)` | Defines a string filter for the selected property. | | `csharp±Filter( Expression<Func<T, bool>> property)` | Defines a bool filter for the selected property. | | `csharp±Filter( Expression<Func<T, IComparable>> property)` | Defines a comarable filter for the selected property. | | `csharp±Object<TObject>( Expression<Func<T, TObject>> property)` | Defines a object filter for the selected property. | | `csharp±List( Expression<Func<T, IEnumerable<string>>> property)` | Defines a array string filter for the selected property. | | `csharp±List( Expression<Func<T, IEnumerable<bool>>> property)` | Defines a array bool filter for the selected property. | | `csharp±List( Expression<Func<T, IEnumerable<IComparable>>> property)` | Defines a array comarable filter for the selected property. | | `csharp±Filter<TObject>( Expression<Func<T, IEnumerable<TObject>>> property)` | Defines a array object filter for the selected property. | | `csharp±Directive<TDirective>(TDirective directiveInstance)` | Add directive `directiveInstance` to the type | | `csharp±Directive<TDirective>(TDirective directiveInstance)` | Add directive of type `TDirective` to the type | | `csharp±Directive<TDirective>(NameString name, params ArgumentNode[] arguments)` | Add directive of type `TDirective` to the type | # Filter Conventions The customization of filters with `FilterInputTypes<T>` works if you only want to customize one specific filter type. If you want to change the behavior of all filter types, you want to create a convention for your filters. The filter convention comes with a fluent interface that is close to a type descriptor. You can see the convention as a configuration object that holds the state that is used by the type system or the execution engine. ## Get Started To use a filter convention you can extend `FilterConvention` and override the `Configure` method. Alternatively, you can directly configure the convention over the constructor argument. You then have to register your custom convention on the schema builder with `AddConvention`. ```csharp public class CustomConvention : FilterConvention { protected override void Configure(IFilterConventionDescriptor descriptor) { } } SchemaBuilder.New().AddConvention<CustomConvention>(); // SchemaBuilder.New().AddConvention(new FilterConvention(x => /* Config */)); ``` ## Convention Descriptor Basics In this section, we will take a look at the basic features of the filter convention. The documentation will reference often to `descriptor`. Imagine this `descriptor` as the parameter of the Configure method of the filter convention in the following context: ```csharp public class CustomConvention : FilterConvention { protected override void Configure( /**highlight-start**/ IFilterConventionDescriptor descriptor /**highlight-end**/ ) { } } SchemaBuilder.New().AddConvention<CustomConvention>(); ``` <br/> ### Argument Name With the convention descriptor, you can easily change the argument name of the `FilterInputType`. **Configuration** ```csharp descriptor.ArgumentName("example_argument_name"); ``` **Result** ```sdl type Query { users(example_argument_name: UserFilter): [User] } ``` ### Change Name of Scalar List Type Element You can change the name of the element of the list type. **Configuration** ```csharp descriptor.ElementName("example_element_name"); ``` **Result** ```sdl input ISingleFilterOfInt16Filter { AND: [ISingleFilterOfInt16Filter!] example_element_name: Short example_element_name_gt: Short example_element_name_gte: Short example_element_name_in: [Short!] example_element_name_lt: Short example_element_name_lte: Short example_element_name_not: Short example_element_name_not_gt: Short example_element_name_not_gte: Short example_element_name_not_in: [Short!] example_element_name_not_lt: Short example_element_name_not_lte: Short OR: [ISingleFilterOfInt16Filter!] } ``` ### Configure Filter Type Name Globally To change the way filter types are named, you have to exchange the factory. You have to provide a delegate of the following type: ```csharp public delegate NameString GetFilterTypeName( IDescriptorContext context, Type entityType); ``` **Configuration** ```csharp descriptor.TypeName((context,types) => context.Naming.GetTypeName(entityType, TypeKind.Object) + "Custom"); ``` **Result** ```sdl type Query { users(where: UserCustom): [User] } ``` ### Configure Filter Description Globally To change the way filter types are named, you have to exchange the factory. You have to provide a delegate of the following type: ```csharp public delegate string GetFilterTypeDescription( IDescriptorContext context, Type entityType); ``` **Configuration** ```csharp descriptor.TypeName((context,types) => context.Naming.GetTypeDescription(entityType, TypeKind.Object); + "Custom"); ``` **Result** ```sdl """ Custom """ input UserFilter { AND: [UserFilter!] isOnline: Boolean isOnline_not: Boolean OR: [UserFilter!] } ``` ### Reset Configuration By default, all predefined values are configured. To start from scratch, you need to call `Reset()`first. **Configuration** ```csharp descriptor.Reset(); ``` **Result** > **⚠ Note:** You will need to add a complete configuration, otherwise the filter will not work as desired! ## Describe with convention With the filter convention descriptor, you have full control over what filters are inferred, their names, operations, and a lot more. The convention provides a familiar interface to the type configuration. It is recommended to first take a look at `Filter & Operations` to understand the concept of filters. This will help you understand how the filter configuration works. Filtering has two core components at its heart. First, you have the inference of filters based on .NET types. The second component is an interceptor that translates the filters to the desired output and applies it to the resolver pipeline. These two parts can (and have to) be configured completely independently. With this separation, it is possible to easily extend the behavior. The descriptor is designed to be extendable by extension methods. **It's fluent** Filter conventions are a completely fluent experience. You can write a whole configuration as a chain of method calls. This provides a very clean interface, but can, on the other hand, get messy quickly. We recommend using indentation to keep the configuration comprehensible. You can drill up with `And()`. ```csharp descriptor.Operation(FilterOperationKind.Equals).Description("has to be equal"); descriptor.Operation(FilterOperationKind.NotEquals).Description("has not to be equal"); descriptor.Type(FilterKind.Comparable).Operation(FilterOperationKind.NotEquals).Description("has to be comparable and not equal") descriptor .Operation(FilterOperationKind.Equals) .Description("has to be equal") .And() .Operation(FilterOperationKind.NotEquals) .Description("has not to be equal") .And() .Type(FilterKind.Comparable) .Operation(FilterOperationKind.NotEquals) .Description("has to be comparable and not equal") ``` ### Configuration of the type system In this section, we will focus mainly on the generation of the schema. If you are interested in changing how filters are translated to the database, you have to look here <<INSERT LINK HERE>> #### Configure Filter Operations Operations can be configured in two ways. You can configure a default configuration that applies to all operations of this kind. In this case the configuration for `FilterOperationKind.Equals` would be applied to all `FilterKind` that specify this operation. ```csharp descriptor.Operation(FilterOperationKind.Equals) ``` If you want to configure a more specific Operation e.g. `FilterOperationKind.Equal` of kind `FilterKind.String`, you can override the default behavior. ```csharp descriptor.Type(FilterKind.String).Operation(FilterOperationKind.Equals) ``` The operation descriptor allows you to configure the name, the description or even ignore an operation completely In this example, we will look at the following input type: ```sdl input UserFilter { loggingCount: Int loggingCount_gt: Int loggingCount_gte: Int loggingCount_in: [Int!] loggingCount_lt: Int loggingCount_lte: Int loggingCount_not: Int loggingCount_not_gt: Int loggingCount_not_gte: Int loggingCount_not_in: [Int!] loggingCount_not_lt: Int loggingCount_not_lte: Int name: String name_contains: String name_ends_with: String name_in: [String] name_not: String name_not_contains: String name_not_ends_with: String name_not_in: [String] name_not_starts_with: String name_starts_with: String AND: [UserFilter!] OR: [UserFilter!] } ``` ##### Change the name of an operation To change the name of an operation you need to specify a delegate of the following type: ```csharp public delegate NameString CreateFieldName( FilterFieldDefintion definition, FilterOperationKind kind); ``` **Configuration** ```csharp{1, 6} // (A) // specifies that all not equals operations should be extended with _nada descriptor .Operation(FilterOperationKind.NotEquals) .Name((def, kind) => def.Name + "_nada" ); // (B) // specifies that the not equals operations should be extended with _niente. // this overrides (A) descriptor .Type(FilterKind.Comparable) .Operation(FilterOperationKind.NotEquals) .Name((def, kind) => def.Name + "_niente" ) ``` **result** ```sdl{8,18} input UserFilter { loggingCount: Int loggingCount_gt: Int loggingCount_gte: Int loggingCount_in: [Int!] loggingCount_lt: Int loggingCount_lte: Int loggingCount_niente: Int <-- (B) loggingCount_not_gt: Int loggingCount_not_gte: Int loggingCount_not_in: [Int!] loggingCount_not_lt: Int loggingCount_not_lte: Int name: String name_contains: String name_ends_with: String name_in: [String] name_nada: String <-- (A) name_not_contains: String name_not_ends_with: String name_not_in: [String] name_not_starts_with: String name_starts_with: String AND: [UserFilter!] OR: [UserFilter!] } ``` ##### Change the description of an operation In the same way, you can configure names you can also configure the description of operations. You can either set the description for all operations of this kind or only for a specific one in combination with a filter kind. **Configuration** ```csharp descriptor .Operation(FilterOperationKind.Equals) .Description("has to be equal") .And() .Operation(FilterOperationKind.NotEquals) .Description("has not to be equal") .And() .Type(FilterKind.Comparable) .Operation(FilterOperationKind.NotEquals) .Description("has to be comparable and not equal") ``` **result** ```sdl{2-4,11-14, 20-22,27-29} input UserFilter { """ has to be equal """ loggingCount: Int loggingCount_gt: Int loggingCount_gte: Int loggingCount_in: [Int!] loggingCount_lt: Int loggingCount_lte: Int """ has to be comparable and not equal """ loggingCount_not: Int loggingCount_not_gt: Int loggingCount_not_gte: Int loggingCount_not_in: [Int!] loggingCount_not_lt: Int loggingCount_not_lte: Int """ has to be equal """ name: String name_contains: String name_ends_with: String name_in: [String] """ has not to be equal """ name_not: String name_not_contains: String name_not_ends_with: String name_not_in: [String] name_not_starts_with: String name_starts_with: String AND: [UserFilter!] OR: [UserFilter!] } ``` ##### Hide Operations _Hot Chocolate_ comes preconfigured with a set of operations. If you like to hide operations globally, you can use `Ignore` for it. If your database provider does not support certain `IQueryable` methods you can just ignore the operation. Ignored operations do not generate filter input types. There are multiple ways to ignore an operation: **Configuration** ```csharp descriptor .Ignore(FilterOperationKind.Equals) .Operation(FilterOperationKind.NotEquals) .Ignore() .And() .Type(FilterKind.Comparable) .Operation(FilterOperationKind.GreaterThanOrEqual) .Ignore(); ``` **result** ```sdl{2,4, 8,14,18} input UserFilter { ↵ loggingCount_gt: Int ↵ loggingCount_in: [Int!] loggingCount_lt: Int loggingCount_lte: Int ↵ loggingCount_not_gt: Int loggingCount_not_gte: Int loggingCount_not_in: [Int!] loggingCount_not_lt: Int loggingCount_not_lte: Int ↵ name_contains: String name_ends_with: String name_in: [String] ↵ name_not_contains: String name_not_ends_with: String name_not_in: [String] name_not_starts_with: String name_starts_with: String AND: [UserFilter!] OR: [UserFilter!] } ``` ##### Configure Implicit Filter The default binding behavior of _Hot Chocolate_ is implicit. Filter types are no exception. This first may seem like magic, but unfortunately, there is none. It is just code. With `AddImplicitFilter` you can add this pinch of magic to your extension too. The filters are created as the type is generated. For each property of a model, a list of factories is sequentially asked to create a definition. The first that can handle the property wins and creates a definition for the filter. To configure you have to use the following delegate: ```csharp public delegate bool TryCreateImplicitFilter( IDescriptorContext context, Type type, PropertyInfo property, IFilterConvention filterConventions, [NotNullWhen(true)] out FilterFieldDefintion? definition); ``` | parameter | type | description | | ------------------- | --------------------------- | --------------------------------------------------------------------------------------------------------- | | _context_ | `IDescriptorContext` | The context of the type descriptor | | _type_ | `Type` | The type of the property. `Nullable<T>` is already unwrapped (typeof(T)) | | _property_ | `PropertyInfo` | The property | | _filterConventions_ | `IFilterConvention` | The instance of the `IFilterContention`. | | _definition_ | `out FilterFieldDefintion?` | The generated definition for the property. Return null if the current factory cannot handle the property. | If you just want to build your extension for implicit bindings, you can just out a custom `FilterFieldDefinition`. It makes sense to encapsulate that logic in a FilterFieldDescriptor though. This descriptor can the
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <resources> <!-- common strings without special context --> <string name="app_name">Delta Chat</string> <string name="ok">OK</string> <string name="cancel">Annuler</string> <string name="yes">Oui</string> <string name="no">Non</string> <string name="select">Sélectionner</string> <string name="on">Allumé</string> <string name="off">Éteint</string> <string name="def">Par défaut</string> <string name="custom">Personalisé</string> <string name="none">Aucun</string> <string name="automatic">Automatique</string> <string name="strict">Stricte</string> <string name="open">Ouvert</string> <string name="join">Rejoindre</string> <string name="rejoin">Réintégrer</string> <string name="delete">Supprimer</string> <string name="info">Info</string> <string name="update">Mise à jour</string> <string name="emoji">Émoji</string> <string name="attachment">Pièce jointe</string> <string name="back">Retour</string> <string name="close">Fermer</string> <string name="forward">Faire suivre</string> <string name="archive">Archiver</string> <string name="unarchive">Désarchiver</string> <string name="mute">Sourdine</string> <string name="ephemeral_messages">Messages éphémères</string> <string name="ephemeral_messages_hint">Ces paramètres s\'appliquent à tous les participants utilisant Delta Chat. Cependant, ils peuvent copier, enregistrer et faire suivre des messages ou utiliser un autre client de messagerie.</string> <string name="save">Sauvegarder</string> <string name="chat">Discussion</string> <string name="media">Média</string> <string name="main_menu">Menu principal</string> <string name="start_chat">Commencer la discussion</string> <string name="show_password">Afficher le mot de passe</string> <string name="hide_password">Masquer le mot de passe</string> <string name="not_now">Pas maintenant</string> <string name="never">Jamais</string> <string name="one_moment">Un instant …</string> <string name="done">Fait</string> <string name="undo">Annuler</string> <!-- Translators: used eg. for the next view, could also be "continue" or so. as used in ios headers, the string should be as short as possible, though. --> <string name="next">Suivant</string> <string name="error">Erreur</string> <string name="error_x">Erreur : %1$s</string> <string name="error_no_network">Réseau inaccessible.</string> <string name="no_app_to_handle_data">Aucune application trouvée pour traiter ce type de données.</string> <string name="no_browser_installed">Aucun navigateur installé.</string> <string name="file_not_found">Impossible de trouver %1$s.</string> <string name="copied_to_clipboard">Copié dans le presse-papier.</string> <string name="contacts_headline">Contacts</string> <string name="email_address">Adresse de courriel</string> <string name="bad_email_address">Mauvaise adresse email.</string> <string name="password">Mot de passe</string> <string name="existing_password">Mot de passe existant</string> <string name="now">Maintenant</string> <!-- Translators: used as a headline in sections with actions that cannot be undone. could also be "Caution" or "Cave" or so. --> <string name="danger">Attention</string> <string name="today">Aujourd\'hui</string> <string name="yesterday">Hier</string> <string name="this_week">Cette semaine</string> <string name="this_month">Ce mois-ci</string> <string name="last_week">La semaine dernière</string> <string name="last_month">Le mois dernier</string> <!-- Translators: show beside messages that are "N minutes old". prefer a short string, prefer abbreviations if they're well-known --> <plurals name="n_minutes"> <item quantity="one">%d min</item> <item quantity="other">%d mins</item> </plurals> <!-- Translators: show beside messages that are "N hours old". prefer a short string, prefer abbreviations if they're well-known --> <plurals name="n_hours"> <item quantity="one">%d heure</item> <item quantity="other">%d heures</item> </plurals> <plurals name="n_chats"> <item quantity="one">%d discussion</item> <item quantity="other">%d discussions</item> </plurals> <plurals name="n_contacts"> <item quantity="one">%d contact</item> <item quantity="other">%d contacts</item> </plurals> <plurals name="n_messages"> <item quantity="one">%d message</item> <item quantity="other">%d messages</item> </plurals> <plurals name="n_members"> <item quantity="one">%d membre</item> <item quantity="other">%d membres</item> </plurals> <string name="self">Moi</string> <string name="draft">Brouillon</string> <string name="image">Image</string> <string name="gif">Gif</string> <string name="images">Images</string> <string name="audio">Son</string> <string name="voice_message">Message vocal</string> <string name="forwarded_message">Message transféré</string> <string name="video">Vidéo</string> <string name="documents">Documents</string> <string name="contact">Contact</string> <string name="verified_contact">Contact vérifié</string> <string name="camera">Caméra</string> <string name="location">Position</string> <string name="gallery">Galerie</string> <string name="images_and_videos">Images et vidéos</string> <string name="file">Fichier</string> <string name="files">Fichiers</string> <string name="unknown">Inconnu</string> <string name="green">Vert</string> <string name="red">Rouge</string> <string name="blue">Bleu</string> <string name="orange">Orange</string> <string name="cyan">Cyan</string> <string name="purple">Pourpre</string> <string name="magenta">Magenta</string> <string name="white">Blanc</string> <string name="small">Petit</string> <string name="normal">Normal</string> <string name="large">Grand</string> <string name="extra_large">Très grand</string> <string name="fast">Rapide</string> <string name="slow">Lent</string> <string name="message_delivered">Message transmis</string> <string name="message_read">Message lu</string> <!-- menu labels (or icon, buttons...) --> <string name="menu_new_contact">Nouveau contact</string> <string name="menu_new_chat">Nouvelle discussion</string> <string name="menu_new_group">Nouveau groupe</string> <string name="menu_new_verified_group">Nouveau groupe vérifié</string> <string name="menu_send">Envoyer</string> <string name="menu_toggle_keyboard">Basculer le clavier emoji</string> <string name="menu_edit_group">Modifier le groupe</string> <string name="menu_group_name_and_image">Nom du groupe et photo</string> <string name="menu_show_map">Montrer la carte</string> <string name="menu_show_global_map">Afficher toutes les positions</string> <string name="menu_archive_chat">Archiver la discussion</string> <string name="menu_unarchive_chat">Désarchiver la discussion</string> <string name="menu_add_attachment">Ajouter une pièce jointe</string> <string name="menu_leave_group">Quitter le groupe</string> <string name="menu_delete_chat">Effacer la discussion</string> <string name="menu_delete_messages">Effacer les messages</string> <string name="menu_delete_image">Effacer l\'image</string> <string name="menu_delete_locations">Effacer toutes les positions ? </string> <string name="menu_delete_location">Effacer cette position ?</string> <string name="menu_message_details">Détails du message</string> <string name="menu_copy_to_clipboard">Copier vers presse-papiers</string> <string name="menu_forward">Faire suivre le message</string> <string name="menu_resend">Renvoyer le message</string> <string name="menu_reply">Répondre au message</string> <string name="menu_mute">Déactiver les notifications</string> <string name="menu_unmute">Réactiver</string> <string name="menu_export_attachment">Exporter le fichier joint</string> <string name="menu_all_media">Tous les médias</string> <string name="menu_share">Partager</string> <string name="menu_block_contact">Bloquer ce contact</string> <string name="menu_unblock_contact">Débloquer ce contact</string> <string name="menu_play">Jouer</string> <string name="menu_pause">Pause</string> <string name="menu_scroll_to_bottom">Faites défiler jusqu\'en bas</string> <string name="menu_scroll_to_top">Faites défiler jusqu\'en haut</string> <string name="menu_help">Aide</string> <string name="menu_select_all">Tout sélectionner</string> <string name="menu_expand">Étendre</string> <string name="menu_edit_name">Modifier le nom</string> <string name="menu_settings">Configuration</string> <string name="menu_advanced">Avancé</string> <string name="menu_deaddrop">Demande de contact</string> <string name="menu_deaddrop_subtitle">Appuyez sur le message pour démarrer la discussion</string> <string name="menu_view_profile">Voir le profil</string> <string name="menu_zoom_in">Zoom avant</string> <string name="menu_zoom_out">Zoom arrière</string> <string name="menu_save_log">Sauvegarder le journal</string> <string name="title_share_location">Partager la position avec tous les membres du groupe</string> <string name="device_talk">Messages de l\'appareil</string> <string name="device_talk_subtitle">Messages générés localement</string> <string name="device_talk_explain">Les messages dans cette discussion sont générés localement par votre application Delta Chat. Ses créateurs l\'utilise pour informer des mises à jour et problèmes rencontrés lors de l\'utilisation.</string> <string name="device_talk_welcome_message">Bienvenue sur Delta Chat ! - Delta Chat ressemble à d\'autres applications de messagerie populaires, mais n\'implique pas un contrôle centralisé, le traçage ou la revente de vos données, de vos amis, collègues ou famille à de grandes organisations. Techniquement, Delta Chat est une application de messagerie électronique avec une interface de tchate moderne. Un courriel retoiletté si vous voulez 👻\n\nUtilisez Delta Chat avec n\'importe qui parmi des milliards de personnes : utilisez simplement leur adresse électronique. Les destinataires n\'ont pas besoin d\'installer Delta Chat, de visiter des sites web ou de s\'inscrire où que ce soit - cependant, bien sûr, s\'ils le souhaitent, vous pouvez les diriger vers 👉 https://get.delta.chat</string> <string name="edit_contact">Mettre à jour le contact</string> <!-- Translators: "Pin" here is the verb for pinning, making sth. sticky. this is NOT the appreviation for "pin number". --> <string name="pin_chat">Épingler le tchate </string> <!-- Translators: this is the opposite of "Pin chat", removing the sticky-state from a chat. --> <string name="unpin_chat">Désépingler le tchate </string> <!-- Translators: this is the verb for pinning, making sth. sticky. this is NOT the appreviation for "pin number". --> <string name="pin">Épingler</string> <!-- Translators: this is the opposite of "Pin", removing the sticky-state from sth. --> <string name="unpin">Désépingler</string> <string name="mute_for_one_hour">Déactiver pour 1 heure</string> <string name="mute_for_two_hours">Déactiver pour 2 heures</string> <string name="mute_for_one_day">Déactiver pour 1 jour</string> <string name="mute_for_seven_days">Déactiver pour 7 jours</string> <string name="mute_forever">Sourdine pour toujours</string> <string name="share_location_once">une fois</string> <string name="share_location_for_5_minutes">pour 5 minutes</string> <string name="share_location_for_30_minutes">pour 30 minutes</string> <string name="share_location_for_one_hour">pour 1 heure</string> <string name="share_location_for_two_hours">pour 2 heures</string> <string name="share_location_for_six_hours">pour 6 heures</string> <string name="file_saved_to">Fichier enregistré dans \"%1$s\".</string> <string name="videochat_invite_user_hint">Cela requiert une application compatible ou un navigateur compatible des deux côtés.</string> <string name="videochat_tap_to_join">Appuyez pour rejoindre</string> <string name="videochat_tap_to_open">Appuyez pour ouvrir</string> <!-- get confirmations --> <string name="ask_leave_group">Voulez-vous vraiment quitter ce groupe ?</string> <plurals name="ask_delete_chat"> <item quantity="one">Effacer %d tchats \?</item> <item quantity="other">Effacer %d discussions \? Elles ne seront plus montrées dans la liste de tchats, mais leurs messages resteront sur le serveur.</item> </plurals> <plurals name="ask_delete_messages"> <item quantity="one">Effacer %d messages ici et sur le serveur ?</item> <item quantity="other">Effacer %dmessages ici et sur le serveur ?</item> </plurals> <string name="ask_forward">Faire suivre les messages à %1$s \?</string> <string name="ask_export_attachment">Exporter une pièce jointe ? L\'exportation des pièces jointes permettra à toute autre application sur votre appareil d\'y accéder. Continuer ?</string> <string name="ask_block_contact">Bloquer ce contact ? Vous ne recevrez plus ses messages.</string> <string name="ask_unblock_contact">Débloquer ce contact ? Vous recevrez à nouveau ses messages.</string> <string name="ask_delete_contacts">Effacer ces contacts? Cela supprimera définitivement les contacts sélectionnés.\n\nLes contacts avec des conversations en cours et ceux venant du carnet d\'adresses du système ne peuvent pas être définitivement effacés.</string> <string name="ask_delete_contact">Supprimer le contact %1$s ?\n\nLes contacts dans les discussions en cours et les contacts du carnet d\'adresses du système ne peuvent pas être supprimés définitivement.</string> <string name="cannot_delete_contacts_in_use">Impossible d\'effacer des contacts ayant une discussion active.</string> <string name="ask_start_chat_with">Discuter avec %1$s ?</string> <!-- Translators: %1$s will be replaces by a comma separated list of names --> <string name="ask_remove_members">Retirer %1$s du groupe ?</string> <!-- contact list --> <string name="contacts_title">Contacts</string> <string name="contacts_enter_name_or_email">Entrez le nom ou l\'email</string> <string name="contacts_type_email_above">Entrez l\'email au dessus</string> <string name="contacts_empty_hint">Pas de contacts.</string> <!-- chatlist and chat view --> <plurals name="chat_archived"> <item quantity="one"> %d discussion archivée</item> <item quantity="other">%d discussions archivées</item> </plurals> <plurals name="chat_unarchived"> <item quantity="one">%d discussion désarchivée</item> <item quantity="other">%d discussions désarchivées</item> </plurals> <string name="chat_archived_chats_title">Discussions archivées</string> <string name="chat_please_enter_message">Entrez un message.</string> <string name="chat_camera_unavailable">Caméra indisponible.</string> <string name="chat_unable_to_record_audio">Impossible d\'enregistrer le son.</string> <plurals name="chat_n_new_messages"> <item quantity="one">%dnouveaux messages</item> <item quantity="other">%d nouveaux messages</item> </plurals> <string name="chat_no_messages_hint">Envoyer un message à %1$s:\n\n• C\'est OK si %2$s n\'utilise pas Delta Chat.\n\n• La remise du premier message peut prendre un moment et peut ne pas être chiffré.</string> <string name="chat_new_group_hint">Composez le premier message, permettant aux autres de répondre au sein de ce groupe. C\'est ok même si tous les membres n\'utilisent pas le Chat Delta. La transmission du premier message peut prendre un certain temps.</string> <string name="chat_record_slide_to_cancel">Glisser pour annuler</string> <string name="chat_record_explain">Appuyez et maintenez pour enregistrer un message vocal, relâchez pour l’envoyer</string> <string name="chat_no_chats_yet_title">Boîte de réception vide. Appuyez sur \"+\" pour commencer une nouvelle discussion.</string> <string name="chat_no_chats_yet_hint">Vous pouvez discuter avec d\'autres utilisateurs de Delta Chat et avec n\'importe quelle adresse e-mail.</string> <string name="chat_all_archived">Tous les tchats archivés. Appuyez sur \"+\" pour commencer un nouveau chat.</string> <string name="chat_share_with_title">Partager avec</string> <string name="chat_input_placeholder">Message</string> <string name="chat_archived_label">Archivé</string> <string name="chat_no_messages">Aucun message.</string> <string name="chat_self_talk_subtitle">Messages envoyés à moi-même</string> <string name="saved_messages">Messages sauvegardés</string> <string name="saved_messages_explain">• Transférez les messages ici pour un accès facile\n\n• Prenez des notes, des mémos vocaux\n\n• Joignez des fichiers pour les sauvegarder</string> <string name="chat_contact_request">Demande de contact</string> <string name="chat_no_contact_requests">Aucune demande de contact. Si vous voulez que les courriels classiques apparaissent ici comme demandes de contact, vous pouvez changer le paramètre correspondant dans les paramètres de l\'application.</string> <string name="retry_send">Réessayer d\'envoyer le message</string> <string name="send_failed">L’envoi du message a échoué</string> <!-- map --> <string name="filter_map_on_time">Afficher les positions dans le temps</string> <string name="show_location_traces">Afficher les traces</string> <string name="add_poi">Afficher les points d\'intérêt</string> <!-- search --> <string name="search">Chercher</string> <string name="search_explain">Rechercher des discussions, des contacts ou des messages</string> <string name="search_no_result_for_x">Aucun résultat trouvé pour \"%s\"</string> <!-- create/edit groups, contact/group profile --> <string name="group_name">Nom du groupe</string> <string name="group_avatar">Avatar du groupe</string> <string name="group_create_button">Créer un groupe</string> <string name="group_please_enter_group_name">Entrez un nom pour le groupe.</string> <string name="group_add_members">Ajouter des participants</string> <string name="group_hello_draft">Salut, je viens juste de nous créer le groupe \"%1$s\".</string> <string name="group_self_not_in_group">Vous devez être membre du groupe pour effectuer cette action.</string> <string name="profile_encryption">Chiffrement</string> <string name="profile_shared_chats">Discussions partagées</string> <string name="tab_contact">Contact</string> <string name="tab_group">Groupe</string> <string name="tab_members">Membres</string> <string name="tab_gallery">Gallerie</string> <string name="tab_docs">Documents</string> <string name="tab_links">Liens</string> <string name="tab_map">Carte</string> <string name="tab_gallery_empty_hint">Les images et vidéos partagées dans ce tchat seront affichées ici.</string> <string name="tab_docs_empty_hint">Les documents, musiques et autres fichiers partagés dans ce chat seront affichés ici.</string> <string name="media_preview">Prévisualiser le fichier multimédia</string> <string name="send_message">Envoyer message</string> <!-- welcome and login --> <string name="welcome_intro1_message">Le messager avec le plus large public au monde. Libre et indépendant.</string> <string name="login_title">Connexion</string> <string name="login_header">Connexion vers votre serveur</string> <string name="login_explain">Se connecter avec un compte de courriel existant.</string> <string name="login_subheader">Pour les fournisseurs de courriel connus, des paramètres supplémentaires sont automatiquement configurés. Parfois, IMAP doit être activé dans l\'interface Web. Consultez votre fournisseur de courriel ou vos amis pour obtenir de l\'aide.</string> <string name="login_no_servers_hint">Il n\'y a pas de serveur Delta Chat, vos données restent sur votre appareil !</string> <string name="login_inbox">Boîte de réception</string> <string name="login_imap_login">Utilisateur IMAP</string> <string name="login_imap_server">Serveur IMAP</string> <string name="login_imap_port">Port IMAP</string> <string name="login_imap_security">Sécurité IMAP</string> <string name="login_outbox">Boîte d\'envoi</string> <string name="login_smtp_login">Utilisateur SMTP</string> <string name="login_smtp_password">Mot de passe SMTP</string> <string name="login_smtp_server">Serveur SMTP</string> <string name="login_smtp_port">Port SMTP</string> <string name="login_smtp_security">Sécurité SMTP</string> <string name="login_auth_method">Méthode d\'autorisation</string> <string name="login_info_oauth2_title">Continuer avec une configuration simplifiée ?</string> <string name="login_info_oauth2_text">L\'adresse de courriel saisie prend en charge une configuration simplifiée (OAuth2.0). \n\nDans l\'étape suivante, autorisez Delta Chat à agir comme votre application de Chat avec courriel . Il n\'y a pas de serveur Delta Chat, vos données restent sur votre appareil !</string> <string name="login_certificate_checks">Vérification du certificat</string> <string name="login_error_mail">Entrez une adresse email valide</string> <string name="login_error_server">Entre un serveur / adresse IP valide</string> <string name="login_error_port">Entrez un port valide (1-65535)</string> <string name="login_error_required_fields">Entrez une adresse email et un mot de passe valides</string> <string name="import_backup_title">Importer une sauvegarde</string> <string name="import_backup_ask">Sauvegarde trouvée sur \"%1$s\".\n\nVoulez-vous importer et en utiliser toutes les données et paramètres ?</string> <string name="import_backup_no_backup_found">Aucune sauvegarde trouvée.\n\nCopiez le fichier de sauvegarde vers \"%1$s\" et essayez à nouveau. Ou bien, appuyez sur \"Démarrage communication\" pour suivre le processus normal de configuration.</string> <!-- Translators: %1$s will be replaced by the email address --> <string name="login_error_cannot_login">Connexion en tant que \"%1$s\" échouée. Vérifiez s.v.p. que votre adresse courriel et votre mot de passe sont corrects.</string> <!-- Translators: %1$s will be replaced by the server name (eg. imap.somewhere.org) and %2$s will be replaced by the human-readable response from the server. this response may be a single word or some sentences and may or may not be localized. --> <string name="login_error_server_response">Réponse de %1$s: %2$s\n\nCertains fournisseurs ajoutent des informations dans votre boite de réception; vous pouvez les vérifier par exemple dans le gestionnaire Web. Consultez votre fournisseur ou vos amis si vous rencontrez des problèmes.</string> <!-- TLS certificate checks --> <string name="accept_invalid_certificates">Accepter les certificats non valides</string> <string name="used_settings">Paramètres utilisés :</string> <string name="switch_account">Changer de compte</string> <string name="add_account">Ajouter un compte</string> <string name="delete_account">Supprimer le compte</string> <string name="delete_account_ask">Êtes-vous sûr de vouloir supprimer les données de votre compte ?</string> <string name="switching_account">Changement de compte...</string> <!-- share and forward messages --> <!-- Translators: Title shown above a chat/contact list; the user selects the recipient of the messages he wants to forward to --> <string name="forward_to">Transférer à …</string> <string name="share_abort">Partage annulé en raison de permissions manquantes.</string> <!-- preferences --> <string name="pref_using_custom">Usage de la personnalisation : %s</string> <string name="pref_using_default">Utilisation par défaut : %s</string> <string name="pref_profile_info_headline">Informations de votre profil</string> <string name="pref_profile_photo">Photo du profil</string> <string name="pref_blocked_contacts">Contacts bloqués</string> <string name="pref_profile_photo_remove_ask">Supprimer la photo de profil ?</string> <string name="pref_password_and_account_settings">Mot de passe et compte</string> <string name="pref_who_can_see_profile_explain">L\'image et le nom de votre profil seront affichés à côté de vos messages lorsque vous communiquez avec d\'autres utilisateurs. Les informations déjà envoyées ne peuvent pas être effacées ou supprimées.</string> <string name="pref_your_name">Votre nom</string> <!-- Translators: The value entered here is visible only to recipients who DO NOT use Deltachat, so its not a "Status" but the last line in the E-Mail. --> <string name="pref_default_status_label">Signature</string> <!-- Translators: The URL should not be localized, it is not clear which language the receiver prefers and the language will be detected on the server --> <string name="pref_default_status_text">Envoyé depuis Delta Chat: https://delta.chat</string> <string name="pref_enter_sends">Entrer les envois de clés</string> <string name="pref_enter_sends_explain">Appuyez sur la touche Entrée pour envoyer des messages texte</string> <string name="pref_outgoing_media_quality">Qualité du média en sortie</string> <string name="pref_outgoing_balanced">Équilibré</string> <string name="pref_outgoing_worse">Pire qualité, taille réduite</string> <string name="pref_vibrate">Vibrer</string> <string name="pref_change_secret">Modifier le secret</string> <string name="pref_change_secret_explain">Modifiez votre code PIN / modèle / empreinte digitale via les paramètres du système</string> <string name="pref_screen_security">Écran de sécurité</string> <!-- Translators: The wording must indicate that we can't guarantee that, its a System flag that we set. But the System in question must honor it. --> <string name="pref_screen_security_explain">Demande de blocage des captures d\'écran dans la liste des captures récentes et à l\'intérieur de l\'application</string> <string name="pref_screen_security_please_restart_hint">Pour appliquer les paramètres de sécurité de l\'écran, veuillez redémarrer l\'application.</string> <string name="pref_notifications">Notifications</string> <string name="pref_notifications_show">Afficher</string> <string name="pref_notifications_priority">Priorité</string> <string name="pref_led_color">Couleur de LED</string> <string name="pref_sound">Son</string> <string name="pref_silent">Silencieux</string> <string name="pref_privacy">Vie privée</string> <string name="pref_chats_and_media">Discussions et fichiers multimédia</string> <!-- Translators: "light" in the meaning "opposite of dark" --> <string name="pref_light_theme">Clair</string> <string name="pref_dark_theme">Foncé</string> <string name="pref_appearance">Apparence</string> <string name="pref_theme">Thème</string> <string name="pref_language">Langage</string> <string name="pref_incognito_keyboard">Clavier anonyme</string> <!-- Translators: Keep in mind that this is a Request - it must be clear in the wording that this cannot be enforced. --> <string name="pref_incognito_keyboard_explain">Demande clavier pour désactiver l\'apprentissage personnalisé</string> <string name="pref_read_receipts">Accusés de lecture</string> <string name="pref_read_receipts_explain">Si les accusés de lecture sont désactivés, vous ne pourrez pas voir les accusés de lecture des autres.</string> <string name="pref_manage_keys">Gérer les clés</string> <string name="pref_use_system_emoji">Utiliser les émojis système</string> <string name="pref_use_system_emoji_explain">Désactiver la prise en charge des émojis intégrés de Delta Chat</string> <string name="pref_app_access">Accès à l\'application</string> <string name="pref_communication">Communication</string> <string name="pref_chats">Discussions</string> <string name="pref_in_chat_sounds">Sons de messages entrants</string> <string name="pref_message_text_size">Taille de police des messages</string> <string name="pref_view_log">Voir le journal</string> <string name="pref_saved_log">Journal enregistré dans le dossier \"Téléchargements\"</string> <string name="pref_save_log_failed">Enregistrement du journal échoué</string> <string name="pref_log_header">Journal</string> <string name="pref_other">Autre</string> <string name="pref_backup">Sauvegarde</string> <string name="pref_backup_explain">Sauvegarder les discussions sur un stockage externe</string> <string name="pref_backup_export_explain">Une sauvegarde vous aide pour une nouvelle installation sur cet appareil ou un autre.\n\nLa sauvegarde contiendra tous les messages, contacts et discussions ainsi que vos réglages d\'Autocrypt de bout en bout. Garder le fichier de sauvegarde dans un endroit sûr ou supprimez le aussitôt utilisé.</string> <string name="pref_backup_export_start_button">Démarrer la sauvegarde</string> <string name="pref_backup_written_to_x">Sauvegarde effectuée avec succès sur %1$s</string> <string name="pref_managekeys_menu_title">Gérer les clés</string> <string name="pref_managekeys_export_secret_keys">Exporter les clés secrètes</string> <string name="pref_managekeys_export_explain">Exporter les clés secrètes vers \"%1$s\"?</string> <string name="pref_managekeys_import_secret_keys">Importer des clés secrètes</string> <string name="pref_managekeys_import_explain">Importer les clés secrètes depuis \"%1$s\"?\n\n• Les clés secrètes existantes ne seront pas détruites\n\n• La dernière clé importée sera utilisée comme nouvelle clé par défaut à moins qu\'il y ait le mot \"legacy\" dans son nom de fichier</string> <string name="pref_managekeys_secret_keys_exported_to_x">Les clés secrètes ont été écrites avec succès vers \"%1$s\".</string> <string name="pref_managekeys_secret_keys_imported_from_x">Les clés secrètes ont été importées depuis \"%1$s\".</string> <string name="pref_background">Arrière-plan</string> <string name="pref_background_btn_default">Utilisez l\'image par défaut</string> <string name="pref_background_btn_gallery">Sélectionner depuis la galerie</string> <string name="pref_imap_folder_handling">Gestion de dossier IMAP</string> <string name="pref_imap_folder_warn_disable_defaults">Si vous désactivez cette option, assurez-vous que votre serveur et vos autres clients sont configurés en conséquence.\n\nSinon les choses pourraient ne pas fonctionner du tout.</string> <string name="pref_watch_inbox_folder">Regarder le dossier Boîte de réception</string> <string name="pref_watch_sent_folder">Regarder le dossier Messages envoyés</string> <string name="pref_watch_mvbox_folder">Regarder le dossier DeltaChat</string> <string name="pref_send_copy_to_self">M\'envoyer une copie</string> <string name="pref_auto_folder_moves">Déplacer automatiquement vers le dossier DeltaChat</string> <string name="pref_auto_folder_moves_explain">Les discussions sont déplacées pour éviter d\'encombrer le dossier Boîte de réception</string> <string name="pref_show_emails">Voir les courriels classiques</string> <string name="pref_show_emails_no">Non, seulement les tchats</string> <string name="pref_show_emails_accepted_contacts">Pour les contacts acceptés</string> <string name="pref_show_emails_all">Tout</string> <string name="pref_experimental_features">Fonctionnalités expérimentales</string> <string name="pref_on_demand_location_streaming">Envoi de la géolocalisation à la demande</string> <string name="pref_background_default">Arrière-plan par défaut</string> <string name="pref_background_default_color">Couleur par défaut</string> <string name="pref_background_custom_image">Image personnalisée</string> <string name="pref_background_custom_color">Couleur personnalisée</string> <!-- automatically delete message --> <string name="delete_old_messages">Effacer les anciens messages</string> <string name="autodel_device_title">Effacer les messages sur appareil</string> <string name="autodel_server_title">Effacer les messages sur le serveur</string> <string name="after_30_seconds">Après 30 secondes</string> <string name="after_1_minute">Après 1 minute</string> <string name="autodel_after_1_hour">Après 1 heure</string> <string name="autodel_after_1_day">Après 1 jour</string> <string name="autodel_after_1_week">Après 1 semaine</string> <string name="autodel_after_4_weeks">Après 4 semaines</string> <string name="autodel_after_1_year">Après 1 année</string> <!-- autocrypt --> <string name="autocrypt">Autocrypt</string> <string name="autocrypt_explain">Autocrypt est une nouvelle spécification ouverte pour le chiffrement automatique de bout en bout des courriels.\n\nVotre configuration de bout en bout est créée automatiquement si nécessaire et vous pouvez la transférer entre les périphériques avec les messages de configuration Autocrypt.</string> <string name="autocrypt_send_asm_title">Envoyer le message de configuration Autocrypt</string> <string name="autocrypt_send_asm_explain_before">Un \"Message de configuration d\'Autocrypt\" partages en toute sécurité vos paramètres de bout-en-bout avec les autres applications conformes à Autocrypt.\n\nLa configuration sera chiffrée par un code d\'installation affiché ici et doit être saisi sur l\'autre appareil.</string> <string name="autocrypt_send_asm_button">Envoyer le message de configuration Autocrypt</string> <string name="autocrypt_send_asm_explain_after">Votre configuration vous a été envoyée. Passez à l\'autre appareil et ouvrez le message de configuration. Vous devriez être invité à entrer un code de configuration. Tapez les chiffres suivants dans l\'invite :\n\n%1$s</string> <string name="autocrypt_prefer_e2ee">Préférez le chiffrement de bout en bout</string> <string name="autocrypt_asm_subject">Message de configuration Autocrypt</string> <string name="autocrypt_asm_general_body">Ceci est le message de configuration Autocrypt utilisé pour transférer votre configuration de bout en bout entre clients. Pour déchiffrer et utiliser votre configuration, ouvrez le message dans un client compatible Autocrypt et entrez le code de configuration présenté sur le périphérique de génération.</string> <string name="autocrypt_asm_click_body">Ceci est le message de configuration Autocrypt utilisé pour transférer votre configuration de bout en bout entre clients. Pour décrypter et utiliser votre configuration, tapez ou cliquez sur ce message.</string> <string name="autocrypt_continue_transfer_title">Message de configuration Autocrypt</string> <string name="autocrypt_continue_transfer_please_enter_code">Veuillez entrer le code de configuration affiché sur l\'autre appareil.</string> <string name="autocrypt_continue_transfer_succeeded">Configuration de bout en bout transférée. Cet appareil est maintenant prêt à utiliser Autocrypt avec la même configuration que l\'autre appareil.</string> <string name="autocrypt_continue_transfer_retry">Ré-essayer</string> <string name="autocrypt_bad_setup_code">Mauvais code d\'installation. Veuillez réessayer. Si vous ne vous souvenez pas du code d\'installation, envoyez simplement un autre message d\'installation Autocrypt depuis l\'autre appareil.</string> <!-- system messages --> <string name="systemmsg_group_name_changed">Nom de groupe modifié de \"%1$s\" en \"%2$s\".</string> <string name="systemmsg_group_image_changed">Image de groupe modifiée.</string> <string name="systemmsg_group_image_deleted">Image de groupe effacée.</string> <string name="systemmsg_member_added">Membre %1$s ajouté.</string> <string name="systemmsg_member_removed">Membre %1$s retiré.</string> <string name="systemmsg_group_left">Groupe quitté.</string> <string name="systemmsg_read_receipt_subject">Accusé de lecture</string> <string name="systemmsg_read_receipt_body">Ceci est un accusé de lecture pour le message \"%1$s\".\n\nCela signifie que le message a été affiché sur l\'appareil du destinataire, pas forcément que le contenu ait été lu.</string> <string name="systemmsg_cannot_decrypt">Ce message ne peut pas être déchiffré.\n\n• Il peut déjà être utile de simplement répondre à ce message et demander à l\'expéditeur de l\'envoyer à nouveau.\n\n• Au cas où vous auriez réinstallé Delta Chat ou un autre programme de messagerie sur cet appareil ou un autre, vous voudrez peut-être envoyer un message de configuration d\'Autocrypt depuis ce dernier.</string> <!-- Translators: %1$s will be replaced by sth. as "member xy added" (trailing full-stop removed). --> <string name="systemmsg_action_by_me">%1$s par moi.</string> <!-- Translators: %1$s will be replaced by sth. as "member xy added" (trailing full-stop removed). %2$s will be replaced by the name/addr of the person who did this action. --> <string name="systemmsg_action_by_user">%1$s sur %2$s.</string> <string name="systemmsg_subject_for_new_contact">Message de %1$s</string> <string name="systemmsg_failed_sending_to">Échec de l\'envoi du message à %1$s.</string> <!-- screen lock --> <string name="screenlock_title">Verrouillage de l\'écran</string> <string name="screenlock_explain">Verrouiller l’accès avec le verrouillage d’écran Android ou l’empreinte digitale ; pour éviter d’afficher le contenu précédent, veuillez également activer « Sécurité écran »</string> <string name="screenlock_authentication_failed">L\'authentification a échoué.</string> <string name="screenlock_unlock_title">Déverrouiller DeltaChat</string> <string name="screenlock_unlock_description">Veuillez entrer votre secret défini par le système pour déverrouiller DeltaChat.</string> <string name="screenlock_inactivity_timeout">Verrouillage du délai d\'inactivité</string> <string name="screenlock_inactivity_timeout_explain">Auto-verrouillage de Delta Chat après un intervalle de temps d\'inactivité spécifié</string> <string name="screenlock_inactivity_timeout_interval">Temps pour rester actif</string> <!-- qr code stuff --> <string name="qr_code">Code QR</string> <string name="qrscan_title">Numériser le QR code</string> <string name="qrscan_hint">Placez votre caméra au dessus du QR code</string> <string name="qrscan_ask_join_group">Voulez-vous rejoindre le groupe \"%1$s\" ?</string> <string name="qrscan_fingerprint_mismatch">L\'empreinte digitale numérisée ne correspond pas à la dernière empreinte digitale vue depuis %1$s</string> <string name="qrscan_no_addr_found">Ce QR code contient une empreinte digitale mais pas d\'adresse de courriel.\n\nPour une vérification externe, veuillez d\'abord établir une connexion cryptée avec le destinataire.</string> <string name="qrscan_contains_text">Texte du QR code numérisé:\n\n%1$s</string> <string name="qrscan_contains_url">Url du QR code numérisé :\n\n%1$s</string> <string name="qrscan_fingerprint_label">Empreinte digitale</string> <string name="qrscan_x_verified_introduce_myself">%1$s vérifié, me présenter …</string> <string name="qrshow_title">QR Code d\'Invitation</string> <string name="qrshow_x_joining">%1$s rejoints.</string> <string name="qrshow_x_verified">%1$s vérifiés.</string> <string name="qrshow_x_has_joined_group">%1$s a rejoint le groupe.</string> <string name="qrshow_join_group_title">QR code d\'invitation</string> <string name="qrshow_join_group_hint">Scannez ceci pour rejoindre le groupe \"%1$s\".</string> <string name="qrshow_join_contact_title">QR code d\'invitation</string> <string name="qrshow_join_contact_hint">Scanner ceci pour créer un contact avec %1$s</string> <string name="qrshow_join_contact_no_connection_hint">\'installation du QR code nécessite une connexion Internet. Veuillez vous connecter à un réseau avant de continuer.</string> <string name="qrshow_join_contact_no_connection_toast">Pas de connexion Internet, ne peut pas effectuer la configuration du QR code.</string> <string name="qraccount_qr_code_cannot_be_used">Le code QR scanné ne peut pas être utilisé pour ouvrir un nouveau compte.</string> <string name="qraccount_use_on_new_install">Le code QR scanné permet d\'ouvrir un nouveau compte. Vous pouvez scanner le code QR lors de la mise en place d\'une nouvelle installation de Delta Chat.</string> <string name="contact_verified">%1$s vérifié.</string> <string name="contact_not_verified">Ne peut pas vérifier %1$s</string> <!-- translators: "setup" is the "encryption setup" here, as in "Autocrypt Setup Message" --> <string name="contact_setup_changed">Configuration modifiée pour %1$s</string> <string name="verified_group_explain">Les groupes vérifiés (fonction expérimentale) offrent une sécurité contre les attaques actives. Les membres sont vérifiés avec un second facteur par d\'autres membres et les messages sont toujours chiffrés de bout en bout.</string> <!-- notifications --> <string name="notify_n_messages_in_m_chats">%1$d nouveaux messages dans %2$d tchats</string> <string name="notify_mark_read">Marquer comme lu</string> <string name="notify_reply_button">Répondre</string> <string name="notify_new_message">Nouveau message</string> <string name="notify_background_connection_enabled">Connexion en arrière-plan activée</string> <string name="notify_priority_high">Haut</string> <string name="notify_priority_max">Maximum</string> <string name="notify_name_and_message">Nom et message</string> <string name="notify_name_only">Nom seulement</string> <string name="notify_no_name_or_message">Pas de nom ou de message</string> <!-- permissions --> <string name="perm_required_title">Autorisation requise</string> <string name="perm_continue">Continuez</string> <string name="perm_explain_access_to_camera_denied">Pour prendre des photos ou filmer des vidéos, aller au menu Paramètres de l\'application, sélectionnez \"Autorisations\" et autorisez \"Appareil photo\".</string> <string name="perm_explain_access_to_mic_denied">Pour envoyer des messages audio, aller dans les réglages de l\'application, sélectionner \"Autorisations\" et autoriser \"Microphone\".</string> <string name="perm_explain_access_to_storage_denied">Pour recevoir ou envoyer des fichiers, aller au menu Paramètres de l\'application, sélectionnez \"Autorisations\" et autoriser \"Stockage\".</string> <string name="perm_explain_access_to_location_denied">Pour attacher une position, aller au menu paramètres de l\'application, sélectionnez \"Autorisations\" et activez \"Position\".</string> <!-- dc_str_* resources --> <string name="encrypted_message">Message chiffré</string> <!-- strings introduced on desktop. we want to share strings between the os, in general, please do not add generic strings here --> <string name="welcome_desktop">Bienvenue sur Delta Chat</string> <string name="login_known_accounts_title_desktop">Comptes connus</string> <string name="global_menu_preferences_language_desktop">Choisissez la langue...</string> <string name="global_menu_file_desktop">Fichier</string> <string name="global_menu_file_quit_desktop">Quittez</string> <string name="global_menu_edit_desktop">Modifier</string> <string name="global_menu_edit_undo_desktop">Annuler</string> <string name="global_menu_edit_redo_desktop">Refaire</string> <string name="global_menu_edit_cut_desktop">Couper</string> <string name="global_menu_edit_copy_desktop">Copier</string> <string name="global_menu_edit_paste_desktop">Coller</string> <string name="global_menu_view_desktop">Voir</string> <string name="global_menu_view_floatontop_desktop">Flotter au dessus</string> <string name="global_menu_view_developer_desktop">Développeur</string> <string name="global_menu_view_developer_tools_desktop">Outils de développement</string> <string name="global_menu_help_desktop">Aide</string> <string name="global_menu_help_learn_desktop">En savoir plus sur Delta Chat</string> <string name="global_menu_help_contribute_desktop">Contribuer sur Github</string> <string name="global_menu_help_report_desktop">Signaler un problème</string> <string name="global_menu_help_about_desktop">À propos de Delta Chat</string> <string name="no_chat_selected_suggestion_desktop">Sélectionner une discussion ou en créer une nouvelle</string> <string name="write_message_desktop">Écrire un message</string> <string name="encryption_info_title_desktop">Information de chiffrement</string> <string name="contact_detail_title_desktop">Détail du contact</string> <string name="contact_request_title_desktop">Demande de contact</string> <string name="delete_message_desktop">Effacer les messages</string> <string name="more_info_desktop">En savoir plus</string> <string name="logout_desktop">Déconnexion</string> <string name="timestamp_format_m_desktop">MMM D</string> <string name="encryption_info_desktop">Afficher les informations de chiffrement</string> <string name="verified_desktop">vérifié</string> <string name="remove_desktop">Retirer</string> <string name="save_desktop">Sauvegardez</string> <string name="add_contact_desktop">Ajouter un contact</string> <string name="login_required_desktop">requis</string> <string name="name_desktop">Nom</string> <string name="autocrypt_key_transfer_desktop">Transfert de la clé Autocrypt</string> <string name="initiate_key_transfer_desktop">Un message de configuration Autocrypt partage en toute sécurité votre configuration de bout en bout avec d\'autres applications compatibles Autocrypt. La configuration est chiffrée par un code de configuration qui s\'affiche ici et doit être tapé sur l\'autre appareil.</string> <string name="reply_to_message_desktop">Répondre au lmessage</string> <string name="select_group_image_desktop">Choisir l\'image de groupe</string> <string name="imex_progress_title_desktop">Avancement de la sauvegarde</string> <string name="download_attachment_desktop">Télécharger la pièce jointe</string> <string name="export_backup_desktop">Exporter la sauvegarde</string> <string name="transfer_key_desktop">Transférer la clé</string> <string name="show_key_transfer_message_desktop">Votre clé vous a été envoyée. Passez à l\'autre appareil et ouvrez le message de configuration. Vous devriez être invité à entrer un code de configuration. Tapez les chiffres suivants:</string> <string name="new_message_from_desktop">Nouveau message de</string> <string name="unblock_contacts_desktop">Débloquer des contacts</string> <string name="none_blocked_desktop">Aucun contact bloqué</string> <string name="autocrypt_correct_desktop">Installation d\'Autocrypt transférée avec succès !</string> <string name="autocrypt_incorrect_desktop">Code de configuration incorrect. Veuillez réessayer.</string> <string name="create_chat_error_desktop">La discussion n’a pas pu être créée.</string> <string name="ask_delete_chat_desktop">Effacer cette discussion?</string> <string name="email_validation_failed_desktop">Adresse email requise.</string> <string name="forget_login_confirmation_desktop">Effacer cette connexion? Tout sera supprimé, y compris vos réglages bout en bout, contacts, discussions, messages et les fichiers multimédia. Cette action ne peut pas être défaite.</string> <string name="me_desktop">moi</string> <string name="in_this_group_desktop">Membres du groupe</string> <string name="not_in_this_group_desktop">Membres de groupe possibles (pas dans groupe)</string> <string name="message_detail_sent_desktop">envoyé</string> <string name="message_detail_received_desktop">reçu</string> <string name="message_detail_from_desktop">de</string> <string name="message_detail_to_desktop">à</string> <string name="menu.view.developer.open.log.folder">Ouvrir le dossier Journal</string> <string name="menu.view.developer.open.current.log.file">Ouvrir le fichier Journal courant</string> <string name="user_location_permission_explanation">DeltaChat a besoin de l\'autorisation de votre position pour l\'afficher et la partager.</string> <!-- accessibility, the general idea is to use the normal strings for accessibility hints wherever possible --> <string name="a11y_delivery_status_error">État de la livraison : Erreur</string> <string name="a11y_encryption_padlock">Cadenas de chiffrement</string> <string name="a11y_delivery_status_sending">État de la livraison : envoi en cours</string> <string name="a11y_delivery_status_draft">État de la livraison : Brouillon</string> <string name="a11y_delivery_status_delivered">État de la livraison : Livré</string> <string name="a11y_delivery_status_read">État de la livraison : Lu</string> <string name="a11y_delivery_status_invalid">Statut de livraison non valide</string> <string name="a11y_message_context_menu_btn_label">Actions sur le message</string> <string name="a11y_background_preview_label">Aperçu de l\'arrière-plan</string> <!-- iOS permissions, copy from "deltachat-ios/Info.plist", which is used on missing translations in "deltachat-ios/LANG.lproj/InfoPlist.strings" --> <string name="InfoPlist_NSCameraUsageDescription">Delta Chat utilise votre appareil photo pour pendre et envoyer des photos et des vidéos et pour scanner des QR codes.</string> <string name="InfoPlist_NSContactsUsageDescription">Delta Chat utilise vos contacts pour montrer une liste d\'adresses de courriel auxquels vous pouvez écrire. Delta Chat n\'a pas de serveur, vos contacts ne sont envoyés nulle part.</string> <string name="InfoPlist_NSMicrophoneUsageDescription">Delta Chat utilise votre micro pour enregistrer et envoyer des messages vocaux et des vidéos avec bande son.</string> <string name="InfoPlist_NSPhotoLibraryUsageDescription">Delta Chat vous laisse choisir quelles photos envoyer de votre photothèque. </string> <string name="perm_enable_bg_reminder_title">Touchez ici pour recevoir des messages pendant que DeltaChat est en arrière-plan.</string> <string name="perm_enable_bg_already_done">Vous avez déjà autorisé Delta Chat à recevoir des messages en arrière plan.\n\nSi vous ne les recevez toujours pas en arrière plan, vérifiez les paramètres de votre système.</string> </resources>
{ "pile_set_name": "Github" }
>>===== MODE =====>> citation <<===== MODE =====<< >>===== RESULT =====>> >>[0] number 10 <<===== RESULT =====<< >>===== CITATIONS =====>> [ [ { "citationID": "CITATION-1", "citationItems": [ { "id": "ITEM-1" } ], "properties": { "noteIndex": 1 } }, [], [] ] ] <<===== CITATIONS =====<< >>===== CSL =====>> <style xmlns="http://purl.org/net/xbiblio/csl" class="note" version="1.1mlz1"> <info> <title>Test fixture</title> <id>http://citationstyles.org/tests/fixture</id> <link href="http://citationstyles.org/tests/fixture" rel="self"/> <link href="http://citationstyles.org/documentation/text" rel="documentation"/> <category citation-format="author-date"/> <updated>2014-04-30T13:19:38+00:00</updated> <rights license="http://creativecommons.org/licenses/by-sa/3.0/">This work is licensed under a Creative Commons Attribution-ShareAlike 3.0 License</rights> </info> <locale> <terms> <term name="number"> <single>number</single> <multiple>numbers</multiple> </term> </terms> </locale> <citation> <layout> <group delimiter=" "> <label variable="number"/> <number variable="number"/> </group> </layout> </citation> </style> <<===== CSL =====<< >>===== INPUT =====>> [ { "id": "ITEM-1", "type": "book", "number": "10" } ] <<===== INPUT =====<<
{ "pile_set_name": "Github" }
/************************************************************************/ /* */ /* Copyright 2009 by Ullrich Koethe */ /* */ /* This file is part of the VIGRA computer vision library. */ /* The VIGRA Website is */ /* http://hci.iwr.uni-heidelberg.de/vigra/ */ /* Please direct questions, bug reports, and contributions to */ /* [email protected] or */ /* [email protected] */ /* */ /* Permission is hereby granted, free of charge, to any person */ /* obtaining a copy of this software and associated documentation */ /* files (the "Software"), to deal in the Software without */ /* restriction, including without limitation the rights to use, */ /* copy, modify, merge, publish, distribute, sublicense, and/or */ /* sell copies of the Software, and to permit persons to whom the */ /* Software is furnished to do so, subject to the following */ /* conditions: */ /* */ /* The above copyright notice and this permission notice shall be */ /* included in all copies or substantial portions of the */ /* Software. */ /* */ /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND */ /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES */ /* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND */ /* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT */ /* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, */ /* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING */ /* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR */ /* OTHER DEALINGS IN THE SOFTWARE. */ /* */ /************************************************************************/ #define PY_ARRAY_UNIQUE_SYMBOL vigranumpyfilters_PyArray_API #define NO_IMPORT_ARRAY #include <vigra/numpy_array.hxx> #include <vigra/numpy_array_converters.hxx> #include <vigra/multi_convolution.hxx> #include <vigra/boundarytensor.hxx> #include <vigra/orientedtensorfilters.hxx> #include <vigra/tensorutilities.hxx> #include <vigra/multi_tensorutilities.hxx> #include "vigranumpyscaleparam.hxx" namespace python = boost::python; namespace vigra { template < class VoxelType, unsigned int ndim > NumpyAnyArray pythonGaussianGradientND(NumpyArray<ndim, Singleband<VoxelType> > array, python::object sigma, NumpyArray<ndim, TinyVector<VoxelType, (int)ndim> > res = NumpyArray<ndim, TinyVector<VoxelType, (int)ndim> >(), python::object sigma_d = python::object(0.0), python::object step_size = python::object(1.0), double window_size = 0.0, python::object roi = python::object()) { pythonScaleParam<ndim> params(sigma, sigma_d, step_size, "gaussianGradient"); params.permuteLikewise(array); std::string description("Gaussian gradient, scale="); description += asString(sigma); ConvolutionOptions<ndim> opt(params().filterWindowSize(window_size)); if(roi != python::object()) { typedef typename MultiArrayShape<ndim>::type Shape; Shape start = array.permuteLikewise(python::extract<Shape>(roi[0])()); Shape stop = array.permuteLikewise(python::extract<Shape>(roi[1])()); opt.subarray(start, stop); res.reshapeIfEmpty(array.taggedShape().resize(stop-start).setChannelDescription(description), "gaussianGradient(): Output array has wrong shape."); } else { res.reshapeIfEmpty(array.taggedShape().setChannelDescription(description), "gaussianGradient(): Output array has wrong shape."); } { PyAllowThreads _pythread; gaussianGradientMultiArray(srcMultiArrayRange(array), destMultiArray(res), opt); } return res; } template < class VoxelType, unsigned int ndim > NumpyAnyArray pythonGaussianGradientMagnitudeND(NumpyArray<ndim, Multiband<VoxelType> > array, const ConvolutionOptions<ndim-1> & opt, NumpyArray<ndim-1, Singleband<VoxelType> > res = NumpyArray<ndim-1, Singleband<VoxelType> >()) { using namespace vigra::functor; static const int sdim = ndim - 1; std::string description("Gaussian gradient magnitude"); typedef typename MultiArrayShape<sdim>::type Shape; Shape tmpShape(array.shape().begin()); if(opt.to_point != Shape()) tmpShape = opt.to_point-opt.from_point; res.reshapeIfEmpty(array.taggedShape().resize(tmpShape).setChannelDescription(description), "gaussianGradientMagnitude(): Output array has wrong shape."); res.init(VoxelType()); { PyAllowThreads _pythread; MultiArray<sdim, TinyVector<VoxelType, sdim> > grad(tmpShape); for(int k=0; k<array.shape(sdim); ++k) { MultiArrayView<sdim, VoxelType, StridedArrayTag> barray = array.bindOuter(k); gaussianGradientMultiArray(srcMultiArrayRange(barray), destMultiArray(grad), opt); combineTwoMultiArrays(srcMultiArrayRange(grad), srcMultiArray(res), destMultiArray(res), squaredNorm(Arg1())+Arg2()); } transformMultiArray(srcMultiArrayRange(res), destMultiArray(res), sqrt(Arg1())); } return res; } template < class PixelType> NumpyAnyArray pythonRieszTransformOfLOG2D(NumpyArray<2, Singleband<PixelType> > image, double scale, unsigned int xorder, unsigned int yorder, NumpyArray<2, Singleband<PixelType> > res = NumpyArray<2, Singleband<PixelType> >()) { res.reshapeIfEmpty(image.taggedShape().setChannelDescription("Riesz transform"), "rieszTransformOfLOG2D(): Output array has wrong shape."); { PyAllowThreads _pythread; rieszTransformOfLOG(srcImageRange(image), destImage(res), scale, xorder, yorder); } return res; } template < class VoxelType, unsigned int ndim > NumpyAnyArray pythonGaussianGradientMagnitudeND(NumpyArray<ndim, Multiband<VoxelType> > volume, const ConvolutionOptions<ndim-1> & opt, NumpyArray<ndim, Multiband<VoxelType> > res = NumpyArray<ndim, Multiband<VoxelType> >()) { using namespace vigra::functor; static const int sdim = ndim - 1; std::string description("channel-wise Gaussian gradient magnitude"); typedef typename MultiArrayShape<sdim>::type Shape; Shape tmpShape(volume.shape().begin()); if(opt.to_point != Shape()) tmpShape = opt.to_point-opt.from_point; res.reshapeIfEmpty(volume.taggedShape().resize(tmpShape).setChannelDescription(description), "gaussianGradientMagnitude(): Output array has wrong shape."); { PyAllowThreads _pythread; MultiArray<sdim, TinyVector<VoxelType, sdim> > grad(tmpShape); for(int k=0; k<volume.shape(sdim); ++k) { MultiArrayView<sdim, VoxelType, StridedArrayTag> bvolume = volume.bindOuter(k); MultiArrayView<sdim, VoxelType, StridedArrayTag> bres = res.bindOuter(k); gaussianGradientMultiArray(srcMultiArrayRange(bvolume), destMultiArray(grad), opt); transformMultiArray(srcMultiArrayRange(grad), destMultiArray(bres), norm(Arg1())); } } return res; } template < class VoxelType, unsigned int ndim > NumpyAnyArray pythonGaussianGradientMagnitude(NumpyArray<ndim, Multiband<VoxelType> > volume, python::object sigma, bool accumulate, NumpyAnyArray res, python::object sigma_d, python::object step_size, double window_size = 0.0, python::object roi = python::object()) { pythonScaleParam<ndim - 1> params(sigma, sigma_d, step_size, "gaussianGradientMagnitude"); params.permuteLikewise(volume); ConvolutionOptions<ndim-1> opt(params().filterWindowSize(window_size)); typedef typename MultiArrayShape<ndim - 1>::type Shape; if(roi != python::object()) { opt.subarray(volume.permuteLikewise(python::extract<Shape>(roi[0])()), volume.permuteLikewise(python::extract<Shape>(roi[1])())); } else { opt.subarray(Shape(), Shape(volume.shape().begin())); } return accumulate ? pythonGaussianGradientMagnitudeND(volume, opt, NumpyArray<ndim-1, Singleband<VoxelType> >(res)) : pythonGaussianGradientMagnitudeND(volume, opt, NumpyArray<ndim, Multiband<VoxelType> >(res)); } template < class VoxelType, unsigned int ndim > NumpyAnyArray pythonSymmetricGradientND(NumpyArray<ndim, Singleband<VoxelType> > volume, NumpyArray<ndim, TinyVector<VoxelType, (int)ndim> > res=python::object(), python::object step_size = python::object(1.0), python::object roi = python::object()) { pythonScaleParam<ndim> params(python::object(0.0), python::object(0.0), step_size, "symmetricGradient"); params.permuteLikewise(volume); ConvolutionOptions<ndim> opt(params()); if(roi != python::object()) { typedef typename MultiArrayShape<ndim>::type Shape; Shape start = volume.permuteLikewise(python::extract<Shape>(roi[0])()); Shape stop = volume.permuteLikewise(python::extract<Shape>(roi[1])()); opt.subarray(start, stop); res.reshapeIfEmpty(volume.taggedShape().resize(stop-start).setChannelDescription("symmetric gradient"), "symmetricGradient(): Output array has wrong shape."); } else { res.reshapeIfEmpty(volume.taggedShape().setChannelDescription("symmetric gradient"), "symmetricGradient(): Output array has wrong shape."); } { PyAllowThreads _pythread; symmetricGradientMultiArray(srcMultiArrayRange(volume), destMultiArray(res), opt); } return res; } template < class VoxelType, unsigned int N > NumpyAnyArray pythonHessianOfGaussianND(NumpyArray<N, Singleband<VoxelType> > array, python::object sigma, NumpyArray<N, TinyVector<VoxelType, int(N*(N+1)/2)> > res= NumpyArray<N, TinyVector<VoxelType, int(N*(N+1)/2)> >(), python::object sigma_d = python::object(0.0), python::object step_size = python::object(1.0), double window_size = 0.0, python::object roi = python::object()) { std::string description("Hessian of Gaussian (flattened upper triangular matrix), scale="); description += asString(sigma); pythonScaleParam<N> params(sigma, sigma_d, step_size, "hessianOfGaussian"); params.permuteLikewise(array); ConvolutionOptions<N> opt(params().filterWindowSize(window_size)); if(roi != python::object()) { typedef typename MultiArrayShape<N>::type Shape; Shape start = array.permuteLikewise(python::extract<Shape>(roi[0])()); Shape stop = array.permuteLikewise(python::extract<Shape>(roi[1])()); opt.subarray(start, stop); res.reshapeIfEmpty(array.taggedShape().resize(stop-start).setChannelDescription(description), "hessianOfGaussian(): Output array has wrong shape."); } else { res.reshapeIfEmpty(array.taggedShape().setChannelDescription(description), "hessianOfGaussian(): Output array has wrong shape."); } { PyAllowThreads _pythread; hessianOfGaussianMultiArray(srcMultiArrayRange(array), destMultiArray(res), opt); } return res; } #if 0 // FIXME: this is probably no longer needed thanks to axistags template < class VoxelType> NumpyAnyArray pythonHessianOfGaussian3D(NumpyArray<3, Singleband<VoxelType> > volume, python::object sigma, NumpyArray<3, TinyVector<VoxelType, 6> > res=NumpyArray<3, TinyVector<VoxelType, 6> >(), python::object sigma_d = python::object(0.0), python::object step_size = python::object(1.0)) { pythonScaleParam<3> params(sigma, sigma_d, step_size, "hessianOfGaussian"); params.permuteLikewise(volume); std::string description("Hessian of Gaussian (flattened upper triangular matrix), scale="); description += asString(sigma); res.reshapeIfEmpty(volume.taggedShape().setChannelDescription(description), "hessianOfGaussian(): Output array has wrong shape."); { PyAllowThreads _pythread; hessianOfGaussianMultiArray(srcMultiArrayRange(volume), destMultiArray(res), params()); } return res; } template < class PixelType> NumpyAnyArray pythonHessianOfGaussian2D(NumpyArray<2, Singleband<PixelType> > image, python::object sigma, NumpyArray<2, TinyVector<PixelType, 3> > res=NumpyArray<2, TinyVector<PixelType, 3> >(), python::object sigma_d = python::object(0.0), python::object step_size = python::object(1.0)) { pythonScaleParam<2> params(sigma, sigma_d, step_size, "hessianOfGaussian"); params.permuteLikewise(image); std::string description("Hessian of Gaussian (flattened upper triangular matrix), scale="); description += asString(sigma); res.reshapeIfEmpty(image.taggedShape().setChannelDescription(description), "hessianOfGaussian(): Output array has wrong shape."); { PyAllowThreads _pythread; hessianOfGaussianMultiArray(srcMultiArrayRange(image), destMultiArray(res), params()); } return res; } #endif template <class PixelType, unsigned int N> NumpyAnyArray pythonStructureTensor(NumpyArray<N, Multiband<PixelType> > array, python::object innerScale, python::object outerScale, NumpyArray<N-1, TinyVector<PixelType, int(N*(N-1)/2)> > res=NumpyArray<N-1, TinyVector<PixelType, int(N*(N-1)/2)> >(), python::object sigma_d = python::object(0.0), python::object step_size = python::object(1.0), double window_size = 0.0, python::object roi = python::object()) { using namespace vigra::functor; static const int sdim = N - 1; std::string description("structure tensor (flattened upper triangular matrix), inner scale="); description += asString(innerScale) + ", outer scale=" + asString(outerScale); pythonScaleParam<N-1> params(innerScale, sigma_d, step_size, outerScale, "structureTensor"); params.permuteLikewise(array); ConvolutionOptions<N-1> opt(params().filterWindowSize(window_size)); if(roi != python::object()) { typedef typename MultiArrayShape<N-1>::type Shape; Shape start = array.permuteLikewise(python::extract<Shape>(roi[0])()); Shape stop = array.permuteLikewise(python::extract<Shape>(roi[1])()); opt.subarray(start, stop); res.reshapeIfEmpty(array.taggedShape().resize(stop-start).setChannelDescription(description), "structureTensor(): Output array has wrong shape."); } else { res.reshapeIfEmpty(array.taggedShape().setChannelDescription(description), "structureTensor(): Output array has wrong shape."); } { PyAllowThreads _pythread; MultiArrayView<sdim, PixelType, StridedArrayTag> band = array.bindOuter(0); structureTensorMultiArray(srcMultiArrayRange(band), destMultiArray(res), opt); if(array.shape(sdim) > 1) { MultiArray<sdim, TinyVector<PixelType, int(N*(N-1)/2)> > st(res.shape()); for(int b=1; b<array.shape(sdim); ++b) { MultiArrayView<sdim, PixelType, StridedArrayTag> band = array.bindOuter(b); structureTensorMultiArray(srcMultiArrayRange(band), destMultiArray(st), opt); combineTwoMultiArrays(srcMultiArrayRange(res), srcMultiArray(st), destMultiArray(res), Arg1() + Arg2()); } } } return res; } template < class SrcPixelType, typename DestPixelType > NumpyAnyArray pythonBoundaryTensor2D(NumpyArray<2, Singleband<SrcPixelType> > image, double scale, NumpyArray<2, TinyVector<DestPixelType, 3> > res = NumpyArray<2, TinyVector<DestPixelType, 3> >()) { std::string description("boundary tensor (flattened upper triangular matrix), scale="); description += asString(scale); res.reshapeIfEmpty(image.taggedShape().setChannelDescription(description), "boundaryTensor2D(): Output array has wrong shape."); { PyAllowThreads _pythread; boundaryTensor(srcImageRange(image), destImage(res), scale); } return res; } template < class SrcPixelType, typename DestPixelType > NumpyAnyArray pythonTensorEigenRepresentation2D(NumpyArray<2, TinyVector<SrcPixelType, 3> > image, NumpyArray<2, TinyVector<DestPixelType, 3> > res = python::object()) { std::string description("tensor eigen representation (ev1, ev2, angle)"); res.reshapeIfEmpty(image.taggedShape().setChannelDescription(description), "tensorEigenRepresentation2D(): Output array has wrong shape."); { PyAllowThreads _pythread; tensorEigenRepresentation(srcImageRange(image), destImage(res)); } return res; } // FIXME: generalize to handle non-interleaved representations template < class PixelType, unsigned int N > NumpyAnyArray pythonVectorToTensor(NumpyArray<N, TinyVector<PixelType, int(N)> > array, NumpyArray<N, TinyVector<PixelType, int(N*(N+1)/2)> > res = python::object()) { std::string description("outer product tensor (flattened upper triangular matrix)"); res.reshapeIfEmpty(array.taggedShape().setChannelDescription(description), "vectorToTensor(): Output array has wrong shape."); { PyAllowThreads _pythread; vectorToTensorMultiArray(srcMultiArrayRange(array), destMultiArray(res)); } return res; } // FIXME: generalize to handle non-interleaved representations template < class PixelType, unsigned int N > NumpyAnyArray pythonTensorTrace(NumpyArray<N, TinyVector<PixelType, int(N*(N+1)/2)> > array, NumpyArray<N, Singleband<PixelType> > res = python::object()) { std::string description("tensor trace"); res.reshapeIfEmpty(array.taggedShape().setChannelDescription(description), "tensorTrace(): Output array has wrong shape."); { PyAllowThreads _pythread; tensorTraceMultiArray(srcMultiArrayRange(array), destMultiArray(res)); } return res; } // FIXME: generalize to handle non-interleaved representations template < class PixelType, unsigned int N > NumpyAnyArray pythonTensorDeterminant(NumpyArray<N, TinyVector<PixelType, int(N*(N+1)/2)> > array, NumpyArray<N, Singleband<PixelType> > res = python::object()) { std::string description("tensor determinant"); res.reshapeIfEmpty(array.taggedShape().setChannelDescription(description), "tensorDeterminant(): Output array has wrong shape."); { PyAllowThreads _pythread; tensorDeterminantMultiArray(srcMultiArrayRange(array), destMultiArray(res)); } return res; } // FIXME: generalize to handle non-interleaved representations template < class PixelType, unsigned int N > NumpyAnyArray pythonTensorEigenvalues(NumpyArray<N, TinyVector<PixelType, int(N*(N+1)/2)> > array, NumpyArray<N, TinyVector<PixelType, int(N)> > res = python::object()) { std::string description("tensor eigenvalues"); res.reshapeIfEmpty(array.taggedShape().setChannelDescription(description), "tensorEigenvalues(): Output array has wrong shape."); { PyAllowThreads _pythread; tensorEigenvaluesMultiArray(srcMultiArrayRange(array), destMultiArray(res)); } return res; } template < class SrcPixelType, typename DestPixelType > NumpyAnyArray pythonHourGlassFilter2D(NumpyArray<2, TinyVector<SrcPixelType, 3> > image, double sigma, double rho, NumpyArray<2, TinyVector<DestPixelType, 3> > res = python::object()) { std::string description("hourglass tensor (flattened upper triangular matrix), scale="); description += asString(sigma) + ", rho=" + asString(rho); res.reshapeIfEmpty(image.taggedShape().setChannelDescription(description), "hourGlassFilter2D(): Output array has wrong shape."); { PyAllowThreads _pythread; hourGlassFilter(srcImageRange(image), destImage(res), sigma, rho); } return res; } void defineTensor() { using namespace python; docstring_options doc_options(true, true, false); def("gaussianGradient", registerConverters(&pythonGaussianGradientND<float,2>), (arg("image"), arg("sigma"), arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Calculate the gradient vector by means of a 1st derivative of " "Gaussian filter at the given scale for a 2D scalar image.\n\n" "If 'sigma' is a single value, an isotropic filter at this scale is " "applied (i.e., each dimension is filtered in the same way). " "If 'sigma' is a tuple or list of values, the amount of smoothing " "will be different for each spatial dimension.\n" "The optional 'sigma_d' (single, tuple, or list) denotes the resolution standard deviation " "per axis, the optional 'step_size' (single, tuple, or list) the distance between two adjacent " "pixels for each dimension. " "The length of the tuples or lists must be equal to the " "number of spatial dimensions.\n\n" "'window_size' and 'roi' have the same meaning as in :func:`gaussianSmoothing`.\n\n" "For details see gaussianGradientMultiArray_ and ConvolutionOptions_ in the vigra C++ documentation.\n"); def("gaussianGradient", registerConverters(&pythonGaussianGradientND<float,3>), (arg("volume"), arg("sigma"), arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Likewise for a 3D scalar volume.\n"); def("rieszTransformOfLOG2D", registerConverters(&pythonRieszTransformOfLOG2D<float>), (arg("image"), arg("scale"), arg("xorder"), arg("yorder"),arg("out")=python::object()), "Calculate Riesz transforms of the Laplacian of Gaussian.\n\n" "For details see rieszTransformOfLOG_ in the vigra C++ documentation.\n"); def("gaussianGradientMagnitude", registerConverters(&pythonGaussianGradientMagnitude<float,3>), (arg("image"), arg("sigma"), arg("accumulate")=true, arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Calculate the gradient magnitude by means of a 1st derivative of " "Gaussian filter at the given scale for a 2D scalar or multiband image.\n" "If 'accumulate' is True (the default), the gradients are accumulated (in the " "L2-norm sense) over all channels of a multi-channel array. Otherwise, " "a separate gradient magnitude is computed for each channel.\n\n" "If 'sigma' is a single value, an isotropic filter at this scale is " "applied (i.e., each dimension is filtered in the same way). " "If 'sigma' is a tuple or list of values, the amount of smoothing " "will be different for each spatial dimension.\n" "The optional 'sigma_d' (single, tuple, or list) denotes the resolution standard deviation " "per axis, the optional 'step_size' (single, tuple, or list) the distance between two adjacent " "pixels for each dimension. " "The length of the tuples or lists must be equal to the " "number of spatial dimensions.\n\n" "'window_size' and 'roi' have the same meaning as in :func:`gaussianSmoothing`.\n\n" "For details see gaussianGradientMultiArray_ and ConvolutionOptions_ in the vigra C++ documentation.\n"); def("gaussianGradientMagnitude", registerConverters(&pythonGaussianGradientMagnitude<float,4>), (arg("volume"), arg("sigma"), arg("accumulate")=true, arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Likewise for a 3D scalar or multiband volume.\n"); def("symmetricGradient", registerConverters(&pythonSymmetricGradientND<float,2>), (arg("image"), arg("out")=python::object(), arg("step_size")=1.0, arg("roi")=python::object()), "Calculate gradient of a scalar 2D image using symmetric difference filters." "\n" "The optional tuple or list 'step_size' denotes the distance between two " "adjacent pixels for each dimension; its length must be equal to the " "number of spatial dimensions.\n\n" "'roi' has the same meaning as in :func:`gaussianSmoothing`.\n\n" "For details see symmetricGradientMultiArray_ and ConvolutionOptions_ in the vigra C++ documentation.\n"); def("symmetricGradient", registerConverters(&pythonSymmetricGradientND<float,3>), (arg("volume"), arg("out")=python::object(), arg("step_size")=1.0, arg("roi")=python::object()), "Likewise for a 3D scalar volume.\n"); // FIXME: is this function still needed? def("hessianOfGaussian2D", registerConverters(&pythonHessianOfGaussianND<float, 2>), (arg("image"), arg("sigma"), arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Calculate the Hessian matrix by means of a derivative of " "Gaussian filters at the given scale for a 2D scalar image.\n" "\n" "If 'sigma' is a single value, an isotropic filter at this scale is " "applied (i.e., each dimension is filtered in the same way). " "If 'sigma' is a tuple or list of values, the amount of smoothing " "will be different for each spatial dimension.\n" "The optional 'sigma_d' (single, tuple, or list) denotes the resolution standard deviation " "per axis, the optional 'step_size' (single, tuple, or list) the distance between two adjacent " "pixels for each dimension. " "The length of the tuples or lists must be equal to the " "number of spatial dimensions.\n\n" "'window_size' and 'roi' have the same meaning as in :func:`gaussianSmoothing`.\n\n" "For details see hessianOfGaussianMultiArray_ and ConvolutionOptions_ in the vigra C++ documentation.\n"); // FIXME: is this function still needed? def("hessianOfGaussian3D", registerConverters(&pythonHessianOfGaussianND<float, 3>), (arg("volume"), arg("sigma"), arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Calculate the Hessian matrix by means of a derivative of " "Gaussian filters at the given scale for a 3D scalar image.\n" "\n" "For details see hessianOfGaussianMultiArray_ in the vigra C++ documentation.\n"); def("hessianOfGaussian", registerConverters(&pythonHessianOfGaussianND<float,2>), (arg("image"), arg("sigma"), arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Calculate the Hessian matrix by means of a derivative of " "Gaussian filters at the given scale for a 2D scalar image.\n" "\n" "If 'sigma' is a single value, an isotropic filter at this scale is " "applied (i.e., each dimension is filtered in the same way). " "If 'sigma' is a tuple or list of values, the amount of smoothing " "will be different for each spatial dimension.\n" "The optional 'sigma_d' (single, tuple, or list) denotes the resolution standard deviation " "per axis, the optional 'step_size' (single, tuple, or list) the distance between two adjacent " "pixels for each dimension. " "The length of the tuples or lists must be equal to the " "number of spatial dimensions.\n\n" "'window_size' and 'roi' have the same meaning as in :func:`gaussianSmoothing`.\n\n" "For details see hessianOfGaussianMultiArray_ in the vigra C++ documentation.\n"); def("hessianOfGaussian", registerConverters(&pythonHessianOfGaussianND<float,3>), (arg("volume"), arg("sigma"), arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Likewise for a 3D scalar or multiband volume.\n"); def("structureTensor", registerConverters(&pythonStructureTensor<float,3>), (arg("image"), arg("innerScale"), arg("outerScale"), arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Calculate the structure tensor of an image by means of Gaussian " "(derivative) filters at the given scales. If the input has multiple channels, " "the structure tensors of each channel are added to get the result.\n\n" "If 'innerScale' and 'outerScale' are single values, " "isotropic filters at these scales are " "applied (i.e., each dimension is filtered in the same way). " "If 'innerScale' and / or 'outerScale' are are tuples or lists of " "values, the amount of smoothing " "will be different for each spatial dimension.\n" "The optional 'sigma_d' (single, tuple, or list) denotes the resolution standard deviation " "per axis, the optional 'step_size' (single, tuple, or list) the distance between two adjacent " "pixels for each dimension. " "The length of the tuples or lists must be equal to the " "number of spatial dimensions.\n\n" "'window_size' and 'roi' have the same meaning as in :func:`gaussianSmoothing`.\n\n" "For details see structureTensorMultiArray_ and ConvolutionOptions_ in the vigra C++ documentation.\n"); def("structureTensor", registerConverters(&pythonStructureTensor<float,4>), (arg("volume"), arg("innerScale"), arg("outerScale"), arg("out")=python::object(), arg("sigma_d")=0.0, arg("step_size")=1.0, arg("window_size")=0.0, arg("roi")=python::object()), "Likewise for a 3D scalar or multiband volume.\n"); def("boundaryTensor2D", registerConverters(&pythonBoundaryTensor2D<float, float>), (arg("image"), arg("scale"),arg("out")=python::object()), "Calculate the boundary tensor for a scalar valued 2D image." "For details see boundaryTensor_ in the vigra C++ documentation.\n"); /** FIXME: Export of Kernel2D before def("gradientEnergyTensor2D", registerConverters(&gradientEnergyTensor2D<float,float>), (arg("image"), arg("derivKernel"), arg("smoothKernel"),arg("out")=python::object())); */ def("tensorEigenRepresentation2D", registerConverters(&pythonTensorEigenRepresentation2D<float,float>), (arg("image"),arg("out")=python::object()), "Calculate eigen representation of a symmetric 2x2 tensor.\n\n" "For details see tensorEigenRepresentation_ in the vigra C++ documentation.\n" ); def("vectorToTensor", registerConverters(&pythonVectorToTensor<float,2>), (arg("image"),arg("out")=python::object()), "Turn a 2D vector valued image (e.g. the gradient image) into " "a tensor image by computing the outer product in every pixel.\n\n" "For details see vectorToTensorMultiArray_ in the vigra C++ documentation.\n"); def("vectorToTensor", registerConverters(&pythonVectorToTensor<float,3>), (arg("volume"),arg("out")=python::object()), "Likewise for a 3D vector-valued volume.\n"); def("tensorTrace", registerConverters(&pythonTensorTrace<float,2>), (arg("image"),arg("out")=python::object()), "Calculate the trace of a 2x2 tensor image.\n\n" "For details see tensorTraceMultiArray_ in the vigra C++ documentation.\n"); def("tensorTrace", registerConverters(&pythonTensorTrace<float,3>), (arg("volume"),arg("out")=python::object()), "Likewise for a 3x3 tensor volume.\n"); def("tensorDeterminant", registerConverters(&pythonTensorDeterminant<float,2>), (arg("image"),arg("out")=python::object()), "Calculate the determinant of a 2x2 tensor image.\n\n" "For details see tensorDeterminantMultiArray_ in the vigra C++ documentation.\n"); def("tensorDeterminant", registerConverters(&pythonTensorDeterminant<float,3>), (arg("volume"),arg("out")=python::object()), "Likewise for a 3x3 tensor volume.\n"); def("tensorEigenvalues", registerConverters(&pythonTensorEigenvalues<float,2>), (arg("image"),arg("out")=python::object()), "Calculate the eigenvalues in each pixel/voxel of a 2x2 tensor image.\n\n" "For details see tensorEigenvaluesMultiArray_ in the vigra C++ documentation.\n"); def("tensorEigenvalues", registerConverters(&pythonTensorEigenvalues<float,3>), (arg("volume"),arg("out")=python::object()), "Likewise for a 3x3 tensor volume.\n"); def("hourGlassFilter2D", registerConverters(&pythonHourGlassFilter2D<float,float>), (arg("image"), arg("sigma"), arg("rho"),arg("out")=python::object()), "Anisotropic tensor smoothing with the hourglass filter. \n\n" "For details see hourGlassFilter_ in the vigra C++ documentation.\n"); /* Wee, tons of errors here def("ellipticGaussian2D", registerConverters(&ellipticGaussian2D<float,float>), (arg("image"), arg("sigmamax"), arg("sigmamin"),arg("out")=python::object())); def("ellipticGaussian2D", registerConverters(&ellipticGaussian2D<float,float>), (arg("image"), arg("sigmamax"), arg("sigmamin"),arg("out")=python::object())); */ } } // namespace vigra
{ "pile_set_name": "Github" }
// (C) Copyright John Maddock 2005. // Use, modification and distribution are subject to the Boost Software License, // Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt). // // See http://www.boost.org/libs/type_traits for most recent version including documentation. #ifndef BOOST_TT_IS_SIGNED_HPP_INCLUDED #define BOOST_TT_IS_SIGNED_HPP_INCLUDED #include <boost/type_traits/is_integral.hpp> #include <boost/type_traits/remove_cv.hpp> #include <boost/type_traits/is_enum.hpp> #include <boost/type_traits/detail/ice_or.hpp> // should be the last #include #include <boost/type_traits/detail/bool_trait_def.hpp> namespace boost { #if !defined( __CODEGEARC__ ) namespace detail{ #if !(defined(__EDG_VERSION__) && __EDG_VERSION__ <= 238) && !defined(BOOST_NO_INCLASS_MEMBER_INITIALIZATION) template <class T> struct is_signed_values { // // Note that we cannot use BOOST_STATIC_CONSTANT here, using enum's // rather than "real" static constants simply doesn't work or give // the correct answer. // typedef typename remove_cv<T>::type no_cv_t; static const no_cv_t minus_one = (static_cast<no_cv_t>(-1)); static const no_cv_t zero = (static_cast<no_cv_t>(0)); }; template <class T> struct is_signed_helper { typedef typename remove_cv<T>::type no_cv_t; BOOST_STATIC_CONSTANT(bool, value = (!(::boost::detail::is_signed_values<T>::minus_one > boost::detail::is_signed_values<T>::zero))); }; template <bool integral_type> struct is_signed_select_helper { template <class T> struct rebind { typedef is_signed_helper<T> type; }; }; template <> struct is_signed_select_helper<false> { template <class T> struct rebind { typedef false_type type; }; }; template <class T> struct is_signed_imp { typedef is_signed_select_helper< ::boost::type_traits::ice_or< ::boost::is_integral<T>::value, ::boost::is_enum<T>::value>::value > selector; typedef typename selector::template rebind<T> binder; typedef typename binder::type type; BOOST_STATIC_CONSTANT(bool, value = type::value); }; #else template <class T> struct is_signed_imp : public false_type{}; template <> struct is_signed_imp<signed char> : public true_type{}; template <> struct is_signed_imp<const signed char> : public true_type{}; template <> struct is_signed_imp<volatile signed char> : public true_type{}; template <> struct is_signed_imp<const volatile signed char> : public true_type{}; template <> struct is_signed_imp<short> : public true_type{}; template <> struct is_signed_imp<const short> : public true_type{}; template <> struct is_signed_imp<volatile short> : public true_type{}; template <> struct is_signed_imp<const volatile short> : public true_type{}; template <> struct is_signed_imp<int> : public true_type{}; template <> struct is_signed_imp<const int> : public true_type{}; template <> struct is_signed_imp<volatile int> : public true_type{}; template <> struct is_signed_imp<const volatile int> : public true_type{}; template <> struct is_signed_imp<long> : public true_type{}; template <> struct is_signed_imp<const long> : public true_type{}; template <> struct is_signed_imp<volatile long> : public true_type{}; template <> struct is_signed_imp<const volatile long> : public true_type{}; #ifdef BOOST_HAS_LONG_LONG template <> struct is_signed_imp<long long> : public true_type{}; template <> struct is_signed_imp<const long long> : public true_type{}; template <> struct is_signed_imp<volatile long long> : public true_type{}; template <> struct is_signed_imp<const volatile long long> : public true_type{}; #endif #if defined(CHAR_MIN) && (CHAR_MIN != 0) template <> struct is_signed_imp<char> : public true_type{}; template <> struct is_signed_imp<const char> : public true_type{}; template <> struct is_signed_imp<volatile char> : public true_type{}; template <> struct is_signed_imp<const volatile char> : public true_type{}; #endif #if defined(WCHAR_MIN) && (WCHAR_MIN != 0) template <> struct is_signed_imp<wchar_t> : public true_type{}; template <> struct is_signed_imp<const wchar_t> : public true_type{}; template <> struct is_signed_imp<volatile wchar_t> : public true_type{}; template <> struct is_signed_imp<const volatile wchar_t> : public true_type{}; #endif #endif } #endif // !defined( __CODEGEARC__ ) #if defined( __CODEGEARC__ ) BOOST_TT_AUX_BOOL_TRAIT_DEF1(is_signed,T,__is_signed(T)) #else BOOST_TT_AUX_BOOL_TRAIT_DEF1(is_signed,T,::boost::detail::is_signed_imp<T>::value) #endif } // namespace boost #include <boost/type_traits/detail/bool_trait_undef.hpp> #endif // BOOST_TT_IS_MEMBER_FUNCTION_POINTER_HPP_INCLUDED
{ "pile_set_name": "Github" }
// Package natives provides native packages via a virtual filesystem. // // See documentation of parseAndAugment in github.com/gopherjs/gopherjs/build // for explanation of behavior used to augment the native packages using the files // in src subfolder. package natives //go:generate vfsgendev -source="github.com/gopherjs/gopherjs/compiler/natives".FS
{ "pile_set_name": "Github" }
<?php /* * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * and is licensed under the MIT license. For more information, see * <http://www.doctrine-project.org>. */ namespace Doctrine\Common\Annotations; /** * Description of AnnotationException * * @since 2.0 * @author Benjamin Eberlei <[email protected]> * @author Guilherme Blanco <[email protected]> * @author Jonathan Wage <[email protected]> * @author Roman Borschel <[email protected]> */ class AnnotationException extends \Exception { /** * Creates a new AnnotationException describing a Syntax error. * * @param string $message Exception message * @return AnnotationException */ public static function syntaxError($message) { return new self('[Syntax Error] ' . $message); } /** * Creates a new AnnotationException describing a Semantical error. * * @param string $message Exception message * @return AnnotationException */ public static function semanticalError($message) { return new self('[Semantical Error] ' . $message); } /** * Creates a new AnnotationException describing a constant semantical error. * * @since 2.3 * @param string $identifier * @param string $context * @return AnnotationException */ public static function semanticalErrorConstants($identifier, $context = null) { return self::semanticalError(sprintf( "Couldn't find constant %s%s", $identifier, $context ? ", $context." : "." )); } /** * Creates a new AnnotationException describing an error which occurred during * the creation of the annotation. * * @since 2.2 * @param string $message * @return AnnotationException */ public static function creationError($message) { return new self('[Creation Error] ' . $message); } /** * Creates a new AnnotationException describing an type error of an attribute. * * @since 2.2 * @param string $attributeName * @param string $annotationName * @param string $context * @param string $expected * @param mixed $actual * @return AnnotationException */ public static function typeError($attributeName, $annotationName, $context, $expected, $actual) { return new self(sprintf( '[Type Error] Attribute "%s" of @%s declared on %s expects %s, but got %s.', $attributeName, $annotationName, $context, $expected, is_object($actual) ? 'an instance of '.get_class($actual) : gettype($actual) )); } /** * Creates a new AnnotationException describing an required error of an attribute. * * @since 2.2 * @param string $attributeName * @param string $annotationName * @param string $context * @param string $expected * @return AnnotationException */ public static function requiredError($attributeName, $annotationName, $context, $expected) { return new self(sprintf( '[Type Error] Attribute "%s" of @%s declared on %s expects %s. This value should not be null.', $attributeName, $annotationName, $context, $expected )); } }
{ "pile_set_name": "Github" }
;--------------------------------------- ; ; animation cel data ; hand_data:: byte both + 0 byte 0b00000000 byte no_animation byte no_cont byte 216+down,216+down,184 byte 0b11100000 word hand_data_a - hand_data word hand_data_b - hand_data word hand_data_c - hand_data hand_data_a: byte 0x05, 0x30, 0x07, 0x45, 0x00, 0x00 byte run,0x80+6 byte 2 byte 9 byte run,4,37 byte 137 byte 137 byte 106 byte 101 byte 101 byte run,13,149 byte run,3,37 byte run,3,9 byte run,3,2 byte run,0x80+15 byte 170 byte 86 byte run,6,85 byte 149 byte 101 byte 89 byte run,9,86 byte 89 byte 89 byte 86 byte run,10,85 byte 149 byte 149 byte 37 byte 37 byte 9 byte 2 byte run,0x80+5 byte 10 byte 37 byte 149 byte 149 byte 85 byte 85 byte 149 byte 101 byte 89 byte 86 byte run,6,85 byte 149 byte 106 byte run,5,85 byte 86 byte 86 byte 89 byte 89 byte 169 byte run,14,85 byte 169 byte 2 byte run,0x80+2 byte 2 byte 9 byte 137 byte 101 byte run,3,89 byte 86 byte 86 byte run,3,85 byte 149 byte 149 byte 170 byte run,4,149 byte run,3,85 byte 106 byte 149 byte 149 byte run,20,85 byte 165 byte 9 byte 2 byte 168 byte 86 byte run,7,85 byte run,4,149 byte 165 byte 90 byte run,7,85 byte 169 byte 86 byte run,24,85 hand_data_b: byte 0x05, 0x2e, 0x0c, 0x45, 0x00, 0x00 byte run,3,128 byte run,3,96 byte run,6,88 byte 168 byte 86 byte run,32,85 byte run,0x80+14 byte 128 byte 96 byte 88 byte 86 byte run,28,85 byte run,0x80+18 byte 128 byte 128 byte 96 byte 96 byte run,3,88 byte run,3,86 byte run,18,85 byte run,0x80+28 byte 128 byte 128 byte 96 byte 96 byte run,3,88 byte run,3,86 byte run,8,85 byte run,0x80+38 byte run,3,128 byte run,3,96 byte 88 byte 88 hand_data_c: byte 0x03, 0x1f, 0x04, 0x30, 0x00, 0x00 byte 10 byte 37 byte run,6,149 byte run,3,37 byte 9 byte 9 byte 2 byte 2 byte run,0x80+17 byte 128 byte 96 byte 96 byte 88 byte 88 byte 86 byte 86 byte run,7,85 byte 149 byte 149 byte 37 byte 37 byte 9 byte 9 byte 2 byte 2 byte run,0x80+16 byte 128 byte 128 byte 96 byte 96 byte 88 byte 88 byte 86 byte 86 byte run,7,85 byte 149 byte 149 byte 37 byte 37 byte 9 byte 9 byte 2 byte 2
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title>Nested Tabs - jQuery EasyUI Demo</title> <link rel="stylesheet" type="text/css" href="../../themes/default/easyui.css"> <link rel="stylesheet" type="text/css" href="../../themes/icon.css"> <link rel="stylesheet" type="text/css" href="../demo.css"> <script type="text/javascript" src="../../jquery.min.js"></script> <script type="text/javascript" src="../../jquery.easyui.min.js"></script> </head> <body> <h2>Nested Tabs</h2> <div class="demo-info"> <div class="demo-tip icon-tip"></div> <div>The tab panel can contain sub tabs or other components.</div> </div> <div style="margin:10px 0;"></div> <div class="easyui-tabs" data-options="tools:'#tab-tools'" style="width:700px;height:250px"> <div title="Sub Tabs" style="padding:10px;"> <div class="easyui-tabs" data-options="fit:true,plain:true"> <div title="Title1" style="padding:10px;">Content 1</div> <div title="Title2" style="padding:10px;">Content 2</div> <div title="Title3" style="padding:10px;">Content 3</div> </div> </div> <div title="Ajax" data-options="href:'../tabs/_content.html',closable:true" style="padding:10px"></div> <div title="Iframe" data-options="closable:true" style="overflow:hidden"> <iframe scrolling="yes" frameborder="0" src="http://www.jeasyui.com/forum/index.php" style="width:100%;height:100%;"></iframe> </div> <div title="DataGrid" data-options="closable:true" style="padding:10px"> <table class="easyui-datagrid" data-options="fit:true,singleSelect:true,rownumbers:true"> <thead> <tr> <th data-options="field:'f1',width:100">Title1</th> <th data-options="field:'f2',width:100">Title2</th> <th data-options="field:'f3',width:100">Title3</th> </tr> </thead> <tbody> <tr> <td>d11</td> <td>d12</td> <td>d13</td> </tr> <tr> <td>d21</td> <td>d22</td> <td>d23</td> </tr> </tbody> </table> </div> </div> </body> </html>
{ "pile_set_name": "Github" }
require 'find' require 'pathname' require 'rake' require 'rspec/core/rake_task' desc "Verify puppet templates" task :template_verify do pwd = ENV["PWD"] erb_file_paths = [] Find.find(pwd) do |path| erb_file_paths << path if path =~ /.*\.erb$/ end exit_code = 0 erb_file_paths.each do |erbfile| pwdpath = Pathname.new(pwd) pn = Pathname.new(erbfile) rel_path = pn.relative_path_from(pwdpath) result = `erb -P -x -T '-' #{erbfile} | ruby -c` puts "Verifying #{rel_path}.... #{result}" if $?.exitstatus != 0 exit_code = 1 end end exit exit_code end
{ "pile_set_name": "Github" }
/* NUGET: BEGIN LICENSE TEXT * * Microsoft grants you the right to use these script files for the sole * purpose of either: (i) interacting through your browser with the Microsoft * website or online service, subject to the applicable licensing or use * terms; or (ii) using the files as included with a Microsoft product subject * to that product's license terms. Microsoft reserves all other rights to the * files not expressly granted by Microsoft, whether by implication, estoppel * or otherwise. Insofar as a script file is dual licensed under GPL, * Microsoft neither took the code under GPL nor distributes it thereunder but * under the terms set out in this paragraph. All notices and licenses * below are for informational purposes only. * * Copyright (c) Faruk Ates, Paul Irish, Alex Sexton; http://www.modernizr.com/license/ * * Includes matchMedia polyfill; Copyright (c) 2010 Filament Group, Inc; http://opensource.org/licenses/MIT * * Includes material adapted from ES5-shim https://github.com/kriskowal/es5-shim/blob/master/es5-shim.js; Copyright 2009-2012 by contributors; http://opensource.org/licenses/MIT * * Includes material from css-support; Copyright (c) 2005-2012 Diego Perini; https://github.com/dperini/css-support/blob/master/LICENSE * * NUGET: END LICENSE TEXT */ /*! * Modernizr v2.6.2 * www.modernizr.com * * Copyright (c) Faruk Ates, Paul Irish, Alex Sexton * Available under the BSD and MIT licenses: www.modernizr.com/license/ */ /* * Modernizr tests which native CSS3 and HTML5 features are available in * the current UA and makes the results available to you in two ways: * as properties on a global Modernizr object, and as classes on the * <html> element. This information allows you to progressively enhance * your pages with a granular level of control over the experience. * * Modernizr has an optional (not included) conditional resource loader * called Modernizr.load(), based on Yepnope.js (yepnopejs.com). * To get a build that includes Modernizr.load(), as well as choosing * which tests to include, go to www.modernizr.com/download/ * * Authors Faruk Ates, Paul Irish, Alex Sexton * Contributors Ryan Seddon, Ben Alman */ window.Modernizr = (function( window, document, undefined ) { var version = '2.6.2', Modernizr = {}, /*>>cssclasses*/ // option for enabling the HTML classes to be added enableClasses = true, /*>>cssclasses*/ docElement = document.documentElement, /** * Create our "modernizr" element that we do most feature tests on. */ mod = 'modernizr', modElem = document.createElement(mod), mStyle = modElem.style, /** * Create the input element for various Web Forms feature tests. */ inputElem /*>>inputelem*/ = document.createElement('input') /*>>inputelem*/ , /*>>smile*/ smile = ':)', /*>>smile*/ toString = {}.toString, // TODO :: make the prefixes more granular /*>>prefixes*/ // List of property values to set for css tests. See ticket #21 prefixes = ' -webkit- -moz- -o- -ms- '.split(' '), /*>>prefixes*/ /*>>domprefixes*/ // Following spec is to expose vendor-specific style properties as: // elem.style.WebkitBorderRadius // and the following would be incorrect: // elem.style.webkitBorderRadius // Webkit ghosts their properties in lowercase but Opera & Moz do not. // Microsoft uses a lowercase `ms` instead of the correct `Ms` in IE8+ // erik.eae.net/archives/2008/03/10/21.48.10/ // More here: github.com/Modernizr/Modernizr/issues/issue/21 omPrefixes = 'Webkit Moz O ms', cssomPrefixes = omPrefixes.split(' '), domPrefixes = omPrefixes.toLowerCase().split(' '), /*>>domprefixes*/ /*>>ns*/ ns = {'svg': 'http://www.w3.org/2000/svg'}, /*>>ns*/ tests = {}, inputs = {}, attrs = {}, classes = [], slice = classes.slice, featureName, // used in testing loop /*>>teststyles*/ // Inject element with style element and some CSS rules injectElementWithStyles = function( rule, callback, nodes, testnames ) { var style, ret, node, docOverflow, div = document.createElement('div'), // After page load injecting a fake body doesn't work so check if body exists body = document.body, // IE6 and 7 won't return offsetWidth or offsetHeight unless it's in the body element, so we fake it. fakeBody = body || document.createElement('body'); if ( parseInt(nodes, 10) ) { // In order not to give false positives we create a node for each test // This also allows the method to scale for unspecified uses while ( nodes-- ) { node = document.createElement('div'); node.id = testnames ? testnames[nodes] : mod + (nodes + 1); div.appendChild(node); } } // <style> elements in IE6-9 are considered 'NoScope' elements and therefore will be removed // when injected with innerHTML. To get around this you need to prepend the 'NoScope' element // with a 'scoped' element, in our case the soft-hyphen entity as it won't mess with our measurements. // msdn.microsoft.com/en-us/library/ms533897%28VS.85%29.aspx // Documents served as xml will throw if using &shy; so use xml friendly encoded version. See issue #277 style = ['&#173;','<style id="s', mod, '">', rule, '</style>'].join(''); div.id = mod; // IE6 will false positive on some tests due to the style element inside the test div somehow interfering offsetHeight, so insert it into body or fakebody. // Opera will act all quirky when injecting elements in documentElement when page is served as xml, needs fakebody too. #270 (body ? div : fakeBody).innerHTML += style; fakeBody.appendChild(div); if ( !body ) { //avoid crashing IE8, if background image is used fakeBody.style.background = ''; //Safari 5.13/5.1.4 OSX stops loading if ::-webkit-scrollbar is used and scrollbars are visible fakeBody.style.overflow = 'hidden'; docOverflow = docElement.style.overflow; docElement.style.overflow = 'hidden'; docElement.appendChild(fakeBody); } ret = callback(div, rule); // If this is done after page load we don't want to remove the body so check if body exists if ( !body ) { fakeBody.parentNode.removeChild(fakeBody); docElement.style.overflow = docOverflow; } else { div.parentNode.removeChild(div); } return !!ret; }, /*>>teststyles*/ /*>>mq*/ // adapted from matchMedia polyfill // by Scott Jehl and Paul Irish // gist.github.com/786768 testMediaQuery = function( mq ) { var matchMedia = window.matchMedia || window.msMatchMedia; if ( matchMedia ) { return matchMedia(mq).matches; } var bool; injectElementWithStyles('@media ' + mq + ' { #' + mod + ' { position: absolute; } }', function( node ) { bool = (window.getComputedStyle ? getComputedStyle(node, null) : node.currentStyle)['position'] == 'absolute'; }); return bool; }, /*>>mq*/ /*>>hasevent*/ // // isEventSupported determines if a given element supports the given event // kangax.github.com/iseventsupported/ // // The following results are known incorrects: // Modernizr.hasEvent("webkitTransitionEnd", elem) // false negative // Modernizr.hasEvent("textInput") // in Webkit. github.com/Modernizr/Modernizr/issues/333 // ... isEventSupported = (function() { var TAGNAMES = { 'select': 'input', 'change': 'input', 'submit': 'form', 'reset': 'form', 'error': 'img', 'load': 'img', 'abort': 'img' }; function isEventSupported( eventName, element ) { element = element || document.createElement(TAGNAMES[eventName] || 'div'); eventName = 'on' + eventName; // When using `setAttribute`, IE skips "unload", WebKit skips "unload" and "resize", whereas `in` "catches" those var isSupported = eventName in element; if ( !isSupported ) { // If it has no `setAttribute` (i.e. doesn't implement Node interface), try generic element if ( !element.setAttribute ) { element = document.createElement('div'); } if ( element.setAttribute && element.removeAttribute ) { element.setAttribute(eventName, ''); isSupported = is(element[eventName], 'function'); // If property was created, "remove it" (by setting value to `undefined`) if ( !is(element[eventName], 'undefined') ) { element[eventName] = undefined; } element.removeAttribute(eventName); } } element = null; return isSupported; } return isEventSupported; })(), /*>>hasevent*/ // TODO :: Add flag for hasownprop ? didn't last time // hasOwnProperty shim by kangax needed for Safari 2.0 support _hasOwnProperty = ({}).hasOwnProperty, hasOwnProp; if ( !is(_hasOwnProperty, 'undefined') && !is(_hasOwnProperty.call, 'undefined') ) { hasOwnProp = function (object, property) { return _hasOwnProperty.call(object, property); }; } else { hasOwnProp = function (object, property) { /* yes, this can give false positives/negatives, but most of the time we don't care about those */ return ((property in object) && is(object.constructor.prototype[property], 'undefined')); }; } // Adapted from ES5-shim https://github.com/kriskowal/es5-shim/blob/master/es5-shim.js // es5.github.com/#x15.3.4.5 if (!Function.prototype.bind) { Function.prototype.bind = function bind(that) { var target = this; if (typeof target != "function") { throw new TypeError(); } var args = slice.call(arguments, 1), bound = function () { if (this instanceof bound) { var F = function(){}; F.prototype = target.prototype; var self = new F(); var result = target.apply( self, args.concat(slice.call(arguments)) ); if (Object(result) === result) { return result; } return self; } else { return target.apply( that, args.concat(slice.call(arguments)) ); } }; return bound; }; } /** * setCss applies given styles to the Modernizr DOM node. */ function setCss( str ) { mStyle.cssText = str; } /** * setCssAll extrapolates all vendor-specific css strings. */ function setCssAll( str1, str2 ) { return setCss(prefixes.join(str1 + ';') + ( str2 || '' )); } /** * is returns a boolean for if typeof obj is exactly type. */ function is( obj, type ) { return typeof obj === type; } /** * contains returns a boolean for if substr is found within str. */ function contains( str, substr ) { return !!~('' + str).indexOf(substr); } /*>>testprop*/ // testProps is a generic CSS / DOM property test. // In testing support for a given CSS property, it's legit to test: // `elem.style[styleName] !== undefined` // If the property is supported it will return an empty string, // if unsupported it will return undefined. // We'll take advantage of this quick test and skip setting a style // on our modernizr element, but instead just testing undefined vs // empty string. // Because the testing of the CSS property names (with "-", as // opposed to the camelCase DOM properties) is non-portable and // non-standard but works in WebKit and IE (but not Gecko or Opera), // we explicitly reject properties with dashes so that authors // developing in WebKit or IE first don't end up with // browser-specific content by accident. function testProps( props, prefixed ) { for ( var i in props ) { var prop = props[i]; if ( !contains(prop, "-") && mStyle[prop] !== undefined ) { return prefixed == 'pfx' ? prop : true; } } return false; } /*>>testprop*/ // TODO :: add testDOMProps /** * testDOMProps is a generic DOM property test; if a browser supports * a certain property, it won't return undefined for it. */ function testDOMProps( props, obj, elem ) { for ( var i in props ) { var item = obj[props[i]]; if ( item !== undefined) { // return the property name as a string if (elem === false) return props[i]; // let's bind a function if (is(item, 'function')){ // default to autobind unless override return item.bind(elem || obj); } // return the unbound function or obj or value return item; } } return false; } /*>>testallprops*/ /** * testPropsAll tests a list of DOM properties we want to check against. * We specify literally ALL possible (known and/or likely) properties on * the element including the non-vendor prefixed one, for forward- * compatibility. */ function testPropsAll( prop, prefixed, elem ) { var ucProp = prop.charAt(0).toUpperCase() + prop.slice(1), props = (prop + ' ' + cssomPrefixes.join(ucProp + ' ') + ucProp).split(' '); // did they call .prefixed('boxSizing') or are we just testing a prop? if(is(prefixed, "string") || is(prefixed, "undefined")) { return testProps(props, prefixed); // otherwise, they called .prefixed('requestAnimationFrame', window[, elem]) } else { props = (prop + ' ' + (domPrefixes).join(ucProp + ' ') + ucProp).split(' '); return testDOMProps(props, prefixed, elem); } } /*>>testallprops*/ /** * Tests * ----- */ // The *new* flexbox // dev.w3.org/csswg/css3-flexbox tests['flexbox'] = function() { return testPropsAll('flexWrap'); }; // The *old* flexbox // www.w3.org/TR/2009/WD-css3-flexbox-20090723/ tests['flexboxlegacy'] = function() { return testPropsAll('boxDirection'); }; // On the S60 and BB Storm, getContext exists, but always returns undefined // so we actually have to call getContext() to verify // github.com/Modernizr/Modernizr/issues/issue/97/ tests['canvas'] = function() { var elem = document.createElement('canvas'); return !!(elem.getContext && elem.getContext('2d')); }; tests['canvastext'] = function() { return !!(Modernizr['canvas'] && is(document.createElement('canvas').getContext('2d').fillText, 'function')); }; // webk.it/70117 is tracking a legit WebGL feature detect proposal // We do a soft detect which may false positive in order to avoid // an expensive context creation: bugzil.la/732441 tests['webgl'] = function() { return !!window.WebGLRenderingContext; }; /* * The Modernizr.touch test only indicates if the browser supports * touch events, which does not necessarily reflect a touchscreen * device, as evidenced by tablets running Windows 7 or, alas, * the Palm Pre / WebOS (touch) phones. * * Additionally, Chrome (desktop) used to lie about its support on this, * but that has since been rectified: crbug.com/36415 * * We also test for Firefox 4 Multitouch Support. * * For more info, see: modernizr.github.com/Modernizr/touch.html */ tests['touch'] = function() { var bool; if(('ontouchstart' in window) || window.DocumentTouch && document instanceof DocumentTouch) { bool = true; } else { injectElementWithStyles(['@media (',prefixes.join('touch-enabled),('),mod,')','{#modernizr{top:9px;position:absolute}}'].join(''), function( node ) { bool = node.offsetTop === 9; }); } return bool; }; // geolocation is often considered a trivial feature detect... // Turns out, it's quite tricky to get right: // // Using !!navigator.geolocation does two things we don't want. It: // 1. Leaks memory in IE9: github.com/Modernizr/Modernizr/issues/513 // 2. Disables page caching in WebKit: webk.it/43956 // // Meanwhile, in Firefox < 8, an about:config setting could expose // a false positive that would throw an exception: bugzil.la/688158 tests['geolocation'] = function() { return 'geolocation' in navigator; }; tests['postmessage'] = function() { return !!window.postMessage; }; // Chrome incognito mode used to throw an exception when using openDatabase // It doesn't anymore. tests['websqldatabase'] = function() { return !!window.openDatabase; }; // Vendors had inconsistent prefixing with the experimental Indexed DB: // - Webkit's implementation is accessible through webkitIndexedDB // - Firefox shipped moz_indexedDB before FF4b9, but since then has been mozIndexedDB // For speed, we don't test the legacy (and beta-only) indexedDB tests['indexedDB'] = function() { return !!testPropsAll("indexedDB", window); }; // documentMode logic from YUI to filter out IE8 Compat Mode // which false positives. tests['hashchange'] = function() { return isEventSupported('hashchange', window) && (document.documentMode === undefined || document.documentMode > 7); }; // Per 1.6: // This used to be Modernizr.historymanagement but the longer // name has been deprecated in favor of a shorter and property-matching one. // The old API is still available in 1.6, but as of 2.0 will throw a warning, // and in the first release thereafter disappear entirely. tests['history'] = function() { return !!(window.history && history.pushState); }; tests['draganddrop'] = function() { var div = document.createElement('div'); return ('draggable' in div) || ('ondragstart' in div && 'ondrop' in div); }; // FF3.6 was EOL'ed on 4/24/12, but the ESR version of FF10 // will be supported until FF19 (2/12/13), at which time, ESR becomes FF17. // FF10 still uses prefixes, so check for it until then. // for more ESR info, see: mozilla.org/en-US/firefox/organizations/faq/ tests['websockets'] = function() { return 'WebSocket' in window || 'MozWebSocket' in window; }; // css-tricks.com/rgba-browser-support/ tests['rgba'] = function() { // Set an rgba() color and check the returned value setCss('background-color:rgba(150,255,150,.5)'); return contains(mStyle.backgroundColor, 'rgba'); }; tests['hsla'] = function() { // Same as rgba(), in fact, browsers re-map hsla() to rgba() internally, // except IE9 who retains it as hsla setCss('background-color:hsla(120,40%,100%,.5)'); return contains(mStyle.backgroundColor, 'rgba') || contains(mStyle.backgroundColor, 'hsla'); }; tests['multiplebgs'] = function() { // Setting multiple images AND a color on the background shorthand property // and then querying the style.background property value for the number of // occurrences of "url(" is a reliable method for detecting ACTUAL support for this! setCss('background:url(https://),url(https://),red url(https://)'); // If the UA supports multiple backgrounds, there should be three occurrences // of the string "url(" in the return value for elemStyle.background return (/(url\s*\(.*?){3}/).test(mStyle.background); }; // this will false positive in Opera Mini // github.com/Modernizr/Modernizr/issues/396 tests['backgroundsize'] = function() { return testPropsAll('backgroundSize'); }; tests['borderimage'] = function() { return testPropsAll('borderImage'); }; // Super comprehensive table about all the unique implementations of // border-radius: muddledramblings.com/table-of-css3-border-radius-compliance tests['borderradius'] = function() { return testPropsAll('borderRadius'); }; // WebOS unfortunately false positives on this test. tests['boxshadow'] = function() { return testPropsAll('boxShadow'); }; // FF3.0 will false positive on this test tests['textshadow'] = function() { return document.createElement('div').style.textShadow === ''; }; tests['opacity'] = function() { // Browsers that actually have CSS Opacity implemented have done so // according to spec, which means their return values are within the // range of [0.0,1.0] - including the leading zero. setCssAll('opacity:.55'); // The non-literal . in this regex is intentional: // German Chrome returns this value as 0,55 // github.com/Modernizr/Modernizr/issues/#issue/59/comment/516632 return (/^0.55$/).test(mStyle.opacity); }; // Note, Android < 4 will pass this test, but can only animate // a single property at a time // daneden.me/2011/12/putting-up-with-androids-bullshit/ tests['cssanimations'] = function() { return testPropsAll('animationName'); }; tests['csscolumns'] = function() { return testPropsAll('columnCount'); }; tests['cssgradients'] = function() { /** * For CSS Gradients syntax, please see: * webkit.org/blog/175/introducing-css-gradients/ * developer.mozilla.org/en/CSS/-moz-linear-gradient * developer.mozilla.org/en/CSS/-moz-radial-gradient * dev.w3.org/csswg/css3-images/#gradients- */ var str1 = 'background-image:', str2 = 'gradient(linear,left top,right bottom,from(#9f9),to(white));', str3 = 'linear-gradient(left top,#9f9, white);'; setCss( // legacy webkit syntax (FIXME: remove when syntax not in use anymore) (str1 + '-webkit- '.split(' ').join(str2 + str1) + // standard syntax // trailing 'background-image:' prefixes.join(str3 + str1)).slice(0, -str1.length) ); return contains(mStyle.backgroundImage, 'gradient'); }; tests['cssreflections'] = function() { return testPropsAll('boxReflect'); }; tests['csstransforms'] = function() { return !!testPropsAll('transform'); }; tests['csstransforms3d'] = function() { var ret = !!testPropsAll('perspective'); // Webkit's 3D transforms are passed off to the browser's own graphics renderer. // It works fine in Safari on Leopard and Snow Leopard, but not in Chrome in // some conditions. As a result, Webkit typically recognizes the syntax but // will sometimes throw a false positive, thus we must do a more thorough check: if ( ret && 'webkitPerspective' in docElement.style ) { // Webkit allows this media query to succeed only if the feature is enabled. // `@media (transform-3d),(-webkit-transform-3d){ ... }` injectElementWithStyles('@media (transform-3d),(-webkit-transform-3d){#modernizr{left:9px;position:absolute;height:3px;}}', function( node, rule ) { ret = node.offsetLeft === 9 && node.offsetHeight === 3; }); } return ret; }; tests['csstransitions'] = function() { return testPropsAll('transition'); }; /*>>fontface*/ // @font-face detection routine by Diego Perini // javascript.nwbox.com/CSSSupport/ // false positives: // WebOS github.com/Modernizr/Modernizr/issues/342 // WP7 github.com/Modernizr/Modernizr/issues/538 tests['fontface'] = function() { var bool; injectElementWithStyles('@font-face {font-family:"font";src:url("https://")}', function( node, rule ) { var style = document.getElementById('smodernizr'), sheet = style.sheet || style.styleSheet, cssText = sheet ? (sheet.cssRules && sheet.cssRules[0] ? sheet.cssRules[0].cssText : sheet.cssText || '') : ''; bool = /src/i.test(cssText) && cssText.indexOf(rule.split(' ')[0]) === 0; }); return bool; }; /*>>fontface*/ // CSS generated content detection tests['generatedcontent'] = function() { var bool; injectElementWithStyles(['#',mod,'{font:0/0 a}#',mod,':after{content:"',smile,'";visibility:hidden;font:3px/1 a}'].join(''), function( node ) { bool = node.offsetHeight >= 3; }); return bool; }; // These tests evaluate support of the video/audio elements, as well as // testing what types of content they support. // // We're using the Boolean constructor here, so that we can extend the value // e.g. Modernizr.video // true // Modernizr.video.ogg // 'probably' // // Codec values from : github.com/NielsLeenheer/html5test/blob/9106a8/index.html#L845 // thx to NielsLeenheer and zcorpan // Note: in some older browsers, "no" was a return value instead of empty string. // It was live in FF3.5.0 and 3.5.1, but fixed in 3.5.2 // It was also live in Safari 4.0.0 - 4.0.4, but fixed in 4.0.5 tests['video'] = function() { var elem = document.createElement('video'), bool = false; // IE9 Running on Windows Server SKU can cause an exception to be thrown, bug #224 try { if ( bool = !!elem.canPlayType ) { bool = new Boolean(bool); bool.ogg = elem.canPlayType('video/ogg; codecs="theora"') .replace(/^no$/,''); // Without QuickTime, this value will be `undefined`. github.com/Modernizr/Modernizr/issues/546 bool.h264 = elem.canPlayType('video/mp4; codecs="avc1.42E01E"') .replace(/^no$/,''); bool.webm = elem.canPlayType('video/webm; codecs="vp8, vorbis"').replace(/^no$/,''); } } catch(e) { } return bool; }; tests['audio'] = function() { var elem = document.createElement('audio'), bool = false; try { if ( bool = !!elem.canPlayType ) { bool = new Boolean(bool); bool.ogg = elem.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,''); bool.mp3 = elem.canPlayType('audio/mpeg;') .replace(/^no$/,''); // Mimetypes accepted: // developer.mozilla.org/En/Media_formats_supported_by_the_audio_and_video_elements // bit.ly/iphoneoscodecs bool.wav = elem.canPlayType('audio/wav; codecs="1"') .replace(/^no$/,''); bool.m4a = ( elem.canPlayType('audio/x-m4a;') || elem.canPlayType('audio/aac;')) .replace(/^no$/,''); } } catch(e) { } return bool; }; // In FF4, if disabled, window.localStorage should === null. // Normally, we could not test that directly and need to do a // `('localStorage' in window) && ` test first because otherwise Firefox will // throw bugzil.la/365772 if cookies are disabled // Also in iOS5 Private Browsing mode, attempting to use localStorage.setItem // will throw the exception: // QUOTA_EXCEEDED_ERRROR DOM Exception 22. // Peculiarly, getItem and removeItem calls do not throw. // Because we are forced to try/catch this, we'll go aggressive. // Just FWIW: IE8 Compat mode supports these features completely: // www.quirksmode.org/dom/html5.html // But IE8 doesn't support either with local files tests['localstorage'] = function() { try { localStorage.setItem(mod, mod); localStorage.removeItem(mod); return true; } catch(e) { return false; } }; tests['sessionstorage'] = function() { try { sessionStorage.setItem(mod, mod); sessionStorage.removeItem(mod); return true; } catch(e) { return false; } }; tests['webworkers'] = function() { return !!window.Worker; }; tests['applicationcache'] = function() { return !!window.applicationCache; }; // Thanks to Erik Dahlstrom tests['svg'] = function() { return !!document.createElementNS && !!document.createElementNS(ns.svg, 'svg').createSVGRect; }; // specifically for SVG inline in HTML, not within XHTML // test page: paulirish.com/demo/inline-svg tests['inlinesvg'] = function() { var div = document.createElement('div'); div.innerHTML = '<svg/>'; return (div.firstChild && div.firstChild.namespaceURI) == ns.svg; }; // SVG SMIL animation tests['smil'] = function() { return !!document.createElementNS && /SVGAnimate/.test(toString.call(document.createElementNS(ns.svg, 'animate'))); }; // This test is only for clip paths in SVG proper, not clip paths on HTML content // demo: srufaculty.sru.edu/david.dailey/svg/newstuff/clipPath4.svg // However read the comments to dig into applying SVG clippaths to HTML content here: // github.com/Modernizr/Modernizr/issues/213#issuecomment-1149491 tests['svgclippaths'] = function() { return !!document.createElementNS && /SVGClipPath/.test(toString.call(document.createElementNS(ns.svg, 'clipPath'))); }; /*>>webforms*/ // input features and input types go directly onto the ret object, bypassing the tests loop. // Hold this guy to execute in a moment. function webforms() { /*>>input*/ // Run through HTML5's new input attributes to see if the UA understands any. // We're using f which is the <input> element created early on // Mike Taylr has created a comprehensive resource for testing these attributes // when applied to all input types: // miketaylr.com/code/input-type-attr.html // spec: www.whatwg.org/specs/web-apps/current-work/multipage/the-input-element.html#input-type-attr-summary // Only input placeholder is tested while textarea's placeholder is not. // Currently Safari 4 and Opera 11 have support only for the input placeholder // Both tests are available in feature-detects/forms-placeholder.js Modernizr['input'] = (function( props ) { for ( var i = 0, len = props.length; i < len; i++ ) { attrs[ props[i] ] = !!(props[i] in inputElem); } if (attrs.list){ // safari false positive's on datalist: webk.it/74252 // see also github.com/Modernizr/Modernizr/issues/146 attrs.list = !!(document.createElement('datalist') && window.HTMLDataListElement); } return attrs; })('autocomplete autofocus list placeholder max min multiple pattern required step'.split(' ')); /*>>input*/ /*>>inputtypes*/ // Run through HTML5's new input types to see if the UA understands any. // This is put behind the tests runloop because it doesn't return a // true/false like all the other tests; instead, it returns an object // containing each input type with its corresponding true/false value // Big thanks to @miketaylr for the html5 forms expertise. miketaylr.com/ Modernizr['inputtypes'] = (function(props) { for ( var i = 0, bool, inputElemType, defaultView, len = props.length; i < len; i++ ) { inputElem.setAttribute('type', inputElemType = props[i]); bool = inputElem.type !== 'text'; // We first check to see if the type we give it sticks.. // If the type does, we feed it a textual value, which shouldn't be valid. // If the value doesn't stick, we know there's input sanitization which infers a custom UI if ( bool ) { inputElem.value = smile; inputElem.style.cssText = 'position:absolute;visibility:hidden;'; if ( /^range$/.test(inputElemType) && inputElem.style.WebkitAppearance !== undefined ) { docElement.appendChild(inputElem); defaultView = document.defaultView; // Safari 2-4 allows the smiley as a value, despite making a slider bool = defaultView.getComputedStyle && defaultView.getComputedStyle(inputElem, null).WebkitAppearance !== 'textfield' && // Mobile android web browser has false positive, so must // check the height to see if the widget is actually there. (inputElem.offsetHeight !== 0); docElement.removeChild(inputElem); } else if ( /^(search|tel)$/.test(inputElemType) ){ // Spec doesn't define any special parsing or detectable UI // behaviors so we pass these through as true // Interestingly, opera fails the earlier test, so it doesn't // even make it here. } else if ( /^(url|email)$/.test(inputElemType) ) { // Real url and email support comes with prebaked validation. bool = inputElem.checkValidity && inputElem.checkValidity() === false; } else { // If the upgraded input compontent rejects the :) text, we got a winner bool = inputElem.value != smile; } } inputs[ props[i] ] = !!bool; } return inputs; })('search tel url email datetime date month week time datetime-local number range color'.split(' ')); /*>>inputtypes*/ } /*>>webforms*/ // End of test definitions // ----------------------- // Run through all tests and detect their support in the current UA. // todo: hypothetically we could be doing an array of tests and use a basic loop here. for ( var feature in tests ) { if ( hasOwnProp(tests, feature) ) { // run the test, throw the return value into the Modernizr, // then based on that boolean, define an appropriate className // and push it into an array of classes we'll join later. featureName = feature.toLowerCase(); Modernizr[featureName] = tests[feature](); classes.push((Modernizr[featureName] ? '' : 'no-') + featureName); } } /*>>webforms*/ // input tests need to run. Modernizr.input || webforms(); /*>>webforms*/ /** * addTest allows the user to define their own feature tests * the result will be added onto the Modernizr object, * as well as an appropriate className set on the html element * * @param feature - String naming the feature * @param test - Function returning true if feature is supported, false if not */ Modernizr.addTest = function ( feature, test ) { if ( typeof feature == 'object' ) { for ( var key in feature ) { if ( hasOwnProp( feature, key ) ) { Modernizr.addTest( key, feature[ key ] ); } } } else { feature = feature.toLowerCase(); if ( Modernizr[feature] !== undefined ) { // we're going to quit if you're trying to overwrite an existing test // if we were to allow it, we'd do this: // var re = new RegExp("\\b(no-)?" + feature + "\\b"); // docElement.className = docElement.className.replace( re, '' ); // but, no rly, stuff 'em. return Modernizr; } test = typeof test == 'function' ? test() : test; if (typeof enableClasses !== "undefined" && enableClasses) { docElement.className += ' ' + (test ? '' : 'no-') + feature; } Modernizr[feature] = test; } return Modernizr; // allow chaining. }; // Reset modElem.cssText to nothing to reduce memory footprint. setCss(''); modElem = inputElem = null; /*>>shiv*/ /*! HTML5 Shiv v3.6.1 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed */ ;(function(window, document) { /*jshint evil:true */ /** Preset options */ var options = window.html5 || {}; /** Used to skip problem elements */ var reSkip = /^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i; /** Not all elements can be cloned in IE **/ var saveClones = /^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i; /** Detect whether the browser supports default html5 styles */ var supportsHtml5Styles; /** Name of the expando, to work with multiple documents or to re-shiv one document */ var expando = '_html5shiv'; /** The id for the the documents expando */ var expanID = 0; /** Cached data for each document */ var expandoData = {}; /** Detect whether the browser supports unknown elements */ var supportsUnknownElements; (function() { try { var a = document.createElement('a'); a.innerHTML = '<xyz></xyz>'; //if the hidden property is implemented we can assume, that the browser supports basic HTML5 Styles supportsHtml5Styles = ('hidden' in a); supportsUnknownElements = a.childNodes.length == 1 || (function() { // assign a false positive if unable to shiv (document.createElement)('a'); var frag = document.createDocumentFragment(); return ( typeof frag.cloneNode == 'undefined' || typeof frag.createDocumentFragment == 'undefined' || typeof frag.createElement == 'undefined' ); }()); } catch(e) { supportsHtml5Styles = true; supportsUnknownElements = true; } }()); /*--------------------------------------------------------------------------*/ /** * Creates a style sheet with the given CSS text and adds it to the document. * @private * @param {Document} ownerDocument The document. * @param {String} cssText The CSS text. * @returns {StyleSheet} The style element. */ function addStyleSheet(ownerDocument, cssText) { var p = ownerDocument.createElement('p'), parent = ownerDocument.getElementsByTagName('head')[0] || ownerDocument.documentElement; p.innerHTML = 'x<style>' + cssText + '</style>'; return parent.insertBefore(p.lastChild, parent.firstChild); } /** * Returns the value of `html5.elements` as an array. * @private * @returns {Array} An array of shived element node names. */ function getElements() { var elements = html5.elements; return typeof elements == 'string' ? elements.split(' ') : elements; } /** * Returns the data associated to the given document * @private * @param {Document} ownerDocument The document. * @returns {Object} An object of data. */ function getExpandoData(ownerDocument) { var data = expandoData[ownerDocument[expando]]; if (!data) { data = {}; expanID++; ownerDocument[expando] = expanID; expandoData[expanID] = data; } return data; } /** * returns a shived element for the given nodeName and document * @memberOf html5 * @param {String} nodeName name of the element * @param {Document} ownerDocument The context document. * @returns {Object} The shived element. */ function createElement(nodeName, ownerDocument, data){ if (!ownerDocument) { ownerDocument = document; } if(supportsUnknownElements){ return ownerDocument.createElement(nodeName); } if (!data) { data = getExpandoData(ownerDocument); } var node; if (data.cache[nodeName]) { node = data.cache[nodeName].cloneNode(); } else if (saveClones.test(nodeName)) { node = (data.cache[nodeName] = data.createElem(nodeName)).cloneNode(); } else { node = data.createElem(nodeName); } // Avoid adding some elements to fragments in IE < 9 because // * Attributes like `name` or `type` cannot be set/changed once an element // is inserted into a document/fragment // * Link elements with `src` attributes that are inaccessible, as with // a 403 response, will cause the tab/window to crash // * Script elements appended to fragments will execute when their `src` // or `text` property is set return node.canHaveChildren && !reSkip.test(nodeName) ? data.frag.appendChild(node) : node; } /** * returns a shived DocumentFragment for the given document * @memberOf html5 * @param {Document} ownerDocument The context document. * @returns {Object} The shived DocumentFragment. */ function createDocumentFragment(ownerDocument, data){ if (!ownerDocument) { ownerDocument = document; } if(supportsUnknownElements){ return ownerDocument.createDocumentFragment(); } data = data || getExpandoData(ownerDocument); var clone = data.frag.cloneNode(), i = 0, elems = getElements(), l = elems.length; for(;i<l;i++){ clone.createElement(elems[i]); } return clone; } /** * Shivs the `createElement` and `createDocumentFragment` methods of the document. * @private * @param {Document|DocumentFragment} ownerDocument The document. * @param {Object} data of the document. */ function shivMethods(ownerDocument, data) { if (!data.cache) { data.cache = {}; data.createElem = ownerDocument.createElement; data.createFrag = ownerDocument.createDocumentFragment; data.frag = data.createFrag(); } ownerDocument.createElement = function(nodeName) { //abort shiv if (!html5.shivMethods) { return data.createElem(nodeName); } return createElement(nodeName, ownerDocument, data); }; ownerDocument.createDocumentFragment = Function('h,f', 'return function(){' + 'var n=f.cloneNode(),c=n.createElement;' + 'h.shivMethods&&(' + // unroll the `createElement` calls getElements().join().replace(/\w+/g, function(nodeName) { data.createElem(nodeName); data.frag.createElement(nodeName); return 'c("' + nodeName + '")'; }) + ');return n}' )(html5, data.frag); } /*--------------------------------------------------------------------------*/ /** * Shivs the given document. * @memberOf html5 * @param {Document} ownerDocument The document to shiv. * @returns {Document} The shived document. */ function shivDocument(ownerDocument) { if (!ownerDocument) { ownerDocument = document; } var data = getExpandoData(ownerDocument); if (html5.shivCSS && !supportsHtml5Styles && !data.hasCSS) { data.hasCSS = !!addStyleSheet(ownerDocument, // corrects block display not defined in IE6/7/8/9 'article,aside,figcaption,figure,footer,header,hgroup,nav,section{display:block}' + // adds styling not present in IE6/7/8/9 'mark{background:#FF0;color:#000}' ); } if (!supportsUnknownElements) { shivMethods(ownerDocument, data); } return ownerDocument; } /*--------------------------------------------------------------------------*/ /** * The `html5` object is exposed so that more elements can be shived and * existing shiving can be detected on iframes. * @type Object * @example * * // options can be changed before the script is included * html5 = { 'elements': 'mark section', 'shivCSS': false, 'shivMethods': false }; */ var html5 = { /** * An array or space separated string of node names of the elements to shiv. * @memberOf html5 * @type Array|String */ 'elements': options.elements || 'abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video', /** * A flag to indicate that the HTML5 style sheet should be inserted. * @memberOf html5 * @type Boolean */ 'shivCSS': (options.shivCSS !== false), /** * Is equal to true if a browser supports creating unknown/HTML5 elements * @memberOf html5 * @type boolean */ 'supportsUnknownElements': supportsUnknownElements, /** * A flag to indicate that the document's `createElement` and `createDocumentFragment` * methods should be overwritten. * @memberOf html5 * @type Boolean */ 'shivMethods': (options.shivMethods !== false), /** * A string to describe the type of `html5` object ("default" or "default print"). * @memberOf html5 * @type String */ 'type': 'default', // shivs the document according to the specified `html5` object options 'shivDocument': shivDocument, //creates a shived element createElement: createElement, //creates a shived documentFragment createDocumentFragment: createDocumentFragment }; /*--------------------------------------------------------------------------*/ // expose html5 window.html5 = html5; // shiv the document shivDocument(document); }(this, document)); /*>>shiv*/ // Assign private properties to the return object with prefix Modernizr._version = version; // expose these for the plugin API. Look in the source for how to join() them against your input /*>>prefixes*/ Modernizr._prefixes = prefixes; /*>>prefixes*/ /*>>domprefixes*/ Modernizr._domPrefixes = domPrefixes; Modernizr._cssomPrefixes = cssomPrefixes; /*>>domprefixes*/ /*>>mq*/ // Modernizr.mq tests a given media query, live against the current state of the window // A few important notes: // * If a browser does not support media queries at all (eg. oldIE) the mq() will always return false // * A max-width or orientation query will be evaluated against the current state, which may change later. // * You must specify values. Eg. If you are testing support for the min-width media query use: // Modernizr.mq('(min-width:0)') // usage: // Modernizr.mq('only screen and (max-width:768)') Modernizr.mq = testMediaQuery; /*>>mq*/ /*>>hasevent*/ // Modernizr.hasEvent() detects support for a given event, with an optional element to test on // Modernizr.hasEvent('gesturestart', elem) Modernizr.hasEvent = isEventSupported; /*>>hasevent*/ /*>>testprop*/ // Modernizr.testProp() investigates whether a given style property is recognized // Note that the property names must be provided in the camelCase variant. // Modernizr.testProp('pointerEvents') Modernizr.testProp = function(prop){ return testProps([prop]); }; /*>>testprop*/ /*>>testallprops*/ // Modernizr.testAllProps() investigates whether a given style property, // or any of its vendor-prefixed variants, is recognized // Note that the property names must be provided in the camelCase variant. // Modernizr.testAllProps('boxSizing') Modernizr.testAllProps = testPropsAll; /*>>testallprops*/ /*>>teststyles*/ // Modernizr.testStyles() allows you to add custom styles to the document and test an element afterwards // Modernizr.testStyles('#modernizr { position:absolute }', function(elem, rule){ ... }) Modernizr.testStyles = injectElementWithStyles; /*>>teststyles*/ /*>>prefixed*/ // Modernizr.prefixed() returns the prefixed or nonprefixed property name variant of your input // Modernizr.prefixed('boxSizing') // 'MozBoxSizing' // Properties must be passed as dom-style camelcase, rather than `box-sizing` hypentated style. // Return values will also be the camelCase variant, if you need to translate that to hypenated style use: // // str.replace(/([A-Z])/g, function(str,m1){ return '-' + m1.toLowerCase(); }).replace(/^ms-/,'-ms-'); // If you're trying to ascertain which transition end event to bind to, you might do something like... // // var transEndEventNames = { // 'WebkitTransition' : 'webkitTransitionEnd', // 'MozTransition' : 'transitionend', // 'OTransition' : 'oTransitionEnd', // 'msTransition' : 'MSTransitionEnd', // 'transition' : 'transitionend' // }, // transEndEventName = transEndEventNames[ Modernizr.prefixed('transition') ]; Modernizr.prefixed = function(prop, obj, elem){ if(!obj) { return testPropsAll(prop, 'pfx'); } else { // Testing DOM property e.g. Modernizr.prefixed('requestAnimationFrame', window) // 'mozRequestAnimationFrame' return testPropsAll(prop, obj, elem); } }; /*>>prefixed*/ /*>>cssclasses*/ // Remove "no-js" class from <html> element, if it exists: docElement.className = docElement.className.replace(/(^|\s)no-js(\s|$)/, '$1$2') + // Add the new classes to the <html> element. (enableClasses ? ' js ' + classes.join(' ') : ''); /*>>cssclasses*/ return Modernizr; })(this, this.document);
{ "pile_set_name": "Github" }
// cgo -godefs -- -Wall -Werror -static -I/tmp/include linux/types.go | go run mkpost.go // Code generated by the command above; see README.md. DO NOT EDIT. // +build riscv64,linux package unix const ( SizeofPtr = 0x8 SizeofLong = 0x8 ) type ( _C_long int64 ) type Timespec struct { Sec int64 Nsec int64 } type Timeval struct { Sec int64 Usec int64 } type Timex struct { Modes uint32 Offset int64 Freq int64 Maxerror int64 Esterror int64 Status int32 Constant int64 Precision int64 Tolerance int64 Time Timeval Tick int64 Ppsfreq int64 Jitter int64 Shift int32 Stabil int64 Jitcnt int64 Calcnt int64 Errcnt int64 Stbcnt int64 Tai int32 _ [44]byte } type Time_t int64 type Tms struct { Utime int64 Stime int64 Cutime int64 Cstime int64 } type Utimbuf struct { Actime int64 Modtime int64 } type Rusage struct { Utime Timeval Stime Timeval Maxrss int64 Ixrss int64 Idrss int64 Isrss int64 Minflt int64 Majflt int64 Nswap int64 Inblock int64 Oublock int64 Msgsnd int64 Msgrcv int64 Nsignals int64 Nvcsw int64 Nivcsw int64 } type Stat_t struct { Dev uint64 Ino uint64 Mode uint32 Nlink uint32 Uid uint32 Gid uint32 Rdev uint64 _ uint64 Size int64 Blksize int32 _ int32 Blocks int64 Atim Timespec Mtim Timespec Ctim Timespec _ [2]int32 } type Dirent struct { Ino uint64 Off int64 Reclen uint16 Type uint8 Name [256]uint8 _ [5]byte } type Flock_t struct { Type int16 Whence int16 Start int64 Len int64 Pid int32 _ [4]byte } const ( FADV_DONTNEED = 0x4 FADV_NOREUSE = 0x5 ) type RawSockaddr struct { Family uint16 Data [14]uint8 } type RawSockaddrAny struct { Addr RawSockaddr Pad [96]uint8 } type Iovec struct { Base *byte Len uint64 } type Msghdr struct { Name *byte Namelen uint32 Iov *Iovec Iovlen uint64 Control *byte Controllen uint64 Flags int32 _ [4]byte } type Cmsghdr struct { Len uint64 Level int32 Type int32 } const ( SizeofIovec = 0x10 SizeofMsghdr = 0x38 SizeofCmsghdr = 0x10 ) const ( SizeofSockFprog = 0x10 ) type PtraceRegs struct { Pc uint64 Ra uint64 Sp uint64 Gp uint64 Tp uint64 T0 uint64 T1 uint64 T2 uint64 S0 uint64 S1 uint64 A0 uint64 A1 uint64 A2 uint64 A3 uint64 A4 uint64 A5 uint64 A6 uint64 A7 uint64 S2 uint64 S3 uint64 S4 uint64 S5 uint64 S6 uint64 S7 uint64 S8 uint64 S9 uint64 S10 uint64 S11 uint64 T3 uint64 T4 uint64 T5 uint64 T6 uint64 } type FdSet struct { Bits [16]int64 } type Sysinfo_t struct { Uptime int64 Loads [3]uint64 Totalram uint64 Freeram uint64 Sharedram uint64 Bufferram uint64 Totalswap uint64 Freeswap uint64 Procs uint16 Pad uint16 Totalhigh uint64 Freehigh uint64 Unit uint32 _ [0]uint8 _ [4]byte } type Ustat_t struct { Tfree int32 Tinode uint64 Fname [6]uint8 Fpack [6]uint8 _ [4]byte } type EpollEvent struct { Events uint32 _ int32 Fd int32 Pad int32 } const ( POLLRDHUP = 0x2000 ) type Sigset_t struct { Val [16]uint64 } const _C__NSIG = 0x41 type Termios struct { Iflag uint32 Oflag uint32 Cflag uint32 Lflag uint32 Line uint8 Cc [19]uint8 Ispeed uint32 Ospeed uint32 } type Taskstats struct { Version uint16 Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 Blkio_count uint64 Blkio_delay_total uint64 Swapin_count uint64 Swapin_delay_total uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 Ac_sched uint8 Ac_pad [3]uint8 _ [4]byte Ac_uid uint32 Ac_gid uint32 Ac_pid uint32 Ac_ppid uint32 Ac_btime uint32 Ac_etime uint64 Ac_utime uint64 Ac_stime uint64 Ac_minflt uint64 Ac_majflt uint64 Coremem uint64 Virtmem uint64 Hiwater_rss uint64 Hiwater_vm uint64 Read_char uint64 Write_char uint64 Read_syscalls uint64 Write_syscalls uint64 Read_bytes uint64 Write_bytes uint64 Cancelled_write_bytes uint64 Nvcsw uint64 Nivcsw uint64 Ac_utimescaled uint64 Ac_stimescaled uint64 Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 Thrashing_count uint64 Thrashing_delay_total uint64 Ac_btime64 uint64 } type cpuMask uint64 const ( _NCPUBITS = 0x40 ) const ( CBitFieldMaskBit0 = 0x1 CBitFieldMaskBit1 = 0x2 CBitFieldMaskBit2 = 0x4 CBitFieldMaskBit3 = 0x8 CBitFieldMaskBit4 = 0x10 CBitFieldMaskBit5 = 0x20 CBitFieldMaskBit6 = 0x40 CBitFieldMaskBit7 = 0x80 CBitFieldMaskBit8 = 0x100 CBitFieldMaskBit9 = 0x200 CBitFieldMaskBit10 = 0x400 CBitFieldMaskBit11 = 0x800 CBitFieldMaskBit12 = 0x1000 CBitFieldMaskBit13 = 0x2000 CBitFieldMaskBit14 = 0x4000 CBitFieldMaskBit15 = 0x8000 CBitFieldMaskBit16 = 0x10000 CBitFieldMaskBit17 = 0x20000 CBitFieldMaskBit18 = 0x40000 CBitFieldMaskBit19 = 0x80000 CBitFieldMaskBit20 = 0x100000 CBitFieldMaskBit21 = 0x200000 CBitFieldMaskBit22 = 0x400000 CBitFieldMaskBit23 = 0x800000 CBitFieldMaskBit24 = 0x1000000 CBitFieldMaskBit25 = 0x2000000 CBitFieldMaskBit26 = 0x4000000 CBitFieldMaskBit27 = 0x8000000 CBitFieldMaskBit28 = 0x10000000 CBitFieldMaskBit29 = 0x20000000 CBitFieldMaskBit30 = 0x40000000 CBitFieldMaskBit31 = 0x80000000 CBitFieldMaskBit32 = 0x100000000 CBitFieldMaskBit33 = 0x200000000 CBitFieldMaskBit34 = 0x400000000 CBitFieldMaskBit35 = 0x800000000 CBitFieldMaskBit36 = 0x1000000000 CBitFieldMaskBit37 = 0x2000000000 CBitFieldMaskBit38 = 0x4000000000 CBitFieldMaskBit39 = 0x8000000000 CBitFieldMaskBit40 = 0x10000000000 CBitFieldMaskBit41 = 0x20000000000 CBitFieldMaskBit42 = 0x40000000000 CBitFieldMaskBit43 = 0x80000000000 CBitFieldMaskBit44 = 0x100000000000 CBitFieldMaskBit45 = 0x200000000000 CBitFieldMaskBit46 = 0x400000000000 CBitFieldMaskBit47 = 0x800000000000 CBitFieldMaskBit48 = 0x1000000000000 CBitFieldMaskBit49 = 0x2000000000000 CBitFieldMaskBit50 = 0x4000000000000 CBitFieldMaskBit51 = 0x8000000000000 CBitFieldMaskBit52 = 0x10000000000000 CBitFieldMaskBit53 = 0x20000000000000 CBitFieldMaskBit54 = 0x40000000000000 CBitFieldMaskBit55 = 0x80000000000000 CBitFieldMaskBit56 = 0x100000000000000 CBitFieldMaskBit57 = 0x200000000000000 CBitFieldMaskBit58 = 0x400000000000000 CBitFieldMaskBit59 = 0x800000000000000 CBitFieldMaskBit60 = 0x1000000000000000 CBitFieldMaskBit61 = 0x2000000000000000 CBitFieldMaskBit62 = 0x4000000000000000 CBitFieldMaskBit63 = 0x8000000000000000 ) type SockaddrStorage struct { Family uint16 _ [118]uint8 _ uint64 } type HDGeometry struct { Heads uint8 Sectors uint8 Cylinders uint16 Start uint64 } type Statfs_t struct { Type int64 Bsize int64 Blocks uint64 Bfree uint64 Bavail uint64 Files uint64 Ffree uint64 Fsid Fsid Namelen int64 Frsize int64 Flags int64 Spare [4]int64 } type TpacketHdr struct { Status uint64 Len uint32 Snaplen uint32 Mac uint16 Net uint16 Sec uint32 Usec uint32 _ [4]byte } const ( SizeofTpacketHdr = 0x20 ) type RTCPLLInfo struct { Ctrl int32 Value int32 Max int32 Min int32 Posmult int32 Negmult int32 Clock int64 } type BlkpgPartition struct { Start int64 Length int64 Pno int32 Devname [64]uint8 Volname [64]uint8 _ [4]byte } const ( BLKPG = 0x1269 ) type XDPUmemReg struct { Addr uint64 Len uint64 Size uint32 Headroom uint32 Flags uint32 _ [4]byte } type CryptoUserAlg struct { Name [64]uint8 Driver_name [64]uint8 Module_name [64]uint8 Type uint32 Mask uint32 Refcnt uint32 Flags uint32 } type CryptoStatAEAD struct { Type [64]uint8 Encrypt_cnt uint64 Encrypt_tlen uint64 Decrypt_cnt uint64 Decrypt_tlen uint64 Err_cnt uint64 } type CryptoStatAKCipher struct { Type [64]uint8 Encrypt_cnt uint64 Encrypt_tlen uint64 Decrypt_cnt uint64 Decrypt_tlen uint64 Verify_cnt uint64 Sign_cnt uint64 Err_cnt uint64 } type CryptoStatCipher struct { Type [64]uint8 Encrypt_cnt uint64 Encrypt_tlen uint64 Decrypt_cnt uint64 Decrypt_tlen uint64 Err_cnt uint64 } type CryptoStatCompress struct { Type [64]uint8 Compress_cnt uint64 Compress_tlen uint64 Decompress_cnt uint64 Decompress_tlen uint64 Err_cnt uint64 } type CryptoStatHash struct { Type [64]uint8 Hash_cnt uint64 Hash_tlen uint64 Err_cnt uint64 } type CryptoStatKPP struct { Type [64]uint8 Setsecret_cnt uint64 Generate_public_key_cnt uint64 Compute_shared_secret_cnt uint64 Err_cnt uint64 } type CryptoStatRNG struct { Type [64]uint8 Generate_cnt uint64 Generate_tlen uint64 Seed_cnt uint64 Err_cnt uint64 } type CryptoStatLarval struct { Type [64]uint8 } type CryptoReportLarval struct { Type [64]uint8 } type CryptoReportHash struct { Type [64]uint8 Blocksize uint32 Digestsize uint32 } type CryptoReportCipher struct { Type [64]uint8 Blocksize uint32 Min_keysize uint32 Max_keysize uint32 } type CryptoReportBlkCipher struct { Type [64]uint8 Geniv [64]uint8 Blocksize uint32 Min_keysize uint32 Max_keysize uint32 Ivsize uint32 } type CryptoReportAEAD struct { Type [64]uint8 Geniv [64]uint8 Blocksize uint32 Maxauthsize uint32 Ivsize uint32 } type CryptoReportComp struct { Type [64]uint8 } type CryptoReportRNG struct { Type [64]uint8 Seedsize uint32 } type CryptoReportAKCipher struct { Type [64]uint8 } type CryptoReportKPP struct { Type [64]uint8 } type CryptoReportAcomp struct { Type [64]uint8 } type LoopInfo struct { Number int32 Device uint32 Inode uint64 Rdevice uint32 Offset int32 Encrypt_type int32 Encrypt_key_size int32 Flags int32 Name [64]uint8 Encrypt_key [32]uint8 Init [2]uint64 Reserved [4]uint8 _ [4]byte } type TIPCSubscr struct { Seq TIPCServiceRange Timeout uint32 Filter uint32 Handle [8]uint8 } type TIPCSIOCLNReq struct { Peer uint32 Id uint32 Linkname [68]uint8 } type TIPCSIOCNodeIDReq struct { Peer uint32 Id [16]uint8 }
{ "pile_set_name": "Github" }
angular.module('nirc') .factory('Notification', function($q, $rootScope) { var supported = ('Notification' in window), nativeNote = window.Notification, Notification = {}; Notification.notify = function(title, opts) { var d = $q.defer(); if (!supported) { d.reject(); return d.promise; } opts = opts || {}; var n = new nativeNote(title, opts); n.onclick = function() { $rootScope.$apply(d.resolve.bind(d)); }; /* if timeout provided, auto-close the notification */ if (opts.timeout) { n.onshow = function() { window.setTimeout(function() { n.close(); d.reject(); /* user didn't click. */ $rootScope.$apply(); }, opts.timeout); }; } return d.promise; }; Notification.request = function() { var d = $q.defer(); if (!supported) { d.reject(); return d.promise; } nativeNote.requestPermission(function(result) { if (result === 'granted') { d.resolve(result); } else { d.reject(result); } $rootScope.$apply(); }); return d.promise; }; return Notification; });
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <!-- ********************************** DO NOT EDIT ********************************** This file will be replaced during upgrades so DO NOT EDIT this file. If you need to adjust the process, reading and understanding this file is the first step. In most cases, the adjustments can be achieved by setting properties or providing one of the "hooks" in the form of a "-before-" or "-after-" target. Whenever possible, look for one of these solutions. Failing that, you can copy whole targets to your build.xml file and it will overrride the target provided here. Doing that can create problems for upgrading to newer versions of Cmd so it is not recommended but it will be easier to manage than editing this file in most cases. --> <project name="x-app-build-impl" default=".help"> <!-- =============================================================== helper targets for ant integrations with IDE's (human readable target names) =============================================================== --> <target name="-before-build-testing"/> <target name="-build-testing" depends="testing,build"/> <target name="-after-build-testing"/> <target name="build-testing" depends="-before-build-testing, -build-testing, -after-build-testing"/> <target name="Build - Testing" description="Create a Testing build of this project" depends="build-testing"/> <target name="-before-build-production"/> <target name="-build-production" depends="production,build"/> <target name="-after-build-production"/> <target name="build-production" depends="-before-build-production, -build-production, -after-build-production"/> <target name="Build - Production" description="Create a Production build of this project" depends="build-production"/> <target name="-before-build-native"/> <target name="-build-native" depends="native,build"/> <target name="-after-build-native"/> <target name="build-native" depends="-before-build-native, -build-native, -after-build-native"/> <target name="Build - Native" description="Create a Native build of this project" depends="build-native"/> <target name="-before-start-local-webserver"/> <target name="-start-local-webserver" depends="init"> <x-launch-terminal> <![CDATA[ ${cmd.dir}/sencha fs web -port=${build.web.port} start -map=${build.web.root} ]]> </x-launch-terminal> </target> <target name="-after-start-local-webserver"/> <target name="start-local-webserver" depends="-before-start-local-webserver, -start-local-webserver, -after-start-local-webserver"/> <target name="WebServer - Start Local" description="Starts a local webserver for this project" depends="start-local-webserver"/> <target name="-before-compass-watch"/> <target name="-compass-watch" depends="init"> <x-launch-terminal> compass watch ${app.sass.dir} </x-launch-terminal> </target> <target name="-after-compass-watch"/> <target name="compass-watch" depends="-before-compass-watch, -compass-watch, -after-compass-watch"/> <target name="Compass - Watch" description="Opens terminal and watches for SASS updates" depends="compass-watch"/> <!-- =============================================================== environment setters =============================================================== --> <target name="production" description="Sets the build environment to production."> <property name="build.environment" value="production"/> </target> <target name="testing" description="Sets the build environment to testing."> <property name="build.environment" value="testing"/> </target> <target name="native" description="Sets the build environment to native."> <property name="build.environment" value="native"/> </target> <target name="package" description="Sets the build environment to package."> <property name="build.environment" value="package"/> </target> <target name="development" description="Sets the build environment to development."> <property name="build.environment" value="development"/> </target> <!-- =============================================================== Find Cmd uses targets from find-cmd-impl.xml to detect the current install of Sencha Cmd =============================================================== --> <import file="${basedir}/.sencha/app/find-cmd-impl.xml"/> <target name="init-cmd" depends="find-cmd-in-path, find-cmd-in-environment, find-cmd-in-shell"> <echo>Using Sencha Cmd from ${cmd.dir} for ${ant.file}</echo> <!-- load the sencha.jar ant task definitions. NOTE: the 'loaderref' attribute stores this task def's class loader on the project by that name, so it will be sharable across sub-projects. This fixes out-of-memory issues, as well as increases performance. To supoprt this, it is recommended that any customizations that use 'ant' or 'antcall' tasks set 'inheritrefs=true' on those tasks, in order to propagate the senchaloader reference to those subprojects. The sencha 'x-ant-call' task, which extends 'antcall' and defaults 'inheritrefs' to true, may be used in place of antcall in build process customizations. --> <taskdef resource="com/sencha/ant/antlib.xml" classpath="${cmd.dir}/sencha.jar" loaderref="senchaloader"/> <!-- Some operations require sencha.jar in the current java classpath, so this will extend the java.lang.Thread#contextClassLoader with the specified java classpath entries --> <x-extend-classpath> <jar path="${cmd.dir}/sencha.jar"/> </x-extend-classpath> </target> <!-- =============================================================== Init uses targets from init-impl.xml to load Sencha Cmd config system properties and ant task definitions =============================================================== --> <import file="${basedir}/.sencha/app/init-impl.xml"/> <import file="${basedir}/.sencha/app/resolve-impl.xml"/> <target name="init" depends="init-local, init-cmd, -before-init, -init, -after-init, -before-init-defaults, -init-defaults, -after-init-defaults, -init-compiler, -init-native-package"/> <!-- =============================================================== Build this is the starting point for the build process. The 'depends' attribute on the -build target controls the ordering of the different build phases =============================================================== --> <target name="-before-build"/> <target name="-build" depends="refresh, resolve, js, resources, slice, sass, page, native-package"/> <target name="-after-build"/> <target name="build" depends="init,-before-build,-build,-after-build" description="Builds the application"/> <!-- =============================================================== Clean removes all artifacts from the output build directories =============================================================== --> <target name="-before-clean"/> <target name="-clean"> <delete dir="${app.output.base}"/> <delete dir="${build.temp.dir}"/> </target> <target name="-after-clean"/> <target name="clean" depends="init" description="Removes all build output produced by the 'build' target"> <x-ant-call unless="skip.clean"> <target name="-before-clean"/> <target name="-clean"/> <target name="-after-clean"/> </x-ant-call> </target> <!-- =============================================================== Watch uses targets from watch-impl.xml to initiate the application watch process using instrumented state from the compiler =============================================================== --> <import file="${basedir}/.sencha/app/watch-impl.xml"/> <target name="-watch-init"> <property name="app.watch.enabled" value="true"/> </target> <target name="watch" depends="-watch-init,development,init" description="Starts Watch to keep your app ready for dev mode"> <x-ant-call> <param name="build.id" value="${build.id}"/> <param name="build.name" value="${build.name}"/> <target name="-before-watch"/> <target name="-watch"/> <target name="-after-watch"/> </x-ant-call> </target> <!-- =============================================================== JS uses targets from js-impl.xml to produce the output js files containing needed application and framework js classes =============================================================== --> <import file="${basedir}/.sencha/app/js-impl.xml"/> <target name="js" depends="init" description="Builds the output javascript file(s)"> <x-ant-call unless="skip.js"> <target name="-before-js"/> <target name="-js"/> <target name="-after-js"/> </x-ant-call> </target> <!-- =============================================================== Sass uses targets from sass-impl.xml to produce the output css files for the application's styling =============================================================== --> <import file="${basedir}/.sencha/app/sass-impl.xml"/> <target name="sass" depends="init" description="Builds the Sass files using Compass."> <x-ant-call unless="skip.sass"> <target name="-before-sass"/> <target name="-sass"/> <target name="-after-sass"/> <target name="refresh"/> </x-ant-call> </target> <!-- =============================================================== Resources uses targets from resources-impl.xml to copy resources from the application and required packages to the output directory =============================================================== --> <import file="${basedir}/.sencha/app/resources-impl.xml"/> <target name="resources" depends="init" description="Copy resources to build folder."> <x-ant-call unless="skip.resources"> <target name="-before-resources"/> <!-- Legacy targets: --> <target name="-before-inherit-resources"/> <target name="-before-copy-resources"/> <target name="-resources"/> <!-- Legacy targets: --> <target name="-after-copy-resources"/> <target name="-after-inherit-resources"/> <target name="-after-resources"/> </x-ant-call> </target> <!-- =============================================================== Slice uses targets from slice-impl.xml to extract theme images from the application for use with older browsers that don't support modern css features =============================================================== --> <import file="${basedir}/.sencha/app/slice-impl.xml"/> <target name="slice" depends="init" description="Slices CSS3 theme to produce non-CSS3 images and sprites."> <x-ant-call unless="skip.slice"> <target name="-before-slice"/> <target name="-slice"/> <target name="-after-slice"/> </x-ant-call> </target> <!-- Theme - this is a legacy support target for extjs 4.1 apps. It redirects to the "slice" ant target --> <target name="theme" depends="init" description="Builds the application's theme(s) images using the slicer (Ext JS 4.1 only)."> <x-ant-call unless="skip.theme"> <target name="-before-theme"/> <target name="slice"/> <target name="-after-theme"/> </x-ant-call> </target> <!-- Refresh Theme - uses targets from refresh-impl.xml to rebuild the current theme --> <target name="refresh-theme" depends="init" description="Rebuilds the currently enabled app theme (Ext JS 4.1 only)."> <x-ant-call unless="skip.theme"> <target name="-before-refresh-theme"/> <target name="-refresh-theme"/> <target name="-after-refresh-theme"/> </x-ant-call> </target> <!-- =============================================================== Refresh uses targets from refresh-impl.xml to generate bootstrapping information for the application =============================================================== --> <import file="${basedir}/.sencha/app/refresh-impl.xml"/> <target name="refresh" depends="init" description="Refreshes the application bootstrap data."> <x-ant-call unless="skip.refresh"> <target name="-before-refresh"/> <target name="-refresh"/> <target name="-after-refresh"/> </x-ant-call> </target> <!-- =============================================================== Page uses targets from page-impl.xml to generate the output markup file and associated microloader / app manifest =============================================================== --> <import file="${basedir}/.sencha/app/page-impl.xml"/> <target name="page" depends="init" description="Builds the application's HTML page."> <x-ant-call unless="skip.page"> <target name="-before-page"/> <target name="-page"/> <target name="-after-page"/> </x-ant-call> </target> <!-- =============================================================== Resolve uses targets from resolve-impl.xml to detect dynamic app dependencies using phantomjs =============================================================== --> <target name="resolve" depends="init" description="Resolve application dependencies dynamically."> <x-ant-call unless="skip.resolve"> <target name="-before-resolve"/> <target name="-resolve"/> <target name="-after-resolve"/> </x-ant-call> </target> <!-- =============================================================== Native Package uses targets from packager-impl.xml to run native packager applications to produce stand-alone installable web apps from this built Cmd application =============================================================== --> <import file="${basedir}/.sencha/app/packager-impl.xml"/> <import file="${basedir}/.sencha/app/cordova-impl.xml"/> <import file="${basedir}/.sencha/app/phonegap-impl.xml"/> <target name="native-package" depends="init" description="Builds native packages of the application"> <x-ant-call unless="skip.native-package"> <target name="-before-native-package"/> <target name="-native-package"/> <target name="-after-native-package"/> </x-ant-call> </target> <target name="-before-publish"/> <target name="-after-publish"/> <target name="-publish"> <property name="app.manager.file" value="${app.output.base}"/> <x-sencha-command> <![CDATA[ manager version create -id=${app.manager.id} -name=${app.version} -host=${app.manager.host} -secret=${app.manager.secret} -apiKey=${app.manager.apiKey} -file=${app.manager.file} ]]> </x-sencha-command> </target> <target name="publish" depends="init,-before-publish,-publish,-after-publish" description="Publish app to Sencha Web Application Manager"/> <!-- =============================================================== Build Dependencies uses the compiler to build metadata files for all detected file-to-file dependencies =============================================================== --> <target name="build-dependencies" depends="init, -detect-app-build-properties"> <x-compile refid="${compiler.ref.id}"> <![CDATA[ restore page and meta -infoType=Dependencies -basePath=${build.dir} -tpl={0} -out=${build.dir}/dependencies.json and meta -infoType=AppManifest -basePath=${build.dir} -tpl={0} -out=${build.dir}/bootsequence.json ]]> </x-compile> </target> <!-- =============================================================== Help - properties displays all current ant properties =============================================================== --> <target name=".props" depends="init" description="Lists all properties defined for the build"> <echoproperties/> </target> <!-- =============================================================== Help - docs displays the help message =============================================================== --> <target name=".help" depends="init" description="Provides help on the build script"> <x-get-project-targets property="help.message"/> <echo><![CDATA[${help.message} This is the main build script for your application. The following properties can be used to disable certain steps in the build process. * skip.page Do not build the HTML page. * skip.js Do not build the output js code file(s) * skip.resources Do not copy resources to the build directory * skip.sass Do not build the SASS. * skip.slice Do not build the theme image slicer. Most build options are controlled by the app manifest. For details see: ${basedir}/app.json For more specific controls over the build process, see: ${basedir}/.sencha/app/defaults.properties For details about how these options affect your build, see ${basedir}/.sencha/app/build-impl.xml These options can be stored in a local.properties file in this folder or in the local.properties file in the workspace. This file should not be included in source control. Alternatively, these can be supplied on the command line. For example: ant -Dskip.sass=1 build To see all currently defined properties, do this: ant .props ]]></echo> </target> </project>
{ "pile_set_name": "Github" }
--- upgrade: - | The ``context`` field for patch checks must now be slug, or a string consisting of only ASCII letters, numbers, underscores or hyphens. While older, non-slugified strings won't cause issues, any scripts creating contexts must be updated where necessary.
{ "pile_set_name": "Github" }
package cloudphoto //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. // AxisInListFaces is a nested struct in cloudphoto response type AxisInListFaces struct { Axis []string `json:"Axis" xml:"Axis"` }
{ "pile_set_name": "Github" }
################################################################################ # # squeezelite # ################################################################################ SQUEEZELITE_VERSION = v1.8 SQUEEZELITE_SITE = $(call github,robadenshi,squeezelite,$(SQUEEZELITE_VERSION)) SQUEEZELITE_LICENSE = GPL-3.0 SQUEEZELITE_LICENSE_FILES = LICENSE.txt SQUEEZELITE_DEPENDENCIES = alsa-lib flac libmad libvorbis faad2 mpg123 SQUEEZELITE_MAKE_OPTS = -DLINKALL ifeq ($(BR2_PACKAGE_SQUEEZELITE_FFMPEG),y) SQUEEZELITE_DEPENDENCIES += ffmpeg SQUEEZELITE_MAKE_OPTS += -DFFMPEG endif ifeq ($(BR2_PACKAGE_SQUEEZELITE_DSD),y) SQUEEZELITE_MAKE_OPTS += -DDSD endif ifeq ($(BR2_PACKAGE_SQUEEZELITE_RESAMPLE),y) SQUEEZELITE_DEPENDENCIES += libsoxr SQUEEZELITE_MAKE_OPTS += -DRESAMPLE endif ifeq ($(BR2_PACKAGE_SQUEEZELITE_VISEXPORT),y) SQUEEZELITE_MAKE_OPTS += -DVISEXPORT endif define SQUEEZELITE_BUILD_CMDS $(TARGET_MAKE_ENV) $(MAKE) $(TARGET_CONFIGURE_OPTS) \ OPTS="$(SQUEEZELITE_MAKE_OPTS)" -C $(@D) all endef define SQUEEZELITE_INSTALL_TARGET_CMDS $(INSTALL) -D -m 0755 $(@D)/squeezelite \ $(TARGET_DIR)/usr/bin/squeezelite endef $(eval $(generic-package))
{ "pile_set_name": "Github" }
/* * FDBRecordStoreScanLimitTest.java * * This source file is part of the FoundationDB open source project * * Copyright 2015-2019 Apple Inc. and the FoundationDB project authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.apple.foundationdb.record.provider.foundationdb.limits; import com.apple.foundationdb.record.EvaluationContext; import com.apple.foundationdb.record.ExecuteProperties; import com.apple.foundationdb.record.IndexScanType; import com.apple.foundationdb.record.IsolationLevel; import com.apple.foundationdb.record.RecordCoreException; import com.apple.foundationdb.record.RecordCursor; import com.apple.foundationdb.record.RecordCursorIterator; import com.apple.foundationdb.record.RecordCursorResult; import com.apple.foundationdb.record.RecordCursorVisitor; import com.apple.foundationdb.record.RecordStoreState; import com.apple.foundationdb.record.ScanLimitReachedException; import com.apple.foundationdb.record.ScanProperties; import com.apple.foundationdb.record.TestRecords1Proto; import com.apple.foundationdb.record.cursors.BaseCursor; import com.apple.foundationdb.record.logging.KeyValueLogMessage; import com.apple.foundationdb.record.logging.LogMessageKeys; import com.apple.foundationdb.record.logging.TestLogMessageKeys; import com.apple.foundationdb.record.provider.foundationdb.FDBQueriedRecord; import com.apple.foundationdb.record.provider.foundationdb.FDBRecordContext; import com.apple.foundationdb.record.provider.foundationdb.FDBRecordStore; import com.apple.foundationdb.record.provider.foundationdb.FDBStoreTimer; import com.apple.foundationdb.record.provider.foundationdb.FDBStoredRecord; import com.apple.foundationdb.record.provider.foundationdb.SplitHelper; import com.apple.foundationdb.record.provider.foundationdb.cursors.ProbableIntersectionCursor; import com.apple.foundationdb.record.query.RecordQuery; import com.apple.foundationdb.record.query.expressions.Query; import com.apple.foundationdb.record.query.plan.RecordQueryPlanner; import com.apple.foundationdb.record.query.plan.ScanComparisons; import com.apple.foundationdb.record.query.plan.plans.QueryPlan; import com.apple.foundationdb.record.query.plan.plans.RecordQueryIndexPlan; import com.apple.foundationdb.record.query.plan.plans.RecordQueryPlan; import com.apple.foundationdb.record.query.plan.plans.RecordQueryPlanWithNoChildren; import com.apple.test.BooleanSource; import com.apple.test.Tags; import com.google.common.base.Strings; import com.google.protobuf.Message; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Stream; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; /** * Tests for scan limits in {@link FDBRecordStore}. */ @Tag(Tags.RequiresFDB) @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class FDBRecordStoreScanLimitTest extends FDBRecordStoreLimitTestBase { @Nonnull private static final Logger LOGGER = LoggerFactory.getLogger(FDBRecordStoreScanLimitTest.class); @BeforeAll public void init() { clearAndInitialize(); } @BeforeEach private void setupRecordStore() throws Exception { setupSimpleRecordStore(); } private Optional<Integer> getRecordScanned(FDBRecordContext context) { if (context.getTimer() == null) { return Optional.empty(); } return Optional.of(context.getTimer().getCount(FDBStoreTimer.Counts.LOAD_KEY_VALUE)); } private Optional<Integer> getRecordsScannedByPlan(RecordQueryPlan plan) throws Exception { return getRecordsScannedByPlan(plan, ExecuteProperties.SERIAL_EXECUTE); } private Optional<Integer> getRecordsScannedByPlan(RecordQueryPlan plan, ExecuteProperties executeProperties) throws Exception { try (FDBRecordContext context = openContext()) { openSimpleRecordStore(context); if (context.getTimer() != null) { context.getTimer().reset(); } try (RecordCursorIterator<FDBQueriedRecord<Message>> cursor = recordStore.executeQuery(plan, null, executeProperties).asIterator()) { while (cursor.hasNext()) { cursor.next().getRecord(); } Optional<Integer> scanned = getRecordScanned(context); if (context.getTimer() != null) { context.getTimer().reset(); } return scanned; } } } private void assertNumberOfRecordsScanned(int expected, Function<byte[], RecordCursor<FDBQueriedRecord<Message>>> cursorFunction, boolean failOnLimitReached, String message) throws Exception { try (FDBRecordContext context = openContext()) { openSimpleRecordStore(context); if (context.getTimer() != null) { context.getTimer().reset(); } try (RecordCursor<FDBQueriedRecord<Message>> cursor = cursorFunction.apply(null)) { boolean caughtScanLimitReached = false; RecordCursorResult<FDBQueriedRecord<Message>> result = null; try { do { result = cursor.getNext(); } while (result.hasNext()); } catch (RecordCoreException ex) { if (failOnLimitReached && ex.getCause() instanceof ScanLimitReachedException) { caughtScanLimitReached = true; } else { throw ex; } } if (failOnLimitReached && !caughtScanLimitReached) { assertNotEquals(RecordCursor.NoNextReason.SCAN_LIMIT_REACHED, result.getNoNextReason()); } Optional<Integer> scanned = getRecordScanned(context); if (context.getTimer() != null) { context.getTimer().reset(); } int overrun = BaseCursorCountVisitor.getCount(cursor); scanned.ifPresent(value -> assertThat(message, value, lessThanOrEqualTo(expected + overrun))); } } } private void assertNumberOfRecordsScanned(int expected, RecordQueryPlan plan, ExecuteProperties executeProperties, String message) throws Exception { assertNumberOfRecordsScanned(expected, continuation -> recordStore.executeQuery(plan, null, executeProperties), executeProperties.isFailOnScanLimitReached(), message); } private int getMaximumToScan(QueryPlan<?> plan) throws Exception { if (plan instanceof RecordQueryPlanWithNoChildren) { try (FDBRecordContext context = openContext()) { openSimpleRecordStore(context); RecordQueryPlanWithNoChildren planWithNoChildren = (RecordQueryPlanWithNoChildren) plan; try (RecordCursorIterator<FDBQueriedRecord<Message>> cursor = recordStore.executeQuery(planWithNoChildren, null, ExecuteProperties.SERIAL_EXECUTE).asIterator()) { int maximumToScan = 0; while (cursor.hasNext()) { FDBQueriedRecord<Message> record = cursor.next(); maximumToScan += record.getStoredRecord().getKeyCount() + (record.getStoredRecord().isVersionedInline() ? 1 : 0); } return maximumToScan; } } } int maximumToScan = 0; for (QueryPlan<?> child : plan.getQueryPlanChildren()) { maximumToScan += getMaximumToScan(child); } return maximumToScan; } public Stream<Arguments> plansWithoutFail() { return plans(false); } public Stream<Arguments> plansWithFails() throws Exception { return Stream.of(Boolean.FALSE, Boolean.TRUE).flatMap(fail -> Stream.concat(plans(fail), unorderedPlans(fail))); } @ParameterizedTest(name = "testPlans() [{index}] {0} {1}") @MethodSource("plansWithFails") public void testPlans(String description, boolean fail, RecordQueryPlan plan) throws Exception { // include a scanLimit of 0, in which case all progress happens via the first "free" key-value scan. LOGGER.info(KeyValueLogMessage.of("running plan to check scan limit failures", LogMessageKeys.DESCRIPTION, description, LogMessageKeys.PLAN, plan, TestLogMessageKeys.FAIL, fail)); int maximumToScan = getMaximumToScan(plan); for (int limit = 0; limit <= maximumToScan * 2; limit = limit * 2 + 1) { assertNumberOfRecordsScanned(limit, plan, ExecuteProperties.newBuilder().setFailOnScanLimitReached(fail).setScannedRecordsLimit(limit).build(), "should be limited by record scan limit"); } for (int limit = maximumToScan + 1; limit <= 100; limit++) { assertNumberOfRecordsScanned(maximumToScan, plan, ExecuteProperties.newBuilder().setFailOnScanLimitReached(fail).setScannedRecordsLimit(limit).build(), "should not be limited by record scan limit"); } } @ParameterizedTest(name = "plansByContinuation() [{index}] {0}") @MethodSource("plansWithoutFail") public void plansByContinuation(String description, boolean fail, RecordQueryPlan plan) throws Exception { int maximumToScan = getMaximumToScan(plan); // include a scanLimit of 0, in which case all progress happens via the first "free" key-value scan. for (int scanLimit = 0; scanLimit <= maximumToScan * 2; scanLimit = 2 * scanLimit + 1) { final Function<FDBQueriedRecord<Message>, Long> getRecNo = r -> { TestRecords1Proto.MySimpleRecord.Builder record = TestRecords1Proto.MySimpleRecord.newBuilder(); record.mergeFrom(r.getRecord()); return record.getRecNo(); }; final ExecuteProperties.Builder properties = ExecuteProperties.newBuilder().setScannedRecordsLimit(scanLimit); try (FDBRecordContext context = openContext()) { openSimpleRecordStore(context); final List<Long> allAtOnce; try (RecordCursor<FDBQueriedRecord<Message>> cursor = recordStore.executeQuery(plan)) { allAtOnce = cursor.map(getRecNo).asList().get(); } final List<Long> byContinuation = new ArrayList<>(); byte[] continuation = null; do { try (RecordCursor<FDBQueriedRecord<Message>> cursor = recordStore.executeQuery(plan, continuation, properties.build())) { if (context.getTimer() != null) { context.getTimer().reset(); } RecordCursorResult<FDBQueriedRecord<Message>> result; while ((result = cursor.getNext()).hasNext()) { byContinuation.add(getRecNo.apply(result.get())); } continuation = result.getContinuation().toBytes(); int overrun = BaseCursorCountVisitor.getCount(cursor); Optional<Integer> recordScanned = getRecordScanned(context); if (recordScanned.isPresent()) { assertThat(recordScanned.get(), lessThanOrEqualTo(Math.min(scanLimit + overrun, maximumToScan))); } } } while (continuation != null); assertEquals(allAtOnce, byContinuation); } } } @ParameterizedTest(name = "unorderedIntersectionWithScanLimit [fail = {0}]") @BooleanSource public void unorderedIntersectionWithScanLimit(boolean fail) throws Exception { // TODO: When there is an UnorderedIntersectionPlan (or whatever) add that to the unordered plans stream RecordQueryPlanner planner = new RecordQueryPlanner(simpleMetaData(NO_HOOK), new RecordStoreState(null, null)); RecordQueryPlan leftPlan = planner.plan(RecordQuery.newBuilder() .setRecordType("MySimpleRecord") .setFilter(Query.field("str_value_indexed").startsWith("ev")) .build() ); RecordQueryPlan rightPlan = planner.plan(RecordQuery.newBuilder() .setRecordType("MySimpleRecord") .setFilter(Query.field("num_value_3_indexed").lessThanOrEquals(1)) .build() ); int maximumToScan; try (FDBRecordContext context = openContext()) { openSimpleRecordStore(context); maximumToScan = recordStore.executeQuery(leftPlan).getCount().get() + recordStore.executeQuery(rightPlan).getCount().get(); } for (int limit = 0; limit < 3 * maximumToScan; limit = 2 * limit + 1) { final int finalLimit = limit; Function<byte[], RecordCursor<FDBQueriedRecord<Message>>> cursorFunction = (continuation) -> { ExecuteProperties executeProperties = ExecuteProperties.newBuilder() .setScannedRecordsLimit(finalLimit) .setFailOnScanLimitReached(fail) .build(); return ProbableIntersectionCursor.create( record -> record.getPrimaryKey().getItems(), Arrays.asList( leftContinuation -> leftPlan.execute(recordStore, EvaluationContext.EMPTY, leftContinuation, executeProperties), rightContinuation -> rightPlan.execute(recordStore, EvaluationContext.EMPTY, rightContinuation, executeProperties) ), continuation, recordStore.getTimer()); }; assertNumberOfRecordsScanned(limit, cursorFunction, fail, "should" + (limit >= maximumToScan ? "not " : "") + " be limited by record scan limit"); } } @Test public void testSplitContinuation() throws Exception { try (FDBRecordContext context = openContext()) { openSimpleRecordStore(context, TEST_SPLIT_HOOK); recordStore.deleteAllRecords(); // Undo setupRecordStore(). commit(context); } final String bigValue = Strings.repeat("X", SplitHelper.SPLIT_RECORD_SIZE + 10); final String smallValue = Strings.repeat("Y", 5); final List<FDBStoredRecord<Message>> createdRecords = new ArrayList<>(); createdRecords.add(saveAndSplitSimpleRecord(1L, smallValue, 1)); createdRecords.add(saveAndSplitSimpleRecord(2L, smallValue, 2)); createdRecords.add(saveAndSplitSimpleRecord(3L, bigValue, 3)); createdRecords.add(saveAndSplitSimpleRecord(4L, smallValue, 4)); createdRecords.add(saveAndSplitSimpleRecord(5L, bigValue, 5)); createdRecords.add(saveAndSplitSimpleRecord(6L, bigValue, 6)); createdRecords.add(saveAndSplitSimpleRecord(7L, smallValue, 7)); createdRecords.add(saveAndSplitSimpleRecord(8L, smallValue, 8)); createdRecords.add(saveAndSplitSimpleRecord(9L, smallValue, 9)); // Scan one record at a time using continuations final List<FDBStoredRecord<Message>> scannedRecords = new ArrayList<>(); try (FDBRecordContext context = openContext()) { openSimpleRecordStore(context, TEST_SPLIT_HOOK); Supplier<ScanProperties> props = () -> new ScanProperties(ExecuteProperties.newBuilder() .setScannedRecordsLimit(0) .setIsolationLevel(IsolationLevel.SERIALIZABLE) .build()); RecordCursorIterator<FDBStoredRecord<Message>> messageCursor = recordStore.scanRecords(null, props.get()).asIterator(); while (messageCursor.hasNext()) { scannedRecords.add(messageCursor.next()); messageCursor = recordStore.scanRecords(messageCursor.getContinuation(), props.get()).asIterator(); } commit(context); } assertEquals(createdRecords, scannedRecords); } @ParameterizedTest @ValueSource(ints = {2, 5, 10, 20}) // for this test, the scan limit must divide 100 public void testExecuteStateReset(int scanLimit) throws Exception { final RecordQueryPlan plan = new RecordQueryIndexPlan("MySimpleRecord$str_value_indexed", IndexScanType.BY_VALUE, ScanComparisons.EMPTY, false); ExecuteProperties properties = ExecuteProperties.newBuilder().setScannedRecordsLimit(scanLimit).build(); try (FDBRecordContext context = openContext()) { openSimpleRecordStore(context); byte[] continuation = null; do { try (RecordCursorIterator<FDBQueriedRecord<Message>> cursor = recordStore.executeQuery(plan, continuation, properties).asIterator()) { int retrieved = 0; while (cursor.hasNext()) { cursor.next(); retrieved++; } continuation = cursor.getContinuation(); if (continuation != null) { // if this is our last call, we might retrieve 0 results assertEquals(scanLimit, retrieved); } properties = properties.resetState(); } } while (continuation != null); } } private static class BaseCursorCountVisitor implements RecordCursorVisitor { private int keyValueCursorCount = 0; @Override public boolean visitEnter(RecordCursor<?> cursor) { if (cursor instanceof BaseCursor) { keyValueCursorCount++; } return true; } @Override public boolean visitLeave(RecordCursor<?> cursor) { return true; } public static int getCount(RecordCursor<?> cursor) { BaseCursorCountVisitor visitor = new BaseCursorCountVisitor(); cursor.accept(visitor); return visitor.keyValueCursorCount; } } }
{ "pile_set_name": "Github" }
[ new MagicCDA() { @Override public void modPowerToughness(final MagicGame game, final MagicPlayer player, final MagicPermanent permanent, final MagicPowerToughness pt) { final MagicCardList cardList = new MagicCardList(player.getGraveyard()); cardList.addAll(player.getOpponent().getGraveyard()); int types = 0; for (final MagicType type : MagicType.ALL_CARD_TYPES) { for (final MagicCard card : cardList) { if (card.hasType(type)) { types++; break; } } } pt.set(types, types + 1); } } ]
{ "pile_set_name": "Github" }
package com.afollestad.appthemeengine.tagprocessors; import android.content.Context; import android.content.res.ColorStateList; import android.graphics.Color; import android.support.annotation.ColorInt; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.design.widget.TextInputLayout; import android.view.View; import android.widget.Button; import android.widget.TextView; import com.afollestad.appthemeengine.util.ATEUtil; import com.afollestad.appthemeengine.util.TextInputLayoutUtil; /** * @author Aidan Follestad (afollestad) */ public class TextColorTagProcessor extends TagProcessor { public static final String PREFIX = "text_color"; public static final String LINK_PREFIX = "text_color_link"; public static final String HINT_PREFIX = "text_color_hint"; private final boolean mLinkMode; private final boolean mHintMode; public TextColorTagProcessor(boolean links, boolean hints) { mLinkMode = links; mHintMode = hints; } @Override public boolean isTypeSupported(@NonNull View view) { return view instanceof TextView; } // TODO is dependent parameter needed? private static ColorStateList getTextSelector(@ColorInt int color, View view, boolean dependent) { if (dependent) color = ATEUtil.isColorLight(color) ? Color.BLACK : Color.WHITE; return new ColorStateList(new int[][]{ new int[]{-android.R.attr.state_enabled}, new int[]{android.R.attr.state_enabled} }, new int[]{ // Buttons are gray when disabled, so the text needs to be black view instanceof Button ? Color.BLACK : ATEUtil.adjustAlpha(color, 0.3f), color }); } @Override public void process(@NonNull Context context, @Nullable String key, @NonNull View view, @NonNull String suffix) { final TextView tv = (TextView) view; final ColorResult result = getColorFromSuffix(context, key, view, suffix); if (result == null) return; if (mHintMode) result.adjustAlpha(0.5f); final ColorStateList sl = getTextSelector(result.getColor(), view, false); if (mLinkMode) { tv.setLinkTextColor(sl); } else if (mHintMode) { tv.setHintTextColor(sl); // Sets parent TextInputLayout hint color if (view.getParent() != null && view.getParent() instanceof TextInputLayout) { final TextInputLayout til = (TextInputLayout) view.getParent(); TextInputLayoutUtil.setHint(til, result.getColor()); } } else { tv.setTextColor(sl); } } }
{ "pile_set_name": "Github" }
sha256:a507d28fae8457404ef6316034d5693c6883caeb081d584b5ecaa36cbcc4ef50
{ "pile_set_name": "Github" }
from .debug_driver import DebugDriver, DebugState class DummyDebugDriver(DebugDriver): """ Stub implementation of the debug driver """ def __init__(self): super().__init__() self.status = DebugState.STOPPED def run(self): self.status = DebugState.RUNNING def restart(self): self.status = DebugState.RUNNING def step(self): pass def stop(self): self.status = DebugState.STOPPED def get_status(self): return self.status def get_registers(self, registers): return {r: 0 for r in registers} def get_pc(self): return 0 def get_fp(self): return 0 def set_breakpoint(self, address): pass def clear_breakpoint(self, address): pass def read_mem(self, address, size): return bytes(size) def write_mem(self, address, data): pass def update_status(self): pass
{ "pile_set_name": "Github" }
// -------------------------------------------------------------------------------------------------------------------- // <copyright file="IReportLogic.cs" company="Brandon Seydel"> // N/A // </copyright> // -------------------------------------------------------------------------------------------------------------------- using System.Collections.Generic; using System.Threading.Tasks; using MailChimp.Net.Core; using MailChimp.Net.Models; namespace MailChimp.Net.Interfaces { /// <summary> /// The ReportLogic interface. /// </summary> public interface IReportLogic { Task<AbuseReportResponse> GetAbuseReportsAsync(string campaignId, BaseRequest request = null); Task<AbuseReport> GetAbuseReportAsync(string campaignId, string reportId, BaseRequest request = null); Task<SentToResponse> GetSentToRecipientsResponseAsync(string campaignId, QueryableBaseRequest request = null); Task<IEnumerable<Report>> GetAllReportsAsync(ReportRequest request = null); Task<IEnumerable<Advice>> GetCampaignAdviceAsync(string campaignId, BaseRequest request = null); Task<IEnumerable<Open>> GetCampaignOpenReportAsync(string campaignId, QueryableBaseRequest request = null); Task<int> GetCampaignOpenReportCountAsync(string campaignId, QueryableBaseRequest request = null); Task<IEnumerable<UrlClicked>> GetClickReportAsync(string campaignId, QueryableBaseRequest request = null); Task<UrlClicked> GetClickReportDetailsAsync(string campaignId, string linkId, BaseRequest request = null); Task<ClickMember> GetClickReportMemberAsync(string campaignId, string linkId, string emailAddressOrHash, BaseRequest request = null); Task<IEnumerable<ClickMember>> GetClickReportMembersAsync(string campaignId, string linkId, QueryableBaseRequest request = null); Task<IEnumerable<Domain>> GetDomainPerformanceAsync(string campaignId, BaseRequest request = null); Task<EepUrlActivity> GetEepUrlReportAsync(string campaignId, BaseRequest request = null); Task<IEnumerable<EmailActivity>> GetEmailActivitiesAsync(string campaignId, QueryableBaseRequest request = null); Task<EmailResponse> GetEmailActivitiesResponseAsync(string campaignId, QueryableBaseRequest request = null); Task<EmailActivity> GetEmailActivityAsync(string campaignId, string emailAddressOrHash, BaseRequest request = null); Task<IEnumerable<OpenLocation>> GetLocationsAsync(string campaignId, BaseRequest request = null); Task<Report> GetReportAsync(string campaignId, BaseRequest request = null); Task<ReportResponse> GetResponseAsync(ReportRequest request = null); Task<SentTo> GetSentToRecipientAsync(string campaignId, string emailAddressOrHash, QueryableBaseRequest request = null); Task<IEnumerable<SentTo>> GetSentToRecipientsAsync(string campaignId, QueryableBaseRequest request = null); Task<IEnumerable<Report>> GetSubReportAsync(string campaignId, BaseRequest request = null); Task<Unsubscribe> GetUnsubscriberAsync(string campaignId, string emailAddressOrHash, BaseRequest request = null); Task<IEnumerable<Unsubscribe>> GetUnsubscribesAsync(string campaignId, QueryableBaseRequest request = null); Task<int> GetUnsubscribesCountAsync(string campaignId, QueryableBaseRequest request = null); } }
{ "pile_set_name": "Github" }
polygon 1 1.269375E+01 4.180566E+01 1.269377E+01 4.180559E+01 1.269370E+01 4.180548E+01 1.269349E+01 4.180503E+01 1.269285E+01 4.180431E+01 1.269276E+01 4.180419E+01 1.269374E+01 4.180394E+01 1.269427E+01 4.180373E+01 1.269522E+01 4.180289E+01 1.269511E+01 4.180273E+01 1.269474E+01 4.180215E+01 1.269423E+01 4.180142E+01 1.269402E+01 4.180112E+01 1.269561E+01 4.180048E+01 1.269696E+01 4.180020E+01 1.269858E+01 4.179973E+01 1.270006E+01 4.179939E+01 1.270022E+01 4.179935E+01 1.270303E+01 4.179897E+01 1.271057E+01 4.179876E+01 1.271858E+01 4.179986E+01 1.272431E+01 4.180062E+01 1.272393E+01 4.180582E+01 1.272750E+01 4.180944E+01 1.272792E+01 4.181031E+01 1.272793E+01 4.181034E+01 1.272543E+01 4.182092E+01 1.272378E+01 4.182570E+01 1.271652E+01 4.183357E+01 1.271538E+01 4.183456E+01 1.271498E+01 4.183537E+01 1.271442E+01 4.183748E+01 1.271220E+01 4.183776E+01 1.271175E+01 4.183781E+01 1.271090E+01 4.183523E+01 1.271084E+01 4.183511E+01 1.270940E+01 4.183483E+01 1.270951E+01 4.183424E+01 1.270875E+01 4.183403E+01 1.270742E+01 4.183367E+01 1.270706E+01 4.183641E+01 1.270658E+01 4.183715E+01 1.270598E+01 4.183833E+01 1.270555E+01 4.183924E+01 1.270506E+01 4.183965E+01 1.270425E+01 4.184026E+01 1.270321E+01 4.184000E+01 1.270226E+01 4.183815E+01 1.270239E+01 4.183754E+01 1.270310E+01 4.183501E+01 1.270409E+01 4.183283E+01 1.270441E+01 4.183204E+01 1.270506E+01 4.183044E+01 1.270515E+01 4.182992E+01 1.270521E+01 4.182942E+01 1.270275E+01 4.182216E+01 1.270152E+01 4.182153E+01 1.270160E+01 4.182107E+01 1.270162E+01 4.182107E+01 1.270173E+01 4.182099E+01 1.270190E+01 4.182016E+01 1.270204E+01 4.181971E+01 1.270205E+01 4.181942E+01 1.270193E+01 4.181906E+01 1.270182E+01 4.181818E+01 1.270190E+01 4.181781E+01 1.270241E+01 4.181730E+01 1.270265E+01 4.181694E+01 1.270266E+01 4.181667E+01 1.270191E+01 4.181663E+01 1.270204E+01 4.181494E+01 1.270212E+01 4.181448E+01 1.270164E+01 4.181424E+01 1.270148E+01 4.181426E+01 1.270206E+01 4.181403E+01 1.270258E+01 4.181358E+01 1.270269E+01 4.181308E+01 1.270220E+01 4.181305E+01 1.270202E+01 4.181299E+01 1.270161E+01 4.181250E+01 1.270101E+01 4.181182E+01 1.270068E+01 4.181172E+01 1.270046E+01 4.181172E+01 1.270004E+01 4.181198E+01 1.269961E+01 4.181236E+01 1.269791E+01 4.181434E+01 1.269869E+01 4.181455E+01 1.269923E+01 4.181479E+01 1.269969E+01 4.181517E+01 1.269986E+01 4.181528E+01 1.269892E+01 4.181641E+01 1.270082E+01 4.181725E+01 1.270062E+01 4.181745E+01 1.269876E+01 4.181973E+01 1.269464E+01 4.181784E+01 1.269303E+01 4.181716E+01 1.269251E+01 4.181704E+01 1.269173E+01 4.181703E+01 1.269039E+01 4.181725E+01 1.268994E+01 4.181724E+01 1.268918E+01 4.181708E+01 1.268861E+01 4.181682E+01 1.268795E+01 4.181640E+01 1.268760E+01 4.181604E+01 1.268731E+01 4.181550E+01 1.268707E+01 4.181480E+01 1.268693E+01 4.181461E+01 1.268625E+01 4.181429E+01 1.268568E+01 4.181399E+01 1.268551E+01 4.181375E+01 1.268544E+01 4.181352E+01 1.268535E+01 4.181249E+01 1.268553E+01 4.181239E+01 1.268592E+01 4.181128E+01 1.268627E+01 4.181062E+01 1.268652E+01 4.181016E+01 1.268650E+01 4.180991E+01 1.268650E+01 4.180984E+01 1.268689E+01 4.180979E+01 1.268745E+01 4.180963E+01 1.268777E+01 4.180955E+01 1.268820E+01 4.180957E+01 1.268849E+01 4.180947E+01 1.268934E+01 4.180909E+01 1.268940E+01 4.180905E+01 1.268991E+01 4.180873E+01 1.269005E+01 4.180865E+01 1.269076E+01 4.180827E+01 1.269140E+01 4.180816E+01 1.269183E+01 4.180777E+01 1.269220E+01 4.180705E+01 1.269247E+01 4.180672E+01 1.269272E+01 4.180642E+01 1.269298E+01 4.180627E+01 1.269370E+01 4.180585E+01 1.269375E+01 4.180566E+01 END END
{ "pile_set_name": "Github" }
/* Front end styles */ .wp-block-jetpack-event-countdown { text-align: center; position: relative; .event-countdown__counter { text-transform: uppercase; font-size: 16px; font-weight: 600; } .event-countdown__counter span { margin: 0 1em; display: inline-flex; align-items: center; } .event-countdown__counter span strong { margin-right: 8px; } .event-countdown__day { line-height: 1; font-weight: 900; display: block; } .event-countdown__hour, .event-countdown__minute, .event-countdown__second { min-width: 1.5em; // A min-width means 2 digit numbers don't cause too much resizing. text-align: right; } .event-countdown__counter p, .event-countdown__event-title p { margin-top: 1rem; margin-bottom: 1rem; } // It's time! .event-countdown__counter.event-countdown__counter-stopped p, .event-countdown__counter.event-countdown__counter-stopped span, .event-countdown__counter.event-countdown__counter-stopped strong { font-size: 100%; display: inline; } .event-countdown__counter-stopped + .event-countdown__event-title { position: absolute; left: 1em; right: 1em; top: 25%; } } // Base font sizes. .wp-block-jetpack-event-countdown { .event-countdown__counter span strong { font-size: 24px; } .event-countdown__day { font-size: 56px; } .event-countdown__event-title { font-size: 36px; } } // Font size beyond mobile. @media only screen and ( min-width: 600px ) { .wp-block-jetpack-event-countdown { .event-countdown__counter span strong { font-size: 36px; } .event-countdown__day { font-size: 96px; } .event-countdown__event-title { font-size: 48px; } } } /* Hidden timestamp used by the script to extract the event time. */ .event-countdown__date { display: none; } /** * Fireworks */ $ecf_color1: #fff922; $ecf_color2: #685fd7; $ecf_color3: #ecfec7; $ecf_color4: #a3fdec; $ecf_color5: #feb352; $ecf_color6: #b82806; // Contain the fireworks, so they never cause scrollbars to appear. .event-countdown__fireworks { position: relative; overflow: hidden; z-index: -1; padding-top: 50%; // Make responsive. > .event-countdown__fireworks-before, > .event-countdown__fireworks-after { will-change: transform; position: absolute; top: 0; width: 4px; height: 4px; border-radius: 50%; animation: 1s event_countdown_bang ease-out infinite backwards, 1s event_countdown_gravity ease-in infinite backwards, 5s event_countdown_position linear infinite backwards; mix-blend-mode: overlay; box-shadow: -120px -218.66667px $ecf_color1, 248px -16.66667px $ecf_color3, 190px 16.33333px $ecf_color1, -113px -308.66667px $ecf_color5, -109px -287.66667px $ecf_color6, -50px -313.66667px $ecf_color5, 226px -31.66667px $ecf_color5, 180px -351.66667px $ecf_color5, -12px -338.66667px $ecf_color3, 220px -388.66667px $ecf_color3, -69px -27.66667px $ecf_color5, -111px -339.66667px $ecf_color1, 155px -237.66667px $ecf_color2, -152px -380.66667px $ecf_color2, -50px -37.66667px $ecf_color2, -95px -175.66667px $ecf_color3, -88px 10.33333px $ecf_color1, 112px -309.66667px $ecf_color1, 69px -415.66667px $ecf_color5, 168px -100.66667px $ecf_color5, -244px 24.33333px $ecf_color6, 97px -325.66667px $ecf_color5, -211px -182.66667px $ecf_color3, 236px -126.66667px $ecf_color4, 140px -196.66667px $ecf_color1, 125px -175.66667px $ecf_color2, 118px -381.66667px $ecf_color5, 144px -111.66667px $ecf_color6, 36px -78.66667px $ecf_color4, -63px -196.66667px $ecf_color2, -218px -227.66667px $ecf_color3, -134px -377.66667px $ecf_color4, -36px -412.66667px $ecf_color4, 209px -106.66667px $ecf_color3, 91px -278.66667px $ecf_color1, -22px -191.66667px $ecf_color3, 139px -392.66667px $ecf_color3, 56px -2.66667px $ecf_color1, -156px -276.66667px $ecf_color5, -163px -233.66667px $ecf_color3, -238px -346.66667px $ecf_color3, 62px -363.66667px $ecf_color1, 244px -170.66667px $ecf_color1, 224px -142.66667px $ecf_color2, 141px -208.66667px $ecf_color2, 211px -285.66667px $ecf_color5, 181px -128.66667px $ecf_color1, 90px -123.66667px $ecf_color2, 189px 70.33333px $ecf_color3, -18px -383.66667px $ecf_color3, 100px -6.66667px $ecf_color5; } > .event-countdown__fireworks-after { animation-delay: 1.25s, 1.25s, 1.25s; animation-duration: 1.25s, 1.25s, 6.25s; } } @keyframes event_countdown_bang { from { box-shadow: 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white, 0 0 white; } } @keyframes event_countdown_gravity { to { transform: translateY( 200px ); -webkit-transform: translateY( 200px ); opacity: 0; } } @keyframes event_countdown_position { 0%, 19.9% { margin-top: 10%; margin-left: 40%; } 20%, 39.9% { margin-top: 40%; margin-left: 30%; } 40%, 59.9% { margin-top: 20%; margin-left: 70%; } 60%, 79.9% { margin-top: 30%; margin-left: 20%; } 80%, 99.9% { margin-top: 30%; margin-left: 80%; } }
{ "pile_set_name": "Github" }
#!/usr/bin/env python # Copyright 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unit tests for the input.py file.""" import gyp.input import unittest import sys class TestFindCycles(unittest.TestCase): def setUp(self): self.nodes = {} for x in ('a', 'b', 'c', 'd', 'e'): self.nodes[x] = gyp.input.DependencyGraphNode(x) def _create_dependency(self, dependent, dependency): dependent.dependencies.append(dependency) dependency.dependents.append(dependent) def test_no_cycle_empty_graph(self): for label, node in self.nodes.iteritems(): self.assertEquals([], node.FindCycles()) def test_no_cycle_line(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['d']) for label, node in self.nodes.iteritems(): self.assertEquals([], node.FindCycles()) def test_no_cycle_dag(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['a'], self.nodes['c']) self._create_dependency(self.nodes['b'], self.nodes['c']) for label, node in self.nodes.iteritems(): self.assertEquals([], node.FindCycles()) def test_cycle_self_reference(self): self._create_dependency(self.nodes['a'], self.nodes['a']) self.assertEquals([[self.nodes['a'], self.nodes['a']]], self.nodes['a'].FindCycles()) def test_cycle_two_nodes(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['a']) self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]], self.nodes['a'].FindCycles()) self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]], self.nodes['b'].FindCycles()) def test_two_cycles(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['a']) self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['b']) cycles = self.nodes['a'].FindCycles() self.assertTrue( [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles) self.assertTrue( [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles) self.assertEquals(2, len(cycles)) def test_big_cycle(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['d']) self._create_dependency(self.nodes['d'], self.nodes['e']) self._create_dependency(self.nodes['e'], self.nodes['a']) self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['c'], self.nodes['d'], self.nodes['e'], self.nodes['a']]], self.nodes['a'].FindCycles()) if __name__ == '__main__': unittest.main()
{ "pile_set_name": "Github" }
//////////////////////////////////////////////////////////////////////////////// /// DISCLAIMER /// /// Copyright 2014-2020 ArangoDB GmbH, Cologne, Germany /// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// /// Copyright holder is ArangoDB GmbH, Cologne, Germany /// /// @author Jan Steemann //////////////////////////////////////////////////////////////////////////////// #include "Section.h" #include "ApplicationFeatures/ShellColorsFeature.h" #include "ProgramOptions/Option.h" #include <iostream> using namespace arangodb::options; // whether or not the section has (displayable) options bool Section::hasOptions() const { if (!hidden) { for (auto const& it : options) { if (!it.second.hasFlag(arangodb::options::Flags::Hidden)) { return true; } } } return false; } // print help for a section // the special search string "." will show help for all sections, even if hidden void Section::printHelp(std::string const& search, size_t tw, size_t ow, bool colors) const { if (search != "." && (hidden || !hasOptions())) { return; } if (colors) { std::cout << "Section '" << ShellColorsFeature::SHELL_COLOR_BRIGHT << displayName() << ShellColorsFeature::SHELL_COLOR_RESET << "' (" << description << ")" << std::endl; } else { std::cout << "Section '" << displayName() << "' (" << description << ")" << std::endl; } // propagate print command to options for (auto const& it : options) { it.second.printHelp(search, tw, ow, colors); } std::cout << std::endl; } // determine display width for a section size_t Section::optionsWidth() const { size_t width = 0; if (!hidden) { for (auto const& it : options) { width = (std::max)(width, it.second.optionsWidth()); } } return width; }
{ "pile_set_name": "Github" }
#!/bin/bash if grep -q 'source /opt/autoenv/activate.sh' ~/.bashrc; then echo -e "\033[31m 正在自动载入 python 环境 \033[0m" else echo -e "\033[31m 不支持自动升级,请参考 http://docs.jumpserver.org/zh/docs/upgrade.html 手动升级 \033[0m" exit 0 fi source ~/.bashrc cd `dirname $0`/ && cd .. && ./jms stop jumpserver_backup=/tmp/jumpserver_backup$(date -d "today" +"%Y%m%d_%H%M%S") mkdir -p $jumpserver_backup cp -r ./* $jumpserver_backup echo -e "\033[31m 是否需要备份Jumpserver数据库 \033[0m" stty erase ^H read -p "确认备份请按Y,否则按其他键跳过备份 " a if [ "$a" == y -o "$a" == Y ];then echo -e "\033[31m 正在备份数据库 \033[0m" echo -e "\033[31m 请手动输入数据库信息 \033[0m" read -p '请输入Jumpserver数据库ip:' DB_HOST read -p '请输入Jumpserver数据库端口:' DB_PORT read -p '请输入Jumpserver数据库名称:' DB_NAME read -p '请输入有权限导出数据库的用户:' DB_USER read -p '请输入该用户的密码:' DB_PASSWORD mysqldump -h$DB_HOST -P$DB_PORT -u$DB_USER -p$DB_PASSWORD $DB_NAME > /$jumpserver_backup/$DB_NAME$(date -d "today" +"%Y%m%d_%H%M%S").sql || { echo -e "\033[31m 备份数据库失败,请检查输入是否有误 \033[0m" exit 1 } echo -e "\033[31m 备份数据库完成 \033[0m" else echo -e "\033[31m 已取消备份数据库操作 \033[0m" fi git pull && pip install -r requirements/requirements.txt && cd utils && sh make_migrations.sh cd .. && ./jms start all -d echo -e "\033[31m 请检查jumpserver是否启动成功 \033[0m" echo -e "\033[31m 备份文件存放于$jumpserver_backup目录 \033[0m" stty erase ^? exit 0
{ "pile_set_name": "Github" }
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.CustomSearch.Types.Product -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.CustomSearch.Types.Product where import Network.Google.CustomSearch.Types.Sum import Network.Google.Prelude -- -- /See:/ 'promotionImage' smart constructor. data PromotionImage = PromotionImage' { _piHeight :: !(Maybe (Textual Int32)) , _piWidth :: !(Maybe (Textual Int32)) , _piSource :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'PromotionImage' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'piHeight' -- -- * 'piWidth' -- -- * 'piSource' promotionImage :: PromotionImage promotionImage = PromotionImage' {_piHeight = Nothing, _piWidth = Nothing, _piSource = Nothing} piHeight :: Lens' PromotionImage (Maybe Int32) piHeight = lens _piHeight (\ s a -> s{_piHeight = a}) . mapping _Coerce piWidth :: Lens' PromotionImage (Maybe Int32) piWidth = lens _piWidth (\ s a -> s{_piWidth = a}) . mapping _Coerce piSource :: Lens' PromotionImage (Maybe Text) piSource = lens _piSource (\ s a -> s{_piSource = a}) instance FromJSON PromotionImage where parseJSON = withObject "PromotionImage" (\ o -> PromotionImage' <$> (o .:? "height") <*> (o .:? "width") <*> (o .:? "source")) instance ToJSON PromotionImage where toJSON PromotionImage'{..} = object (catMaybes [("height" .=) <$> _piHeight, ("width" .=) <$> _piWidth, ("source" .=) <$> _piSource]) -- -- /See:/ 'context' smart constructor. data Context = Context' { _cFacets :: !(Maybe [[ContextFacetsItemItem]]) , _cTitle :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'Context' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'cFacets' -- -- * 'cTitle' context :: Context context = Context' {_cFacets = Nothing, _cTitle = Nothing} cFacets :: Lens' Context [[ContextFacetsItemItem]] cFacets = lens _cFacets (\ s a -> s{_cFacets = a}) . _Default . _Coerce cTitle :: Lens' Context (Maybe Text) cTitle = lens _cTitle (\ s a -> s{_cTitle = a}) instance FromJSON Context where parseJSON = withObject "Context" (\ o -> Context' <$> (o .:? "facets" .!= mempty) <*> (o .:? "title")) instance ToJSON Context where toJSON Context'{..} = object (catMaybes [("facets" .=) <$> _cFacets, ("title" .=) <$> _cTitle]) -- -- /See:/ 'searchQueries' smart constructor. newtype SearchQueries = SearchQueries' { _sqAddtional :: HashMap Text [Query] } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'SearchQueries' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'sqAddtional' searchQueries :: HashMap Text [Query] -- ^ 'sqAddtional' -> SearchQueries searchQueries pSqAddtional_ = SearchQueries' {_sqAddtional = _Coerce # pSqAddtional_} sqAddtional :: Lens' SearchQueries (HashMap Text [Query]) sqAddtional = lens _sqAddtional (\ s a -> s{_sqAddtional = a}) . _Coerce instance FromJSON SearchQueries where parseJSON = withObject "SearchQueries" (\ o -> SearchQueries' <$> (parseJSONObject o)) instance ToJSON SearchQueries where toJSON = toJSON . _sqAddtional -- -- /See:/ 'resultPagemapAdditionalItem' smart constructor. newtype ResultPagemapAdditionalItem = ResultPagemapAdditionalItem' { _rpaiAddtional :: HashMap Text JSONValue } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ResultPagemapAdditionalItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rpaiAddtional' resultPagemapAdditionalItem :: HashMap Text JSONValue -- ^ 'rpaiAddtional' -> ResultPagemapAdditionalItem resultPagemapAdditionalItem pRpaiAddtional_ = ResultPagemapAdditionalItem' {_rpaiAddtional = _Coerce # pRpaiAddtional_} rpaiAddtional :: Lens' ResultPagemapAdditionalItem (HashMap Text JSONValue) rpaiAddtional = lens _rpaiAddtional (\ s a -> s{_rpaiAddtional = a}) . _Coerce instance FromJSON ResultPagemapAdditionalItem where parseJSON = withObject "ResultPagemapAdditionalItem" (\ o -> ResultPagemapAdditionalItem' <$> (parseJSONObject o)) instance ToJSON ResultPagemapAdditionalItem where toJSON = toJSON . _rpaiAddtional -- -- /See:/ 'searchURL' smart constructor. data SearchURL = SearchURL' { _suType :: !Text , _suTemplate :: !Text } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'SearchURL' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'suType' -- -- * 'suTemplate' searchURL :: SearchURL searchURL = SearchURL' { _suType = "application/json" , _suTemplate = "https://www.googleapis.com/customsearch/v1?q={searchTerms}&num={count?}&start={startIndex?}&lr={language?}&safe={safe?}&cx={cx?}&sort={sort?}&filter={filter?}&gl={gl?}&cr={cr?}&googlehost={googleHost?}&c2coff={disableCnTwTranslation?}&hq={hq?}&hl={hl?}&siteSearch={siteSearch?}&siteSearchFilter={siteSearchFilter?}&exactTerms={exactTerms?}&excludeTerms={excludeTerms?}&linkSite={linkSite?}&orTerms={orTerms?}&relatedSite={relatedSite?}&dateRestrict={dateRestrict?}&lowRange={lowRange?}&highRange={highRange?}&searchType={searchType}&fileType={fileType?}&rights={rights?}&imgSize={imgSize?}&imgType={imgType?}&imgColorType={imgColorType?}&imgDominantColor={imgDominantColor?}&alt=json" } suType :: Lens' SearchURL Text suType = lens _suType (\ s a -> s{_suType = a}) suTemplate :: Lens' SearchURL Text suTemplate = lens _suTemplate (\ s a -> s{_suTemplate = a}) instance FromJSON SearchURL where parseJSON = withObject "SearchURL" (\ o -> SearchURL' <$> (o .:? "type" .!= "application/json") <*> (o .:? "template" .!= "https://www.googleapis.com/customsearch/v1?q={searchTerms}&num={count?}&start={startIndex?}&lr={language?}&safe={safe?}&cx={cx?}&sort={sort?}&filter={filter?}&gl={gl?}&cr={cr?}&googlehost={googleHost?}&c2coff={disableCnTwTranslation?}&hq={hq?}&hl={hl?}&siteSearch={siteSearch?}&siteSearchFilter={siteSearchFilter?}&exactTerms={exactTerms?}&excludeTerms={excludeTerms?}&linkSite={linkSite?}&orTerms={orTerms?}&relatedSite={relatedSite?}&dateRestrict={dateRestrict?}&lowRange={lowRange?}&highRange={highRange?}&searchType={searchType}&fileType={fileType?}&rights={rights?}&imgSize={imgSize?}&imgType={imgType?}&imgColorType={imgColorType?}&imgDominantColor={imgDominantColor?}&alt=json")) instance ToJSON SearchURL where toJSON SearchURL'{..} = object (catMaybes [Just ("type" .= _suType), Just ("template" .= _suTemplate)]) -- -- /See:/ 'searchSpelling' smart constructor. data SearchSpelling = SearchSpelling' { _ssCorrectedQuery :: !(Maybe Text) , _ssHTMLCorrectedQuery :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'SearchSpelling' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'ssCorrectedQuery' -- -- * 'ssHTMLCorrectedQuery' searchSpelling :: SearchSpelling searchSpelling = SearchSpelling' {_ssCorrectedQuery = Nothing, _ssHTMLCorrectedQuery = Nothing} ssCorrectedQuery :: Lens' SearchSpelling (Maybe Text) ssCorrectedQuery = lens _ssCorrectedQuery (\ s a -> s{_ssCorrectedQuery = a}) ssHTMLCorrectedQuery :: Lens' SearchSpelling (Maybe Text) ssHTMLCorrectedQuery = lens _ssHTMLCorrectedQuery (\ s a -> s{_ssHTMLCorrectedQuery = a}) instance FromJSON SearchSpelling where parseJSON = withObject "SearchSpelling" (\ o -> SearchSpelling' <$> (o .:? "correctedQuery") <*> (o .:? "htmlCorrectedQuery")) instance ToJSON SearchSpelling where toJSON SearchSpelling'{..} = object (catMaybes [("correctedQuery" .=) <$> _ssCorrectedQuery, ("htmlCorrectedQuery" .=) <$> _ssHTMLCorrectedQuery]) -- -- /See:/ 'resultImage' smart constructor. data ResultImage = ResultImage' { _riThumbnailLink :: !(Maybe Text) , _riHeight :: !(Maybe (Textual Int32)) , _riByteSize :: !(Maybe (Textual Int32)) , _riContextLink :: !(Maybe Text) , _riThumbnailHeight :: !(Maybe (Textual Int32)) , _riWidth :: !(Maybe (Textual Int32)) , _riThumbnailWidth :: !(Maybe (Textual Int32)) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ResultImage' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'riThumbnailLink' -- -- * 'riHeight' -- -- * 'riByteSize' -- -- * 'riContextLink' -- -- * 'riThumbnailHeight' -- -- * 'riWidth' -- -- * 'riThumbnailWidth' resultImage :: ResultImage resultImage = ResultImage' { _riThumbnailLink = Nothing , _riHeight = Nothing , _riByteSize = Nothing , _riContextLink = Nothing , _riThumbnailHeight = Nothing , _riWidth = Nothing , _riThumbnailWidth = Nothing } riThumbnailLink :: Lens' ResultImage (Maybe Text) riThumbnailLink = lens _riThumbnailLink (\ s a -> s{_riThumbnailLink = a}) riHeight :: Lens' ResultImage (Maybe Int32) riHeight = lens _riHeight (\ s a -> s{_riHeight = a}) . mapping _Coerce riByteSize :: Lens' ResultImage (Maybe Int32) riByteSize = lens _riByteSize (\ s a -> s{_riByteSize = a}) . mapping _Coerce riContextLink :: Lens' ResultImage (Maybe Text) riContextLink = lens _riContextLink (\ s a -> s{_riContextLink = a}) riThumbnailHeight :: Lens' ResultImage (Maybe Int32) riThumbnailHeight = lens _riThumbnailHeight (\ s a -> s{_riThumbnailHeight = a}) . mapping _Coerce riWidth :: Lens' ResultImage (Maybe Int32) riWidth = lens _riWidth (\ s a -> s{_riWidth = a}) . mapping _Coerce riThumbnailWidth :: Lens' ResultImage (Maybe Int32) riThumbnailWidth = lens _riThumbnailWidth (\ s a -> s{_riThumbnailWidth = a}) . mapping _Coerce instance FromJSON ResultImage where parseJSON = withObject "ResultImage" (\ o -> ResultImage' <$> (o .:? "thumbnailLink") <*> (o .:? "height") <*> (o .:? "byteSize") <*> (o .:? "contextLink") <*> (o .:? "thumbnailHeight") <*> (o .:? "width") <*> (o .:? "thumbnailWidth")) instance ToJSON ResultImage where toJSON ResultImage'{..} = object (catMaybes [("thumbnailLink" .=) <$> _riThumbnailLink, ("height" .=) <$> _riHeight, ("byteSize" .=) <$> _riByteSize, ("contextLink" .=) <$> _riContextLink, ("thumbnailHeight" .=) <$> _riThumbnailHeight, ("width" .=) <$> _riWidth, ("thumbnailWidth" .=) <$> _riThumbnailWidth]) -- -- /See:/ 'resultPagemap' smart constructor. newtype ResultPagemap = ResultPagemap' { _rpAddtional :: HashMap Text [ResultPagemapAdditionalItem] } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ResultPagemap' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rpAddtional' resultPagemap :: HashMap Text [ResultPagemapAdditionalItem] -- ^ 'rpAddtional' -> ResultPagemap resultPagemap pRpAddtional_ = ResultPagemap' {_rpAddtional = _Coerce # pRpAddtional_} rpAddtional :: Lens' ResultPagemap (HashMap Text [ResultPagemapAdditionalItem]) rpAddtional = lens _rpAddtional (\ s a -> s{_rpAddtional = a}) . _Coerce instance FromJSON ResultPagemap where parseJSON = withObject "ResultPagemap" (\ o -> ResultPagemap' <$> (parseJSONObject o)) instance ToJSON ResultPagemap where toJSON = toJSON . _rpAddtional -- -- /See:/ 'result' smart constructor. data Result = Result' { _rMime :: !(Maybe Text) , _rImage :: !(Maybe ResultImage) , _rPagemap :: !(Maybe ResultPagemap) , _rDisplayLink :: !(Maybe Text) , _rFileFormat :: !(Maybe Text) , _rSnippet :: !(Maybe Text) , _rKind :: !Text , _rLink :: !(Maybe Text) , _rHTMLSnippet :: !(Maybe Text) , _rHTMLFormattedURL :: !(Maybe Text) , _rCacheId :: !(Maybe Text) , _rFormattedURL :: !(Maybe Text) , _rHTMLTitle :: !(Maybe Text) , _rLabels :: !(Maybe [ResultLabelsItem]) , _rTitle :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'Result' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rMime' -- -- * 'rImage' -- -- * 'rPagemap' -- -- * 'rDisplayLink' -- -- * 'rFileFormat' -- -- * 'rSnippet' -- -- * 'rKind' -- -- * 'rLink' -- -- * 'rHTMLSnippet' -- -- * 'rHTMLFormattedURL' -- -- * 'rCacheId' -- -- * 'rFormattedURL' -- -- * 'rHTMLTitle' -- -- * 'rLabels' -- -- * 'rTitle' result :: Result result = Result' { _rMime = Nothing , _rImage = Nothing , _rPagemap = Nothing , _rDisplayLink = Nothing , _rFileFormat = Nothing , _rSnippet = Nothing , _rKind = "customsearch#result" , _rLink = Nothing , _rHTMLSnippet = Nothing , _rHTMLFormattedURL = Nothing , _rCacheId = Nothing , _rFormattedURL = Nothing , _rHTMLTitle = Nothing , _rLabels = Nothing , _rTitle = Nothing } rMime :: Lens' Result (Maybe Text) rMime = lens _rMime (\ s a -> s{_rMime = a}) rImage :: Lens' Result (Maybe ResultImage) rImage = lens _rImage (\ s a -> s{_rImage = a}) rPagemap :: Lens' Result (Maybe ResultPagemap) rPagemap = lens _rPagemap (\ s a -> s{_rPagemap = a}) rDisplayLink :: Lens' Result (Maybe Text) rDisplayLink = lens _rDisplayLink (\ s a -> s{_rDisplayLink = a}) rFileFormat :: Lens' Result (Maybe Text) rFileFormat = lens _rFileFormat (\ s a -> s{_rFileFormat = a}) rSnippet :: Lens' Result (Maybe Text) rSnippet = lens _rSnippet (\ s a -> s{_rSnippet = a}) rKind :: Lens' Result Text rKind = lens _rKind (\ s a -> s{_rKind = a}) rLink :: Lens' Result (Maybe Text) rLink = lens _rLink (\ s a -> s{_rLink = a}) rHTMLSnippet :: Lens' Result (Maybe Text) rHTMLSnippet = lens _rHTMLSnippet (\ s a -> s{_rHTMLSnippet = a}) rHTMLFormattedURL :: Lens' Result (Maybe Text) rHTMLFormattedURL = lens _rHTMLFormattedURL (\ s a -> s{_rHTMLFormattedURL = a}) rCacheId :: Lens' Result (Maybe Text) rCacheId = lens _rCacheId (\ s a -> s{_rCacheId = a}) rFormattedURL :: Lens' Result (Maybe Text) rFormattedURL = lens _rFormattedURL (\ s a -> s{_rFormattedURL = a}) rHTMLTitle :: Lens' Result (Maybe Text) rHTMLTitle = lens _rHTMLTitle (\ s a -> s{_rHTMLTitle = a}) rLabels :: Lens' Result [ResultLabelsItem] rLabels = lens _rLabels (\ s a -> s{_rLabels = a}) . _Default . _Coerce rTitle :: Lens' Result (Maybe Text) rTitle = lens _rTitle (\ s a -> s{_rTitle = a}) instance FromJSON Result where parseJSON = withObject "Result" (\ o -> Result' <$> (o .:? "mime") <*> (o .:? "image") <*> (o .:? "pagemap") <*> (o .:? "displayLink") <*> (o .:? "fileFormat") <*> (o .:? "snippet") <*> (o .:? "kind" .!= "customsearch#result") <*> (o .:? "link") <*> (o .:? "htmlSnippet") <*> (o .:? "htmlFormattedUrl") <*> (o .:? "cacheId") <*> (o .:? "formattedUrl") <*> (o .:? "htmlTitle") <*> (o .:? "labels" .!= mempty) <*> (o .:? "title")) instance ToJSON Result where toJSON Result'{..} = object (catMaybes [("mime" .=) <$> _rMime, ("image" .=) <$> _rImage, ("pagemap" .=) <$> _rPagemap, ("displayLink" .=) <$> _rDisplayLink, ("fileFormat" .=) <$> _rFileFormat, ("snippet" .=) <$> _rSnippet, Just ("kind" .= _rKind), ("link" .=) <$> _rLink, ("htmlSnippet" .=) <$> _rHTMLSnippet, ("htmlFormattedUrl" .=) <$> _rHTMLFormattedURL, ("cacheId" .=) <$> _rCacheId, ("formattedUrl" .=) <$> _rFormattedURL, ("htmlTitle" .=) <$> _rHTMLTitle, ("labels" .=) <$> _rLabels, ("title" .=) <$> _rTitle]) -- -- /See:/ 'resultLabelsItem' smart constructor. data ResultLabelsItem = ResultLabelsItem' { _rliName :: !(Maybe Text) , _rliDisplayName :: !(Maybe Text) , _rliLabelWithOp :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ResultLabelsItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rliName' -- -- * 'rliDisplayName' -- -- * 'rliLabelWithOp' resultLabelsItem :: ResultLabelsItem resultLabelsItem = ResultLabelsItem' {_rliName = Nothing, _rliDisplayName = Nothing, _rliLabelWithOp = Nothing} rliName :: Lens' ResultLabelsItem (Maybe Text) rliName = lens _rliName (\ s a -> s{_rliName = a}) rliDisplayName :: Lens' ResultLabelsItem (Maybe Text) rliDisplayName = lens _rliDisplayName (\ s a -> s{_rliDisplayName = a}) rliLabelWithOp :: Lens' ResultLabelsItem (Maybe Text) rliLabelWithOp = lens _rliLabelWithOp (\ s a -> s{_rliLabelWithOp = a}) instance FromJSON ResultLabelsItem where parseJSON = withObject "ResultLabelsItem" (\ o -> ResultLabelsItem' <$> (o .:? "name") <*> (o .:? "displayName") <*> (o .:? "label_with_op")) instance ToJSON ResultLabelsItem where toJSON ResultLabelsItem'{..} = object (catMaybes [("name" .=) <$> _rliName, ("displayName" .=) <$> _rliDisplayName, ("label_with_op" .=) <$> _rliLabelWithOp]) -- -- /See:/ 'searchSearchInformation' smart constructor. data SearchSearchInformation = SearchSearchInformation' { _ssiSearchTime :: !(Maybe (Textual Double)) , _ssiFormattedSearchTime :: !(Maybe Text) , _ssiTotalResults :: !(Maybe (Textual Int64)) , _ssiFormattedTotalResults :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'SearchSearchInformation' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'ssiSearchTime' -- -- * 'ssiFormattedSearchTime' -- -- * 'ssiTotalResults' -- -- * 'ssiFormattedTotalResults' searchSearchInformation :: SearchSearchInformation searchSearchInformation = SearchSearchInformation' { _ssiSearchTime = Nothing , _ssiFormattedSearchTime = Nothing , _ssiTotalResults = Nothing , _ssiFormattedTotalResults = Nothing } ssiSearchTime :: Lens' SearchSearchInformation (Maybe Double) ssiSearchTime = lens _ssiSearchTime (\ s a -> s{_ssiSearchTime = a}) . mapping _Coerce ssiFormattedSearchTime :: Lens' SearchSearchInformation (Maybe Text) ssiFormattedSearchTime = lens _ssiFormattedSearchTime (\ s a -> s{_ssiFormattedSearchTime = a}) ssiTotalResults :: Lens' SearchSearchInformation (Maybe Int64) ssiTotalResults = lens _ssiTotalResults (\ s a -> s{_ssiTotalResults = a}) . mapping _Coerce ssiFormattedTotalResults :: Lens' SearchSearchInformation (Maybe Text) ssiFormattedTotalResults = lens _ssiFormattedTotalResults (\ s a -> s{_ssiFormattedTotalResults = a}) instance FromJSON SearchSearchInformation where parseJSON = withObject "SearchSearchInformation" (\ o -> SearchSearchInformation' <$> (o .:? "searchTime") <*> (o .:? "formattedSearchTime") <*> (o .:? "totalResults") <*> (o .:? "formattedTotalResults")) instance ToJSON SearchSearchInformation where toJSON SearchSearchInformation'{..} = object (catMaybes [("searchTime" .=) <$> _ssiSearchTime, ("formattedSearchTime" .=) <$> _ssiFormattedSearchTime, ("totalResults" .=) <$> _ssiTotalResults, ("formattedTotalResults" .=) <$> _ssiFormattedTotalResults]) -- -- /See:/ 'query' smart constructor. data Query = Query' { _qImgDominantColor :: !(Maybe Text) , _qOutputEncoding :: !(Maybe Text) , _qSiteSearchFilter :: !(Maybe Text) , _qInputEncoding :: !(Maybe Text) , _qOrTerms :: !(Maybe Text) , _qSearchTerms :: !(Maybe Text) , _qStartPage :: !(Maybe (Textual Int32)) , _qRights :: !(Maybe Text) , _qCount :: !(Maybe (Textual Int32)) , _qExcludeTerms :: !(Maybe Text) , _qFileType :: !(Maybe Text) , _qSearchType :: !(Maybe Text) , _qGoogleHost :: !(Maybe Text) , _qDisableCnTwTranslation :: !(Maybe Text) , _qRelatedSite :: !(Maybe Text) , _qHl :: !(Maybe Text) , _qSort :: !(Maybe Text) , _qLanguage :: !(Maybe Text) , _qSiteSearch :: !(Maybe Text) , _qFilter :: !(Maybe Text) , _qTotalResults :: !(Maybe (Textual Int64)) , _qDateRestrict :: !(Maybe Text) , _qTitle :: !(Maybe Text) , _qLinkSite :: !(Maybe Text) , _qLowRange :: !(Maybe Text) , _qImgType :: !(Maybe Text) , _qGl :: !(Maybe Text) , _qCx :: !(Maybe Text) , _qImgColorType :: !(Maybe Text) , _qImgSize :: !(Maybe Text) , _qExactTerms :: !(Maybe Text) , _qStartIndex :: !(Maybe (Textual Int32)) , _qCr :: !(Maybe Text) , _qSafe :: !(Maybe Text) , _qHq :: !(Maybe Text) , _qHighRange :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'Query' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'qImgDominantColor' -- -- * 'qOutputEncoding' -- -- * 'qSiteSearchFilter' -- -- * 'qInputEncoding' -- -- * 'qOrTerms' -- -- * 'qSearchTerms' -- -- * 'qStartPage' -- -- * 'qRights' -- -- * 'qCount' -- -- * 'qExcludeTerms' -- -- * 'qFileType' -- -- * 'qSearchType' -- -- * 'qGoogleHost' -- -- * 'qDisableCnTwTranslation' -- -- * 'qRelatedSite' -- -- * 'qHl' -- -- * 'qSort' -- -- * 'qLanguage' -- -- * 'qSiteSearch' -- -- * 'qFilter' -- -- * 'qTotalResults' -- -- * 'qDateRestrict' -- -- * 'qTitle' -- -- * 'qLinkSite' -- -- * 'qLowRange' -- -- * 'qImgType' -- -- * 'qGl' -- -- * 'qCx' -- -- * 'qImgColorType' -- -- * 'qImgSize' -- -- * 'qExactTerms' -- -- * 'qStartIndex' -- -- * 'qCr' -- -- * 'qSafe' -- -- * 'qHq' -- -- * 'qHighRange' query :: Query query = Query' { _qImgDominantColor = Nothing , _qOutputEncoding = Nothing , _qSiteSearchFilter = Nothing , _qInputEncoding = Nothing , _qOrTerms = Nothing , _qSearchTerms = Nothing , _qStartPage = Nothing , _qRights = Nothing , _qCount = Nothing , _qExcludeTerms = Nothing , _qFileType = Nothing , _qSearchType = Nothing , _qGoogleHost = Nothing , _qDisableCnTwTranslation = Nothing , _qRelatedSite = Nothing , _qHl = Nothing , _qSort = Nothing , _qLanguage = Nothing , _qSiteSearch = Nothing , _qFilter = Nothing , _qTotalResults = Nothing , _qDateRestrict = Nothing , _qTitle = Nothing , _qLinkSite = Nothing , _qLowRange = Nothing , _qImgType = Nothing , _qGl = Nothing , _qCx = Nothing , _qImgColorType = Nothing , _qImgSize = Nothing , _qExactTerms = Nothing , _qStartIndex = Nothing , _qCr = Nothing , _qSafe = Nothing , _qHq = Nothing , _qHighRange = Nothing } qImgDominantColor :: Lens' Query (Maybe Text) qImgDominantColor = lens _qImgDominantColor (\ s a -> s{_qImgDominantColor = a}) qOutputEncoding :: Lens' Query (Maybe Text) qOutputEncoding = lens _qOutputEncoding (\ s a -> s{_qOutputEncoding = a}) qSiteSearchFilter :: Lens' Query (Maybe Text) qSiteSearchFilter = lens _qSiteSearchFilter (\ s a -> s{_qSiteSearchFilter = a}) qInputEncoding :: Lens' Query (Maybe Text) qInputEncoding = lens _qInputEncoding (\ s a -> s{_qInputEncoding = a}) qOrTerms :: Lens' Query (Maybe Text) qOrTerms = lens _qOrTerms (\ s a -> s{_qOrTerms = a}) qSearchTerms :: Lens' Query (Maybe Text) qSearchTerms = lens _qSearchTerms (\ s a -> s{_qSearchTerms = a}) qStartPage :: Lens' Query (Maybe Int32) qStartPage = lens _qStartPage (\ s a -> s{_qStartPage = a}) . mapping _Coerce qRights :: Lens' Query (Maybe Text) qRights = lens _qRights (\ s a -> s{_qRights = a}) qCount :: Lens' Query (Maybe Int32) qCount = lens _qCount (\ s a -> s{_qCount = a}) . mapping _Coerce qExcludeTerms :: Lens' Query (Maybe Text) qExcludeTerms = lens _qExcludeTerms (\ s a -> s{_qExcludeTerms = a}) qFileType :: Lens' Query (Maybe Text) qFileType = lens _qFileType (\ s a -> s{_qFileType = a}) qSearchType :: Lens' Query (Maybe Text) qSearchType = lens _qSearchType (\ s a -> s{_qSearchType = a}) qGoogleHost :: Lens' Query (Maybe Text) qGoogleHost = lens _qGoogleHost (\ s a -> s{_qGoogleHost = a}) qDisableCnTwTranslation :: Lens' Query (Maybe Text) qDisableCnTwTranslation = lens _qDisableCnTwTranslation (\ s a -> s{_qDisableCnTwTranslation = a}) qRelatedSite :: Lens' Query (Maybe Text) qRelatedSite = lens _qRelatedSite (\ s a -> s{_qRelatedSite = a}) qHl :: Lens' Query (Maybe Text) qHl = lens _qHl (\ s a -> s{_qHl = a}) qSort :: Lens' Query (Maybe Text) qSort = lens _qSort (\ s a -> s{_qSort = a}) qLanguage :: Lens' Query (Maybe Text) qLanguage = lens _qLanguage (\ s a -> s{_qLanguage = a}) qSiteSearch :: Lens' Query (Maybe Text) qSiteSearch = lens _qSiteSearch (\ s a -> s{_qSiteSearch = a}) qFilter :: Lens' Query (Maybe Text) qFilter = lens _qFilter (\ s a -> s{_qFilter = a}) qTotalResults :: Lens' Query (Maybe Int64) qTotalResults = lens _qTotalResults (\ s a -> s{_qTotalResults = a}) . mapping _Coerce qDateRestrict :: Lens' Query (Maybe Text) qDateRestrict = lens _qDateRestrict (\ s a -> s{_qDateRestrict = a}) qTitle :: Lens' Query (Maybe Text) qTitle = lens _qTitle (\ s a -> s{_qTitle = a}) qLinkSite :: Lens' Query (Maybe Text) qLinkSite = lens _qLinkSite (\ s a -> s{_qLinkSite = a}) qLowRange :: Lens' Query (Maybe Text) qLowRange = lens _qLowRange (\ s a -> s{_qLowRange = a}) qImgType :: Lens' Query (Maybe Text) qImgType = lens _qImgType (\ s a -> s{_qImgType = a}) qGl :: Lens' Query (Maybe Text) qGl = lens _qGl (\ s a -> s{_qGl = a}) qCx :: Lens' Query (Maybe Text) qCx = lens _qCx (\ s a -> s{_qCx = a}) qImgColorType :: Lens' Query (Maybe Text) qImgColorType = lens _qImgColorType (\ s a -> s{_qImgColorType = a}) qImgSize :: Lens' Query (Maybe Text) qImgSize = lens _qImgSize (\ s a -> s{_qImgSize = a}) qExactTerms :: Lens' Query (Maybe Text) qExactTerms = lens _qExactTerms (\ s a -> s{_qExactTerms = a}) qStartIndex :: Lens' Query (Maybe Int32) qStartIndex = lens _qStartIndex (\ s a -> s{_qStartIndex = a}) . mapping _Coerce qCr :: Lens' Query (Maybe Text) qCr = lens _qCr (\ s a -> s{_qCr = a}) qSafe :: Lens' Query (Maybe Text) qSafe = lens _qSafe (\ s a -> s{_qSafe = a}) qHq :: Lens' Query (Maybe Text) qHq = lens _qHq (\ s a -> s{_qHq = a}) qHighRange :: Lens' Query (Maybe Text) qHighRange = lens _qHighRange (\ s a -> s{_qHighRange = a}) instance FromJSON Query where parseJSON = withObject "Query" (\ o -> Query' <$> (o .:? "imgDominantColor") <*> (o .:? "outputEncoding") <*> (o .:? "siteSearchFilter") <*> (o .:? "inputEncoding") <*> (o .:? "orTerms") <*> (o .:? "searchTerms") <*> (o .:? "startPage") <*> (o .:? "rights") <*> (o .:? "count") <*> (o .:? "excludeTerms") <*> (o .:? "fileType") <*> (o .:? "searchType") <*> (o .:? "googleHost") <*> (o .:? "disableCnTwTranslation") <*> (o .:? "relatedSite") <*> (o .:? "hl") <*> (o .:? "sort") <*> (o .:? "language") <*> (o .:? "siteSearch") <*> (o .:? "filter") <*> (o .:? "totalResults") <*> (o .:? "dateRestrict") <*> (o .:? "title") <*> (o .:? "linkSite") <*> (o .:? "lowRange") <*> (o .:? "imgType") <*> (o .:? "gl") <*> (o .:? "cx") <*> (o .:? "imgColorType") <*> (o .:? "imgSize") <*> (o .:? "exactTerms") <*> (o .:? "startIndex") <*> (o .:? "cr") <*> (o .:? "safe") <*> (o .:? "hq") <*> (o .:? "highRange")) instance ToJSON Query where toJSON Query'{..} = object (catMaybes [("imgDominantColor" .=) <$> _qImgDominantColor, ("outputEncoding" .=) <$> _qOutputEncoding, ("siteSearchFilter" .=) <$> _qSiteSearchFilter, ("inputEncoding" .=) <$> _qInputEncoding, ("orTerms" .=) <$> _qOrTerms, ("searchTerms" .=) <$> _qSearchTerms, ("startPage" .=) <$> _qStartPage, ("rights" .=) <$> _qRights, ("count" .=) <$> _qCount, ("excludeTerms" .=) <$> _qExcludeTerms, ("fileType" .=) <$> _qFileType, ("searchType" .=) <$> _qSearchType, ("googleHost" .=) <$> _qGoogleHost, ("disableCnTwTranslation" .=) <$> _qDisableCnTwTranslation, ("relatedSite" .=) <$> _qRelatedSite, ("hl" .=) <$> _qHl, ("sort" .=) <$> _qSort, ("language" .=) <$> _qLanguage, ("siteSearch" .=) <$> _qSiteSearch, ("filter" .=) <$> _qFilter, ("totalResults" .=) <$> _qTotalResults, ("dateRestrict" .=) <$> _qDateRestrict, ("title" .=) <$> _qTitle, ("linkSite" .=) <$> _qLinkSite, ("lowRange" .=) <$> _qLowRange, ("imgType" .=) <$> _qImgType, ("gl" .=) <$> _qGl, ("cx" .=) <$> _qCx, ("imgColorType" .=) <$> _qImgColorType, ("imgSize" .=) <$> _qImgSize, ("exactTerms" .=) <$> _qExactTerms, ("startIndex" .=) <$> _qStartIndex, ("cr" .=) <$> _qCr, ("safe" .=) <$> _qSafe, ("hq" .=) <$> _qHq, ("highRange" .=) <$> _qHighRange]) -- -- /See:/ 'promotionBodyLinesItem' smart constructor. data PromotionBodyLinesItem = PromotionBodyLinesItem' { _pbliLink :: !(Maybe Text) , _pbliURL :: !(Maybe Text) , _pbliHTMLTitle :: !(Maybe Text) , _pbliTitle :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'PromotionBodyLinesItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pbliLink' -- -- * 'pbliURL' -- -- * 'pbliHTMLTitle' -- -- * 'pbliTitle' promotionBodyLinesItem :: PromotionBodyLinesItem promotionBodyLinesItem = PromotionBodyLinesItem' { _pbliLink = Nothing , _pbliURL = Nothing , _pbliHTMLTitle = Nothing , _pbliTitle = Nothing } pbliLink :: Lens' PromotionBodyLinesItem (Maybe Text) pbliLink = lens _pbliLink (\ s a -> s{_pbliLink = a}) pbliURL :: Lens' PromotionBodyLinesItem (Maybe Text) pbliURL = lens _pbliURL (\ s a -> s{_pbliURL = a}) pbliHTMLTitle :: Lens' PromotionBodyLinesItem (Maybe Text) pbliHTMLTitle = lens _pbliHTMLTitle (\ s a -> s{_pbliHTMLTitle = a}) pbliTitle :: Lens' PromotionBodyLinesItem (Maybe Text) pbliTitle = lens _pbliTitle (\ s a -> s{_pbliTitle = a}) instance FromJSON PromotionBodyLinesItem where parseJSON = withObject "PromotionBodyLinesItem" (\ o -> PromotionBodyLinesItem' <$> (o .:? "link") <*> (o .:? "url") <*> (o .:? "htmlTitle") <*> (o .:? "title")) instance ToJSON PromotionBodyLinesItem where toJSON PromotionBodyLinesItem'{..} = object (catMaybes [("link" .=) <$> _pbliLink, ("url" .=) <$> _pbliURL, ("htmlTitle" .=) <$> _pbliHTMLTitle, ("title" .=) <$> _pbliTitle]) -- -- /See:/ 'promotion' smart constructor. data Promotion = Promotion' { _pImage :: !(Maybe PromotionImage) , _pDisplayLink :: !(Maybe Text) , _pBodyLines :: !(Maybe [PromotionBodyLinesItem]) , _pLink :: !(Maybe Text) , _pHTMLTitle :: !(Maybe Text) , _pTitle :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'Promotion' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pImage' -- -- * 'pDisplayLink' -- -- * 'pBodyLines' -- -- * 'pLink' -- -- * 'pHTMLTitle' -- -- * 'pTitle' promotion :: Promotion promotion = Promotion' { _pImage = Nothing , _pDisplayLink = Nothing , _pBodyLines = Nothing , _pLink = Nothing , _pHTMLTitle = Nothing , _pTitle = Nothing } pImage :: Lens' Promotion (Maybe PromotionImage) pImage = lens _pImage (\ s a -> s{_pImage = a}) pDisplayLink :: Lens' Promotion (Maybe Text) pDisplayLink = lens _pDisplayLink (\ s a -> s{_pDisplayLink = a}) pBodyLines :: Lens' Promotion [PromotionBodyLinesItem] pBodyLines = lens _pBodyLines (\ s a -> s{_pBodyLines = a}) . _Default . _Coerce pLink :: Lens' Promotion (Maybe Text) pLink = lens _pLink (\ s a -> s{_pLink = a}) pHTMLTitle :: Lens' Promotion (Maybe Text) pHTMLTitle = lens _pHTMLTitle (\ s a -> s{_pHTMLTitle = a}) pTitle :: Lens' Promotion (Maybe Text) pTitle = lens _pTitle (\ s a -> s{_pTitle = a}) instance FromJSON Promotion where parseJSON = withObject "Promotion" (\ o -> Promotion' <$> (o .:? "image") <*> (o .:? "displayLink") <*> (o .:? "bodyLines" .!= mempty) <*> (o .:? "link") <*> (o .:? "htmlTitle") <*> (o .:? "title")) instance ToJSON Promotion where toJSON Promotion'{..} = object (catMaybes [("image" .=) <$> _pImage, ("displayLink" .=) <$> _pDisplayLink, ("bodyLines" .=) <$> _pBodyLines, ("link" .=) <$> _pLink, ("htmlTitle" .=) <$> _pHTMLTitle, ("title" .=) <$> _pTitle]) -- -- /See:/ 'search' smart constructor. data Search = Search' { _sQueries :: !(Maybe SearchQueries) , _sContext :: !(Maybe Context) , _sKind :: !Text , _sURL :: !(Maybe SearchURL) , _sItems :: !(Maybe [Result]) , _sSearchInformation :: !(Maybe SearchSearchInformation) , _sPromotions :: !(Maybe [Promotion]) , _sSpelling :: !(Maybe SearchSpelling) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'Search' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'sQueries' -- -- * 'sContext' -- -- * 'sKind' -- -- * 'sURL' -- -- * 'sItems' -- -- * 'sSearchInformation' -- -- * 'sPromotions' -- -- * 'sSpelling' search :: Search search = Search' { _sQueries = Nothing , _sContext = Nothing , _sKind = "customsearch#search" , _sURL = Nothing , _sItems = Nothing , _sSearchInformation = Nothing , _sPromotions = Nothing , _sSpelling = Nothing } sQueries :: Lens' Search (Maybe SearchQueries) sQueries = lens _sQueries (\ s a -> s{_sQueries = a}) sContext :: Lens' Search (Maybe Context) sContext = lens _sContext (\ s a -> s{_sContext = a}) sKind :: Lens' Search Text sKind = lens _sKind (\ s a -> s{_sKind = a}) sURL :: Lens' Search (Maybe SearchURL) sURL = lens _sURL (\ s a -> s{_sURL = a}) sItems :: Lens' Search [Result] sItems = lens _sItems (\ s a -> s{_sItems = a}) . _Default . _Coerce sSearchInformation :: Lens' Search (Maybe SearchSearchInformation) sSearchInformation = lens _sSearchInformation (\ s a -> s{_sSearchInformation = a}) sPromotions :: Lens' Search [Promotion] sPromotions = lens _sPromotions (\ s a -> s{_sPromotions = a}) . _Default . _Coerce sSpelling :: Lens' Search (Maybe SearchSpelling) sSpelling = lens _sSpelling (\ s a -> s{_sSpelling = a}) instance FromJSON Search where parseJSON = withObject "Search" (\ o -> Search' <$> (o .:? "queries") <*> (o .:? "context") <*> (o .:? "kind" .!= "customsearch#search") <*> (o .:? "url") <*> (o .:? "items" .!= mempty) <*> (o .:? "searchInformation") <*> (o .:? "promotions" .!= mempty) <*> (o .:? "spelling")) instance ToJSON Search where toJSON Search'{..} = object (catMaybes [("queries" .=) <$> _sQueries, ("context" .=) <$> _sContext, Just ("kind" .= _sKind), ("url" .=) <$> _sURL, ("items" .=) <$> _sItems, ("searchInformation" .=) <$> _sSearchInformation, ("promotions" .=) <$> _sPromotions, ("spelling" .=) <$> _sSpelling]) -- -- /See:/ 'contextFacetsItemItem' smart constructor. data ContextFacetsItemItem = ContextFacetsItemItem' { _cfiiAnchor :: !(Maybe Text) , _cfiiLabelWithOp :: !(Maybe Text) , _cfiiLabel :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ContextFacetsItemItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'cfiiAnchor' -- -- * 'cfiiLabelWithOp' -- -- * 'cfiiLabel' contextFacetsItemItem :: ContextFacetsItemItem contextFacetsItemItem = ContextFacetsItemItem' {_cfiiAnchor = Nothing, _cfiiLabelWithOp = Nothing, _cfiiLabel = Nothing} cfiiAnchor :: Lens' ContextFacetsItemItem (Maybe Text) cfiiAnchor = lens _cfiiAnchor (\ s a -> s{_cfiiAnchor = a}) cfiiLabelWithOp :: Lens' ContextFacetsItemItem (Maybe Text) cfiiLabelWithOp = lens _cfiiLabelWithOp (\ s a -> s{_cfiiLabelWithOp = a}) cfiiLabel :: Lens' ContextFacetsItemItem (Maybe Text) cfiiLabel = lens _cfiiLabel (\ s a -> s{_cfiiLabel = a}) instance FromJSON ContextFacetsItemItem where parseJSON = withObject "ContextFacetsItemItem" (\ o -> ContextFacetsItemItem' <$> (o .:? "anchor") <*> (o .:? "label_with_op") <*> (o .:? "label")) instance ToJSON ContextFacetsItemItem where toJSON ContextFacetsItemItem'{..} = object (catMaybes [("anchor" .=) <$> _cfiiAnchor, ("label_with_op" .=) <$> _cfiiLabelWithOp, ("label" .=) <$> _cfiiLabel])
{ "pile_set_name": "Github" }
# Rust YouTube JSON Webservice example You can use it for these blog posts. 1. [How to make JSON Webservice with Rust and YouTube API](https://www.steadylearner.com/blog/read/How-to-make-JSON-Webservice-with-Rust-and-YouTube-API) 2. [How to use CORS and OPTIONS HTTP request with Rust Rocket](https://www.steadylearner.com/blog/read/How-to-use-CORS-and-OPTIONS-HTTP-request-with-Rust-Rocket) 3. [How to render a YouTube vlog with Rust Yew fetch API](https://www.steadylearner.com/blog/read/How-to-render-a-YouTube-vlog-with-Rust-Yew-fetch-API) ## How to test it 1. Build /server/.env with [YouTube API](https://www.google.com/search?q=how+to+use+youtube+api+for+developers). 2. **$./install.sh** in **web** folder and **$./run-local.sh** for the full stack Rust JSON Webservice example.
{ "pile_set_name": "Github" }
var optimist = require('./../index'); var argv = optimist.usage('This is my awesome program', { 'about': { description: 'Provide some details about the author of this program', required: true, short: 'a', }, 'info': { description: 'Provide some information about the node.js agains!!!!!!', boolean: true, short: 'i' } }).argv; optimist.showHelp(); console.log('\n\nInspecting options'); console.dir(argv);
{ "pile_set_name": "Github" }
/** * HABmin - Home Automation User and Administration Interface * Designed for openHAB (www.openhab.com) * * This software is copyright of Chris Jackson under the GPL license. * Note that this licence may be changed at a later date. * * (c) 2014-2015 Chris Jackson ([email protected]) */ angular.module('sitemapTextWidget', [ 'HABmin.iconModel' ]) .directive('sitemapText', function (ImgFactory) { return { restrict: 'E', template: '<habmin-icon class="icon-lg sitemap-widget-icon" icon="{{widget.icon}}"></habmin-icon>' + '<div class="sitemap-widget-content">' + ' <span ng-style="labelColor">{{widget.label}}</span>' + ' <span class="pull-right" ng-style="valueColor">{{itemModel}}</span>' + '</div>', scope: { itemId: "@", itemModel: "=", widget: "=" }, link: function ($scope, element, attrs, controller) { if ($scope.widget.labelcolor != null) { $scope.labelColor = {color: $scope.widget.labelcolor}; } if ($scope.widget.valuecolor) { $scope.valueColor = {color: $scope.widget.valuecolor}; } // And then watch for changes $scope.$on('smarthome/items/' + $scope.itemId + "/state", function (event, state) { var num = Number(state.value); if (!isNaN(num)) { $scope.itemModel = num; } // $scope.$apply(); }); } }; }) ;
{ "pile_set_name": "Github" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("NotificationTask")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("NotificationTask")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")] [assembly: ComVisible(false)]
{ "pile_set_name": "Github" }
import React, { Component } from "react"; import moment from "moment"; import { RouteComponentProps } from "react-router-dom"; import { deSig } from "~/logic/lib/util"; import { ChatHookUpdate } from "~/types/chat-hook-update"; import { Inbox, Envelope } from "~/types/chat-update"; import { Contacts } from "~/types/contact-update"; import { Path, Patp } from "~/types/noun"; import GlobalApi from "~/logic/api/global"; import { Association } from "~/types/metadata-update"; import {Group} from "~/types/group-update"; import { LocalUpdateRemoteContentPolicy } from "~/types"; import { SubmitDragger } from '~/views/components/s3-upload'; import ChatWindow from './lib/ChatWindow'; import ChatHeader from './lib/ChatHeader'; import ChatInput from "./lib/ChatInput"; type ChatScreenProps = RouteComponentProps<{ ship: Patp; station: string; }> & { chatSynced: ChatHookUpdate; station: any; association: Association; api: GlobalApi; read: number; mailboxSize: number; inbox: Inbox; contacts: Contacts; group: Group; pendingMessages: Map<Path, Envelope[]>; s3: any; popout: boolean; sidebarShown: boolean; chatInitialized: boolean; envelopes: Envelope[]; hideAvatars: boolean; hideNicknames: boolean; remoteContentPolicy: LocalUpdateRemoteContentPolicy; }; interface ChatScreenState { messages: Map<string, string>; dragover: boolean; } export class ChatScreen extends Component<ChatScreenProps, ChatScreenState> { private chatInput: React.RefObject<ChatInput>; lastNumPending = 0; activityTimeout: NodeJS.Timeout | null = null; constructor(props) { super(props); this.state = { messages: new Map(), dragover: false, }; this.chatInput = React.createRef(); moment.updateLocale("en", { calendar: { sameDay: "[Today]", nextDay: "[Tomorrow]", nextWeek: "dddd", lastDay: "[Yesterday]", lastWeek: "[Last] dddd", sameElse: "DD/MM/YYYY", }, }); } readyToUpload(): boolean { return Boolean(this.chatInput.current?.s3Uploader.current?.inputRef.current); } onDragEnter(event) { if (!this.readyToUpload() || (!event.dataTransfer.files.length && !event.dataTransfer.types.includes('Files'))) { return; } this.setState({ dragover: true }); } onDrop(event: DragEvent) { this.setState({ dragover: false }); event.preventDefault(); if (!event.dataTransfer || !event.dataTransfer.files.length) { return; } if (event.dataTransfer.items.length && !event.dataTransfer.files.length) { event.preventDefault(); return; } event.preventDefault(); this.chatInput.current?.uploadFiles(event.dataTransfer.files); } render() { const { props, state } = this; const lastMsgNum = props.envelopes.length > 0 ? props.envelopes.length : 0; const ownerContact = window.ship in props.contacts ? props.contacts[window.ship] : false; const pendingMessages = (props.pendingMessages.get(props.station) || []) .map((value) => ({ ...value, pending: true })); const isChatMissing = props.chatInitialized && !(props.station in props.inbox) && props.chatSynced && !(props.station in props.chatSynced); const isChatLoading = props.chatInitialized && !(props.station in props.inbox) && props.chatSynced && (props.station in props.chatSynced); const isChatUnsynced = props.chatSynced && !(props.station in props.chatSynced) && props.envelopes.length > 0; const unreadCount = props.mailboxSize - props.read; const unreadMsg = unreadCount > 0 && props.envelopes[unreadCount - 1]; return ( <div key={props.station} className="h-100 w-100 overflow-hidden flex flex-column relative" onDragEnter={this.onDragEnter.bind(this)} onDragOver={event => { event.preventDefault(); if ( !this.state.dragover && ( (event.dataTransfer.files.length && event.dataTransfer.files[0].kind === 'file') || (event.dataTransfer.items.length && event.dataTransfer.items[0].kind === 'file') ) ) { this.setState({ dragover: true }); } }} onDragLeave={(event) => { const over = document.elementFromPoint(event.clientX, event.clientY); if (!over || !event.currentTarget.contains(over)) { this.setState({ dragover: false }); }} } onDrop={this.onDrop.bind(this)} > {this.state.dragover ? <SubmitDragger /> : null} <ChatHeader {...props} /> <ChatWindow isChatMissing={isChatMissing} isChatLoading={isChatLoading} isChatUnsynced={isChatUnsynced} unreadCount={unreadCount} unreadMsg={unreadMsg} stationPendingMessages={pendingMessages} ship={props.match.params.ship} {...props} /> <ChatInput ref={this.chatInput} api={props.api} numMsgs={lastMsgNum} station={props.station} owner={deSig(props.match.params.ship)} ownerContact={ownerContact} envelopes={props.envelopes} contacts={props.contacts} onUnmount={(msg: string) => this.setState({ messages: this.state.messages.set(props.station, msg) })} s3={props.s3} placeholder="Message..." message={this.state.messages.get(props.station) || ""} deleteMessage={() => this.setState({ messages: this.state.messages.set(props.station, "") })} hideAvatars={props.hideAvatars} /> </div> ); } }
{ "pile_set_name": "Github" }
// Copyright 2012 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build windows,race package windows import ( "runtime" "unsafe" ) const raceenabled = true func raceAcquire(addr unsafe.Pointer) { runtime.RaceAcquire(addr) } func raceReleaseMerge(addr unsafe.Pointer) { runtime.RaceReleaseMerge(addr) } func raceReadRange(addr unsafe.Pointer, len int) { runtime.RaceReadRange(addr, len) } func raceWriteRange(addr unsafe.Pointer, len int) { runtime.RaceWriteRange(addr, len) }
{ "pile_set_name": "Github" }
# Translation of Odoo Server. # This file contains the translation of the following modules: # * partner_autocomplete # msgid "" msgstr "" "Project-Id-Version: Odoo Server 10.saas~18\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2017-09-20 09:53+0000\n" "PO-Revision-Date: 2017-09-20 09:53+0000\n" "Language-Team: Macedonian (https://www.transifex.com/odoo/teams/41243/mk/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: \n" "Language: mk\n" "Plural-Forms: nplurals=2; plural=(n % 10 == 1 && n % 100 != 11) ? 0 : 1;\n" #. module: partner_autocomplete #: model:ir.model,name:partner_autocomplete.model_res_partner msgid "Contact" msgstr "" #. module: partner_autocomplete #: model_terms:ir.ui.view,arch_db:partner_autocomplete.view_partner_form #: model_terms:ir.ui.view,arch_db:partner_autocomplete.view_partner_short_form msgid "VAT" msgstr "" #. module: partner_autocomplete #: model_terms:ir.ui.view,arch_db:partner_autocomplete.view_partner_form #: model_terms:ir.ui.view,arch_db:partner_autocomplete.view_partner_short_form msgid "e.g. BE0477472701" msgstr ""
{ "pile_set_name": "Github" }
Czech annual average consumer inflation eased slightly in 1996, not as much as original government forecasts but still pleasing analysts. The Czech Statistical Bureau said on Thursday that its key sliding average inflation figure, which uses 1993 as a base, closed the year at 8.8 percent, down from 9.1 percent for 1995. Despite missing the government's original eight percent forecast, analysts said they were optimistic the central bank's tight monetary policies were hitting the mark, bringing inflation back under control after a strong first half surge. "The final outcome on average inflation is a success because of its favourable development in the second half of the year," Martin Kupka, an economist at investment bank Patria Finance told Reuters. "Of course in the longer run, we still need to see a more rapid decrease in inflation," he added. The CSU said month-on-month inflation for December was 0.5 percent, unchanged from November, putting consumer prices 8.6 percent higher, year-on-year, steady with the previous month. CSU chairman Edvard Outrata told Reuters that 1996 core inflation remained around five percent with government price dergeulations comprising the rest. But he said 1997 inflation would rest on whether or not the government decides to take further, stronger steps on freeing up energy and housing prices. The CSU said that price increases in foodstuffs, leisure and textile sectors accounted for some 80 percent of the monthly CPI rise. The CSU said industrial output slowed in November to an increase of only 1.4 percent, year-on-year, from a 5.3 percent rise in output in October while industrial wages were 17.1 percent higher for the first 11 months of the year. After the release of the figures, Industry and Trade Minister Vladimir Dlouhy said wage growth without productivity increases stemmed from a lack of industrial restructuring. "We thought it (inflation) would be lower, and what I see, aboveall, is a fundamental wage problem," he said. "The slower restructuring in some companies...is also a source of inflation, because it is generating of inflationary money, mainly through rising wages, without a respective effect (in output)" Dlouhy added. The government battled wage growth throughout the economy with wage controls, ended in 1995, which tied rising wages to correspnding increases in industrial output. Pay rises in 1996, not matched by an increase in productivity, have accelerated an already large trade deficit, by sparking domestic demand which has in turn caused higher inflation and made exported goods less competitive. Boris Gomez of ING Barings Capital Markets warned attempts to further force down inflation in 1997 may be thwarted if wage growth cannot be brought under control. "The December CPI figure was positive since even though there a strong Christmas shopping spree by Czechs...But we still are concerned about the effects of wage growth since it will be tough for the government to cap it," he said. Most analysts agreed that the central bank would continue its tight monetary policies at least for the first quarter of 1997, in order to keep a lid on inflationary pressures. The Czech crown too is expected to remain stable in the short-term despite lingering problems in forcing inflation down. Traders said the crown was unaffected by December's CPI figures since they were in line with forecasts. The crown was fixed by the cental bank on Thursday at 3.72 percent above its dollar/mark basket midpoint, slightly stronger than its +3.57 percent fixing the previous day. -- Prague Newsroom, 42-2-2423-0003
{ "pile_set_name": "Github" }
/* * Copyright 2014-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.flow; import org.onlab.util.Identifier; /** * Representation of a Flow ID. */ public final class FlowId extends Identifier<Long> { private FlowId(long id) { super(id); } /** * Creates a flow ID from a long value. * * @param id long value * @return flow ID */ public static FlowId valueOf(long id) { return new FlowId(id); } /** * Gets the flow ID value. * * @return flow ID value as long */ public long value() { return this.identifier; } @Override public String toString() { return Long.toHexString(identifier); } }
{ "pile_set_name": "Github" }
@model OrderModel @inject Grand.Services.Security.IPermissionService permissionService @{ //page title ViewBag.Title = T("Admin.Orders.EditOrderDetails").Text; //has "Manage Documents" permission? var canManageDocuments = await permissionService.Authorize(Grand.Services.Security.StandardPermissionProvider.ManageDocuments); var canManageGenericAttributes = await permissionService.Authorize(Grand.Services.Security.StandardPermissionProvider.ManageGenericAttributes); } <form asp-area="@Constants.AreaAdmin" asp-controller="Order" asp-action="Edit" method="post" id="order-form"> <div class="row"> <div class="col-md-12"> <div class="x_panel light form-fit"> <div class="x_title"> <div class="caption"> <i class="icon-basket"></i> @T("Admin.Orders.EditOrderDetails") - @Model.OrderNumber <small><i class="fa fa-arrow-circle-left"></i>@Html.ActionLink(T("Admin.Orders.BackToList").Text, "List")</small> </div> <div class="actions"> <div class="btn-group btn-group-devided util-btn-margin-bottom-5"> <a href="@Url.Action("PdfInvoice", new { orderId = Model.Id })" class="btn purple"> <i class="fa fa-file-pdf-o"></i> @T("Admin.Orders.PdfInvoice") </a> @if (!Model.IsLoggedInAsVendor) { <span id="order-delete" class="btn red"><i class="fa fa-trash-o"></i> @T("Admin.Common.Delete")</span> } <input type="submit" id="btnRefreshPage" style="display: none" /> <script> $(document).ready(function () { $('#btnRefreshPage').click(function () { //refresh page location.reload(); }); }); </script> <vc:admin-widget widget-zone="order_details_buttons" additional-data="Model" /> </div> </div> </div> <div class="x_content form"> <admin-tabstrip name="order-edit" SetTabPos="true"> <items> <tabstrip-item text="@T("Admin.Orders.Info")" tab-index="0"> <content> <div> <partial name="_OrderDetails.Info" model="Model" /> </div> </content> </tabstrip-item> <tabstrip-item text="@T("Admin.Orders.Addresses")" tab-index="1"> <content> <div> <partial name="_OrderDetails.Addresses" model="Model" /> </div> </content> </tabstrip-item> <tabstrip-item text="@T("Admin.Orders.Shipments")" tab-index="2"> <content> <div> <partial name="_OrderDetails.Shipment" model="Model" /> </div> </content> </tabstrip-item> <tabstrip-item text="@T("Admin.Orders.Products")" tab-index="3"> <content> <div> <partial name="_OrderDetails.Products" model="Model" /> </div> </content> </tabstrip-item> @if (!Model.IsLoggedInAsVendor) { <tabstrip-item text="@T("Admin.Orders.OrderNotes")" tab-index="4"> <content> <div> <partial name="_OrderDetails.Notes" model="Model" /> </div> </content> </tabstrip-item> } @if (canManageDocuments) { <tabstrip-item text="@T("Admin.Orders.Documents")" tab-index="5"> <content> <div> <partial name="_OrderDetails.Documents" model="Model" /> </div> </content> </tabstrip-item> } @if (canManageGenericAttributes) { <tabstrip-item text="@T("Admin.Common.GenericAttributes.Tab")" tab-index="6"> <content> <div> <div class="form-actions"> <input type="submit" value="@T("Admin.Common.Save")" id="btnSaveGenericAttributes" name="save-generic-attributes" class="k-button" /> </div> <div class="form-horizontal"> @{ ViewData["ObjectType"] = "Order"; ViewData["TabIndex"] = 5; } <partial name="_GenericAttributes" model="Model" view-data="ViewData" /> </div> </div> </content> </tabstrip-item> } <vc:admin-widget widget-zone="order_details_tabs" additional-data="Model" /> </items> </admin-tabstrip> </div> </div> </div> </div> <div asp-validation-summary="All"></div> </form> <admin-delete-confirmation button-id="order-delete" />
{ "pile_set_name": "Github" }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Author: [email protected] (Anuraag Agrawal) // Author: [email protected] (Johan Tibell) #ifndef GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__ #define GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__ #include <Python.h> #include <memory> #include <google/protobuf/descriptor.h> #include <google/protobuf/pyext/message.h> namespace google { namespace protobuf { namespace python { typedef struct RepeatedScalarContainer : public ContainerBase { } RepeatedScalarContainer; extern PyTypeObject RepeatedScalarContainer_Type; namespace repeated_scalar_container { // Builds a RepeatedScalarContainer object, from a parent message and a // field descriptor. extern RepeatedScalarContainer* NewContainer( CMessage* parent, const FieldDescriptor* parent_field_descriptor); // Appends the scalar 'item' to the end of the container 'self'. // // Returns None if successful; returns NULL and sets an exception if // unsuccessful. PyObject* Append(RepeatedScalarContainer* self, PyObject* item); // Appends all the elements in the input iterator to the container. // // Returns None if successful; returns NULL and sets an exception if // unsuccessful. PyObject* Extend(RepeatedScalarContainer* self, PyObject* value); } // namespace repeated_scalar_container } // namespace python } // namespace protobuf } // namespace google #endif // GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__
{ "pile_set_name": "Github" }
/* * Copyright 2018-present Open Networking Foundation * Copyright © 2020 camunda services GmbH ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.storage.journal.index; import io.atomix.storage.journal.Indexed; /** Journal index. */ public interface JournalIndex { /** * Adds an entry for the given index at the given position. * * @param indexed the indexed entry for which to add the entry * @param position the position of the given index */ void index(Indexed indexed, int position); /** * Looks up the position of the given index. * * @param index the index to lookup * @return the position of the given index or a lesser index */ Position lookup(long index); /** * Truncates the index to the given index, which means everything higher will be removed from the * index * * @param index the index to which to truncate the index */ void truncate(long index); /** * Compacts the index until the next stored index (exclusively), which means everything lower then * the stored index will be removed. * * <p>Example Index: {5 -> 10; 10 -> 20; 15 -> 30}, when compact is called with index 11. The next * lower stored index is 10, everything lower then this index will be removed. This means the * mapping {5 -> 10}, should be removed. * * @param index the index to which to compact the index */ void compact(long index); }
{ "pile_set_name": "Github" }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; import * as vscode from 'vscode'; import { BlockchainExplorerProvider } from '../../BlockchainExplorerProvider'; import { NodeTreeItem } from './NodeTreeItem'; import { FabricEnvironmentRegistryEntry, FabricNode } from 'ibm-blockchain-platform-common'; export class PeerTreeItem extends NodeTreeItem { contextValue: string = 'blockchain-peer-item'; constructor(provider: BlockchainExplorerProvider, public readonly peerName: string, public readonly tooltip: string, environmentRegsitryEntry: FabricEnvironmentRegistryEntry, node: FabricNode, public readonly command?: vscode.Command) { super(provider, peerName, tooltip, environmentRegsitryEntry, node, command); } }
{ "pile_set_name": "Github" }
LD := g++ CXX := g++ RM := rm -rf -include sources.mk OBJS += $(subst .cpp,.o,$(CPP_SRCS)) CPP_DEPS += $(subst .cpp,.d,$(CPP_SRCS)) EXE := .exe all: fakeit_test_application coverage: fakeit_test_application_with_coverage check: fakeit_test_application ./fakeit_tests$(EXE) fakeit_test_application: $(OBJS) @echo 'Building test application: fakeit_tests$(EXE)' @echo 'Invoking: GCC C++ Linker' $(LD) -flto -o "fakeit_tests$(EXE)" $(OBJS) @echo 'Finished building test application: fakeit_tests$(EXE)' @echo ' ' fakeit_test_application_with_coverage: $(subst .cpp,_with_coverage,$(CPP_SRCS)) @echo 'Building test application: fakeit_tests$(EXE)' @echo 'Invoking: GCC C++ Linker' $(LD) --coverage -o "fakeit_tests$(EXE)" $(OBJS) @echo 'Finished building test application: fakeit_tests$(EXE)' @echo ' ' %.o: ../tests/%.cpp @echo 'Building file: $<' @echo 'Invoking: GCC C++ Compiler' $(CXX) -flto -D__GXX_EXPERIMENTAL_CXX0X__ -I"../include" -I"../config/standalone" -O0 -g3 -Wall -Wextra -Wno-ignored-qualifiers -c -fmessage-length=0 -std=c++11 -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@:%.o=%.d)" -o "$@" "$<" @echo 'Finished building: $<' @echo ' ' %_with_coverage: ../tests/%.cpp @echo 'Building file: $<' @echo 'Invoking: GCC C++ Compiler' $(CXX) --coverage -D__GXX_EXPERIMENTAL_CXX0X__ -I"../include" -I"../config/standalone" -O0 -g3 -Wall -Wextra -Wno-ignored-qualifiers -c -fmessage-length=0 -std=c++11 -MMD -MP -MF"$(@:%_with_coverage=%.d)" -MT"$(@:%_with_coverage=%.d)" -o $(subst _with_coverage,.o,"$@") "$<" @echo 'Finished building: $<' @echo ' ' ifneq ($(MAKECMDGOALS),clean) -include $(CPP_DEPS) endif # Other Targets clean: -$(RM) $(OBJS)$(CPP_DEPS) fakeit_tests$(EXE) *.gc* -@echo ' '
{ "pile_set_name": "Github" }
/* * Copyright (c) 2006-2007 Erin Catto http://www.box2d.org * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ #include <Box2D/Dynamics/b2Body.h> #include <Box2D/Dynamics/b2Fixture.h> #include <Box2D/Dynamics/b2World.h> #include <Box2D/Dynamics/Contacts/b2Contact.h> #include <Box2D/Dynamics/Joints/b2Joint.h> b2Body::b2Body(const b2BodyDef* bd, b2World* world) { b2Assert(bd->position.IsValid()); b2Assert(bd->linearVelocity.IsValid()); b2Assert(b2IsValid(bd->angle)); b2Assert(b2IsValid(bd->angularVelocity)); b2Assert(b2IsValid(bd->angularDamping) && bd->angularDamping >= 0.0f); b2Assert(b2IsValid(bd->linearDamping) && bd->linearDamping >= 0.0f); m_flags = 0; if (bd->bullet) { m_flags |= e_bulletFlag; } if (bd->fixedRotation) { m_flags |= e_fixedRotationFlag; } if (bd->allowSleep) { m_flags |= e_autoSleepFlag; } if (bd->awake) { m_flags |= e_awakeFlag; } if (bd->active) { m_flags |= e_activeFlag; } m_world = world; m_xf.p = bd->position; m_xf.q.Set(bd->angle); m_sweep.localCenter.SetZero(); m_sweep.c0 = m_xf.p; m_sweep.c = m_xf.p; m_sweep.a0 = bd->angle; m_sweep.a = bd->angle; m_sweep.alpha0 = 0.0f; m_jointList = NULL; m_contactList = NULL; m_prev = NULL; m_next = NULL; m_linearVelocity = bd->linearVelocity; m_angularVelocity = bd->angularVelocity; m_linearDamping = bd->linearDamping; m_angularDamping = bd->angularDamping; m_gravityScale = bd->gravityScale; m_force.SetZero(); m_torque = 0.0f; m_sleepTime = 0.0f; m_type = bd->type; if (m_type == b2_dynamicBody) { m_mass = 1.0f; m_invMass = 1.0f; } else { m_mass = 0.0f; m_invMass = 0.0f; } m_I = 0.0f; m_invI = 0.0f; m_userData = bd->userData; m_fixtureList = NULL; m_fixtureCount = 0; } b2Body::~b2Body() { // shapes and joints are destroyed in b2World::Destroy } void b2Body::SetType(b2BodyType type) { b2Assert(m_world->IsLocked() == false); if (m_world->IsLocked() == true) { return; } if (m_type == type) { return; } m_type = type; ResetMassData(); if (m_type == b2_staticBody) { m_linearVelocity.SetZero(); m_angularVelocity = 0.0f; m_sweep.a0 = m_sweep.a; m_sweep.c0 = m_sweep.c; SynchronizeFixtures(); } SetAwake(true); m_force.SetZero(); m_torque = 0.0f; // Delete the attached contacts. b2ContactEdge* ce = m_contactList; while (ce) { b2ContactEdge* ce0 = ce; ce = ce->next; m_world->m_contactManager.Destroy(ce0->contact); } m_contactList = NULL; // Touch the proxies so that new contacts will be created (when appropriate) b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase; for (b2Fixture* f = m_fixtureList; f; f = f->m_next) { int32 proxyCount = f->m_proxyCount; for (int32 i = 0; i < proxyCount; ++i) { broadPhase->TouchProxy(f->m_proxies[i].proxyId); } } } b2Fixture* b2Body::CreateFixture(const b2FixtureDef* def) { b2Assert(m_world->IsLocked() == false); if (m_world->IsLocked() == true) { return NULL; } b2BlockAllocator* allocator = &m_world->m_blockAllocator; void* memory = allocator->Allocate(sizeof(b2Fixture)); b2Fixture* fixture = new (memory) b2Fixture; fixture->Create(allocator, this, def); if (m_flags & e_activeFlag) { b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase; fixture->CreateProxies(broadPhase, m_xf); } fixture->m_next = m_fixtureList; m_fixtureList = fixture; ++m_fixtureCount; fixture->m_body = this; // Adjust mass properties if needed. if (fixture->m_density > 0.0f) { ResetMassData(); } // Let the world know we have a new fixture. This will cause new contacts // to be created at the beginning of the next time step. m_world->m_flags |= b2World::e_newFixture; return fixture; } b2Fixture* b2Body::CreateFixture(const b2Shape* shape, float32 density) { b2FixtureDef def; def.shape = shape; def.density = density; return CreateFixture(&def); } void b2Body::DestroyFixture(b2Fixture* fixture) { b2Assert(m_world->IsLocked() == false); if (m_world->IsLocked() == true) { return; } b2Assert(fixture->m_body == this); // Remove the fixture from this body's singly linked list. b2Assert(m_fixtureCount > 0); b2Fixture** node = &m_fixtureList; bool found = false; while (*node != NULL) { if (*node == fixture) { *node = fixture->m_next; found = true; break; } node = &(*node)->m_next; } // You tried to remove a shape that is not attached to this body. b2Assert(found); // Destroy any contacts associated with the fixture. b2ContactEdge* edge = m_contactList; while (edge) { b2Contact* c = edge->contact; edge = edge->next; b2Fixture* fixtureA = c->GetFixtureA(); b2Fixture* fixtureB = c->GetFixtureB(); if (fixture == fixtureA || fixture == fixtureB) { // This destroys the contact and removes it from // this body's contact list. m_world->m_contactManager.Destroy(c); } } b2BlockAllocator* allocator = &m_world->m_blockAllocator; if (m_flags & e_activeFlag) { b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase; fixture->DestroyProxies(broadPhase); } fixture->Destroy(allocator); fixture->m_body = NULL; fixture->m_next = NULL; fixture->~b2Fixture(); allocator->Free(fixture, sizeof(b2Fixture)); --m_fixtureCount; // Reset the mass data. ResetMassData(); } void b2Body::ResetMassData() { // Compute mass data from shapes. Each shape has its own density. m_mass = 0.0f; m_invMass = 0.0f; m_I = 0.0f; m_invI = 0.0f; m_sweep.localCenter.SetZero(); // Static and kinematic bodies have zero mass. if (m_type == b2_staticBody || m_type == b2_kinematicBody) { m_sweep.c0 = m_xf.p; m_sweep.c = m_xf.p; m_sweep.a0 = m_sweep.a; return; } b2Assert(m_type == b2_dynamicBody); // Accumulate mass over all fixtures. b2Vec2 localCenter = b2Vec2_zero; for (b2Fixture* f = m_fixtureList; f; f = f->m_next) { if (f->m_density == 0.0f) { continue; } b2MassData massData; f->GetMassData(&massData); m_mass += massData.mass; localCenter += massData.mass * massData.center; m_I += massData.I; } // Compute center of mass. if (m_mass > 0.0f) { m_invMass = 1.0f / m_mass; localCenter *= m_invMass; } else { // Force all dynamic bodies to have a positive mass. m_mass = 1.0f; m_invMass = 1.0f; } if (m_I > 0.0f && (m_flags & e_fixedRotationFlag) == 0) { // Center the inertia about the center of mass. m_I -= m_mass * b2Dot(localCenter, localCenter); b2Assert(m_I > 0.0f); m_invI = 1.0f / m_I; } else { m_I = 0.0f; m_invI = 0.0f; } // Move center of mass. b2Vec2 oldCenter = m_sweep.c; m_sweep.localCenter = localCenter; m_sweep.c0 = m_sweep.c = b2Mul(m_xf, m_sweep.localCenter); // Update center of mass velocity. m_linearVelocity += b2Cross(m_angularVelocity, m_sweep.c - oldCenter); } void b2Body::SetMassData(const b2MassData* massData) { b2Assert(m_world->IsLocked() == false); if (m_world->IsLocked() == true) { return; } if (m_type != b2_dynamicBody) { return; } m_invMass = 0.0f; m_I = 0.0f; m_invI = 0.0f; m_mass = massData->mass; if (m_mass <= 0.0f) { m_mass = 1.0f; } m_invMass = 1.0f / m_mass; if (massData->I > 0.0f && (m_flags & b2Body::e_fixedRotationFlag) == 0) { m_I = massData->I - m_mass * b2Dot(massData->center, massData->center); b2Assert(m_I > 0.0f); m_invI = 1.0f / m_I; } // Move center of mass. b2Vec2 oldCenter = m_sweep.c; m_sweep.localCenter = massData->center; m_sweep.c0 = m_sweep.c = b2Mul(m_xf, m_sweep.localCenter); // Update center of mass velocity. m_linearVelocity += b2Cross(m_angularVelocity, m_sweep.c - oldCenter); } bool b2Body::ShouldCollide(const b2Body* other) const { // At least one body should be dynamic. if (m_type != b2_dynamicBody && other->m_type != b2_dynamicBody) { return false; } // Does a joint prevent collision? for (b2JointEdge* jn = m_jointList; jn; jn = jn->next) { if (jn->other == other) { if (jn->joint->m_collideConnected == false) { return false; } } } return true; } void b2Body::SetTransform(const b2Vec2& position, float32 angle) { b2Assert(m_world->IsLocked() == false); if (m_world->IsLocked() == true) { return; } m_xf.q.Set(angle); m_xf.p = position; m_sweep.c = b2Mul(m_xf, m_sweep.localCenter); m_sweep.a = angle; m_sweep.c0 = m_sweep.c; m_sweep.a0 = angle; b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase; for (b2Fixture* f = m_fixtureList; f; f = f->m_next) { f->Synchronize(broadPhase, m_xf, m_xf); } } void b2Body::SynchronizeFixtures() { b2Transform xf1; xf1.q.Set(m_sweep.a0); xf1.p = m_sweep.c0 - b2Mul(xf1.q, m_sweep.localCenter); b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase; for (b2Fixture* f = m_fixtureList; f; f = f->m_next) { f->Synchronize(broadPhase, xf1, m_xf); } } void b2Body::SetActive(bool flag) { b2Assert(m_world->IsLocked() == false); if (flag == IsActive()) { return; } if (flag) { m_flags |= e_activeFlag; // Create all proxies. b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase; for (b2Fixture* f = m_fixtureList; f; f = f->m_next) { f->CreateProxies(broadPhase, m_xf); } // Contacts are created the next time step. } else { m_flags &= ~e_activeFlag; // Destroy all proxies. b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase; for (b2Fixture* f = m_fixtureList; f; f = f->m_next) { f->DestroyProxies(broadPhase); } // Destroy the attached contacts. b2ContactEdge* ce = m_contactList; while (ce) { b2ContactEdge* ce0 = ce; ce = ce->next; m_world->m_contactManager.Destroy(ce0->contact); } m_contactList = NULL; } } void b2Body::SetFixedRotation(bool flag) { bool status = (m_flags & e_fixedRotationFlag) == e_fixedRotationFlag; if (status == flag) { return; } if (flag) { m_flags |= e_fixedRotationFlag; } else { m_flags &= ~e_fixedRotationFlag; } m_angularVelocity = 0.0f; ResetMassData(); } void b2Body::Dump() { int32 bodyIndex = m_islandIndex; b2Log("{\n"); b2Log(" b2BodyDef bd;\n"); b2Log(" bd.type = b2BodyType(%d);\n", m_type); b2Log(" bd.position.Set(%.15lef, %.15lef);\n", m_xf.p.x, m_xf.p.y); b2Log(" bd.angle = %.15lef;\n", m_sweep.a); b2Log(" bd.linearVelocity.Set(%.15lef, %.15lef);\n", m_linearVelocity.x, m_linearVelocity.y); b2Log(" bd.angularVelocity = %.15lef;\n", m_angularVelocity); b2Log(" bd.linearDamping = %.15lef;\n", m_linearDamping); b2Log(" bd.angularDamping = %.15lef;\n", m_angularDamping); b2Log(" bd.allowSleep = bool(%d);\n", m_flags & e_autoSleepFlag); b2Log(" bd.awake = bool(%d);\n", m_flags & e_awakeFlag); b2Log(" bd.fixedRotation = bool(%d);\n", m_flags & e_fixedRotationFlag); b2Log(" bd.bullet = bool(%d);\n", m_flags & e_bulletFlag); b2Log(" bd.active = bool(%d);\n", m_flags & e_activeFlag); b2Log(" bd.gravityScale = %.15lef;\n", m_gravityScale); b2Log(" bodies[%d] = m_world->CreateBody(&bd);\n", m_islandIndex); b2Log("\n"); for (b2Fixture* f = m_fixtureList; f; f = f->m_next) { b2Log(" {\n"); f->Dump(bodyIndex); b2Log(" }\n"); } b2Log("}\n"); }
{ "pile_set_name": "Github" }
<?xml version="1.0 " ?> <doc></doc>
{ "pile_set_name": "Github" }
Reduce parentdirs we use, parentdirs are used for ordering Included upstream diff -u --new-file --recursive rpm-5.1.9_vanilla/lib/depends.c rpm-5.1.9_no-parentdirs/lib/depends.c --- rpm-5.1.9_vanilla/lib/depends.c 2009-04-12 19:46:17.000000000 +0000 +++ rpm-5.1.9_no-parentdirs/lib/depends.c 2009-06-13 15:21:43.504999639 +0000 @@ -2257,7 +2257,7 @@ #define isAuto(_x) ((_x) & _autobits) /*@unchecked@*/ -static int slashDepth = 100; /* #slashes pemitted in parentdir deps. */ +static int slashDepth = 2; /* #slashes pemitted in parentdir deps. */ static int countSlashes(const char * dn) /*@*/
{ "pile_set_name": "Github" }
/*---------------------------------------------------------------------------*\ ========= | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / O peration | \\ / A nd | Copyright (C) 2011-2013 OpenFOAM Foundation \\/ M anipulation | ------------------------------------------------------------------------------- License This file is part of OpenFOAM. OpenFOAM is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. OpenFOAM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>. Description Writes the header description of the File to the stream associated with the File. \*---------------------------------------------------------------------------*/ #include "IOobject.H" #include "objectRegistry.H" // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // bool Foam::IOobject::writeHeader(Ostream& os, const word& type) const { if (!os.good()) { Info<< "IOobject::writeHeader(Ostream&) : " << "no stream open for write" << nl << os.info() << endl; return false; } writeBanner(os) << "FoamFile\n{\n" << " version " << os.version() << ";\n" << " format " << os.format() << ";\n" << " class " << type << ";\n"; if (note().size()) { os << " note " << note() << ";\n"; } os << " location " << instance()/db().dbDir()/local() << ";\n" << " object " << name() << ";\n" << "}" << nl; writeDivider(os) << endl; return true; } bool Foam::IOobject::writeHeader(Ostream& os) const { return writeHeader(os, type()); } // ************************************************************************* //
{ "pile_set_name": "Github" }
<resources> <dimen name="height_top_bar">46dp</dimen> <dimen name="padding_search_bar">5dp</dimen> <dimen name="height_row_weixin">59dp</dimen> <dimen name="size_avatar">50dp</dimen> <dimen name="margin_chat_activity">5dp</dimen> <dimen name="field_textsize">14sp</dimen> <dimen name="field_margin_right">5dp</dimen> <dimen name="call_button_padding_vertical">10dp</dimen> <dimen name="call_button_padding_left">25dp</dimen> <dimen name="call_button_padding_right">25dp</dimen> <dimen name="sidebar_text_size">10dp</dimen> <dimen name="activity_horizontal_margin">16dp</dimen> <dimen name="activity_vertical_margin">16dp</dimen> <dimen name="image_thumbnail_size">100dp</dimen> <dimen name="image_thumbnail_spacing">1dp</dimen> </resources>
{ "pile_set_name": "Github" }
'' FreeBASIC binding for libxcb-1.11, xcb-proto-1.11 '' '' based on the C header files: '' Copyright (C) 2006 Jeremy Kolb. '' All Rights Reserved. '' '' Permission is hereby granted, free of charge, to any person obtaining a copy '' of this software and associated documentation files (the "Software"), to deal '' in the Software without restriction, including without limitation the rights '' to use, copy, modify, merge, publish, distribute, sublicense, and/or sell '' copies of the Software, and to permit persons to whom the Software is '' furnished to do so, subject to the following conditions: '' '' The above copyright notice and this permission notice shall be included in all '' copies or substantial portions of the Software. '' '' THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR '' IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, '' FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE '' AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN '' ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION '' WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '' '' Except as contained in this notice, the names of the authors or their '' institutions shall not be used in advertising or otherwise to promote the '' sale, use or other dealings in this Software without prior written '' authorization from the authors. '' '' translated to FreeBASIC by: '' Copyright © 2015 FreeBASIC development team #pragma once #include once "xcb.bi" #include once "xproto.bi" #include once "xcbext.bi" '' The following symbols have been renamed: '' constant XCB_XINERAMA_QUERY_VERSION => XCB_XINERAMA_QUERY_VERSION_ '' constant XCB_XINERAMA_GET_STATE => XCB_XINERAMA_GET_STATE_ '' constant XCB_XINERAMA_GET_SCREEN_COUNT => XCB_XINERAMA_GET_SCREEN_COUNT_ '' constant XCB_XINERAMA_GET_SCREEN_SIZE => XCB_XINERAMA_GET_SCREEN_SIZE_ '' constant XCB_XINERAMA_IS_ACTIVE => XCB_XINERAMA_IS_ACTIVE_ '' constant XCB_XINERAMA_QUERY_SCREENS => XCB_XINERAMA_QUERY_SCREENS_ extern "C" #define __XINERAMA_H const XCB_XINERAMA_MAJOR_VERSION = 1 const XCB_XINERAMA_MINOR_VERSION = 1 extern xcb_xinerama_id as xcb_extension_t type xcb_xinerama_screen_info_t x_org as short y_org as short width as ushort height as ushort end type type xcb_xinerama_screen_info_iterator_t data as xcb_xinerama_screen_info_t ptr as long rem index as long end type type xcb_xinerama_query_version_cookie_t sequence as ulong end type const XCB_XINERAMA_QUERY_VERSION_ = 0 type xcb_xinerama_query_version_request_t major_opcode as ubyte minor_opcode as ubyte length as ushort major as ubyte minor as ubyte end type type xcb_xinerama_query_version_reply_t response_type as ubyte pad0 as ubyte sequence as ushort length as ulong major as ushort minor as ushort end type type xcb_xinerama_get_state_cookie_t sequence as ulong end type const XCB_XINERAMA_GET_STATE_ = 1 type xcb_xinerama_get_state_request_t major_opcode as ubyte minor_opcode as ubyte length as ushort window as xcb_window_t end type type xcb_xinerama_get_state_reply_t response_type as ubyte state as ubyte sequence as ushort length as ulong window as xcb_window_t end type type xcb_xinerama_get_screen_count_cookie_t sequence as ulong end type const XCB_XINERAMA_GET_SCREEN_COUNT_ = 2 type xcb_xinerama_get_screen_count_request_t major_opcode as ubyte minor_opcode as ubyte length as ushort window as xcb_window_t end type type xcb_xinerama_get_screen_count_reply_t response_type as ubyte screen_count as ubyte sequence as ushort length as ulong window as xcb_window_t end type type xcb_xinerama_get_screen_size_cookie_t sequence as ulong end type const XCB_XINERAMA_GET_SCREEN_SIZE_ = 3 type xcb_xinerama_get_screen_size_request_t major_opcode as ubyte minor_opcode as ubyte length as ushort window as xcb_window_t screen as ulong end type type xcb_xinerama_get_screen_size_reply_t response_type as ubyte pad0 as ubyte sequence as ushort length as ulong width as ulong height as ulong window as xcb_window_t screen as ulong end type type xcb_xinerama_is_active_cookie_t sequence as ulong end type const XCB_XINERAMA_IS_ACTIVE_ = 4 type xcb_xinerama_is_active_request_t major_opcode as ubyte minor_opcode as ubyte length as ushort end type type xcb_xinerama_is_active_reply_t response_type as ubyte pad0 as ubyte sequence as ushort length as ulong state as ulong end type type xcb_xinerama_query_screens_cookie_t sequence as ulong end type const XCB_XINERAMA_QUERY_SCREENS_ = 5 type xcb_xinerama_query_screens_request_t major_opcode as ubyte minor_opcode as ubyte length as ushort end type type xcb_xinerama_query_screens_reply_t response_type as ubyte pad0 as ubyte sequence as ushort length as ulong number as ulong pad1(0 to 19) as ubyte end type declare sub xcb_xinerama_screen_info_next(byval i as xcb_xinerama_screen_info_iterator_t ptr) declare function xcb_xinerama_screen_info_end(byval i as xcb_xinerama_screen_info_iterator_t) as xcb_generic_iterator_t declare function xcb_xinerama_query_version(byval c as xcb_connection_t ptr, byval major as ubyte, byval minor as ubyte) as xcb_xinerama_query_version_cookie_t declare function xcb_xinerama_query_version_unchecked(byval c as xcb_connection_t ptr, byval major as ubyte, byval minor as ubyte) as xcb_xinerama_query_version_cookie_t declare function xcb_xinerama_query_version_reply(byval c as xcb_connection_t ptr, byval cookie as xcb_xinerama_query_version_cookie_t, byval e as xcb_generic_error_t ptr ptr) as xcb_xinerama_query_version_reply_t ptr declare function xcb_xinerama_get_state(byval c as xcb_connection_t ptr, byval window as xcb_window_t) as xcb_xinerama_get_state_cookie_t declare function xcb_xinerama_get_state_unchecked(byval c as xcb_connection_t ptr, byval window as xcb_window_t) as xcb_xinerama_get_state_cookie_t declare function xcb_xinerama_get_state_reply(byval c as xcb_connection_t ptr, byval cookie as xcb_xinerama_get_state_cookie_t, byval e as xcb_generic_error_t ptr ptr) as xcb_xinerama_get_state_reply_t ptr declare function xcb_xinerama_get_screen_count(byval c as xcb_connection_t ptr, byval window as xcb_window_t) as xcb_xinerama_get_screen_count_cookie_t declare function xcb_xinerama_get_screen_count_unchecked(byval c as xcb_connection_t ptr, byval window as xcb_window_t) as xcb_xinerama_get_screen_count_cookie_t declare function xcb_xinerama_get_screen_count_reply(byval c as xcb_connection_t ptr, byval cookie as xcb_xinerama_get_screen_count_cookie_t, byval e as xcb_generic_error_t ptr ptr) as xcb_xinerama_get_screen_count_reply_t ptr declare function xcb_xinerama_get_screen_size(byval c as xcb_connection_t ptr, byval window as xcb_window_t, byval screen as ulong) as xcb_xinerama_get_screen_size_cookie_t declare function xcb_xinerama_get_screen_size_unchecked(byval c as xcb_connection_t ptr, byval window as xcb_window_t, byval screen as ulong) as xcb_xinerama_get_screen_size_cookie_t declare function xcb_xinerama_get_screen_size_reply(byval c as xcb_connection_t ptr, byval cookie as xcb_xinerama_get_screen_size_cookie_t, byval e as xcb_generic_error_t ptr ptr) as xcb_xinerama_get_screen_size_reply_t ptr declare function xcb_xinerama_is_active(byval c as xcb_connection_t ptr) as xcb_xinerama_is_active_cookie_t declare function xcb_xinerama_is_active_unchecked(byval c as xcb_connection_t ptr) as xcb_xinerama_is_active_cookie_t declare function xcb_xinerama_is_active_reply(byval c as xcb_connection_t ptr, byval cookie as xcb_xinerama_is_active_cookie_t, byval e as xcb_generic_error_t ptr ptr) as xcb_xinerama_is_active_reply_t ptr declare function xcb_xinerama_query_screens_sizeof(byval _buffer as const any ptr) as long declare function xcb_xinerama_query_screens(byval c as xcb_connection_t ptr) as xcb_xinerama_query_screens_cookie_t declare function xcb_xinerama_query_screens_unchecked(byval c as xcb_connection_t ptr) as xcb_xinerama_query_screens_cookie_t declare function xcb_xinerama_query_screens_screen_info(byval R as const xcb_xinerama_query_screens_reply_t ptr) as xcb_xinerama_screen_info_t ptr declare function xcb_xinerama_query_screens_screen_info_length(byval R as const xcb_xinerama_query_screens_reply_t ptr) as long declare function xcb_xinerama_query_screens_screen_info_iterator(byval R as const xcb_xinerama_query_screens_reply_t ptr) as xcb_xinerama_screen_info_iterator_t declare function xcb_xinerama_query_screens_reply(byval c as xcb_connection_t ptr, byval cookie as xcb_xinerama_query_screens_cookie_t, byval e as xcb_generic_error_t ptr ptr) as xcb_xinerama_query_screens_reply_t ptr end extern
{ "pile_set_name": "Github" }
def parrot(): pass
{ "pile_set_name": "Github" }
""" This module provides means to detect the App Engine environment. """ import os def is_appengine(): return "APPENGINE_RUNTIME" in os.environ def is_appengine_sandbox(): """Reports if the app is running in the first generation sandbox. The second generation runtimes are technically still in a sandbox, but it is much less restrictive, so generally you shouldn't need to check for it. see https://cloud.google.com/appengine/docs/standard/runtimes """ return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" def is_local_appengine(): return is_appengine() and os.environ.get("SERVER_SOFTWARE", "").startswith( "Development/" ) def is_prod_appengine(): return is_appengine() and os.environ.get("SERVER_SOFTWARE", "").startswith( "Google App Engine/" ) def is_prod_appengine_mvms(): """Deprecated.""" return False
{ "pile_set_name": "Github" }