lang
stringclasses
10 values
seed
stringlengths
5
2.12k
shell
cd git git clone https://github.com/andwxh/udp2raw-tunnel.git cd udp2raw-tunnel git checkout 2698ec93951258d33543816c0a54eab3d1515f49 tail -n +2 CMakeLists.txt >tmp mv tmp CMakeLists.txt
shell
#9 run configure.py python3 ./configure.py --bootstrap #10 cp sudo cp ./ninja /usr/bin #11 pip install libs pip3 install -i https://pypi.tuna.tsinghua.edu.cn/simple/ opencv-python torch torchvision --default-timeout=100 #12 install detectron2 #python3 -m pip install 'git+https://github.com/facebookresearch/detectron2.git' -i https://pypi.tuna.tsinghua.edu.cn/simple python3 -m pip install 'git+https://github.com/facebookresearch/detectron2.git' -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com #python3 -m pip install 'git+https://gitee.com/vinsonws/detectron2' -i https://pypi.tuna.tsinghua.edu.cn/simple #python3 -m pip install 'git+https://gitee.com/vinsonws/detectron2' -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com #13 install cuda --override #14 install cudnn
shell
#Usage: call pair.sh 00:00:00:00:00:00 bluetoothctl pair $1
shell
#!/bin/sh cat /tmp/some.html
shell
PORT_ACCESS=$(nc -zvw1 ${NODE_ADDRESS} ${P2P_PORT} > /dev/null 2>&1 ; echo $?) if [ "$PORT_ACCESS" -eq "0" ]; then eval "${KILL_COMMAND}" MESSAGE=$(cat <<-EOF
shell
./occ 244.8213 -16.4727 Latest_TLE.txt 2017:296:00:00:00 2017:297:00:00:00 lunar_296_297.occ
shell
<gh_stars>0 curl http://dbpedia.org/data/Space_Shuttle.ttl > SpaceShuttle.ttl cat SpaceShuttle.ttl | grep "@prefix" | wc -l
shell
#!/bin/sh dd if=/dev/urandom of=compressfile bs=1M count=256
shell
#! /bin/bash TARGET=tab_menu.zip zip $TARGET \
shell
if [ ! $# -eq 3 ]; then help exit 1 fi # Args projectID="$1" ref="$2" token="$3"
shell
#!/usr/bin/node var path = require('path') , fs = require('fs') , spawn = require('child_process').spawn , serverlist = [] //c('starting mothership'); //var mothership = spawn('node', ['../../run_mothership']); //c('mothership started'); //mothership.stdout.on('data', function(data){ // c(data); //})
shell
#!/bin/bash #================================================= # Description: DIY script # Lisence: MIT # Author: P3TERX # Blog: https://p3terx.com #================================================= # Uncomment a feed source #sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default #comment useless feed source sed -i 's/\(.*routing\)/#\0/' feeds.conf.default
shell
replicator --consumer.config source.properties \ --producer.config destination.properties \ --replication.config replicator.properties \ --cluster.id fm-replicator
shell
# Ensure the machines are up vagrant up # Empty dist folder rm -rf dist mkdir -p dist
shell
if [ ! "$OPENXR_REPO" ]; then export OPENXR_REPO=$(cd .. && pwd)/OpenXR-SDK-Source fi scripts/hpp_genxr.py -registry $OPENXR_REPO/specification/registry/xr.xml -o include/openxr openxr.hpp clang-format-6.0 -style=file -i include/openxr/openxr.hpp )
shell
--threads=8 \ --weighted_loss=0 python src/train.py \ --checkpoint=telescope_0 \ --save_dir=models/telescope_1 \ --stage=1 \ --early_cutoff=25 \ --epochs=-1 \ --batch_size=16 \ --threads=8 \
shell
depends() { return 0 } # called by dracut install() {
shell
npm install npm run icon npm run build cd site npm install
shell
#!/bin/bash docker push magland/sf-jrclust:0.1.0
shell
sleep 1; done for i in $(seq 60); do docker-compose exec postgresql10 psql postgres://postgres:postgresrootpassword@localhost -c "SELECT 1" && break; sleep 1; done for i in $(seq 60); do docker-compose exec mysql mysql -hmysql -uroot -pmysqlrootpassword -e "SELECT 1" && break; sleep 1; done
shell
#!/bin/bash #SBATCH --nodes=4096 export OMP_NUM_THREADS=64 export MUSPI_NUMBATIDS=203 export MUSPI_NUMINJFIFOS=3 export MUSPI_NUMRECFIFOS=3 srun --ntasks=4096 ./mpi_test-bgq-spi
shell
exit 1 fi local -r url="https://raw.githubusercontent.com/Linuxbrew/install/master/install.sh" log_info "started procedure for installing homebrew" pushd "$HOME" >/dev/null 2>&1 mkdir -p $HOME/.cache && \ log_info "running main script to install homebrew" sh -c "$(curl -fsSL $url)" # add_profile_env_var "MANPATH" '$MANPATH:/home/linuxbrew/.linuxbrew/share/man' # add_profile_env_var "INFOPATH" '$INFOPATH:/home/linuxbrew/.linuxbrew/share/info' add_to_path '/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin/' log_info "testing homebrew installation..." log_info "uninstalling cmake..." sudo apt-get remove -yqq cmake
shell
mkdir -p ${BIN_DIR} mkdir -p ${SHARE_DIR} mkdir -p ${DOC_DIR} mkdir -p ${PREFIX}/lib # Copy binaries and scripts # perl scripts use /usr/bin/env so there is no need to do modify them if [[ "$OSTYPE" == "darwin"* ]]; then find . -maxdepth 1 -type f -perm +111 -exec cp -p \{\} ${BIN_DIR} \; else find . -maxdepth 1 -type f -executable -exec cp -p \{\} ${BIN_DIR} \; fi # Compile selection functions and install them. # The SYSTEM variable is not really needed
shell
DOCKER_HOST_IP=$(route -n | awk '/UG[ \t]/{print $2}') #use the correct mysql instance if [ -z $MYSQL_PORT ]; then MYSQL_PORT=$DOCKER_HOST_IP:3306 fi find $DOCKER_DIR/*/config/*.json -type f -exec sed -i "s/%%MYSQL%%/$MYSQL_PORT/g" {} \;
shell
#!/bin/bash # do it without escaping like bash # echo -n "$1" | xsel --clipboard # do it like bash does <tabbed> filenames printf '%q' "$1" | xsel --clipboard
shell
flogo create -f ${app_name} app cd app cd src go mod tidy cd ..
shell
next_version="$((current_version + 1))" plutil -replace CFBundleVersion -string "$next_version" "$INFO_PLIST" git config --local user.email "<EMAIL>" git config --local user.name "GitHub Action" git add $INFO_PLIST git commit -m "Bump bundle version to $next_version"
shell
echo "$SOLANA_CONFIG_DIR/leader.json not found, create it by running:" echo echo " ${here}/setup.sh" exit 1 } if [[ -n "$SOLANA_CUDA" ]]; then program="$solana_fullnode_cuda" else program="$solana_fullnode" fi tune_networking trap 'kill "$pid" && wait "$pid"' INT TERM
shell
function substring { ${$1:$2:$3} }
shell
<gh_stars>0 #!/bin/sh gcloud --quiet compute instances delete homebase
shell
# Environment variables: # TWINE_USERNAME: pypi username # TWINE_PASSWORD: <PASSWORD> # DRY_RUN: <bool> python -m pip install --upgrade pip pip install setuptools wheel twine if [ -f requirements.txt ]; then pip install -r requirements.txt; fi python setup.py sdist bdist_wheel
shell
# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Load into BQ ### IMPORTANT ### this files has to be created with official dsdgen generator with "-update" flag
shell
<gh_stars>0 docker rm $(docker ps -a -f status=exited -q) docker rmi $(docker images -f "dangling=true" -q)
shell
brew install coreutils ln -s "${BREW_PREFIX}/bin/gsha256sum" "${BREW_PREFIX}/bin/sha256sum" # Install some other useful utilities like `sponge`. brew install moreutils # Install GNU `find`, `locate`, `updatedb`, and `xargs`, `g`-prefixed. brew install findutils # Install GNU `sed`, overwriting the built-in `sed`. brew install gnu-sed #--with-default-names # Install a modern version of Bash. brew install bash brew install bash-completion2 # Switch to using brew-installed bash as default shell
shell
rm -rf layer unzip layer.zip -d layer cd layer brotli -d lo.tar.br
shell
rm -rf win32 mkdir win32 cd win32 find . -name '*.qlf' | xargs rm cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_TOOLCHAIN_FILE=../cmake/cross/linux_win32.cmake -G Ninja .. ninja -j 2 cpack
shell
"path": "/organisations/create", "requestContext": {}, "httpMethod": "POST", "headers": {}, "multiValueHeaders": { }, "queryStringParameters": null, "multiValueQueryStringParameters": null, "pathParameters": null,
shell
sudo apt-get -y install 'binfmt*' sudo apt-get -y install libc6-armhf-armel-cross sudo apt-get -y install debian-keyring sudo apt-get -y install debian-archive-keyring sudo apt-get -y install emdebian-archive-keyring tee /etc/apt/sources.list.d/emdebian.list << EOF deb http://mirrors.mit.edu/debian squeeze main
shell
OS_VERSION="7.0" PREFIX="" WHOAMI_ROOT=$($PREFIX id -u); if [ $WHOAMI_ROOT -ne 0 ]; then echo "Are you running this script under root?"
shell
# --------- ------- ---- ------- ----------- ---------- ----------- ----------- # base03 '#002b36' 8/4 brblack 235 #1c1c1c 15 -12 -12 0 43 54 193 100 21 # base02 '#073642' 0/4 black 235 #262626 20 -12 -12 7 54 66 192 90 26 # base01 '#586e75' 10/7 brgreen 240 #585858 45 -07 -07 88 110 117 194 25 46 # base00 '#657b83' 11/7 bryellow 241 #626262 50 -07 -07 101 123 131 195 23 51 # base0 '#839496' 12/6 brblue 244 #808080 60 -06 -03 131 148 150 186 13 59 # base1 '#93a1a1' 14/4 brcyan 245 #8a8a8a 65 -05 -02 147 161 161 180 9 63
shell
fi DIR="/usr/local/ddos-deflate" CRONFILE="/etc/cron.d/ddos-deflate" LOGFILE="/var/log/ddos-deflate.log" printf "${RED}Uninstalling DDoS-Deflate.\n" if [ -d $DIR ]; then
shell
export NODE_3="172.16.31.10" export USER=root # The first server starts the cluster k3sup install \ --cluster \ --user $USER \ --ip $NODE_1 # The second node joins k3sup join \ --server \ --ip $NODE_2 \ --user $USER \ --server-user $USER \
shell
# Documentation: # @raycast.author <NAME> # @raycast.authorURL https://github.com/PSalant726 # @raycast.description Search pkg.go.dev for package documentation version="" if [ -n "$2" ]; then version="@$2" fi open "https://pkg.go.dev/$1$version"
shell
#!/bin/bash cd "$(dirname "${BASH_SOURCE[0]}")" \ && . "../../utils.sh" \ && . "./utils.sh" # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - print_in_purple "\n Golang\n\n" brew_install "Golang" "go" brew_install "GoLand" "goland" "--cask"
shell
#!/usr/bin/env bash # Copyright (c) Facebook, Inc. and its affiliates. set -e git clone https://github.com/facebookincubator/hsthrift.git cd hsthrift ./install_deps.sh --nuke
shell
#!bin/bash # Shell script to start chromium chromium-browser http://localhost:8080 -start-fullscreen &
shell
<svg id="caca" height="150" width="500"> <ellipse class="ellip" cx="240" cy="100" rx="220" ry="30" style="fill:purple"></ellipse> <ellipse cx="220" cy="70" rx="190" ry="20" style="fill:lime"></ellipse> Sorry, your browser does not support inline SVG. </svg> </content> <step autoclean="true"> <eicommands> </eicommands> </step> <step autoclean="true"> <eicommands> <changecontent action="append"> <elements>
shell
#----------------------------------------------------------FUNCTIONS---------------------------------------------------------------- function define_Constants () { local versStamp="Version 1.0.9, 05-19-2016" loggerTag="transcode.serviceIngest"
shell
docker logs --follow kafdrop
shell
apt-add-repository ppa:ondrej/php5-5.6 -y apt-get update # installing xvfb, java and php apt-get install xvfb openjdk-7-jre-headless php5-cli php5-curl php5-xdebug ncurses-term unzip xfonts-100dpi xfonts-75dpi xfonts-scalable xfonts-cyrillic vim -y --no-install-recommends
shell
#!/bin/sh echo "Cloning repositories..." CODE=$HOME/Code
shell
hourly) interval=3600 ;; weekly) interval=604800
shell
# configure advertise_addr iface=$(ip link | grep -v docker -A1 | grep -v lo -A1 | head -n1 | cut -f2 -d: | tr -d '[:space:]') ip_addr=$(ip addr | grep ${iface} | grep -oP 'inet ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)' | cut -f2 -d" ") echo "advertise_addr = \"${ip_addr}\"" >> /etc/consul.d/server.hcl
shell
# WITHOUT arguments: # # loads legacy data into mongo database data for the DARIAH application # Legacy data consists of documents that have a field isPristine: true # # The import script removes existing pristine data from the database, # then imports the new pristine data into the database, except for those # records where a non pristine version is already in the database. # # The DARIAH app takes care that when a record is modified, the isPristine field disappears. # This script is set up to work at specific servers. # Currently it supports
shell
#!/bin/sh # iceberg-dark printf "\033]4;0;#1e2132;1;#e27878;2;#b4be82;3;#e2a478;4;#84a0c6;5;#a093c7;6;#89b8c2;7;#c6c8d1;8;#6b7089;9;#e98989;10;#c0ca8e;11;#e9b189;12;#91acd1;13;#ada0d3;14;#95c4ce;15;#d2d4de\007" printf "\033]10;#c6c8d1;#161821;#c6c8d1\007" printf "\033]17;#c6c8d1\007" printf "\033]19;#161821\007" printf "\033]5;0;#c6c8d1\007"
shell
cp crash-course/target/docbook/publish/en-US/pdf/apiman-crash-course.pdf ../apiman.github.io/latest/crash-course/. cp developer-guide/target/docbook/publish/en-US/pdf/apiman-developer-guide.pdf ../apiman.github.io/latest/developer-guide/. cp installation-guide/target/docbook/publish/en-US/pdf/apiman-installation-guide.pdf ../apiman.github.io/latest/installation-guide/. cp production-guide/target/docbook/publish/en-US/pdf/apiman-production-guide.pdf ../apiman.github.io/latest/production-guide/. cp user-guide/target/docbook/publish/en-US/pdf/apiman-user-guide.pdf ../apiman.github.io/latest/user-guide/.
shell
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash if [[ $? != 0 ]] then echo "Error: Failed to download Node.js source, configuration will now stop"
shell
mkdir -p ${test_wd} cd ${test_wd} # In case we are resuming from a previous failed run, which has left output in the directory rm *.odb || true # Create some test data cat > data.csv <<EOF integer_column:INTEGER,double_column:DOUBLE,integer_missing:INTEGER,double_missing:DOUBLE
shell
export "FLUTTER_BUILD_DIR=build" export "SYMROOT=${SOURCE_ROOT}/../build/ios" export "FLUTTER_FRAMEWORK_DIR=/Users/mikerockett/flutter/bin/cache/artifacts/engine/ios" export "FLUTTER_BUILD_NAME=0.5.4" export "FLUTTER_BUILD_NUMBER=3"
shell
NO_INIT=0 CLEAN=0 FLAGS="" # read options TEMP=`getopt -o s:ncl: --long subdir:,flags:,no-init,clean,lab: -n $0 -- "$@"` eval set -- "$TEMP" # extract options and their arguments while true ; do case "$1" in -s|--subdir)
shell
function get_ipq40xx_device_mac () { mac=`hexdump -x -n 6 /dev/mtd7 | head -n 1 | \ awk '{printf("%s:%s:%s:%s:%s:%s\n", substr($2,3,2), \ substr($2,1,2), substr($3,3,2), substr($3,1,2), \ substr($4,3,2), substr($4,1,2));}'` echo $mac } function get_b1300_serial_num () { suff=`dd if=/dev/mtd7 bs=1 skip=$((0x30)) count=16 2>/dev/null`
shell
#PBS -N spark_wordcount #PBS -l nodes=2:ppn=2,vmem=1GB #PBS -l walltime=0:30 #PBS -j oe #PBS -o output/spark_wordcount.oe #PBS -V set -e export SPARKHPC_JAVA_CLASSPATH=${SPARKHPC_HOME}/tests/core/target/tests-core_2.10-1.0.jar export SPARKHPC_DRIVER_CLASSPATH=
shell
### #!/usr/bin/env bash remote_path=/home/app/nginx/html remote_path2=/home/app/nginx/html remote_user=app remote_ip=172.16.176.151
shell
#!/bin/bash export USER_NAME=mzabolotnov docker build -t $USER_NAME/alertmanager monitoring/alertmanager
shell
set -e if [ ! -d "/opt/skiff" ]; then echo "Non-skiff system detected, bailing out!" exit 1 fi INIT_ONCE=/run/skiff-swap-inited if [ -f $INIT_ONCE ]; then echo "$INIT_ONCE exists, bailing out." exit 0 fi
shell
SRC_DIR="${ZDIR}/modules" OUT_DIR="${ZDIR}/common/cpp/zsdn-commons/zsdn/topics" echo "Topics Source Dir: ${SRC_DIR}" echo "Topics Out Dir: ${OUT_DIR}" java -jar ./hierarchy-builder/hierarchy-builder/target/hierarchy-builder-0.0.1-SNAPSHOT-jar-with-dependencies.jar --source ${SRC_DIR} -r --out ${OUT_DIR} --language cpp -c "zsdn::modules" echo "### Finished Building C++ Topics ###"
shell
if [ $1 -eq 200 ]; then exit 0 fi echo "$1 $2" >> zaporedje.dat }
shell
set xlabel 'simulation time' set output "${PREF}.eps" plot \ "${PREF}.gr" using 1:2 w l lw 2 title 'red', \ "${PREF}.gr" using 1:3 w l lw 2 title 'green', \ "${PREF}.gr" using 1:4 w l lw 2 title 'blue' EOF
shell
EXPLICIT_LIST=`tail -n +4 $1` # extract package name and versions for LINE in ${EXPLICIT_LIST}; do URL_NOPRO=${LINE:7} FILENAME=${URL_NOPRO##/*/} # just package name # NAME=${FILENAME%%-*} # package, version NAME=${FILENAME%-*} # package,version, hash # NAME=$FILENAME%%.tar*}
shell
echo "There are total of" $(echo $availableTeams | jq length) "teams available" fi for team in $(echo $availableTeams | jq -c '.[]'); do apps=$(o365 teams app list -i $(echo $team | jq ''.id) -a) echo "All apps in team are given below: " $(echo $team | jq ''.displayName) " " $(echo $team | jq ''.id) echo $apps
shell
#!/bin/bash docker push harmoniteam/harmoni:kinetic-base docker push harmoniteam/harmoni:kinetic-full docker push harmoniteam/harmoni:kinetic-w2l docker push harmoniteam/harmoni:kinetic-face_detect docker push harmoniteam/harmoni:noetic-base docker push harmoniteam/harmoni:noetic-full # docker push harmoniteam/harmoni:noetic-w2l
shell
echo "Usage: ${0} {DOMAIN} {N_BOXES}" >&2 exit 1 fi DOMAIN=${1} N_BOXES=${2}
shell
#SBATCH -t 24:01:01 #SBATCH --mem=10000 #SBATCH -N 1 -n 1 #SBATCH -p gpu #SBATCH --gres=gpu:1
shell
http-server build -S -p 5000
shell
#!/bin/sh # --batch to prevent interactive command --yes to assume "yes" for questions gpg --quiet --batch --yes --decrypt --passphrase="<PASSWORD>" \ --output ./android/keystores/release.keystore ./android/keystores/release.keystore.gpg gpg --quiet --batch --yes --decrypt --passphrase="<PASSWORD>" \ --output ./android/keystores/google-private-key.json ./android/keystores/google-private-key.json.gpg
shell
echo Linting commit messages with GitLint pipenv run gitlint --commits fbb925a...HEAD echo Finished linting commits echo echo Linting code with Flake8
shell
<gh_stars>1-10 #!/bin/sh BIN="/csb/toolchains/mips32--glibc--stable-2018.11-1/bin" SRC_FILE=$1 KERNEL=$2 OUT_FILE=${SRC_FILE%.*}.ko echo "Compiling \"${SRC_FILE}\" into \"${OUT_FILE}\"" stap -gv -a mips -p4 -r $KERNEL -B CROSS_COMPILE=$BIN/mips-linux- -m $OUT_FILE $SRC_FILE
shell
pidList=$(ps aux | grep $CATALINA_HOME | grep -v grep | awk '{print $2}') for pid in $pidList; do kill -9 $pid done $CATALINA_HOME/bin/startup.sh echo -e "\033[32m ----------------------------------------------------\033[0m" echo -e "\033[32m | [$(date +'%F %T')] >>> [ReDeploy successfully.] | \033[0m" echo -e "\033[32m ----------------------------------------------------\033[0m"
shell
ENV_OPTIONS="" # Define RCLONE config if user and password are provided, # otherwise mount rclone.conf from the host if [ ${#RCLONE_USER} -gt 8 ] && [ ${#RCLONE_PASSWORD} -gt 8 ]; then ENV_OPTIONS+="-e RCLONE_CONFIG=${RCLONE_CONF_CONTAINER} \ -e RCLONE_CONFIG_RSHARE_TYPE=${RCLONE_TYPE} \ -e RCLONE_CONFIG_RSHARE_URL=${RCLONE_URL} \ -e RCLONE_CONFIG_RSHARE_VENDOR=${RCLONE_VENDOR} \ -e RCLONE_CONFIG_RSHARE_USER=${RCLONE_USER} \ -e RCLONE_CONFIG_RSHARE_PASS=${RCLONE_PASSWORD}" else ENV_OPTIONS+="-e RCLONE_CONFIG=${RCLONE_CONF_CONTAINER}" if [ -f "$RCLONE_CONF_HOST" ]; then
shell
# get token from service account # token=$(kubectl --kubeconfig ~/.kube/config_$CLUSTER_NAME -n kube-system get secret $DEFAULT_SECRET_NAME -o json | jq '.data.token' | tr -d '"' | base64 --decode) token=$(kubectl --kubeconfig $KUBECONFIG -n kube-system get secret $DEFAULT_SECRET_NAME -o json | jq '.data.token' | tr -d '"' | base64 --decode) # get current context to pull data directly from the local kube config file # context=`kubectl --kubeconfig ~/.kube/config_$CLUSTER_NAME config current-context` context=`kubectl --kubeconfig $KUBECONFIG config current-context` # get cluster name of context # name=`kubectl --kubeconfig ~/.kube/config_$CLUSTER_NAME config get-contexts $context | awk '{print $3}' | tail -n 1` name=`kubectl --kubeconfig $KUBECONFIG config get-contexts $context | awk '{print $3}' | tail -n 1` # get endpoint of current context # endpoint=`kubectl --kubeconfig ~/.kube/config_$CLUSTER_NAME config view -o jsonpath="{.clusters[?(@.name == \"$name\")].cluster.server}"`
shell
cat owasp-v2/optional_rules/modsecurity_crs_55_application_defects.conf | sed 's/Header edit/#Header edit/g' > owasp-v2/optional_rules/modsecurity_crs_55_application_defects.conf.tmp mv owasp-v2/optional_rules/modsecurity_crs_55_application_defects.conf.tmp owasp-v2/optional_rules/modsecurity_crs_55_application_defects.conf cat owasp-v2/experimental_rules/modsecurity_crs_42_csp_enforcement.conf | sed 's/Header set/#Header set/g' > owasp-v2/experimental_rules/modsecurity_crs_42_csp_enforcement.conf.tmp mv owasp-v2/experimental_rules/modsecurity_crs_42_csp_enforcement.conf.tmp owasp-v2/experimental_rules/modsecurity_crs_42_csp_enforcement.conf # Disables SecGeoLookupDb cat owasp-v2/experimental_rules/modsecurity_crs_61_ip_forensics.conf | sed 's/SecGeoLookupDb/#SecGeoLookupDb/g' > owasp-v2/experimental_rules/modsecurity_crs_61_ip_forensics.conf.tmp mv owasp-v2/experimental_rules/modsecurity_crs_61_ip_forensics.conf.tmp owasp-v2/experimental_rules/modsecurity_crs_61_ip_forensics.conf
shell
head -c -1 $file > out1 || fail=1 compare exp1 out1 || fail=1 fi done Exit $fail
shell
# Get environment from common/env-config.sh . $RUNDIR/../common/env-config.sh if [ ! -f "${DOCKERKEYS}/openldap/ldap.key" ] || [ ! -f "${DOCKERKEYS}/openldap/ldap.crt" ] || [ ! -f "${DOCKERKEYS}/openldap/ca.crt" ] || [ ! -f "${DOCKERKEYS}/openldap/dhparam.pem" ] || [ ! -f "${DOCKERKEYS}/postgresql/server.pem" ] then echo "Key files not found. Restore or create keys before running this script." exit 1 fi docker network create isva docker volume create isvaconfig docker volume create libldap docker volume create libsecauthority docker volume create ldapslapd
shell
############################ PROJECT_DIR='/dd-agent-omnibus' cd $PROJECT_DIR git fetch --all git checkout $OMNIBUS_BRANCH git reset --hard origin/$OMNIBUS_BRANCH rake agent:build-integration
shell
#!/usr/bin/env bash TAG_NAME="ddagent_security_agent_docs_builder" docker build . --file ./docs/cloud-workload-security/scripts/Dockerfile --tag "$TAG_NAME" docker run --rm -v $(pwd):/go/src/github.com/DataDog/datadog-agent "$TAG_NAME" inv -e security-agent.generate-cws-documentation --go-generate
shell
#!/bin/sh #/Users/apple/Library/Python/2.7/bin/pyinstaller --windowed --onefile --clean --noconfirm ff777wingflex.py /Users/apple/Library/Python/2.7/bin/pyinstaller --onefile --clean --noconfirm ff777wingflex.spec pushd dist create-dmg ff777wingflexfixmac.app/ popd
shell
: retrieve latest average CO2 reading from NOAA : uses: awk, tail, curl : run: source dailyc02.sh : note: a change to echo messages onto STDERR. : to see the "canonical format", declare -f dailyco2 : date: 2020-07-20 -- do not leave data files in cwd : date: 2021-02-10 -- display data source is NOAA set -- ftp://aftp.cmdl.noaa.gov/products/trends/co2/co2_trend_gl.txt set -- $1 $(basename $1) curl -q $1 > $2 || { echo curl error 1>&2; return 1; } : tail -n 1 $2 | awk '
shell
fi if [ ! -e $TR ]; then eval $COLOR_RED echo "GPIO not setup! Failing." eval $COLOR_NORMAL fi if [ -e $TUNSLIP_LOG ]; then # some trivial log rotation mv $TUNSLIP_LOG $TUNSLIP_LOG.1
shell
source /home/erosita/sw/sass-setup.sh eSASSdevel #python create_simput.py python efeds_sixte_simulator.py python prepare_event_files2.py python run_esass.py
shell
ln -s ${maxlikepath}/maxlike_subaru_secure_filehandler.py . ln -s ${maxlikepath}/maxlike_voigt_simdriver.py . ln -s ${maxlikepath}/newmanstyle_batchrunner.py . ln -s ${maxlikepath}/nfwmodel2param.c . ln -s ${maxlikepath}/nfwmodel2param.pyx . ln -s ${maxlikepath}/nfwmodel2param.so . ln -s ${maxlikepath}/nfwmodel_normshapedistro.py . ln -s ${maxlikepath}/nfwmodel_sim.py .
shell
echo Usage example: allRoundtripTests xmlts_report.html echo Usage note: report will be placed under FIRTT_DATA as defined in env.sh exit fi roundtripTest.sh saxroundtrip_rtt $1 roundtripTest.sh staxroundtrip_rtt $1 roundtripTest.sh domroundtrip_rtt $1 roundtripTest.sh domsaxroundtrip_rtt $1
shell
#!/bin/bash # <NAME> # 08/12/2020 clear echo " *** SCRIPT PARA EL APAGADO DEL EQUIPO *** " echo " SELECCIONA UNA OPCIÓN:"
shell
git branch -D "${local_branch}" } trap cleanup EXIT master_hash=$(git rev-parse --short=12 "${remote_master_branch}") git merge "${remote_master_branch}" \ -m "Merge ${remote_master_branch_name} ${master_hash} into doc" dirty=y doc/support/generate.sh
shell
while getopts "vq_" c do case $c in v) verbose=1;; q) quiet=1;; _) debug=1; verbose=1;; \?) echo $usage >&2 exit 2;; esac done
shell
#!/bin/sh # Count lines find examples inc src test tools NativeJIT/Examples NativeJIT/inc NativeJIT/src NativeJIT/test -type f | xargs wc -l # Count files find examples inc src test tools NativeJIT/Examples NativeJIT/inc NativeJIT/src NativeJIT/test -type f | wc -l
shell
# downloads linux and linux headers mkdir /tmp/ubuntukernel5.9.10 cd /tmp/ubuntukernel5.9.10 wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.9.10/arm64/linux-headers-5.9.10-050910-generic_5.9.10-050910.202011221708_arm64.deb wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.9.10/arm64/linux-image-5.9.10-050910-generic_5.9.10-050910.202011221708_arm64.deb wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.9.10/arm64/linux-modules-5.9.10-050910-generic_5.9.10-050910.202011221708_arm64.deb sudo dpkg -i *.deb cd rm -r /tmp/ubuntukernel5.9.10
shell
rm /etc/ssh/*host*key* >& /dev/null ssh-keygen -A /usr/libexec/platform-python /etc/confluent/apiclient >& /dev/null for pubkey in /etc/ssh/ssh_host*key.pub; do certfile=${pubkey/.pub/-cert.pub} /usr/libexec/platform-python /etc/confluent/apiclient /confluent-api/self/sshcert $pubkey > $certfile echo HostCertificate $certfile >> /etc/ssh/sshd_config done
shell
#!/bin/bash # # This script can be sourced to ensure VCPKG_ROOT points at a bootstrapped vcpkg repository. # It will also modify the environment (if sourced) to reflect any overrides in # vcpkg triplet used neccesary to match the semantics of vcpkg-rs. if [ "$VCPKG_ROOT" == "" ]; then echo "VCPKG_ROOT must be set." exit 1
shell
#! /bin/bash echo "111,Tom,M,88" echo "222,Marry,F,77" echo "333,Lucy,F,66" echo "444,Lily,F,66" echo "555,Peter,F,66" echo "$1" echo "$2"
shell
#!/bin/bash if [[ "$OSTYPE" == "darwin"* ]]; then brew install cmake macvim elif [[ "$OSTYPE" == "linux-gnu" ]]; then # maybe set install location first... git clone https://github.com/vim/vim cd vim git pull fi