lang
stringclasses
10 values
seed
stringlengths
5
2.12k
shell
ls *tsv | awk -F "." '{print $1}' > tmp; a=`cat tmp`; mkdir IGH IGK IGL; for i in $a; do echo $i > $i.name; done; for i in $a; do cat $i.heavy_chain_igblast_db-pass.tsv | head -2 | cut -f 4,5,6,7 | grep productive -v > IGH/$i.info.hc ; done; for i in $a; do cat $i.light_chain_K_igblast_db-pass.tsv | head -2 |cut -f 4,5,7 | grep productive -v > IGK/$i.info.lck ; done; for i in $a; do cat $i.light_chain_L_igblast_db-pass.tsv | head -2 |cut -f 4,5,7 | grep productive -v > IGL/$i.info.lcl ; done; for i in $a; do paste $i.name IGH/$i.info.hc > IGH/$i.heavy.tsv ; done; for i in $a; do paste $i.name IGK/$i.info.lck > IGK/$i.lck.tsv ; done; for i in $a; do paste $i.name IGL/$i.info.lcl > IGL/$i.lcl.tsv ; done; cat IGH/*tsv > IGH.tsv cat IGL/*tsv > IGL.tsv cat IGK/*tsv > IGK.tsv cat IGH.tsv | awk '$2 == "T" || $2 == "F"' > IGH_parsed.tsv
shell
#!/usr/bin/zsh exec $HOME/mine/user/crontab/@boot --unlock imb-unlock
shell
#Invoke the cisco umbrella command line uninstaller as documented on this umbrella KB article: https://support.umbrella.com/hc/en-us/articles/230901028-Umbrella-Roaming-Client-Uninstalling /Applications/OpenDNS\ Roaming\ Client/rcuninstall
shell
source activate deidentify # Disable MKL multithreading as it will actually slow down spaCy tokenization export MKL_NUM_TRHEADS=1 # Specify GPU to run on
shell
#!/bin/bash # # visualize rigid bodies in pymol # cat << EOF > .tmp.pml load $1, traj create frame0, traj, 1, 0 bg_color white as spheres set sphere_scale,0.5 set specular, off spectrum b set sphere_scale,1,frame0 EOF pymol .tmp.pml
shell
if [ "$1" == "$check" ]; then echo "Hello!" fi os="" if [ -f /etc/debian_version ]; then echo "its a debian box" os="debian" elif [ -f /etc/redhat-release ]; then echo "its a centos / redhat box"
shell
scaleup() { local deployment_name=${1:?must specify a deployment name} echo "Scaling up $deployment_name" oc scale -n hive "deployment.v1.apps/$deployment_name" --replicas=1 oc rollout status -n hive "deployment.v1.apps/$deployment_name" -w if [[ "$(oc get -n hive "deployment.v1.apps/$deployment_name" -o jsonpath='{.spec.replicas}')" != "1" ]] then echo "$deployment_name has not been scaled down to 0" fi } scaleup "hive-operator" scaleup "hive-controllers"
shell
xcodebuild -target SetupEditor -configuration Deployment cp -Rf build/Deployment/SetupEditor.app ${OMEAppDir} cd ${DevDir}/../utilities/OMEchpasswd xcodebuild -alltargets clean xcodebuild -target OMEchpasswd -configuration Deployment cp -Rf build/Deployment/OMEchpasswd.app ${OMEAppDir} cd ${DevDir}/../OME_Apps xcodebuild -alltargets clean xcodebuild -target OME_DownloadMails_ObjC -configuration Release xcodebuild -target OME_SendMail_ObjC -configuration Release
shell
git remote rm stree-orderbundle git remote rm stree-oauthbundle git remote rm stree-pagebundle git remote rm stree-paymentbundle git remote rm stree-producerbundle git remote rm stree-productbundle git remote rm stree-productstatusbundle git remote rm stree-reportbundle git remote rm stree-reviewbundle git remote rm stree-routingbundle git remote rm stree-searchbundle git remote rm stree-shippingbundle git remote rm stree-shopbundle
shell
{ echo "<svg xmlns=\"http://www.w3.org/2000/svg\" height=\"580\" width=\"460\">" for x in {1..7}; do echo "<line x1=\"$((10+x*30))\" y1=\"10\" x2=\"$((10+x*30))\" y2=\"$((10+18*30))\" style=\"stroke:rgb(0,0,0);stroke-width:3\" />" done for y in {1..9}; do echo "<line x1=\"10\" y1=\"$((10+y*30))\" x2=\"$((10+14*30))\" y2=\"$((10+y*30))\" style=\"stroke:rgb(0,0,0);stroke-width:3\" />" done for x in {0..7}; do echo "<text x=\"$((10+$x*30+7))\" y=\"$((10+18*30+15))\" fill=\"black\">$x</text>" done for y in {0..9}; do echo "<text x=\"$((10+14*30+5))\" y=\"$((10+(17-y)*30+20))\" fill=\"black\">$y</text>" done while read x1 y1 x2 y2; do
shell
echo "Current path $CURRENT_PATH" for sf in `find "$CURRENT_PATH/home" -type f` ; do sfd="$HOME/${sf##$CURRENT_PATH/home/}" sfd_base=$(dirname "$sfd")
shell
# Set env export TRACE_BENCH="freqmine" input_base="/opt/parsec-3.0/pkgs/apps/freqmine/run" # export TRACE_CMD="/opt/parsec-3.0/pkgs/apps/freqmine/inst/amd64-linux.gcc-serial/bin/freqmine $input_base/kosarak_500k.dat 410" # simmedium export TRACE_CMD="/opt/parsec-3.0/pkgs/apps/freqmine/inst/amd64-linux.gcc-serial/bin/freqmine $input_base/kosarak_250k.dat 220" # simsmall
shell
#!/bin/bash while true do echo The current time is $(date) sleep 1 done
shell
#!/bin/sh # shellcheck source=/dev/null . "$HOME/.direnv.d/rvm.sh"
shell
set -eux cargo fmt -- --check check_args=("" "--no-default-features" "--all-features") for args in "${check_args[@]}"; do cargo check ${args} cargo clippy ${args} -- -D warnings done echo PASS
shell
# skintone SVGs from source SVGs. # Receives the build plan from helpers/export-svg-skintones.sh on stdin. set -ueo pipefail # Capture the build plan for replay. # Extract only the target file column. TARGETS=$(cut -f1 | git hash-object -w --stdin) # Generate git cat-file blob "$TARGETS" | xargs helpers/lib/export-svg-skintones.js # Optimize git cat-file blob "$TARGETS" | xargs -n1 node_modules/.bin/svgo --quiet --config helpers/beautify-svg.yml
shell
pkill -9 -f pxscene.sh cp /var/tmp/pxscene.log $TESTRUNLOGS grep "Failures: 0" $TESTRUNLOGS retVal=$? if [ "$retVal" -eq 0 ] then exit 0; else
shell
#!/bin/bash set -e if [ -f "$HOME/ompi/bin/mpicc" ]; then export PATH=$HOME/ompi/bin:$PATH export LD_LIBRARY_PATH=$HOME/ompi/lib:$LD_LIBRARY_PATH
shell
} hash unzip 2>/dev/null || { echo >&2 "$0: [ERROR] unzip is not installed. Aborting." exit 1 } NUM_THREADS=4 ver=2.4.9 # Get correct opencv apt-get install \ build-essential checkinstall git cmake libfaac-dev libjack-jackd2-dev \ libmp3lame-dev libopencore-amrnb-dev libopencore-amrwb-dev libsdl1.2-dev \ libtheora-dev libva-dev libvdpau-dev libvorbis-dev libx11-dev libxfixes-dev \
shell
<gh_stars>0 #!/bin/bash #Bash Script that pulls out commonly analyzed email header artifacts #Author: <NAME> #Copyright 2019, <NAME>, All rights reserved. #Author's Personal Website: briantcarr.com #Computer Emergency Response Team Intern at the Center for Internet Security #Any inquiries can be directed to: <EMAIL> #If you improve upon this script, I encourage you to share your results. #42 72 69 61 6e 20 54 68 6f 6d 61 73 20 43 61 72 72 #Input File
shell
#!/bin/bash new_ver="$1" sed -i -e "s/compose_cd_ver=\".*\"/compose_cd_ver=\"${new_ver}\"/g" compose-cd git status git add compose-cd git commit -m "release: ${new_ver}" git tag "${new_ver}"
shell
MYSQL_ESTATE_PORT=3306 MYSQL_ESTATE_USER=isucon MYSQL_ESTATE_DBNAME=isuumo MYSQL_ESTATE_PASS=<PASSWORD> MYSQL_CHAIR_HOST="isu3" MYSQL_CHAIR_PORT=3306 MYSQL_CHAIR_USER=isucon MYSQL_CHAIR_DBNAME=isuumo MYSQL_CHAIR_PASS=<PASSWORD>
shell
echo " Extension/layer name: ${EXTENSION_NAME}" echo " Lambda function: ${LAMBDA_FUNCTION}" echo " Self contained deployment: ${SELF_CONTAINED}" echo '' ###### END: parse command line arguments rm -rf bin rm -rf obj if [ "${SELF_CONTAINED}" = "true" ] ; then echo 'Building self-contained extension...' echo ''
shell
#!/bin/sh docker rmi fpm:fricas docker build -t fpm:fricas . > log tail log docker images
shell
#!/bin/bash for i in `seq 1 20` do echo "creating User $i" aws workmail create-user \ --organization-id <YOUR_ORGANIZATION_ID> \ --name student$i \ --display-name Student$i \ --password <PASSWORD_FOR_STUDENTS_ACCOUNTS> \ done echo "done :) "
shell
if [ "$#" -eq "4" ] then mkdir sssp javac -cp $PGX_HOME/lib/common/*:$PGX_HOME/lib/yarn/*:$PGX_HOME/lib/embedded/*:$PGX_HOME/lib/client/*:$PGX_HOME/third-party/* SSSP.java -d sssp/ java -Dlog4j.configuration=file:$PGX_HOME/conf/log4j.xml -cp $PGX_HOME/lib/common/*:$PGX_HOME/lib/yarn/*:$PGX_HOME/lib/embedded/*:$PGX_HOME/lib/client/*:$PGX_HOME/third-party/*:sssp SSSP $1 $2 $3 $4 rm -r sssp else echo "usage: ./SSSP.sh <graph config path> <costPropName in the .json file> <src node name> <dest node name>" fi
shell
--prefix=/tools \ --disable-multilib \ --disable-nls \ --disable-libstdcxx-threads \ --disable-libstdcxx-pch \ --with-gxx-include-dir=/tools/$LFS_TGT/include/c++/7.2.0
shell
# build types/tfjs.d.ts node_modules/.bin/api-extractor run --local --verbose --config types/api-tfjs.json # patch types/tfjs.d.ts sed -i 's/\@tensorflow\/tfjs-core/\.\/tfjs-core/' types/tfjs.d.ts echo "done..."
shell
python3 main.py --mode "train"
shell
echo "# $(date) Starting targets..." /usr/local/bin/aws --profile jenkins ec2 start-instances --instance-ids ${targets} echo "# $(date) Waiting for targets to start..." /usr/local/bin/aws --profile jenkins ec2 wait instance-running --instance-ids ${targets}
shell
freq_items=$(cat "$path_freq" | sort -nr | sed 's/^\([0-9]*\)\t\([0-9]*\)\t\(.*\)/\3/') # Checks if given item is within list of frequent items function is_freq() { item="$1" while read -r freq_item; do if [ "$item" == "$freq_item" ]; then return fi done <<< "$freq_items" false
shell
xtra = length($0) - w if (xtra > 0) { plen = length($2) split($2, pieces, "/") pcount = length(pieces) postfix = "/.../" pieces[pcount] remaining = length($2) - xtra - length(postfix) - length(pieces[1]) str = pieces[1] for (i = 2; i < pcount; i++) { remaining = remaining - length(pieces[i]) - 1 if (remaining > 0) {
shell
# open the browser and dump the secret echo "" echo "Use this token to log in:" echo $TOKEN_LINE echo $TOKEN_LINE | clip echo ""
shell
#export LOG_ROOT=$TLSMD_ROOT/log # Bad path export LOG_ROOT=/home/tlsmd/log ## start web-app database server daemon $TLSMD_ROOT/bin/webtlsmdd.py > $LOG_ROOT/webtlsmdd.log 2>&1 & sleep 1 ## start the web-app tlsmd job running daemon $TLSMD_ROOT/bin/webtlsmdrund.py > $LOG_ROOT/webtlsmdrund.log 2>&1 &
shell
#!/usr/bin/env bash echo "┏━━━ 🧹 CLEAN ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" lerna run clean --parallel
shell
#windows export BERT_BASE_DIR=/media/turing/D741ADF8271B9526/BASE_MODEL/Bert/uncased_L-2_H-128_A-2 # layers注意预训练模型的层数 python extract_features.py \ --input_file=/BASE_MODEL/Bert/english.txt \ --output_file=/BASE_MODEL/Bert/output.json \ --vocab_file=/BASE_MODEL/Bert/uncased_L-2_H-128_A-2/vocab.txt \ --bert_config_file=/BASE_MODEL/Bert/uncased_L-2_H-128_A-2/bert_config.json \ --init_checkpoint=/BASE_MODEL/Bert/uncased_L-2_H-128_A-2/bert_model.ckpt \ --layers=-1,-2\ --max_seq_length=128 \ --batch_size=8
shell
<gh_stars>0 #!/usr/bin/env bash make clean sphinx-apidoc -o source ../GpAwsAbs -f make html
shell
echo Increasing build number ./buildnumber --header "main/HAP/HAPBuildnumber.hpp" make -j8 app echo Flash make flash #echo Uploading #scp build/Homekit.bin pi@homebridge:/home/pi/docker/docker-update_server/firmwares/
shell
do mapfile -t jobs < <( aws batch list-jobs --job-queue nas-eval --job-status $js | jq '.jobSummaryList[].jobId' ) for j in "${jobs[@]}" do job=${j:1:-1} echo $job aws batch terminate-job --job-id $job --reason "schedule for deletion" done done
shell
#!/usr/bin/env bash BookingNonProdAccountProfile=blog-bookingnonprd AirmilesNonProdAccountProfile=blog-airmilesnonprd region=us-east-1 bookingAPI=$(aws cloudformation describe-stacks --stack-name booking-lambda --profile $BookingNonProdAccountProfile --region $region --query 'Stacks[0].Outputs[?OutputKey==`BookingAPI`].OutputValue' --output text) echo -e "Booking API endpoint is: $bookingAPI" airmilesAPI=$(aws cloudformation describe-stacks --stack-name airmiles-lambda --profile $AirmilesNonProdAccountProfile --region $region --query 'Stacks[0].Outputs[?OutputKey==`AirmileAPI`].OutputValue' --output text) echo -e "Airmiles API endpoint is: $airmilesAPI"
shell
# Index reference genome srun -c 1 -p medium --mem 10G -J wgsIndexGenome -o log_%j 01_scripts/06_gatk_dictionnary_reference.sh # Find regions to re-align srun -c 1 -p medium --mem 30G -J wgsFindIndels -o log_%j 01_scripts/07_gatk_realign_targets.sh # Re-align around indels
shell
#!/usr/bin/env sh set -o pipefail xcrun simctl list devicetypes \ | tail -n +2 \ | sed -E "s/[[:space:]]\(.*$//"
shell
# username: # determines the usernaame of the current user # usage: # USERNAME=$(user_username) function user_username() { whoami } # home_of_specific_user: # determines the home directory of a specific user # usage: # HOME_DIR=$(user_home_of_specific_user) function user_home_of_specific_user() {
shell
if test -f "requirements-vendor.txt"; then python3 -m pip install -r requirements-vendor.txt -t ./vendor fi
shell
offer=$(pub offers show -p "$PUBLISHER" -o "$name") urls=$(echo "${offer}" | jq '.definition["plans"][]."microsoft-azure-corevm.vmImagesPublicAzure"[]?.osVhdUrl') if [[ -z $URLS ]]; then URLS=${urls} else URLS=${URLS}$'\n'${urls} fi done NOW=$(date +%s) IFS=$'\n'
shell
read -sp "Password: " rootpasswd echo "Please enter USER name MySQL!" read -p "User: " db_user mysql -uroot -p${rootpasswd} -e "UPDATE mysql.user SET Password=PASSWORD('${<PASSWORD>}') WHERE User='${db_user}' AND Host='localhost';" mysql -uroot -p${rootpasswd} -e "FLUSH PRIVILEGES;"
shell
sh /scripts/postgres_backup.sh echo "$(date '+%Y-%m-%dT%H:%M:%S') -> #################### iniciando tasks.py ####################" python3 /scripts/tasks.py
shell
source "$(dirname "${BASH_SOURCE}")/../../hack/lib/init.sh" trap os::test::junit::reconcile_output EXIT os::test::junit::declare_suite_start "cmd/sdn" os::cmd::expect_success 'oc get clusternetworks' os::cmd::expect_success_and_text 'oc get clusternetwork default -o jsonpath="{.pluginName}"' 'redhat/openshift-ovs-multitenant'
shell
#!/usr/bin/env bash hive -e "CREATE DATABASE IF NOT EXISTS ${clive_db} LOCATION '${published_bucket}/${hive_metastore_location}';"
shell
# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
shell
cp -f /home/pi/scanner/scripts/config-wifi.txt /etc/network/interfaces; /home/pi/scanner/scripts/reset-wifi.sh;
shell
#!/bin/bash wget http://www.cs.toronto.edu/~raeidsaqur/projects/mgn/data-mgn-demo.zip unzip data-mgn-demo.zip rm data-mgn-demo.zip
shell
<gh_stars>0 echo clean partial packages sudo apt-get autoclean echo cleanup apt cache sudo apt-get clean echo clean up unused dependencies sudo apt-get autoremove
shell
#!/bin/bash # Convert svg to ico. convert icon.svg -density 300 -resize 256x256 -fuzz 10% -transparent white icon-256.png convert icon.svg -density 300 -resize 192x192 -fuzz 10% -transparent white icon-192.png convert icon.svg -density 300 -resize 32x32 -fuzz 10% -transparent white icon-32.png convert icon.svg -density 300 -resize 16x16 -fuzz 10% -transparent white icon-16.png convert icon-16.png icon-32.png icon-192.png icon-256.png icon.ico rm icon-16.png icon-32.png icon-192.png icon-256.png
shell
#!/bin/sh if [ ! -e "/usr/local/etc/crowdsec/collections/opnsense.yaml" ]; then /usr/local/bin/cscli --error collections install crowdsecurity/opnsense fi /usr/local/bin/cscli --error hub update \ && /usr/local/bin/cscli --error hub upgrade service crowdsec enabled && service crowdsec restart
shell
remove_folders() { echo "Removing previous result directories..." # Reset train directory rm -rf ${TRAIN_DIR} mkdir -p ${TRAIN_DIR} } run_train() { python ${SRC_AI_MODEL}/train_gan.py \ --codings_size ${CODINGS_SIZE} \ --seed ${SEED} \ --batch_size ${BATCH_SIZE} \
shell
fi echo "+) Acquiring vault-${VERSION}" wget https://github.com/hashicorp/vault/archive/v${VERSION}.tar.gz echo "+) Extracting vault-${VERSION}.tar.gz" tar zxf v${VERSION}.tar.gz echo "+) Running update_commands.rb" ./update_commands.rb echo "+) Updating the badge in the README.md" sed -i "/img.shields.io/c\[\![](https://img.shields.io/badge/Supports%20Vault%20Version-${VERSION}-blue.svg)](https://github.com/hashicorp/Vault/blob/v${VERSION}/CHANGELOG.md)" README.md
shell
set -e if [[ "$1" = 'insurance-db' ]]; then insurance-db "${@:2}" exit 1
shell
openstack floating ip delete ${ip} done } allocate_4_floating_ip() { for ip in {1..4} ; do openstack floating ip create ${OS_FLOATING_IP_POOL} done } title() {
shell
mkdir -p {build,install} cd build CC=gcc cmake .. -DGMX_GIFS="$PWD/../.." \ -DCMAKE_INSTALL_PREFIX=$PWD/../install \
shell
echo "Waiting for process to start..." sleep 2 done echo "Rabbitmq started, configuring server ..." rabbitmqctl add_user $RABBITMQ_ADMIN_USER $RABBITMQ_ADMIN_PASSWORD rabbitmqctl set_user_tags $RABBITMQ_ADMIN_USER user_tag rabbitmqctl add_vhost $RABBITMQ_ADMIN_VHOST
shell
if [ "$2" = "test" ]; then # Perform test with official scripts ./eval.sh $1 # Calculate overall F1 score python cal_total.py --exp-name=$1 else
shell
--name duke_init_JVTC_unsupervised \ --dataset-target dukemtmc-reid \ --stage 2 \ --epochs 40 \ --init ./examples/logs/JVTC/duke/resnet50_duke075_epoch00040.pth \ --mesh-dir ./examples/mesh/DukeMTMC/
shell
#!/bin/bash # SPDX-license-identifier: Apache-2.0 ############################################################################## # Copyright 2019 © Samsung Electronics Co., Ltd. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, Version 2.0 # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## function stop_all { docker-compose kill docker-compose down
shell
for f in *.h; do echo $f; if [ ! -f ${f/'.h'/'.cpp'} ]; then ../h2cppx/h2cppx.py $f > ${f/'.h'/'.cpp'}; fi done
shell
#!/bin/bash cd /lustre/atlas2/nro101/proj-shared/BigNeuron/Bin_Vaa3D_BigNeuron_version1/ for i in {1..27} do /usr/bin/time -v sh /lustre/atlas2/nro101/proj-shared/BigNeuron/zhi/janelia_13k_batch_run/gaussiansmooth_set2/text_jobs/$i/0.txt done
shell
#!/bin/bash set -e -o pipefail -u # link into main package repo to handle dependencies for package in packages/*; do #TODO: handle docker image builds better ln -sf ../../${package} termux-packages/packages/ done cd termux-packages ./build-package.sh "$@"
shell
# create local persist sudo mkdir /local/persist/path/your_name docker volume create -d local-persist -o mountpoint=/local/persist/path/your_name --name=your_name_devel_conda_env # create container docker run -d -p 20001-20003:20001-20003 -p 20000:22 -v /local/workspace:/workspace -v your_name_devel_conda_env:/opt/conda --name your_conda_gpu_env --restart always --hostname your_host_name --runtime nvidia --ipc host gpu/conda-torch-tensorflow:public
shell
# generate imports for haveno services and types using grpc-web mkdir -p ./src/protobuf cd ./src/protobuf || exit 1 protoc -I=../../../haveno/proto/src/main/proto/ ../../../haveno/proto/src/main/proto/*.proto --js_out=import_style=commonjs,binary:./ --grpc-web_out=import_style=typescript,mode=grpcwebtext:./ || exit 1
shell
startos=$( /usr/bin/find "/Applications" -iname "startosinstall" -type f ) [ "$mode" = "upgrade" ] && msgosupgtitle="Upgrading macOS" || msgosupgtitle="Erasing macOS to $downloados" cat <<EOF > "$pbjson" { "percentage": -1, "title": "$msgosupgtitle", "message": "Preparing to $mode ..", "icon": "$updateicon"
shell
#!/bin/sh case "$@" in *"directory not a file"*) ;; *) exit 1 ;; esac
shell
#!/bin/bash docker run -p 80 -p 22 --env LOGSTASH_SERVER=172.16.31.10:2510 -d docker.local:5000/myassets /sbin/my_init --enable-insecure-key
shell
echo "$gem_sha1" gem_chng_log=$(echo "`date +"%a %b %d %Y"` `git config user.name` <`git config user.email`> $new_ver-1") sed -i '/^Version:/ s/[0-9].[0-9]*.[0-9]*..$/'$new_ver'/' $file sed -i '/^Release:/ s/[0-9]*%/1%/' $file sed -i '/^%define sha1\s*'"$gem_name"'/ s/=[0-9a-f]*$/='$gem_sha1'/' $file sed -i '/^%changelog/a* '"$gem_chng_log"'\n- Update to version '"$new_ver"'' $file else echo "Gem $gem_name is up to date $new_ver"
shell
parserOptions[shorts]="${SHORT_OPTS}" parserOptions[longs]="${LONG_OPTS}" parserOptions[cases]="${CASE1} ${SHOW_HELP} ${SSH_TO_USE}" ## Declare some variables
shell
else echo "reached $malloclib" { time -p ./binarytree-ldpreload.sh $malloclib ; } 2> $tmpfile echo "after time command" fi
shell
# Build first for project references yarn --mutex network --frozen-lockfile --network-timeout 60000 yarn run build # Validate existing schema yarn validate-graphql # Validate schema is not out of date yarn update-graphql-schema if [ -n "$(git status --porcelain)" ]; then
shell
if [ "$answerSettings" == "y" ]; then cp settings.js.example static/js/settings.js fi
shell
#!/bin/bash set -e if [[ ! -d .pyEnv ]] then python3 -m virtualenv .pyEnv fi mkdir -p .cache/pip user="`ls -dl .cache/pip | awk '{print $3}' | head -n 1`" source .pyEnv/bin/activate su $user -c "python3 -m pip install --cache .cache/pip -r docs/requirements.txt"
shell
command -v "$@" > /dev/null 2>&1 } if ! command_exists wv; then echo "Install docker" exit 1
shell
wget -v https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/CEMS-EFAS/EMO-5-pd_1990_2019.nc wget -v https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/CEMS-EFAS/EMO-5-pr6_1990_2019.nc wget -v https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/CEMS-EFAS/EMO-5-pr_1990_2019.nc wget -v https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/CEMS-EFAS/EMO-5-rg_1990_2019.nc wget -v https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/CEMS-EFAS/EMO-5-ta6_1990_2019.nc
shell
echo " " echo "options:" echo "-h, --help show brief help" echo "-q, --quick perform quick recon only (default: false)" echo "-d, --domain <domain> top domain to scan, can take multiple" echo " " echo "example:" echo "$0 --quick -d google.com -d uber.com"
shell
sudo chmod -R ug+w storage sudo chmod -R ug+w bootstrap/cache sudo chmod g+s storage sudo chmod g+s bootstrap/cache sudo setfacl -R -dm u:www-data:rwx storage sudo setfacl -R -m u:www-data:rwx storage sudo chmod -R g+w storage
shell
kubectl delete -f base/jvb-base/jvb-configmap.yaml kubectl delete -f shard0/shard-config.yaml kubectl delete -f base/config.yaml kubectl delete -f shard0/jicofo.yaml kubectl delete -f base/web-base/service.yaml kubectl delete -f base/web-base/web-prosody.yaml kubectl delete -f base/web-base/web-configmap.yaml kubectl delete -f base/web-base/jicofo-configmap.yaml kubectl delete -f shard0/shard-config.yaml kubectl delete -f base/config.yaml
shell
source ../spec.sh source ./module_spec.sh PATCHESDIR="RLBFILES/patches/210120-startup" if [ ! -d ${PATCHESDIR} ]; then echo "patches are missing..." exit 1 fi patch ${DIR}/etc/hosts < ${PATCHESDIR}/localhost6.diff patch ${DIR}/etc/rc.d/rc.udev < ${PATCHESDIR}/rc.udev.diff
shell
# tput setaf 3; echo 'Finishing dependency installation' pacman -U /var/cache/pacman/pkg/* # tput setaf 6; echo 'Ensuring we have a clean and sanitary environment'; tput reset; # make -C neovim distclean make -C neovim -j 12 CFLAGS='-O0 -pipe' make -C neovim install PREFIX=/usr/local mkdir export make -C neovim -j 12 CFLAGS='-O0 -pipe' make -C neovim install PREFIX=export/ tar -cf neovim.tar export/ mkdir -p /root/.config /root/.vim
shell
#!/bin/bash # Launch the grafana server /run.sh & # Create the dashboards and data sources. This will run indefinitely, # scanning for elements and adding their dashboards when created cd dashboards && python3 create_dashboards.py
shell
-i --inventory-file --key-file -m --module-name -o --only-if-changed --purge -s --sleep -t --tags -U --url --vault-password-file -v --verbose" if [[ "$current_word" == -* ]]; then COMPREPLY=( $( compgen -W "$options" -- "$current_word" ) ) fi }
shell
#!/usr/bin/env bash export SQUAD_DIR=../datasets/squad export SQUAD_VERSION=v1.1 export BERT_BASE_DIR=../pretrained/uncased_L-12_H-768_A-12 python ../create_finetuning_data.py \ --data_file=${SQUAD_DIR}/train-${SQUAD_VERSION}.sample.json \ --vocab_file=${BERT_BASE_DIR}/vocab.txt \ --train_data_output_path=${SQUAD_DIR}/squad_${SQUAD_VERSION}_train.tf_record \ --meta_data_file_path=${SQUAD_DIR}/squad_${SQUAD_VERSION}_meta_data \ --fine_tuning_task_type=squad --max_seq_length=384
shell
--label_smoothing \ --noam \ --emb_dim 300 \ --hidden_dim 300 \ --hop 1 \ --heads 2 \ --pretrain_emb \ --model KEMP \ --device_id 0 \ --concept_num 1 \ --total_concept_num 10 \ --attn_loss \ --pointer_gen \ --save_path result/KEMP/ \
shell
#npm install -g @angular/cli #npm install #ng serve #ng build #Para ser ubicado en http://localhost/AngularTemplatesCompiled/light
shell
word: opt ... tail: [a1 a2 a3] # El paquete `flag` necesita que todas las banderas # aparezcan antes del resto de los argumentos (de otra # manera las banderas serán interpretadas como argumentos # posicionales) $ ./command-line-flags -word=opt a1 a2 a3 -numb=7 word: opt numb: 42 fork: false svar: bar trailing: [a1 a2 a3 -numb=7]
shell
#!/bin/sh #PBS -N the_fastest_cuda_in_the_west #PBS -l walltime=2:00:00 #PBS -l nodes=1:r662:k20:ppn=24
shell
sub_run_path_list () { #echo "sub_run_path_list" mod_grubrc_theme_path_list "$@" }
shell
#!/bin/bash # Cleanup any stale mounts left from previous shutdown # see https://bugs.launchpad.net/ubuntu/+source/docker.io/+bug/1404300 grep "mapper/docker" /proc/mounts | /usr/bin/awk '{ print $2 }' | xargs -r umount || true
shell
#!/bin/bash gnome-terminal -x bash -c "expect commu.sh; exec bash" gnome-terminal -x bash -c "expect track.sh; exec bash" gnome-terminal -x bash -c "expect rostopic.sh; exec bash"
shell
#!/usr/bin/env bash $PIN_ROOT/pin -t $PIN_ROOT/source/tools/ManualExamples/obj-intel64/pinatrace.so -- ./sort_A S; mv pinatrace.out std_sort_A.trace; $PIN_ROOT/pin -t $PIN_ROOT/source/tools/ManualExamples/obj-intel64/pinatrace.so -- ./sort_B S; mv pinatrace.out std_sort_B.trace; $PIN_ROOT/pin -t $PIN_ROOT/source/tools/ManualExamples/obj-intel64/pinatrace.so -- ./sort_A O; mv pinatrace.out o_sort_A.trace; $PIN_ROOT/pin -t $PIN_ROOT/source/tools/ManualExamples/obj-intel64/pinatrace.so -- ./sort_B O; mv pinatrace.out o_sort_B.trace;
shell
LOGFILE=/sujh/logs/split-`date +%Y-%m-%d-%H-%M-%S`.log CUDA_VISIBLE_DEVICES=0,1 nohup python -u train.py --iter=100000 >>$LOGFILE 2>&1 &
shell
sudo pacman -Syu wget git curl lynx # Fonts sudo pacman -Syu ttf-fira-mono ttf-fira-code ttf-fira-mono ttf-fira-sans ttf-roboto ttf-roboto-mono ttf-font-awesome tff-all-the-icons yay -S ttf-roboto-slab # GNOME Customization # sudo pacman -Syu gnome-tweaks # sudo pacman -S chrome-gnome-shell ## Export GNOME key-shortcuts #dconf dump / | sed -n '/\[org.gnome.settings-daemon.plugins.media-keys/,/^$/p' > custom-shortcuts$(date -I).ini
shell
else #determine latest date of the files in this dir DATA_VERSION=$($GSTAT -c '%y' $(ls -t $k/*.mat | head -n1) | awk '{print $1}') #build sink dir SINK=$DATADIR/$OBSOLETEDIR/$(basename ${k/.metadata})/$DATA_VERSION fi #make destination [ -d $SINK ] || $ECHO mkdir -p $SINK if $BACK && [ -z "$(ls $k/*.mat 2> /dev/null)" ] then #if the directory is empty, then just get rid of it echo "Directory $k is empty, removing it..." $ECHO rmdir $k || exit $? else
shell
# MongoDB 是一个文档数据库,旨在简化开发和扩展 是基于分布式文件存储的数据库,由 C++语言编写,专为 WEB 应用提供可扩展性、高性能和高可用性的数据存储解决方案 # 官方安装文档: https://docs.mongodb.com/manual/tutorial/install-mongodb-on-ubuntu/ echo "Ubuntu宿主机安装MongoDB" echo "安装平台依赖包" wget -qO - https://www.mongodb.org/static/pgp/server-5.0.asc | sudo apt-key add - sudo apt-get install -y gnupg echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/5.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-5.0.list sudo sudo apt update && apt-get update && apt-get upgrade