lang
stringclasses
10 values
seed
stringlengths
5
2.12k
shell
#!/bin/bash ./install.sh retVal=$? if [ $retVal -ne 0 ]; then echo "Error!!" exit $retVal fi
shell
tmux set-option -g message-style "fg=$message_fg,bg=$message_bg,$message_attr" message_command_fg=colour16 # black message_command_bg=colour160 # light yellow tmux set-option -g message-command-style "fg=$message_command_fg,bg=$message_command_bg,$message_attr"
shell
--eval-log --auto-term \ --bias 51057.171 \ --preprocessing 3 \ --uienet \ "/home/martin/dev/openvslam/assets/UIENet-CUDA-Script.pt" \ --filter \ --filter-dia 10 \ --filter-color 20 \ --filter-space 20 \
shell
<gh_stars>0 go build -o notification-splunk sudo chown root notification-splunk sudo chgrp root notification-splunk sudo cp ./notification-splunk /etc/crowdsec/plugins/
shell
apt-get install -y git apt-get install -y ant
shell
# edge-oidc domain export DOMAIN=$(kubectl get svc edge-oidc -o jsonpath="{.status.loadBalancer.ingress[*].hostname}") # keycloak http export PROVIDER_HOST=$(kubectl get svc keycloak-backend -o jsonpath="{.status.loadBalancer.ingress[*].hostname}") export PROVIDER="http://$PROVIDER_HOST:80/auth/realms/greymatter" # full edge-oidc URL export SERVICE_URL?="https://$(kubectl get svc edge-oidc -o jsonpath="{.status.loadBalancer.ingress[*].hostname}"):10808" envsubst < $original >> $templated # apply configs greymatter delete domain edge-oidc greymatter delete listener edge-oidc
shell
# Description: # # capture 'ssh -G 8.8.8.8' # Read each options # find this uncommented option in current directory # if not found, ALERT #
shell
#!/bin/bash set -x export FLAGS_call_stack_level=2 export CUDA_VISIBLE_DEVICES=0,1 python -m paddle.distributed.launch \ inference.py --model_type gpt \ --model_path ../../static/inference_model_pp1mp2/
shell
${ERROR_LOG_PATH}" set +e sh "${SHELL_FILE_PATH}" 2>>"${ERROR_LOG_PATH}" set -e
shell
REGULAR_TREE=1 LINUX_NEXT_TREE=0 # linux-next: working with the bleeding edge? if [ ${REGULAR_TREE} -eq 1 -a ${LINUX_NEXT_TREE} -eq 1 ] ; then echo "${name}: Both 'regular' and 'linux-next' can't be cloned, choose one of them pl.." exit 1 fi if [ ${REGULAR_TREE} -eq 0 -a ${LINUX_NEXT_TREE} -eq 1 ] ; then [ $# -ne 1 ] && { echo "Working with linux-next:" echo "Usage: ${name} new-branch-to-work-under" exit 1 }
shell
#!/bin/bash os=`uname` while true do result=`ps aux | grep "tomcat6" | grep -v grep | grep -v "checkit.sh"` # echo $result if [ -z "$result" ]; then if [ "Linux" == $os ]; then sudo service tomcat6 start sleep "$2" fi if [ "Darwin" == $os ];
shell
cd ~/go/src/etcd/contrib/raftexample2 rm -rf raftserver raftclient raftexample* go build -o raftserver ./server go build -o raftclient ./client
shell
ip=$1 start=$2 end=$3 options="-n -v -z -w 1" if [ $# -eq 4 ] ; then options=$options" -u" fi if [ $end -lt $start ] ; then
shell
#!/bin/bash cd "$(dirname "${BASH_SOURCE[0]}")" source bin/_initdemos.sh echo '.' echo 'Use right button to drag slider up/down.' echo '.' export PSC_LOD_LEVEL="0.1"
shell
# This file is read from command_file.yaml # Go's text/teplate echo "{{.Task.Name}}"
shell
ssh -yi '~/test3.pem' [email protected] if [ $t1 -eq 0 ]; then wifi on else wifi off fi } up () {
shell
if [ $err -eq 0 ] ; then echo "Container successfully installed" echo "-------------------------------------------------------------------------------------" date else echo "=======================================" echo "!!!!!!! Container install failed !!!!!!" echo "======================================="
shell
#/bin/sh #This is a comment ! echo Hello world # This is a comment ,too!
shell
evfname=$1 dmask=$2 attfname=$3 trigtime=$4 idname=$5 if [ "$#" -ne 5 ]; then njobs=$6 else njobs=20 fi workdir=$workdir$idname
shell
#!/bin/bash export WAIT_HOSTS=localhost:4748 export WAIT_HOSTS_TIMEOUT=10 export WAIT_BEFORE_HOSTS=1 export WAIT_AFTER_HOSTS=2 ./target/x86_64-unknown-linux-musl/release/wait && echo 'DOOOOOOONEEEEEE'
shell
docker run --name test-pushgateway -d -p 9091:9091 prom/pushgateway
shell
# Send the reboot command with required cookie and csrf token. reboot_ret=$(curl -i -s -H "Cookie: userid=$login_ret;" --data "model=%7B%22reboot%22%3A%221%22%7D&csrf_token=$csrf_ret" http://$modem_ip/goform/Reboot | head -1); # If the response from the reboot command is 200, everything is good. if [ "$reboot_ret" == "HTTP/1.1 200 OK" ] ; then echo "Reboot success."; # Exit with true. exit 1; else # Error on anything that isn't a 200. echo "Reboot failed: $reboot_ret"; fi else # Login error.
shell
local case=`yabar_create_case`; yabar_case_init $case $case.is "正常系:新規ファイル出力 echoコマンド" local TEMP=/tmp/aaa.txt $case.start.trace $TEMP $case.run "echo aaa >$TEMP"
shell
esac done # handle non-option arguments if [ -z "$h" ]; then
shell
declare -a fontsArr=( "font-hack" "font-anonymous-pro" "font-nexa" "font-nunito" "font-source-code-pro" )
shell
} # return the set union of U and V # where U and V are sets repsented as comma-delimited lists function set_union() { U=$1 V=$2 if [ -z "$U" ]; then echo $V elif [ -z "$V" ]; then echo $U else
shell
generateAddress() { echo "Generating an IOTA address holding all IOTAs..." local seed=$(generateSeed) echo $seed >./utils/nodex.seed # Now we run a tiny Node.js utility to get the first address to be on the snapshot docker-compose run --rm -w /usr/src/app address-generator sh -c 'npm install --prefix=/package "@iota/core" > /dev/null && node address-generator.js $(cat nodex.seed) 2> /dev/null > address.txt' } generateAddress2() { echo "Generating an IOTA address holding all IOTAs..."
shell
#!/bin/sh /usr/local/demo/bin/weston-st-egl-cube-tex -f -a
shell
work_dir="$(cd "$(dirname "$0")" ; pwd -P)/../" source "$work_dir/VMS.cfg" #still the ssh server has to be registered for the respective user for i in "${VMS[@]}" do
shell
--prefix=debian/ranger \ --build-dir=build \ --component=${comp} \ --doc-dir=usr/share/doc/ranger-doc done
shell
#!/bin/bash echo "Starting SSH ..." /usr/sbin/sshd echo "Starting Go ..." go-wrapper run
shell
run matlab -nodisplay -singleCompThread -nojvm -r "run_from_shell('ParseArgument(''f'')');" echo "$output" 1>&2 [ "${lines[*]: -3:1}" == " 0" ] [ "$status" -eq 0 ] } # # ParseArgument(3) #
shell
# execute the Streams standalone application within the debugger step "debugging standalone application '$application' ..." gdb --args $standalone $*
shell
# uninstall.sh - Remove all hucon files from the system. # # Copyright (C) 2019 <NAME> # All rights reserved. # # This software may be modified and distributed under the terms # of the BSD license. See the LICENSE file for details. # remove the hackerschool into the site packages if [ -L /usr/lib/python2.7/site-packages/hackerschool ]; then rm /usr/lib/python2.7/site-packages/hackerschool fi # remove the hucon into the site packages if [ -L /usr/lib/python2.7/site-packages/hucon ]; then
shell
set -e # Create clusterrolebbinding for default sa kubectl create clusterrolebinding gluu-cluster-admin \ --clusterrole=cluster-admin \ --serviceaccount=default:default # Install Gluu cloud native edition
shell
( ${ACS_ENGINE_PATH}aks-engine deploy -m $MODEL \ -l $DEPLOY_REGION \ -g $group\ --auth-method client_secret \ --client-id $AZURE_CLIENT_ID \ --client-secret $AZURE_CLIENT_SECRET \ --dns-prefix $group \ --subscription-id $AZURE_SUBSCRIPTION_ID \ --set windowsProfile.adminPassword=$<PASSWORD> \
shell
to_upper() case $1 in a*) _UPR=A ;; b*) _UPR=B ;; c*) _UPR=C ;; d*) _UPR=D ;; e*) _UPR=E ;; f*) _UPR=F ;; g*) _UPR=G ;; h*) _UPR=H ;; i*) _UPR=I ;; j*) _UPR=J ;; k*) _UPR=K ;; l*) _UPR=L ;;
shell
#!/bin/sh ln -s /storage/workspaces/media-server-setup/photos/move-videos.sh /storage/photos/move-videos.sh
shell
#!/bin/bash set -v ng build --prod npx ngh
shell
# -------------------------------------------- # OPTIONAL: SSL DATABASE SETTINGS # -------------------------------------------- DB_SSL=${DB_SSL:-false} DB_SSL_IS_PAAS=${DB_SSL_IS_PAAS:-false} DB_SSL_KEY_PATH=${DB_SSL_KEY_PATH:-null} DB_SSL_CERT_PATH=${DB_SSL_CERT_PATH:-null} DB_SSL_CA_PATH=${DB_SSL_CA_PATH:-null} DB_SSL_CIPHER=${DB_SSL_CIPHER:-null} # -------------------------------------------- # REQUIRED: OUTGOING MAIL SERVER SETTINGS # -------------------------------------------- MAIL_DRIVER=${MAIL_DRIVER:-smtp}
shell
tar -czvf ${FILE}.tar.gz ${FILE} rm -rf ${FILE} echo "DONE" ls -lh ${FILE}.tar.gz
shell
__enhancd::entry::git::root() { if git rev-parse --is-inside-work-tree 2> /dev/null && [[ $(git rev-parse --show-toplevel) != $PWD ]]; then echo $(git rev-parse --show-toplevel) 2> /dev/null else return fi | __enhancd::filter::exclude "true" }
shell
get_list_of_containers_to_mount container_patterns_to_mount echo_with_ts "Containers to mount:" printf "%s\n" "${container_patterns_to_mount[@]-}" container_patterns_to_mount=("${container_patterns_to_mount[@]/.blob.core.windows.net/}") container_patterns_to_mount=("${container_patterns_to_mount[@]/https:\/\//}") container_patterns_to_mount=("${container_patterns_to_mount[@]/http:\/\//}") for pattern in "${container_patterns_to_mount[@]}"; do acct_and_cont=$(expr "$pattern" : '^[-/]*\([^?]*\)') # remove leading "-" and "/", and the SAS token acct_and_cont=${acct_and_cont/%\//} # remove trailing "/"
shell
#!/bin/sh untracked=`git ls-files --others --exclude-standard` if [ "$untracked" != "" ]; then gitflow_fail "- ERROR - found untracked files" exit 1 else gitflow_ok " - ok, no untracked files" fi
shell
TRAIN_DIR=/home/app/data/ISIC/ISIC-images-preprocessed-sorted CHECKPOINT_PATH=/tmp/my_checkpoints/inception_v3.ckpt python train_image_classifier.py \ --train_dir=${TRAIN_DIR} \ --dataset_dir=${DATASET_DIR} \ --dataset_name=flowers \ --dataset_split_name=train \ --model_name=inception_v3 \
shell
#!/usr/bin/env bash set -euo pipefail GIT_ROOT=$(git rev-parse --show-toplevel) pushd "$GIT_ROOT" > /dev/null
shell
} echo_b () { [ $# -ne 1 ] && return 0 echo -e "\033[34m$1\033[0m" }
shell
mkdir -p raw_data wget -c "https://drive.google.com/uc?export=download&id=1M4O9XZnhL1t7K3zSYQn7sapjt2nkrHQf" -O 'raw_data/test_set.json' wget -c "https://drive.google.com/uc?export=download&id=1ihc8Wcv7OZ3KWFgKags75dEEXVNAeosk" -O 'raw_data/train_set.json'
shell
# VOTING_OFFSET=6 CHUTNEY=./chutney myname=$(basename "$0") [ -x $CHUTNEY ] || { echo "$myname: missing $CHUTNEY"; exit 1; } [ -d networks ] || { echo "$myname: missing directory: networks"; exit 1; } flavour=basic; [ -n "$1" ] && { flavour=$1; shift; } $CHUTNEY stop networks/$flavour echo "$myname: boostrapping network: $flavour" $CHUTNEY configure networks/$flavour
shell
<gh_stars>1-10 #!/bin/bash -eux # Re-enable PostgreSQL autovacuum and fsync sudo sed -i -e 's/^autovacuum = off/#autovacuum = on/' \ -e 's/^fsync = off/#fsync = on/' /etc/postgresql/9.5/main/postgresql.conf sudo /etc/init.d/postgresql reload
shell
mv 961.mul.bam 96_A_mul.bam mv 962.ref.bam 96_B_ref.bam mv 962.alt.bam 96_B_alt.bam mv 962.unk.bam 96_B_unk.bam mv 962.mul.bam 96_B_mul.bam
shell
#!/bin/bash export PATH=$PATH:/usr/local/bin set -e if [ $# != 1 ] ; then echo "USAGE: $0 VERSION" echo " e.g.: $0 1.4.4" exit 1; fi version=$1 echo "Building sdk $version..."
shell
<gh_stars>10-100 ASAN_OPTIONS=suppressions="tools/runners/sanitizers/asan/asan-suppressions.txt ${ASAN_OPTIONS}" \ LSAN_OPTIONS=suppressions="tools/runners/sanitizers/asan/lsan-suppressions.txt ${LSAN_OPTIONS}" \ "${@}"
shell
# Set up a Docker environment. # set -e echo "Setting up a Docker environment." docker pull ubuntu:17.10 docker-compose build
shell
cd ./build/ || exit 1 # run unit tests export SIGNAL_HANDLER_EXAMPLE_EXE="$(pwd)/SignalHandlerExample" ./ml-gridengine-executor-unit-tests --rng-seed=time || exit 1 # run integrated tests [[ -x integrated-tests ]] || ln -sf ../tests/integrated-tests/ integrated-tests || exit 1 python -m pytest integrated-tests
shell
for i in {1..5}; do sleep 1 echo "Shell working $i" done exit 0
shell
#java -jar target/example-java-read-and-write-from-hdfs-1.0-SNAPSHOT-jar-with-dependencies.jar hdfs://localhost:51000 yarn jar target/example-java-read-and-write-from-hdfs-1.0-SNAPSHOT-jar-with-dependencies.jar
shell
export ZONES="a c f" export MIRROR_NUM_REPLICAS_PER_ZONE=2 export MIRROR_TARGET_URL="" # target is down, was https://log.certly.io export MIRROR_TARGET_PUBLIC_KEY="certly_1.pem" export MONITORING="gcm"
shell
usernameFull=$1 regionZip=$2 #BankDemo_PAC.zip unzipFolder=$3 #/home/demouser echo "Importing BankDemo region" yum install unzip -y echo "Unzipping $regionZip to $unzipFolder" unzip $regionZip -d $unzipFolder regionFolderPath=$unzipFolder/BankDemo_PAC chown -R $usernameFull $regionFolderPath chmod -R 755 $regionFolderPath echo "Importing $regionFolderPath/Repo/BNKDM.xml" runuser -l $usernameFull -c ". /opt/microfocus/EnterpriseDeveloper/bin/cobsetenv; export CCITCP2_PORT=1086; mfds /g 5 `pwd`/$regionFolderPath/Repo/BNKDM.xml D"
shell
#!/bin/bash cat gsd/dev.txt gsd/test.txt | python benchmark.py -g ja_core_news_md ja_core_news_trf cat gsd/dev.txt gsd/test.txt | python benchmark.py ja_core_news_md ja_core_news_trf
shell
#!/bin/bash cd /opt/coscale/autoscaler python autoscaler.py
shell
check_extension(){ git diff --cached --name-status | while read status file; do # do a check if try to commit jupyter notebooks extension=$1 if [[ $file =~ $extension$ ]] ; then echo "Please remove *.$extension files before committing" exit 1 fi done } check_extension "$@"
shell
#!/usr/bin/env bash sudo apt-get update sudo apt-get install -y apache2
shell
set -eu cd ${BASH_SOURCE[0]%/*} >/dev/null 2>&1 readonly mydir=${PWD} cd - >/dev/null 2>&1 mkdir -p $1
shell
# install dein if [ ! -d ~/.config/nvim/bundles ]; then curl https://raw.githubusercontent.com/Shougo/dein.vim/master/bin/installer.sh > vim/installer.sh mkdir ~/.config/nvim/bundles/ sh ./vim/installer.sh ~/.config/nvim/bundles rm vim/installer.sh fi # installl tpm if [ ! -d ~/.tmux/plugins/tpm ]; then mkdir -p ~/.tmux/plugins
shell
#!/bin/bash cat ./popular-names.txt | wc -l
shell
# This script is now necessary since webpack-cli no longer accepts arbitrary # flags. # We now have to set an environment variable from the flag and then # access that variable within the node process. if [ $# -eq 2 ]; then if [ $1 == "--instance" ]; then export INSTANCE=$2 else
shell
commit_message="$1" tag="$2" git add . git commit -m "$commit_message" git push origin master git tag $2 git push origin tag $2
shell
#!/bin/bash COMMAND="${1:-GPG_AGENT_INFO=\$HOME/S.gpg-agent gpg -K}" echo ${COMMAND} docker run --rm -ti \ -v ~/.gnupg/S.gpg-agent:/root/S.gpg-agent \ -v ~/.gnupg:/root/.gnupg \ -v ${GOPATH}:/go \ -- aws-vault:yubikey bash -c "${COMMAND}"
shell
if [ $# -ne 1 ]; then echo "Usage: ./build <map_pow2>" exit 1 fi make clean make MAP_SIZE_POW2=$1 cd llvm_mode make MAP_SIZE_POW2=$1
shell
<gh_stars>0 docker-compose -f docker-compose.yaml -f docker-compose.test.yaml up -d
shell
if [ $# -ne 4 ]; then error "Uso: script nombre extensión número ruta" fi if [ ! -d $4 ]; then error "Error: el directorio no existe" fi if [ $3 -lt 1 ]; then error "Error: el numero de ficheros no puede ser menor que 1" fi for (( i = 1; i <= $3; i++ )); do name="$4/$1$i.$2" if [ $i -lt 10 ]; then
shell
#!/bin/bash # Change Password of mysqld mysqladmin -p shutdown start mysql
shell
# A script to launch the flask application. FLASK_APP="$(readlink -f API/__main__.py)" FLASK_ENV=development export FLASK_APP export FLASK_ENV flask run
shell
<gh_stars>0 mkdir -p build && cd build cmake -DCMAKE_BUILD_TYPE=Release .. && cmake --build .
shell
while true; do ./publish.sh; done
shell
<gh_stars>0 #!/bin/bash module load openmpi echo "MPI module loaded" mpicc ../../Drivers/mpiCG1Dtimer.c -lm -o 1Dtimer.exe echo "mpiCG1Dtimer.c Compiled with mpicc" sbatch ./mpiCG1Dtim_CC.sbatch echo "Job submitted via sbatch"
shell
rm -rf html rm -rf latex rm -rf *~
shell
mv build/scripts/swap.mv . mv build/scripts/swap3.mv . mv build/scripts/swap4.mv . rm -rf build echo "cleaned ./build"
shell
#!/bin/bash set -e cat Dockerrun.aws.json.tpl | sed "s#TAG#$1#g" > Dockerrun.aws.json
shell
command -v dnf >/dev/null || return assertEquals "did not detect dnf" "dnf" "$package_manager" } function test_detect_package_manager_with_yum() { command -v dnf >/dev/null && return command -v yum >/dev/null || return assertEquals "did not detect yum" "yum" "$package_manager" }
shell
!#/bin/bash wget \ --recursive \ --no-clobber \ --page-requisites \ --html-extension \ --convert-links \ --restrict-file-names=windows \ --domains understandmyself.com \ --no-parent \ https://www.understandmyself.com/ #This command downloads the Web site www.website.org/tutorials/html/.
shell
# extract options and their arguments into variables. while true ; do case "$1" in -i|--bids-input) bids_input="$2" shift 2 ;; -o|--output-dir) output_dir="$2" shift 2 ;; -s|--subject-id) subject_id="$2"
shell
#!/bin/sh cd `dirname $0` fyne bundle -package fyne frame.svg > bunded.go
shell
#!/bin/sh cat <<EOF ALTER TABLE status ADD COLUMN concurrency INTEGER DEFAULT 0; EOF
shell
--mode=train \ --alsologtostderr \ --gin_param="batch_size=8" \ --gin_param="train_util.train.num_steps=12000" \ --gin_param="train_util.train.steps_per_save=300" \ --gin_param="train_util.train.steps_per_summary=100" \ --gin_param="trainers.Trainer.checkpoints_to_keep=1" \ --save_dir="$RESULTS_DIR/no_crepe_base_urmp_train_ZF0Encoder_ff2" \ --gin_file=/mnt/antares_raid/home/nielsrolf/ddsp/ddsp/training/gin/models2/experiments/no_crepe/generated/no_crepe_base_urmp_train_ZF0Encoder_ff2.gin
shell
<gh_stars>0 #!/usr/bin/env bash ./monstache -f config.toml
shell
set -ex # Load the list of modified files. git fetch git branch -lr FILES=$(git diff --name-only origin/master... src/include) # Create a new temp directory. mkdir src/include2 # Copy modified files into it. for FILE in $FILES do
shell
docker build -t intro-to-ml-ops .
shell
#!/bin/sh mkdir -p third_party/lzma_sdk wget https://chromium.googlesource.com/chromium/src/+archive/refs/heads/main/third_party/lzma_sdk.tar.gz && tar xf -C third_party/lzma_sdk/ lzma_sdk.tar.gz cd third_party/lzma_sdk/ && patch -p0 < ../../lzma_sdk.patch && cd ../.. python gn/build/gen.py ninja -C ./gn/out ./gn/out/gn gen -C out/Default ninja -C out/Default
shell
#!/usr/bin/env bash set -e make -B time ./row time ./column
shell
then : "$(( __shx__heredocCount++ ))" __shx__outputScriptToEval+="${__shx__newLine}IFS= read -r -d '' __SHX_HEREDOC_$__shx__heredocCount << 'SHX_PRINT_BLOCK'${__shx__newLine}" __shx__outputScriptToEval+="$__shx__stringBuilder" __shx__outputScriptToEval+="${__shx__newLine}SHX_PRINT_BLOCK" __shx__outputScriptToEval+="${__shx__newLine}printf '%s' \"\${__SHX_HEREDOC_$__shx__heredocCount%$'\\n'}\"" __shx__outputScriptToEval+="${__shx__newLine}unset __SHX_HEREDOC_$__shx__heredocCount"
shell
libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python-openssl # TODO: add check for existing curl https://pyenv.run | bash
shell
#!/bin/bash #gunicorn --workers 1 -b 127.0.0.1:5001 docker-server:falconApi nohup /root/anaconda3/bin/python /root/srv/docker-server.py & echo STARTED
shell
#!/usr/bin/env sh # AUTOGENERATED FILE! DO NOT MODIFY # >>>> PKGS: fonts-powerline sudo apt-get install -y \ fonts-powerline
shell
#!/bin/bash launchDockerPs() { retryCount=1 maxRetry=10 docker ps -a while [ "$retryCount" -lt "$maxRetry" ]; do echo "[$(date)] wait... =" $retryCount "/" $maxRetry retryCount=$((retryCount+1)) sleep 60
shell
SSL_PASS="${KFKCFG_SSL_KEY_PASSWORD}" mkdir -p ${SSL_FOLDER} mkdir -p ${TMP_FOLDER} echo "Retrieving certificates & PK..." aws s3 cp s3://${S3_CERTS_BUCKET}/${BROKER_ID}/cert ${TMP_FOLDER} aws s3 cp s3://${S3_CERTS_BUCKET}/${BROKER_ID}/cert-issuer ${TMP_FOLDER} aws s3 cp s3://${S3_CERTS_BUCKET}/${BROKER_ID}/key ${TMP_FOLDER} aws s3 cp s3://${S3_CERTS_BUCKET}/ca-cert ${TMP_FOLDER} # create cert full chain cat ${TMP_FOLDER}/cert ${TMP_FOLDER}/cert-issuer > ${TMP_FOLDER}/cert-chain
shell
#!/bin/bash i=1 n1=1 n2=1 c=0 while [[ $i -lt $(date +%d) ]]; do
shell
(mvn spring-boot:run -f $directory/core/pom.xml &) sleep 20 echo "-------------------------------------------------------------------" done
shell
#!/bin/sh while [ "1" == "1" ] do date cat /proc/vmstat |egrep "nr_free_pages|nr_anon_pages|nr_file_pages|nr_dirty|nr_writeback|pgpgout|pgsteal_normal|pgscan_kswapd_normal|pgscan_direct_normal|kswapd_steal" echo sleep 1 done