lang
stringclasses
10 values
seed
stringlengths
5
2.12k
shell
set -e export PIPENV_IGNORE_VIRTUALENVS=1 export PIPENV_VENV_IN_PROJECT=1 pipenv install --dev pipenv run python -m mypy_boto3 # Setup [s3,...] stubs pipenv run mypy -p src.app
shell
<gh_stars>0 #!/bin/sh /usr/local/opt/llvm/bin/llvm-objdump -color -source -disassemble target/release/noop_rust.o
shell
ROOT=$(dirname $0)/.. test -x "$ROOT/bin/python" || { echo "No virtual env -- creating ..." ( cd $ROOT virtualenv . ./bin/pip install -r requirements.txt --build-dir $ROOT/.pip_build
shell
# run database and cache updates php bin/dbupdate.php # Install OKAPI curl __FRONTEND_URL__/okapi/update?install=true
shell
UNAME=`uname` if [ $UNAME == 'Darwin' ] then OUTTYPE='quartz' else OUTTYPE='cairo1' fi python manage.py test --include_profile $@ ./bin/profile_png.sh [ $UNAME == 'Darwin' ] && open profiling/prof_db.png
shell
USAGE gh project num_project [num_column [num_card]] --[FLAGS] EXAMPLES gh project 1\tprints columns for project with id 1 gh project 1 2\tprints cards in column 1 (in project 1) " source $this_dir'help/flags.sh' exit 0
shell
set -e apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 1E9377A2BA9EF27F apt-get install -y --allow-unauthenticated --no-install-recommends \ g++-7 \ python \ python3-sh \
shell
# Install dependencies for gdb and m4 sudo apt install binutils binutils-dev binutils-source sudo apt install help2man autoconf automake texinfo bison flex # Terminal UI dependencies for GDB sudo apt install libncurses5 libncurses5-dev libncursesw5 libncursesw5-dev # Install m4 from source (3 years ahead of version in apt) cd ~/Downloads
shell
opt="--encoding=utf-8" # Note: Makefile examples contain constructions like ${PROG} which # looks like Mako constructions, but they are not. Use --no_mako # to turn off Mako processing. opt="--no_mako" rm -f *.aux html=${name}-reveal system doconce format html $name --pygments_html_style=perldoc --keep_pygments_html_bg --html_links_in_new_window --html_output=$html $opt system doconce slides_html $html reveal --html_slide_theme=beige
shell
<gh_stars>0 #!/usr/bin/env bash version="$(sed -n -r 's/^version=(.*)/\1/p' library.properties)" git tag "v$version" files="$(git ls-files src examples README.md LICENSE library.properties keywords.txt)" zip -r "Arduino_Library-$version.zip" $files tar czf "Arduino_Library-$version.tar.gz" $files
shell
#!/bin/bash #edit the input properties file to the cluster extractor distFile=$1 clusterFile=$2 clusters=$3 newclusters=$4 outDir=$5 #sed -i "s/\(distFile *= *\).*/\1$3/" $1 > $2 #awk -F"=" -v newval="=$3" '/distFile/{$2=newval;print;next}1' $1 | awk -F"=" -v newval1="=$4" '/clusterFile/{$2=newval1;print;next}1' | awk -F"=" -v newval="=$5" '/numPoints/{$2=newval;print;next}1' | awk -F"=" -v newval="=$6" '/clusters/{$2=newval;print;next}1' | awk -F"=" -v newval="=$7" '/newclusters/{$2=newval;print;next}1'| awk -F"=" -v newval="=$8" '/joins/{$2=newval;print;next}1' | awk -F"=" -v newval="=$outdir" '/outDir/{$2=newval;print;next}1' > $2 cp=$HOME/.m2/repository/edu/indiana/soic/spidal/dapwc/1.0-ompi1.8.1/dapwc-1.0-ompi1.8.1.jar
shell
make install_sw # libevent rm -rf libevent git clone https://github.com/libevent/libevent.git cd libevent make clean || true export PKG_CONFIG_PATH=$OPENSSL_INSTALL_DIR/lib/pkgconfig ./autogen.sh CPPFLAGS="-I${OPENSSL_INSTALL_DIR}/include" \ LDFLAGS="-L${OPENSSL_INSTALL_DIR}/lib" \
shell
<gh_stars>0 extracted_files=`tar -xzvf packages/jdk-8u66-linux-x64.tar.gz | wc -l` echo $extracted_files" extracted" ln -s jdk1.8.0_66 jdk
shell
# Get SBT / Scala build env image and run assembly (compile soctool to a fat JAR) docker run --mount src="$(pwd)",target=/opt/workspace,type=bind eed3si9n/sbt:jdk11-alpine \ 'set assemblyOutputPath in assembly := new File("./target/soctool.jar")' \ assembly
shell
if [ -z ${POPLAR_SDK_ENABLED+x} ] then # POPLAR_ROOT=$(cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd) POPLAR_ROOT="$HOME/Desktop/poplar_sdk-ubuntu_18_04-2.0.0-EA.1+418-ec08eca1f6/poplar-ubuntu_18_04-2.0.0+91590-712863139b" echo_popart_info $POPLAR_ROOT read poplar_root_path POPART_ROOT="${poplar_root_path:-${POPLAR_ROOT}}" export CMAKE_PREFIX_PATH=${POPLAR_ROOT}${CMAKE_PREFIX_PATH:+:${CMAKE_PREFIX_PATH}} export PATH=${POPLAR_ROOT}/bin${PATH:+:${PATH}} export CPATH=${POPLAR_ROOT}/include${CPATH:+:${CPATH}} export LIBRARY_PATH=${POPLAR_ROOT}/lib${LIBRARY_PATH:+:${LIBRARY_PATH}}
shell
#!/bin/sh mkdir build_ && cd build_ cmake \ -DCMAKE_INSTALL_PREFIX=${PREFIX} -DCMAKE_INSTALL_LIBDIR=lib \ -DCMAKE_PREFIX_PATH=${PREFIX} \ -DCMAKE_INSTALL_RPATH="${PREFIX}/lib" -DCMAKE_BUILD_WITH_INSTALL_RPATH=ON -DCMAKE_MACOSX_RPATH=ON .. make install -j${CPU_COUNT} if [[ "$CONDA_BUILD_CROSS_COMPILATION" != "1" ]]; then ctest fi
shell
export S3_CREDENTIALS="`cf service-key fbi-cde-s3 colin-key | tail -n +2`" export AWS_ACCESS_KEY_ID=`echo "${S3_CREDENTIALS}" | jq -r .access_key_id` export AWS_SECRET_ACCESS_KEY=`echo "${S3_CREDENTIALS}" | jq -r .secret_access_key` export BUCKET_NAME=`echo "${S3_CREDENTIALS}" | jq -r .bucket` export AWS_DEFAULT_REGION=`echo "${S3_CREDENTIALS}" | jq -r '.region'` for i in "${arr_years[@]}"; do aws s3 rm --recursive s3://${BUCKET_NAME}/$i aws s3 cp --recursive zips/$i/ s3://${BUCKET_NAME}/$i done
shell
export GRPC_VERBOSITY=info export GRPC_TRACE=http
shell
# Fix Gradle compilation error [ -z "$ANDROID_NDK_HOME" ] && export ANDROID_NDK_HOME="`which ndk-build | sed 's@/ndk-build@@'`" cd project ./gradlew bundleRelease || exit 1 ../copyAssets.sh pack-binaries-bundle app/build/outputs/bundle/release/app-release.aab cd app/build/outputs/bundle/release || exit 1
shell
export VSIM_DIR=. export USE_ZERO_RISCY=0 export RISCY_RV32F=0 export ZERO_RV32M=0 export ZERO_RV32E=0 export PL_NETLIST="" export TB_TEST="" vsim -64 -c -do 'source tcl_files/run_spi.tcl; log -r /*; run -a; exit;'
shell
"webgl_blender_model" "webgl_cube" "webgl_shader" "webgl_text" ) NG_APPS=( "binary-clock" "googlemaps" "learning" "memory" "money-spin" "weekend-countdown" )
shell
#! /bin/bash ansible servers -m ping -u root
shell
#!/bin/bash jid1=$(sbatch submit-job-1.sh | grep -oP '\d+'); echo $jid1 jid2=$(sbatch --dependency=afterok:$jid1 submit-job-2.sh | grep -oP '\d+'); echo $jid2 jid3=$(sbatch --dependency=afterok:$jid2 submit-job-3.sh | grep -oP '\d+'); echo $jid3
shell
# on linux/macOS local IP can be found using $(ipconfig getifaddr en0) export NODE_SERVER_IP=$1 dart tool/env.dart cd nodejs_server_test_auth_basic_and_ssl node index.js & flutter clean
shell
mkdir -p "$plg_dir/plg/xpto" athena.test.assert_exit_code "athena.docker.mount_dir_from_plugin" "xpto" "$tmp_dir" athena.docker.mount_dir_from_plugin "xpto" "$tmp_dir" athena.test.assert_value "-v $plg_dir/plg/xpto:${tmp_dir}" "$ATHENA_DOCKER_OPTS" ATHENA_DOCKER_OPTS="$curr_extra_opts" rm -r "$plg_dir" rm -r "$tmp_dir" } function testcase_athena.docker.set_no_default_router()
shell
# Based on http://docs.openstack.org/icehouse/install-guide/install/apt/content/ceilometer-install.html export OS_SERVICE_TOKEN=keystone export OS_SERVICE_ENDPOINT=http://keystone.openstack.sj.ifsc.edu.br:35357/v2.0 keystone user-create --name=ceilometer --pass=<PASSWORD> --email=<EMAIL> keystone user-role-add --user=ceilometer --tenant=service --role=admin keystone service-create --name=ceilometer --type=metering --description="Telemetry" keystone endpoint-create --region=ifsc-sj --service-id=$(keystone service-list | awk '/ metering / {print $2}') --publicurl=http://ceilometer.openstack.sj.ifsc.edu.br:8777 --internalurl=http://ceilometer.openstack.sj.ifsc.edu.br:8777 --adminurl=http://ceilometer.openstack.sj.ifsc.edu.br:8777 # Based on http://docs.openstack.org/icehouse/install-guide/install/apt/content/ceilometer-install-swift.html keystone role-create --name=ResellerAdmin
shell
# automatically run the HLS projects python auto_run.py <kernel_name> --src_dir <bench_directory>/hls/src source <vivado_installation_directory>/settings64.sh cd <bench_directory>/hls/prj vivado_hls -f script_0.tcl
shell
flocker-ca initialize cluster flocker-ca create-control-certificate cluster mv -v control-cluster.crt control-service.crt mv -v control-cluster.key control-service.key chmod -v 0700 /etc/flocker
shell
$HELPERS/ssh restore
shell
git checkout master git pull git merge --no-ff --no-verify -m "merge release/$1 into master" release/$1 npm run changelog git add CHANGELOG.md npm run docs git add docs/index.html git commit --amend --no-verify -m "merge release/$1 into master" git tag v$1 git push --no-verify git push origin --no-verify v$1 git checkout develop git merge --no-ff --no-verify -m "merge release/$1 into develop" release/$1 git diff develop master -- CHANGELOG.md > patchfile git apply patchfile
shell
d=$f printf -v formatted_number "%09d" $j convert \( $a $c +append \) \ \( $b $d +append \) \ -background none -append ../tiles16/$formatted_number.png j=$(expr $j + 1) fi i=$(expr $i + 1) done
shell
pushd . cd ${PRD} quartus_sh -t ${SCRIPTS_ROOT_DIR}/add-persona.tcl -PROJECT_NAME ${TEMPLATE_PROJECT_NAME} -PERSONA_NAME ${PERSONA_NAME} -HLS_PROJECT_DIR ${HLS_PROJECT_DIR} popd
shell
exit fi ssid="" channel="7"
shell
if [ "$SEED" = "$(hostname)" ]; then echo "I AM THE SEED NODE" thord tendermint show-node-id > /tmp/shared/node.txt fi # write node account data to json file in shared directory echo "$NODE_ADDRESS $VALIDATOR $NODE_PUB_KEY $VERSION $ADDRESS" > /tmp/shared/node_$NODE_ADDRESS.json # wait until THORNode have the correct number of nodes in our directory before continuing while [ "$(ls -1 /tmp/shared/node_*.json | wc -l | tr -d '[:space:]')" != "$NODES" ]; do
shell
if [ $occurrences -gt 0 ]; then return 0; else return 1; fi
shell
#!/bin/bash for f in ../../Release_Binary/documentation/html/*.js ../../Release_Binary/documentation/html/**/*.js; do echo "$f" java -jar yuicompressor-2.4.8.jar --nomunge -o "$f" "$f" done
shell
#!/bin/bash envsubst < ./connect-properties/connect-standalone.properties.template > ./connect-properties/connect-standalone.properties envsubst < ./connect-properties/sink.properties.template > ./connect-properties/sink.properties envsubst < ./newrelic/newrelic.yml.template > ./newrelic/newrelic.yml if [[ "$#" -eq 0 ]] then echo "Starting Connect..." exec /opt/kafka_2.10-0.10.0.1/bin/connect-standalone.sh ./connect-properties/connect-standalone.properties ./connect-properties/sink.properties 2>&1 else /bin/bash -c "$*" fi
shell
#!/usr/bin/env bash cat $2_header.html $1 $2_footer.html > $3
shell
--test-input fix \ --feat-format kaldi \ --nj 10 \ --epochs 40 \ --lr 0.1 \ --input-dim 161 \ --filter ${filter} \ --time-dim 1 \
shell
URL_OS="Windows" fi echo "Downloading miniconda for $URL_OS" DOWNLOAD_PATH="miniconda.sh" if [ ${PYTHON_VERSION} == "2.7" ]; then wget http://repo.continuum.io/miniconda/Miniconda-latest-$URL_OS-x86_64.sh -O ${DOWNLOAD_PATH}; INSTALL_FOLDER="$HOME/miniconda2" else
shell
do FILE_DATE=$( echo $FILE | sed 's/,/ /g' | awk '{print $1}' ) if [ $( echo $FILE_DATE | wc -c ) -eq 8 ] then FILE_DATE=$( echo $FILE_DATE | sed 's/.\{2\}/&0/' )
shell
IMAGE_BASE_URL="${IMAGE_BASE_URL:-set in nephoria-config.sh}" NEPHORIA_TESTCASE_MODULE="nephoria.testcases" NEPHORIA_RESULTS_BASE="$(pwd)/nephoria_results" NEPHORIA_OPTS="--clc ${CLC_IP} ${NEPHORIA_OPTS}" NEPHORIA_TESTCASES=( "ec2.images.load_hvm_image --image-url ${IMAGE_BASE_URL}/precise-server-cloudimg-amd64-disk1.img" "ec2.images.load_pv_image --kernel-image-url ${IMAGE_BASE_URL}/vmlinuz-3.2.0-23-virtual --ramdisk-image-url ${IMAGE_BASE_URL}/initrd.img-3.2.0-23-virtual --disk-image-url ${IMAGE_BASE_URL}/precise-server-cloudimg-amd64-ext3.img" "ec2.network.net_tests_classic" "ec2.images.load_bfebs_image --image-url ${IMAGE_BASE_URL}/precise-server-cloudimg-amd64-disk1.img" "ec2.ebs.block_dev_map_suite --url ${IMAGE_BASE_URL}/precise-server-cloudimg-amd64-disk1.img" "ec2.ebs.legacy_ebs_test_suite" "ec2.images.import_instance --no-https --image-url ${IMAGE_BASE_URL}/precise-server-cloudimg-amd64-disk1.img" "ec2.images.import_instance --no-https --imageformat vmdk --image-url ${IMAGE_BASE_URL}/ubuntu_trusty.vmdk" "admintests.instance_migration" )
shell
rm -rf stanford-corenlp-full-2015-04-20 wget http://nlp.stanford.edu/software/stanford-chinese-corenlp-2015-04-20-models.jar wget https://raw.githubusercontent.com/stanfordnlp/CoreNLP/master/src/edu/stanford/nlp/pipeline/StanfordCoreNLP-chinese.properties
shell
fvim() { local file=$(fd "$BUFFER" ./ -t file -H | fzf --prompt "[file]: " --preview 'bat --color always --style header,grid --line-range :100 {}' --query "$LBUFFER") if [[ -n "$file" ]]; then BUFFER="nvim ${file}" zle accept-line
shell
#!/bin/bash ROOT=$(dirname "${BASH_SOURCE}") if [[ -x "${ROOT}/bin/Release/dotnet/dotnet" ]]; then exec ${ROOT}/bin/Release/dotnet/dotnet "$@" elif [[ -x "${ROOT}/bin/Debug/dotnet/dotnet" ]]; then exec ${ROOT}/bin/Debug/dotnet/dotnet "$@" else echo "You need to run 'make prepare' first." fi
shell
calico/node calico/node:v3.12.0 quay.io/calico/node-s390x:v3.12.0 quay.io/calico/cni-s390x:v3.12.0 calico/cni:latest calico/cni:latest-s390x calico/cni:v3.12.0 calico/felix:latest-s390x calico/protoc-s390x:latest
shell
alias be='bundle exec' alias resetdb='bundle exec rails db:drop db:create db:migrate db:seed' alias ff='bundle exec rspec --fail-fast'
shell
TRGDIR='' while getopts ":frs:t:h" FLAG; do case $FLAG in h) usage "Help message for $SCRIPT" ;; f) FORCEINSTALL='TRUE' ;;
shell
time=$(date +%F" "%T) # shellcheck disable=SC2001 echo "$line"|sed "s/^/$time [$1]: /" >>$BACKUP_TMP_LOG 2>&1 done } incremental_backup(){ bin_dir=$1
shell
#!/bin/bash xdg-open $(stack path --local-install-root)/bin/PlagueInc.jsexe/index.html
shell
export PATH="$HOME/.heroku/autossh:$PATH"
shell
echo Install root yarn echo Install test cd test yarn
shell
#!/usr/bin/env bash MODULE_NAME='Packaged fonts' if distro_has_gui then module_packages xfonts-terminus else log_module_start log_info "GUI not available, skipping fonts installation" log_no_changes fi
shell
#!/usr/bin/env bash set -o nounset set -o pipefail curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(go env GOPATH)/bin v1.16.0
shell
#!/usr/local/env zsh . $(brew --prefix asdf)/libexec/asdf.sh
shell
parallel --jobs 6 < ./results/exp_readc/run-1/lustre_4n_6t_6d_1000f_617m_5i/jobs/jobs_n1.txt
shell
if (( ${#CHAPTER_TITLES[@]} > 0 )) then FN log light yellow "- Already loaded" return fi if [ -f "$FULL_SOURCE_METADATA_FILE" ] then FN log light yellow "- Loaded existing metadata $FULL_SOURCE_METADATA_FILE" source "$FULL_SOURCE_METADATA_FILE" return fi
shell
# configure script. It will create shell variables for most of # the configuration options discovered by the configure script. # This script is intended to be included by the configure scripts # for Tclxml extensions so that they don't have to figure this all # out for themselves. This file does not duplicate information # already provided by tclConfig.sh, so you may need to use that # file in addition to this one. # # The information in this file is specific to a single platform.
shell
fi while [ $# -gt 0 ]; do case $1 in -h|--help) print_help exit 1 ;;
shell
#!/bin/bash # use SCP to transfer files scp -P 2222 ./minix-R3.2.1/servers/vm/region.c [email protected]:/usr/src/servers/vm/region.c scp -P 2222 ./minix-R3.2.1/lib/libminixfs/cache.c [email protected]:/usr/src/lib/libminixfs/cache.c # alternative: mount mkdir mountpoint sshfs [email protected]:/usr/src mountpoint # in Minix 3, rebuild the system su cd /usr/src make build reboot
shell
echo "Invalid number of arguments." echo "Usage: $0 <log directory> <singularity image> <job id> <job status>" exit 1 fi logdir=$1 image=$2 jobid=$3 status=$4 dir=`dirname $0` if [ -d ${logdir}/${jobid} ]; then exit fi
shell
echo "> In the future if you want to configure SPPMon for additional" echo "SPP servers, you can run individual installations scripts". echo "The scripts are located within the directory:" echo "${path}." echo "" echo "> Documentation for SPPMon is available within the wiki:" echo "https://github.com/IBM/spectrum-protect-sppmon/wiki" echo "" echo "> Please make sure to regulary pull SPPMon updates from git" echo "using the command:" echo " git pull"
shell
--cache \ --dir /hps/nobackup2/production/ensembl/anja/release_93/vep_dumps//dumps/qc/b11c5cc92d94fd1c21393386e1c19016 \ --host mysql-ens-var-prod-1 \ --port 4449 \ --refseq \ --user ensro \ --input_file /hps/nobackup2/production/ensembl/anja/release_93/vep_dumps//dumps/qc/homo_sapiens_GRCh38_human_frequency_test_input.txt \
shell
./nbody -benchmark -i=1 -numbodies=327680
shell
#!/bin/sh SCRIPT_DIR=$(dirname $0) java -Djetty.base="$SCRIPT_DIR" -jar "$SCRIPT_DIR/../start.jar" --stop STOP.KEY=stop_jetty_plz STOP.PORT=13450
shell
export EDITOR=/usr/bin/vim export CDPATH+=${CDPATH:+:}/run/media/$USER:/mount
shell
case $1 in png|text) echo "[INFO] Generating executable \`$1'" sbcl --noinform \ --non-interactive\ --disable-debugger \ --eval "(require 'asdf)" \ --eval "(push '*default-pathname-defaults* asdf:*central-registry*)" \ --eval "(asdf:make \"cl-noise-$1/binary\")" ;; *) echo "[ERROR] No examples passed!" echo "[ERROR] Available:" echo "[ERROR] png"
shell
#!/usr/bin/env bash set -e # Start Xvfb Xvfb -ac -screen scrn 1280x2000x24 :99.0 & export DISPLAY=:99.0 exec "$@"
shell
fi echo "Deployment namespace $deployment_name operator namespace $operator_namespace" ./undeploy_example.sh $1 ./undeploy_infrastructure.sh $deployment_name $operator_namespace
shell
# set -f # avoid globbing (expansion of *). array=(${CODEBUILD_BUILD_ARN//:/ }) AccountId=${array[4]} echo AccountId: ${AccountId} # unset +f echo TargetRegion: ${AWS_REGION} BuildSHA1=${CODEBUILD_RESOLVED_SOURCE_VERSION} echo BuildSHA1: ${BuildSHA1} echo "############### to the job here ###############" #!/bin/bash
shell
# plotting experiment result for CatrPole # In the first graph, compare the learning curves (average return at each iteration) # for the experiments pre xed with sb_. (The small batch experiments.) python plot.py data/sb_no_rtg_dna_CartPole-v0_11-05-2020_17-13-00 data/sb_rtg_dna_CartPole-v0_11-05-2020_17-30-17 data/sb_rtg_na_CartPole-v0_11-05-2020_17-42-32 --legend sb_no_rtg_dna sb_rtg_dna sb_rtg_na
shell
#/bin/bash -e pkg="libelf-0.8.13" if [ ! -f $HOME/.local/lib/pkgconfig/libelf.pc ]; then curl -O http://www.mr511.de/software/${pkg}.tar.gz tar xvzf ${pkg}.tar.gz cd ${pkg} ./configure --prefix=$HOME/.local make make install fi
shell
######################################## #Error Checking function. Checks for exist status of last command ran. If non-zero assumes something went wrong and bails script. function error_check {
shell
#!/bin/bash touch final for file in $@ do tail -n -3 $file >> final done
shell
--vnet-subnet-id $subnetaksid \ --assign-identity $identityid \ --assign-kubelet-identity $kubeletidentityid \ --api-server-authorized-ip-ranges $myip \ -o table
shell
nomad::client_config /dev/null assert "client_config_only_etcd" "$_test_" "etcd" nomad_advertise_ip=192.168.0.103 nomad::client_config /dev/null
shell
SECRETS_FILE=${SECRETS_REPO}/default.nix RANDOM_FILE=${SECRETS_REPO}/random mkpasswdfn () {
shell
npx sequelize db:migrate npx sequelize db:seed:all npm start
shell
echo "Please supply the client name as first parameter" exit 1 fi client_name=$1 client_folder=client-credentials/$client_name if [ -z "$2" ]
shell
ENV HADOOP_CONF_DIR=/opt/hadoop/conf ENV SPARK_EXTRA_CLASSPATH=/opt/hadoop/conf:/opt/hadoop/share/hadoop/hdfs/*:/opt/hadoop/share/hadoop/hdfs/lib/*:/opt/hadoop/share/hadoop/yarn/*:/opt/hadoop/share/hadoop/yarn/lib/*:/opt/hadoop/share/hadoop/mapreduce/*:/opt/hadoop/share/hadoop/mapreduce/lib/*:/opt/ozone/share/ozone/lib/ratis-thirdparty-misc-0.2.0.jar:/opt/ozone/share/ozone/lib/ratis-proto-0.4.0-a8c4ca0-SNAPSHOT.jar:/opt/ozone/share/ozone/lib/hadoop-ozone-filesystem-0.4.0-SNAPSHOT.jar EOF docker build -t vvaks/spark:2.4.0-3.3.0-SNAPSHOT . docker push vvaks/spark:2.4.0-3.3.0-SNAPSHOT } spark_inject_hadoop_jars () { yes | cp -f hadoop/hadoop-dist/target/hadoop-3.3.0-SNAPSHOT/share/hadoop/common/* spark-2.4.0-bin-without-hadoop/jars/ yes | cp -f hadoop/hadoop-dist/target/hadoop-3.3.0-SNAPSHOT/share/hadoop/common/lib/* spark-2.4.0-bin-without-hadoop/jars/ yes | cp -f hadoop/hadoop-dist/target/hadoop-3.3.0-SNAPSHOT/share/hadoop/hdfs/* spark-2.4.0-bin-without-hadoop/jars/ yes | cp -f hadoop/hadoop-dist/target/hadoop-3.3.0-SNAPSHOT/share/hadoop/hdfs/lib/* spark-2.4.0-bin-without-hadoop/jars/ yes | cp -f hadoop/hadoop-dist/target/hadoop-3.3.0-SNAPSHOT/share/hadoop/mapreduce/* spark-2.4.0-bin-without-hadoop/jars/
shell
#!/bin/bash duration=$1 sysctl net.core.busy_poll=$duration sysctl net.core.busy_read=$duration
shell
# Single Topic # Pub/Sub Publisher # DataCatalog EntryGroup Owner # Multiple Topic # Pub/Sub Editor # authenticate
shell
#!bin/bash sudo docker container exec -u www-data -d dockerlnmp_fpm_1 php /usr/share/nginx/html/rss/update.php --daemon
shell
--test_data_file $FEVER_TEST_PATH \ --test_result_path /ppl_results/$LM_MODEL_TYPE.$TEST_EXP_NAME.npy \ --k $K \ --exp_name $EXP_NAME_FOR_SAVE done done
shell
cp ../../../bin/release/libLotkaVolterraPredator.so binaries/linux64/Predator.so && cp ../../LotkaVolterraPredator/data/modelDescription.xml . && if [ -f ../Predator.fmu ]; then rm ../Predator.fmu fi && 7za a ../Predator.zip . | cat > /dev/null && cd .. && mv Predator.zip Predator.fmu && echo "Created Predator.fmu" &&
shell
} installK3sPrimaryNode() { PRIMARY_IP=$(getNodeIP $1) k3sup install --ip "$PRIMARY_IP" --context "$context" --user "$USER" --ssh-key "${PRIVATE_SSH_KEY_PATH}" } joinK3sNode() {
shell
#!/usr/bin/env bash set -e; npm link -f; cd "$HOME"; rm -rf "$HOME/xxxxdc" ts_project xxxxdc/yyy/zafooxxuuzz --yes
shell
cp Multeasymap_bench.demo RaiNyMore2_bench.demo build/data/demos cd build cmake -DCMAKE_BUILD_TYPE=Release ../DDNet-15.3.1 make -j $NUM_CPU_CORES echo $? > ~/install-exit-status # Make sure not to use/overwrite user config, only use our custom config echo -e "add_path $HOME/config\nadd_path \$DATADIR\nadd_path \$CURRENTDIR" > storage.cfg cd .. echo "#!/bin/sh
shell
#!/bin/bash cp -nr /oopt-gnpy/examples /shared exec "$@"
shell
cd $(git rev-parse --show-toplevel) watchexec -r \ -w mesh-visualizer/src \ -w mesh-visualizer/build.rs \ -w mesh-visualizer/index.html \ -w crates/blender-export-test/src/tests/suzanne.blend \ ./mesh-visualizer/build.sh
shell
<gh_stars>0 #!/usr/bin/env bash # # The SmartQuark Project # Copyright (c) <NAME>, 2021. # This software is licensed under MIT License # See LICENSE # # JVM Variables export GRAALVM_HOME=/Library/Java/JavaVirtualMachines/graalvm-ce-java11-21.1.0/Contents/Home export JAVA_HOME=${GRAALVM_HOME}
shell
for d in */; do dir=${d%/} echo "Building $dir" cp -r $rootloc/ddn-agent $rootloc/sql $dir docker build -t djavorszky/ddn-agent-$dir:latest -t djavorszky/ddn-agent-$dir:$version $dir
shell
fi docker run --network=lfda_default -e PG_USER="${PG_USER}" -e PG_PORT="${PG_PORT}" -e PG_HOST="${PG_HOST}" -e PG_PASS="${PG_PASS}" devstats-lfda db.sh psql postgres -c 'select 1' 1>/dev/null 2>/dev/null && exit 0 while true do docker run --network=lfda_default -e PG_USER="${PG_USER}" -e PG_PORT="${PG_PORT}" -e PG_HOST="${PG_HOST}" -e PG_PASS="${PG_PASS}" devstats-lfda db.sh psql postgres -c 'select 1' 1>/dev/null 2>/dev/null r=$?
shell
#!/usr/bin/env bash cp -v $PROJ_DIR/tests/data/config.cfg "$HOME/Library/Application Support/snipsync"
shell
export NODE_ENV="production" # Use "exec" so we inherit signals exec node dist/api/index.js
shell
## setup /etc/hosts.d mkdir -p /etc/hosts.d \ && [ ! -f "/etc/hosts.d/$customConf" ] \ && cp /etc/hosts /etc/hosts.d/$customConf \ && echo "copied initial custom.conf" ## download blocklist and compile listURL='https://raw.githubusercontent.com/StevenBlack/hosts/master/alternates/gambling/hosts' wget --no-verbose -O /etc/hosts.d/$blocklistConf $listURL \ && cat /etc/hosts.d/*.conf > /etc/hosts \ && systemctl restart systemd-networkd \ && echo "hosts list updated" fi
shell
latest="4.0.1" else # get latest version of codacy reporter from sonatype latest=$(curl "https://oss.sonatype.org/service/local/repositories/releases/content/com/codacy/codacy-coverage-reporter/maven-metadata.xml" | xpath -e "/metadata/versioning/release/text()") fi echo Downloading latest version $latest of codacy reporter from sonatype # download laterst assembly jar mvn dependency:get dependency:copy \ -DoutputDirectory=$HOME \ -DoutputAbsoluteArtifactFilename=true \ -Dmdep.stripVersion=true \ -DrepoUrl=https://oss.sonatype.org/service/local/repositories/releases/content/ \
shell
--query=/publisher/lib/echo.sparql \ --results=RDF > "${TMPDIR}/pre_prod.rdf" ${JENA_ARQ} \ --data ${TMPDIR}/external.rdf \ --data ${TMPDIR}/pre_prod.rdf \ --query=/publisher/lib/echo.sparql \ --results=RDF > ${TMPDIR}/prod.rdf
shell
#/bin/bash DOCKER_PATH=/home/vzwingmann/budget-Prod echo "" echo "## Mise à jour des images docker ##" docker-compose -f $DOCKER_PATH/docker-compose.yml pull echo ""
shell
# `helm list -A` # `helm delete <OLD_INSTALLATION>` apply() { helm repo add jetstack https://charts.jetstack.io helm repo add prometheus-community https://prometheus-community.github.io/helm-charts helm repo add karpenter https://awslabs.github.io/karpenter/charts helm repo update helm upgrade --install cert-manager jetstack/cert-manager \ --create-namespace \ --namespace cert-manager \ --version v1.1.0 \