lang
stringclasses 10
values | seed
stringlengths 5
2.12k
|
---|---|
shell | #!/bin/bash
ifconfig sit0 up
ifconfig sit0 inet6 tunnel fdf8:f53e:61e4::18
ifconfig sit1 up
ifconfig sit1 inet6 add fdf8:f53e:61e4::18/64
route -A inet6 add ::/0 dev sit1 |
shell | sudo find /run/containers/storage/overlay-containers/ -path "*/userdata/shm" -exec umount {} \;
sudo umount /run/netns/cni-*
sudo -E bash -c "rm -r /var/lib/containers/storage/overlay*/*"
sudo -E bash -c "rm -r /var/lib/cni/networks/*"
sudo -E bash -c "rm -r /var/run/kata-containers/*"
sudo rm -rf /var/lib/rook
sudo -E bash -c "rm -rf /var/lib/etcd"
sudo systemctl daemon-reload
sudo systemctl is-active crio && sudo systemctl stop crio
sudo systemctl is-active containerd && sudo systemctl stop containerd
sudo systemctl is-enabled crio && sudo systemctl restart crio
sudo systemctl is-enabled containerd && sudo systemctl restart containerd |
shell | "$CLOUD_REBUILD" DmSoft 32 dll release same |
shell |
echo "::group::Simple test"
$DIR/simple/test.sh
echo "::endgroup::"
echo
echo "::group::OOM test" |
shell | # If <argument> is blank or another option, print an error message and exit
# with status 1.
_require_argument() {
# Set local variables from arguments.
#
# NOTE: 'local' is a non-POSIX bash feature and keeps the variable local to
# the block of code, as defined by curly braces. It's easiest to just think
# of them as local to a function.
local _option="${1:-}"
local _argument="${2:-}"
|
shell | #!/bin/bash
# Script to resample all audio to 16KHz in advance to save time
# <NAME> 2019, MIT License
#
# Requires GNU parallel, please ensure you cite this in your research as they request
set -e #Exit on first error
cd $1
find ./ -type f -name '*.wav' | parallel --will-cite --progress ffmpeg -hide_banner -y -loglevel error -i {} -af aresample=resampler=soxr -ar 16000 {}
echo "Done!"
|
shell | echo " -c [CHANNEL] Channel to use (stable/beta). Defaults to stable."
echo " -d Debug mode"
echo " -u Uninstall instead"
exit 0
;;
: )
echo "Invalid option: $OPTARG requires an argument" 1>&2
;;
esac
done
SUDO=''
if [ "$(id -u)" != "0" ]; then |
shell | export MLX5_SINGLE_THREADED=1
export MLX_QP_ALLOC_TYPE="HUGE"
export MLX_CQ_ALLOC_TYPE="HUGE"
if [ "$#" -ne 1 ]; then
blue "Illegal number of parameters"
blue "Usage: ./run-machine.sh <machine-id>"
exit
fi
# With link-time optimization, main exe does not get correct permissions
chmod +x main
drop_shm |
shell | CONTAINERID=$(basename $(cat proc/self/cpuset) | cut -c 1-12)
LOG="/logs/log.txt"
case $1 in
sleep)
trap "echo $DATE $NAME $CONTAINERID Received SIGTERM >> $LOG; exit 0" SIGTERM
echo "$DATE $NAME $CONTAINERID Starting" >> $LOG
sleep 10
touch /tmp/ready |
shell | mkdir -p ./dist/
cp ./dist/index.html ../smith-app/
cp ./dist/*.js ../smith-app/
|
shell | ## because of the way loading 2html.vim behaves, I had to do all that
## nasty redirection.
done
exit 0
}
## Scatch Paper Section
# -U NONE -g -f --cmd ":set guipty" \
|
shell |
if [ -z "$launchScript" ]
then
launchScript='comp-generic.sh'
compName=generic
fi
. $componentDir/$launchScript
} |
shell | <gh_stars>1-10
#!/usr/bin/env bash
wget http://download.tensorflow.org/models/vgg_16_2016_08_28.tar.gz
tar -zxf vgg_16_2016_08_28.tar.gz
rm vgg_16_2016_08_28.tar.gz |
shell | # right foot sensor has X pointing forward, Y pointing leftward, Z pointing upward
BUFFER=30
REFRESH_RATE=10
rxplot --period=$BUFFER --buffer=$BUFFER --refresh_rate=$REFRESH_RATE --legend='fx LLEG','fx RLEG','fy LLEG','fy RLEG','fz LLEG','fz RLEG' /forceLLEG/data[0],/forceRLEG/data[0] /forceLLEG/data[1],/forceRLEG/data[1] /forceLLEG/data[2],/forceRLEG/data[2] &
|
shell | echo "Running SSO config Tool against Lightwave release"
VMIDENTITY_LIB_DIR=/opt/vmware/jars
LOG_CONFIG=$VMIDENTITY_LIB_DIR/../share/config/idm/ssoconfig.log4j2.xml
fi
CLASSPATH=$VMIDENTITY_LIB_DIR/*:$SAMLTOKEN_JAR_DIR:.:*
#unset JAVA_TOOL_OPTIONS
$JAVA_BIN $JAVA_OPTS \
-Dlog4j.configurationFile=$LOG_CONFIG \
-Dvmware.log.dir=/var/log/vmware/sso \
-cp $CLASSPATH \ |
shell | #DEVICE_TARGET=${DEVICE_TARGET:-amdgcn-amd-amdhsa}
DEVICE_ARCH=${DEVICE_ARCH:-sm_35}
#DEVICE_ARCH=${DEVICE_ARCH:-gfx803}
|
shell | else
mysql_str="--host=$mysql_host --user=$mysql_user"
fi
database=philiusData
echo "Creating database $database"
# echo "mysql $mysql_str < $schema_dir/$database.sql"
mysql $mysql_str < $schema_dir/$database.sql |
shell | #!/bin/bash
floyd run --gpu --env tensorflow-1.7 "python -m src.train --output-dir /output --model-dir /output/checkpoint --micro-step 10000" |
shell |
for i in {16817 .. 16818}
do
qdel $i
done
|
shell | rm -rf var
./cluster.py configure
sudo supervisorctl start cluster:consul cluster:vault
./cluster.py autovault
sudo supervisorctl start cluster:nomad
|
shell | kind: Namespace
metadata:
name: prometheus
!EOF!
flux create helmrelease prometheus \
--interval=1h \
--source=HelmRepository/prometheus \
--chart=kube-prometheus-stack \
--chart-version='>34.0.0' \
--crds=CreateReplace \
--target-namespace=prometheus \
--values="${SCRIPTPATH}/prometheus-values.yaml" \
--export |
shell | #!/bin/sh
docker exec -it strapi-docker_db_1 mongo admin -u mongouser -p 'GozVHboWSdvedss' --eval "db.getSiblingDB('strapi')['core_store'].deleteMany({'key': 'plugin_content_manager_schema'})"
docker-compose down
docker-compose up -d
|
shell |
echo -e "Extracting features with vanilla models...\n"
. slurm/osvm/extract_features.sh {16904..16909} # vanilla
echo -e "\n\n\n\n" |
shell | #!/bin/bash
echo "well !! Hello there !" |
shell | Enter: ';
read var;
if [ "$var" -eq "1" ]; then
APPLY_THEMES
fi
if [ "$var" -eq "2" ]; then
BACKUP
exit 0 |
shell | <gh_stars>1-10
mono ".paket\paket.exe" "restore" |
shell | execshell "mkdir -p _build" && execshell "pushd _build"
execshell "$CMAKE .. -DCMAKE_BUILD_TYPE=$build_type -DCMAKE_PREFIX_PATH=$prefix
-DCMAKE_INSTALL_PREFIX=$prefix -DCMAKE_INSTALL_LIBDIR=lib $cmake_exdefine"
execshell "make -j$J"
[ "X$install_cmd" = "X" ] && execshell "make install" || execshell "$install_cmd"
[ "X$cmake_postprocess" != "X" ] && execshell "$cmake_postprocess"
execshell "popd"
[[ ! -z "$cmake_list_dir" ]] && execshell "popd" |
shell | #!/usr/bin/env bash
node -e "const peers = Object.entries(require('./package.json').peerDependencies || {}).map(d => d.join('@')).join(' '); if (peers.length) process.stdout.write('yarn add -P --no-lockfile ' + String(peers));" | bash |
shell | #!/bin/bash
TOP=$(cd $(dirname $0);pwd)
cd "$TOP/content"
python3 ../scripts/map_build.py $@ |
shell | #!/bin/bash
export DEBIAN_FRONTEND=noninteractive
apt-get update &&
apt-get install -y python-pip &&
pip install azure-cli applicationinsights &&
pip install azure-batch &&
pip install azure-mgmt-storage &&
pip install setuptools &&
pip install azure &&
pip install configparser &&
|
shell |
cat <<EOF >> /etc/sysctl.conf
net.bridge.bridge-nf-call-ip6tables = 1
net.bridge.bridge-nf-call-iptables = 1
EOF
[[ $(command -v docker) ]] && systemctl enable docker
if [[ -n ${SUDO_USER} ]] && getent group docker; then usermod -aG docker "${SUDO_USER}"; fi
#curl -LsS https://raw.githubusercontent.com/openvswitch/ovs/master/utilities/ovs-docker -o /usr/local/bin/ovs-docker && chmod a+x /usr/local/bin/ovs-docker || true |
shell | make clean
make all \
CC="mpic++ ${CXXFLAGS} ${CPPFLAGS} ${LDFLAGS}" \
CXX="${CXX} ${CXXFLAGS} ${CPPFLAGS} ${LDFLAGS}"
mkdir -p "${PREFIX}/bin"
cp \
buildG* \
fullsimplify \
parsimplify \
disco* \ |
shell |
# install Conda virtual environment
conda env create -f environment.yml
eval "$(conda shell.bash hook)"
conda activate Hysia
# call build
cd ..
bash scripts/build.sh
|
shell | # Compile the module
java -jar ${STRJ} -i test09.str -la org.test09_lib -p test09 -o test09 || exit 1
# Compile the module's java
javac -cp .:${STRJ} test09/*.java || exit 1
# Run the program
java -cp .:${STRJ} -Xmx1024m test09/test09 || exit 1
rm -rf org test09 |
shell | # Author: <EMAIL>
# Date: 2018/05/14
WORK_DIR=/data/app
mkdir -p $WORK_DIR
pushd $WORK_DIR
wget --no-check-certificate -c --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u171-b11/512cd62ec5174c3487ac17c61aaa89e8/jdk-8u171-linux-x64.rpm
rpm -ivh jdk-8u171-linux-x64.rpm
|
shell | ros_melodic_desktop_px4
# run simulation
docker exec \
ros_melodic_desktop_px4 \ |
shell | # ApiManagementCreateAILogger
RESOURCE_GROUP="myresourcegroup"
SERVICE_NAME="myservice"
LOGGER_NAME="mylogger"
az resource create --id /subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.ApiManagement/service/$SERVICE_NAME/loggers/$LOGGER_NAME --api-version 2019-01-01 --is-full-object --properties '
{
"properties": {
"loggerType": "applicationInsights",
"description": "adding a new logger",
"credentials": {
"instrumentationKey": "11................a1"
}
}
}
|
shell | USE_POOL_PASSWORD "${USE_POOL_PASSWORD:-no}"
"$progdir/update-secrets"
"$progdir/update-config"
# The master will crash if run as pid 1 (bug?) plus supervisor can restart
# it if it dies, and gives us the ability to run other services.
exec /usr/bin/supervisord -c /etc/supervisord.conf
|
shell |
echo -e "\e[1m\e[32mRunning test file -> $filename\e[0m"
bats -p $file
done
}
#dgr
command -v rkt >/dev/null 2>&1 || { echo >&2 "rkt not found in path"; exit 1; } |
shell | <gh_stars>0
./vendor/bin/premake/premake5 $1
|
shell |
DADAPUSH_CHANNEL_TOKEN=$1
if [ -z "$DADAPUSH_CHANNEL_TOKEN" ]
then
echo "No channel token specified"
fi
# Host |
shell | echo -en "\033[00m"
}
function _in {
# credits: https://stackoverflow.com/a/8574392
local e match="$1"
shift
for e; do [[ "$e" == "$match" ]] && return 0; done
return 1 |
shell |
*
-
+
/
$(( 1 * 1))
$(( 1 - 1))
$(( 1 + 1)) |
shell | #!/bin/bash
export LDBC_DATA_DIRECTORY=${DATAGEN_OUTPUT_DIR}/csv/raw/composite-merged-fk
echo "=====> spark-concat <====="
./spark-concat.sh ${LDBC_DATA_DIRECTORY}
echo "=====> load <====="
./load.sh ${LDBC_DATA_DIRECTORY} --no-header
echo "=====> transform <====="
./transform.sh
echo "=====> export <====="
./export.sh
echo "=====> factor tables <=====" |
shell | local bucket="$1"
local key="$2"
local versionID="$3"
aws s3api get-object --bucket "$bucket" --key "$key" --version-id "$versionID" "$tmpfile"
aws s3api put-object --bucket "$bucket" --key "$key" --body "$tmpfile"
STATE="${bucket}/${key}"
dynamodb_key="{ \"LockID\": { \"S\": \"${STATE}-md5\" } }" |
shell | #!/usr/bin/env bash
sudo mount -t cifs -o user=frederico //192.168.0.11/share /mnt/share
|
shell | /debootstrap/debootstrap --second-stage
cat <<EOT > /etc/apt/sources.list
# deb http://archive.debian.org/debian/ $distro main
deb http://archive.debian.org/debian/ $distro main non-free contrib
deb http://archive.debian.org/debian-security/ $distro/updates main non-free contrib |
shell | case "$_config_IncludeConfig_net_mAdminState-$_config_IncludeConfig_std_mAdminState" in
enabled-enabled)
INCLUDECONFIG="config-OK"
;;
enabled-disabled)
INCLUDECONFIG="config-std-DISABLED"
STATE=$STATE_WARNING
;; |
shell | #!/bin/bash
python3 gen_plot_list.py plotclip_out/ cleanmetadata_out/ LAS_FILES .las > plot_list.json
makeflow --jx main_workflow_phase2.jx --jx-args plot_list.json $@
|
shell | # connect to your queue manager.
###### Cleanup from previous runs
# Kill any old instances of the application
ps -ef|grep gradle | grep sample3.Application | awk '{print $2}' | xargs kill -9 >/dev/null 2>&1
# and try to clear the queue (assuming it's a local queue manager)
echo "CLEAR QLOCAL(DEV.QUEUE.1)" | runmqsc -e QM1 >/dev/null 2>&1
######
# Now run the program. Build using the gradle wrapper in parent directory
cd ../..
./gradlew -p samples/s3 bootRun
|
shell | #!/bin/sh
echo "Hello world"
sleep 1
echo "coucou"
|
shell | git fetch
git checkout 8af9b8c2b889d80c22d6bc26ba0df1afb79a30db
./configure -prefix=${CITYHASH_INSTALL_DIR} --enable-sse4.2
make -j ${NUM_PROC} CXXFLAGS="-g -O3 -msse4.2"
make install
tar cvfz ${CITYHASH_INSTALL_DIR}.tar.gz --exclude="./bin" -C ${CITYHASH_INSTALL_DIR} .
cd ${WORK_DIR} |
shell | CUDA_VISIBLE_DEVICES=0 python render.py \
weights/pretrain/celeba_noview/generator.pth \
--output_dir imgs \
--curriculum CelebA_ShadeGAN_noview \
--seeds 0 5 8 43 \
--num_steps 12 \
--sample_dist fixed \
--psi 0.5 \
--delta 0.06491 \
--image_size 256 \
--ema \
--rotate \
--relight
|
shell | if [ -z $docker_template ]
then
docker_template="Dockerfile.Template"
fi
if [ -z $(docker images | grep log-benchmark) ]
then
rm Dockerfile || true
# Replace to project name
sed "s/{{PROJECT_NAME}}/$project_name/g" templates/$docker_template > Dockerfile
docker build -t $container_name .
rm Dockerfile
fi |
shell | tokei ../android/nasa/src/main/kotlin --output json > ./build/tokei/android_nasa.json
tokei ../android/notes/src/main/kotlin --output json > ./build/tokei/android_notes.json
tokei ../android/pixelsort/src/main/kotlin --output json > ./build/tokei/android_pixelsort.json
tokei ../android/settings/src/main/kotlin --output json > ./build/tokei/android_settings.json
tokei ../android/game/src/main/kotlin --output json > ./build/tokei/android_game.json
# ios metrics with tokei
tokei ../ios/Playground/UseCases/Fibonacci --output json > ./build/tokei/ios_fibonacci.json
tokei ../ios/Playground/UseCases/Nasa --output json > ./build/tokei/ios_nasa.json
tokei ../ios/Playground/UseCases/Notes --output json > ./build/tokei/ios_notes.json
tokei ../ios/Playground/UseCases/Pixelsort --output json > ./build/tokei/ios_pixelsort.json |
shell | }
function build() {
pushd $SCRIPT_DIR
info "Building test runner..."
dotnet build -c Release
}
function start_services() {
info "Starting dependencies docker-compose $LOADDEV up -d --force-recreate..."
docker-compose $LOADDEV up -d --force-recreate |
shell | read -p "Replace the installed version with shiny-server ${SHINY_VERSION}? (y/N)" -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]];
then
echo "Aborted by user." |
shell | #!/bin/bash
for i in $(seq 1 `head -1 input.txt|wc -c`); do
sed "s/^\(.\{$(($i-1))\}\)./\1./" < input.txt | xargs -I X -n 1 bash -c '(echo -n "X ";grep -c X input.txt)'|grep 2| tr -d '.'
done |
shell | #!/bin/bash
killall node.py
rm -rf ../../logs/*
|
shell | if [ -a _build/sanitize.sh ]
then _build/sanitize.sh
fi
rm -rf _build
echo -e "\e[1m\e[33m- OCamlBuild -> main.ml\e[0m" |
shell | cat output.json | jd
|
shell | pip install openTSNE
pip install matplotlib.pyplot
pip install pandas
pip install seaborn
# run script and save verbose into file
|
shell | set -o nounset
set -o pipefail
SCRIPT_ROOT=$(dirname ${BASH_SOURCE})/..
CODEGEN_PKG=${CODEGEN_PKG:-$(cd ${SCRIPT_ROOT}; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../code-generator)}
${CODEGEN_PKG}/generate-groups.sh "deepcopy,client,informer,lister" \
github.com/argoproj/argo-rollouts/pkg/client github.com/argoproj/argo-rollouts/pkg/apis \ |
shell | rk_image_process \
-input="/dev/video0 /dev/video1 /dev/video2 /dev/video3" \
-input_width 1280 -input_height 720 -input_format 4 \
-width 2560 -height 1440 -format 6 \
-disp -drm_conn_type="DSI" -drm_raw8_mode \
-processor="none"
|
shell | rustc part-1.rs
./part-1 < input.txt
rustc part-2-naive.rs
time ./part-2-naive < input.txt
rustc part-2.rs
time ./part-2 < input.txt
|
shell | apt update
}
function install_k8s {
print_g "Installing K8s"
/usr/bin/curl -sfL https://get.k3s.io | /bin/sh -
/usr/local/bin/kubectl cluster-info |
shell | echo PBS: execution mode is $PBS_ENVIRONMENT
echo PBS: job identifier is $PBS_JOBID
echo PBS: job name is $PBS_JOBNAME
echo PBS: job number is $JOBNUM
echo PBS: logfile is $LOGFILE
echo PBS: node file is $PBS_NODEFILE |
shell |
# Which dump to use
FILENAME="/2_backups/prod1.n45.wa.bl.uk/w3act_postgres/w3act.pgdump-20211201"
echo "Downloading $FILENAME to ${W3ACT_DUMPS_DIR}"
curl -o ${W3ACT_DUMPS_DIR}/w3act_dump.sql "http://hdfs.api.wa.bl.uk/webhdfs/v1${FILENAME}?op=OPEN&user.name=access"
|
shell |
cd "$(dirname "$0")"
echo "Leaving so soon ? Please file an issue if you encountered any difficulties."
pm2 delete Pulsar
echo "Do you want to uninstall pm2 [Y/n]?"
read choice
if [ $choice == "Y" -o $choice == "y" ]
then
npm uninstall pm2 -g
echo "Successfully uninstalled pm2."
else
echo "Skipped." |
shell | then
ICS=' ';
read -r -a array < ${LAMMPSSRC}/version.h ;
version=`echo ${array[2]} ${array[3]} ${array[4]} | sed s/\"//g`;
cp -av ${LAMMPSDOXYFILE} ${DOXYFILE} ;
sed -i "s/LAMMPS_VERSION/${version}/g" ${DOXYFILE} ;
cp -av ${LAMMPSDEVELOPERDOXFILE} ${DEVELOPERDOXFILE} ;
sed -i "s/LAMMPS_VERSION/${version}/g" ${DEVELOPERDOXFILE} ;
${DOXYGEN} ${DOXYFILE} ; |
shell | cat Castalia-Trace.txt | grep "Aggregated Value" > output/SinkAggrValues.txt
cat Castalia-Trace.txt | grep "Sensed" | grep "App_8" > output/Node8Sensed.txt
|
shell | fi
if [ ! -d "$basedir/import/commons-compress-1.20" ]; then
mkdir -p commons-compress-1.20
wget --directory-prefix=commons-compress-1.20 https://repo1.maven.org/maven2/org/apache/commons/commons-compress/1.20/commons-compress-1.20.jar
fi
if [ ! -d "$basedir/import/org-json" ]; then
mkdir -p org-json |
shell | for FILE in *; do
if [[ "$FILE" == *".cpp" ]]; then
echo "Compiling $FILE...";
g++ $FILE -o $FILE.exe
echo "Running $FILE.exe...";
./$FILE.exe
fi |
shell | brew upgrade
# Install useful binaries
brew install curl
brew install coreutils
brew install tmux
brew install node
brew install ruby
brew install wget |
shell | tar xf ${TARBALL}
}
function build() {
sed -i '/skipping/d' util/packer.c &&
./configure --prefix=/usr \
--disable-static \ |
shell | assertEquals "$expected" "$actual"
}
testAuthBasicFileContentMissingParams() {
expected=""
actual=$(nginx_cfg_auth_basic_file_content)
assertEquals "$expected" "$actual"
}
testAuthBasicFile() { |
shell | #!/bin/bash
docker build -t localreg:5000/redis-commander:0.4.2 .
|
shell | CMakeArch=$4
for option in "${Options[@]}"
do
option_up=`echo $option | tr a-z A-Z`
if [[ $option_up == *"DISABLE"* ]]; then
new_option_up=${option_up/DISABLE_/}
CMAKE_OPTION="-DKokkos_ENABLE_${new_option_up}=OFF"
else |
shell | # This breaks VCS, since --as-needed requires that you order dynamic libraries
# in a particular way (namely, in the reverse order of the dependency graph)
# To fix this, we need to add --no-as-needed to the list of linker options
# when building with VCS
# See http://stackoverflow.com/questions/42113237/undefined-reference-when-linking-dynamic-library-in-ubuntu-system
VCS_FLAGS="-LDFLAGS -Wl,--no-as-needed"
### Add local overrides for libraries that vcs/dc/icc need
LD_PATH="$HOME/.local/lib/"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${LD_PATH}"
### Helper alias
alias vcs="vcs -full64 ${VCS_FLAGS}" |
shell | #!/bin/bash
sudo su -
yum install -y httpd
chkconfig --level 345 httpd on
cat <<EOF > /var/www/html/index.html
<html>
<body>
<p>hostname is: $(hostname)</p>
</body>
</html>
EOF
chown -R apache:apache /var/www/html |
shell | go-bindata -o bindata/bindata.go -pkg bin templates/...
go-bindata -nocompress -o nocomp/bindata.go -pkg nocomp templates/...
go-bindata -nometadata -o nometa/bindata.go -pkg nometa templates/...
go-bindata -nocompress -nometadata -o nocompmeta/bindata.go -pkg nocompmeta templates/...
echo "Done go-bindata..." |
shell | TERMINATE=";"
# BEELINE="beeline -u 'jdbc:hive2://sandbox-hdp.hortonworks.com:2181/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2' --showHeader=false --outputformat=tsv2 --silent"
DEFAULT_NB_DDL_SNAPSHOTS="7"
|
shell | if [ -z "$CI_TEST_OS" ]; then
echo "Env var CI_TEST_OS not supplied!"
exit 1
fi
echo "Starting $(basename $0) with CI_TEST_OS=$CI_TEST_OS"
docker run \
--volume $PWD:/host \
--workdir /host/continuous-integration \
$CI_TEST_OS \ |
shell |
create_tar () {
FILE=$1
DIR=$2
tar cf $FILE --sparse -C $DIR data
compress_tar $FILE
}
create_orig $TEST_DIR/orig
create_tar $TEST_DIR/orig.tar $TEST_DIR/orig
|
shell | # SCION upgrade version 0.9
wget https://raw.githubusercontent.com/netsec-ethz/scion-coord/master/scion_upgrade_script.sh -O upgrade.sh
chmod +x upgrade.sh
|
shell | SCREEN2="radio"
SCREEN3="roles"
SCREEN4="trash"
# Changedir
cd $HOMEPATH
# Stop running bot modules
./stop.sh
|
shell | if [ "$TRAVIS" = "true" ]; then
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
fi
export PATH=${SOURCE_DIR}/Tools/apache-ant/bin:$JAVA_HOME:$PATH
fi
cd ${SOURCE_DIR} |
shell |
dir=$2
mkdir -p $dir
[ -f path.sh ] && . ./path.sh
srcdict=$1
[ ! -r $srcdict ] && echo "Missing $srcdict" && exit 1 |
shell | export TIME="$(${CMD_DATE} +%H-%M-%S)"
export TIMESTAMP="${YEAR}-${MONTH}-${DAY}-${TIME}"
# Pre-defined backup status
export BACKUP_SUCCESS='NO'
#########
# Define, check, create directories.
#
# Backup directory.
export BACKUP_DIR="${BACKUP_ROOTDIR}/ldap/${YEAR}/${MONTH}"
export BACKUP_FILE="${BACKUP_DIR}/${TIMESTAMP}.ldif"
# Find the old backup which should be removed.
export REMOVE_OLD_BACKUP='NO' |
shell |
# Clean up our mess.
"$(DIR)/recursive_umount.sh" "$work"
rm -rf "$work"
unregister_temp_file "$work" |
shell | #!/bin/bash
export CUDA_VISIBLE_DEVICES=0
dataset=cifar10
model=resnet18
xw=1
lx=0.006
ly=0.05
run_hbar -cfg config/general-hbar-xentropy-${dataset}.yaml -slmo -xw $xw -lx ${lx} -ly ${ly} -sd 444 \
-mf ${dataset}_${model}_xw_${xw}_lx_${lx}_ly_${ly}.pt
|
shell | ./lmv -r 90 -a 90 F:F+F-F-FF+F+F-F F+F+F+F
|
shell | arr=(${(s/_/)id})
echo "contest: $arr[1]"
echo "problem: $arr[2]"
if [[ -z "$arr[1]" || -z "$arr[2]" ]]; then
echo -e "\x1b[31mUsage: $0 abc001_a\x1b[m"
return 1 |
shell | #!/bin/bash
docker-compose up -d --build sales-order-system-client |
shell | # ================================================================
# Assertions
# ================================================================
./tests/run_assertions.sh
# ================================================================
# After
# ================================================================
./tests/tear_down.sh
failed=`grep "^[[:alpha:]]" ./test_results | wc -l` |
shell | # CURR_DIR=$(pwd)
# cd $HOME/my_website && bash setup.sh
# cd $CURR_DIR
# clear conda cache
conda clean -y -t |
shell | <gh_stars>0
php artisan migrate
echo "ok tous est bon"
|
shell | #!/bin/sh
# Requires watchr: https://github.com/mynyml/watchr
watchr -e 'watch(".*\.less$") { system("sless=`find -name style.less` && scss=`echo \"$sless\" | sed -e\"s/style.less/style.css/g\"` && rm -rf \"$scss\" && lessc --compress \"$sless\" > \"$scss\" && notify-send \"Watchr Less\" \" Just compiled style.less > style.css\" -t 1 -i notification-message-im ") }'
|
shell | -evalue 1e-25 \
-num_threads 8 \
-max_target_seqs 1 \
-outfmt 6
}
export -f RunBlastx
export -f RunBlastn
|
shell | docker stop centos7-systemd-nginx-sclo-php72-test && docker rm centos7-systemd-nginx-sclo-php72-test
|
Subsets and Splits