lang
stringclasses 10
values | seed
stringlengths 5
2.12k
|
---|---|
shell | awk -F "," '/,,,,,,,,,/{flag=1; a=$1; next} BEGIN{OFS= ","} {$1=a FS $ (1) ; print} ; /,,,,,,,,,/{flag=0} ' $1 > $2
sed -i.old '1s;^;Category;' $2
|
shell | #!/bin/sh
docker build -t hellojee_distributable_ha .
|
shell |
info "🔤 Adding fonts..."
# Create fonts folder if not exists
mkdir -p ~/.local/share/fonts
cp -rf ./fonts/* ~/.local/share/fonts
info "⌨️ Adding zsh files..."
cp ./zsh/.zshrc ~/
cp -rf ./zsh/.zsh ~/
|
shell | echo "\n[server]\n\
headless = true\n\
port = $PORT\n\
enableCORS = false" >> .streamlit/config.toml
|
shell | r18,10,64 \
r34,20,32 \
r50,20,16 \
r101,24,12 \
r152,30,8 ;do
i=( ${m//,/ } )
if [ ! -e model_${i[0]}/final.pth ];then
python3 aqc_training.py --net ${i[0]} --adam --n_epochs ${i[1]} --batch_size ${i[2]} model_${i[0]} 2>&1 |tee log_${i[0]}.txt
fi
done
|
shell | echo "运行在服务提供者"
vip=192.168.56.105
case $1 in
start)
echo "Start LVS"
ifconfig eth0:0 $vip broadcast $vip netmask 255.255.255.255 up
route add -host $vip dev eth0:0
echo "1" > /proc/sys/net/ipv4/conf/lo/arp_ignore
echo "2" > /proc/sys/net/ipv4/conf/lo/arp_announce
echo "1" > /proc/sys/net/ipv4/conf/all/arp_ignore
echo "1" > /proc/sys/net/ipv4/conf/all/arp_announce
;;
stop)
echo "Stop LVS"
route del -host $vip dev eth0:0 |
shell | sh nginx.sh
sh redis.sh
echo "INSTALL END:----"
echo "[ PHP NGINX REDIS]"
|
shell |
source "${BASH_SOURCE%/*}/shared.sh"
set -e
cd ..
sudo docker build -t crux .
sudo docker tag crux ${AWS_ECR_URI}
sudo docker push ${AWS_ECR_URI}
|
shell | <gh_stars>1-10
#!/bin/bash
echo $1
for patient in `ls -c1 ${1}/*.list | egrep --only "[0-9]+\.list" | egrep --only "[0-9]+" | sort -u`
do
echo $patient
./runListLocallyPhyML.bash ${1} $patient
done |
shell | sudo apt install fail2ban -y
# https://docs.mongodb.com/manual/tutorial/install-mongodb-on-ubuntu/
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2930ADAE8CAF5059EE73BB4B58712A2291FA4AD5
# 16.04
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.6 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.6.list
sudo apt update -y
sudo apt install mongodb-org-shell=3.6.5 mongodb-org-tools=3.6.5 -y
# Install some OS dependencies:
sudo apt-get install --no-install-recommends -y -q build-essential git python3-dev python3-venv
sudo apt-get install --no-install-recommends -y -q python3-pip wget |
shell | # -----------------------------------------------------------------------------
# CppAD: C++ Algorithmic Differentiation: Copyright (C) 2003-16 <NAME>
#
# CppAD is distributed under the terms of the
# Eclipse Public License Version 2.0.
#
# This Source Code may also be made available under the following
# Secondary License when the conditions for such availability set forth |
shell |
ABSOLUTE_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
export KEY_PASSWORD=<PASSWORD>
export ENCRYPTION_PASSWORD=<PASSWORD>
export HOCKEY_API_TOKEN=XXX
export HOCKEY_APP_ID=XXX
$ABSOLUTE_PATH/decrypt_files.sh
|
shell | #!/bin/bash
ZSLDG=('clipart' 'infograph' 'painting' 'quickdraw' 'sketch')
DATA=$1
for target in ${ZSLDG[@]} ; do
python -m torch.distributed.launch --nproc_per_node=1 main.py --zsl --dg --target $target --config_file configs/zsl+dg/$target.json --data_root $DATA --name $target-zsldg
done
|
shell | [[ -z "$CNFDIR" ]] && CNFDIR="$DIR/cnf"
function usage () {
echo "Usage : $0 [COMMAND] [[SERVER]] [OPTIONS]
Config helper tool+library
CLI Usage:
Commands: |
shell |
function TD_unix2human() {
local OS_SYSTEM=`uname -s`
local ival="$(echo ${1}|tr -d '"')"
if [[ "$OS_SYSTEM" == "Linux" ]];then
echo "$(date +'%F %T %Z' -d @$ival)"
else
echo "$(date -r $ival +'%F %T %Z')"
fi
}
VestRound=`tonos-cli run $(cat $KEYS_DIR/depool.addr) getParticipantInfo "{\"addr\":\"$(cat $KEYS_DIR/$(hostname -s).addr)\"}" --abi $DSCs_DIR/DePool.abi.json| sed -e '1,/Succeeded./d'|sed 's/Result: //'|jq '[.vestings[]]|.[0]'` |
shell | <gh_stars>1-10
#! /bin/bash
# download font
# https://moji.or.jp/ipafont/ipafontdownload/
curl -fsSLO https://moji.or.jp/wp-content/ipafont/IPAexfont/ipaexg00301.zip
unzip ipaexg00301.zip
|
shell | # 安装iptables
yum install -y iptables
# 升级iptables
yum update iptables
# 安装iptables-services
yum install -y iptables-services
# 设置开机不启动 |
shell |
touch tmdata.go;echo 'package tmmock' > tmdata.go; echo 'var mockdata = map[string]string{' >> tmdata.go; for f in `ls *.json`; do echo '"'$f'":`' >> tmdata.go ;cat $f >> tmdata.go;echo '`,'>>tmdata.go ; done; echo '}'>>tmdata.go
gofmt -w tmdata.go |
shell | #!/bin/sh
echo "continue" >/tmp/custom_upgrade.status
|
shell | #!/usr/bin/env bash
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
if [[ ! -a ~/.emacs.d || ! -a ~/.emacs.d/spacemacs.mk ]]; then
git clone --depth=1 https://github.com/syl20bnr/spacemacs ~/.emacs.d
fi
ln -sf "$SCRIPT_DIR/spacemacs" ~/.spacemacs
# Systemd service
[[ -f ~/.config/systemd/user/emacs.service ]] && exit 0 |
shell | do
project_dir=$(dirname "$line")
project_name=$(basename "$project_dir")
find "$project_dir" -name "*.cs" -not -path '*AssemblyInfo.cs' -not -path '*/obj/*.cs' | xargs sed -i -f xunit2nunit.sed
sed "s/test_name/$project_name/g" template/standalone/test_name.csproj > "${project_dir}/${project_name}.csproj"
sed "s/test_name/$project_name/g" template/standalone/test_name_coreclr.csproj > "${project_dir}/${project_name}_coreclr.csproj"
cp template/standalone/packages.config "$project_dir/"
echo $project_name |
shell | # https://developer.zendesk.com/rest_api/docs/core/workspaces#reorder-workspaces
zdesk_workspaces_reorder () {
method=PUT
url=/api/v2/workspaces/reorder.json
} |
shell | #!/bin/bash
source settings.sh
bash $XL_HOME/init.sh |
shell | #!/bin/sh
echo "[Application Deployment] Executed after processing application deployment"
|
shell | TOKEN="$(curl -s -L -X POST "$URL/oauth/token" -H 'Content-Type: application/x-www-form-urlencoded' -u "$CLIENT_ID:$CLIENT_SECRET" -d 'grant_type=client_credentials' | jq -r '.access_token')"
#echo $TOKEN
# check active subscription
# echo "active subscription in region $REGION: "
#curl -H "Authorization: Bearer $TOKEN" "https://saas-manager.cfapps.${REGION}.hana.ondemand.com/saas-manager/v1/application/subscriptions"
subscription=$(curl -sS -H "Authorization: Bearer $TOKEN" "https://saas-manager.cfapps.${REGION}.hana.ondemand.com/saas-manager/v1/application/subscriptions")
echo $subscription |
shell | d=$1
echo $d
echo "ls $d/train* > .tmp; cat .tmp | grep -v meta > ftrain"
ls $d/train* > .tmp; cat .tmp | grep -v meta > ftrain
echo "ls $d/val* > .tmp; cat .tmp | grep -v meta > fval"
ls $d/val* > .tmp; cat .tmp | grep -v meta > fval
echo "ls $d/val*_a_a_a > fval2" |
shell | #!/bin/bash
echo "Installing wget..."
brew install wget
|
shell | #!/bin/bash
wget -q -O - --no-check-certificate "https://genome.ucsc.edu/cgi-bin/hgBlat?userSeq=$1&type=DNA&db=$2&output=json"
|
shell | return 0
fi
# Only master
if [[ "$TRAVIS_BRANCH" != "master" ]];then
echo -e "Not master, skip deploy www\n" |
shell | <gh_stars>0
#!/bin/bash
# (C) Copyright 1996-2016 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities |
shell | #/bin/sh/
CUR_DIR=`pwd`
hadoop jar $CUR_DIR/gaussian_blur.jar GaussianBlur $1
|
shell | #!/usr/bin/env bash
#wget https://ops.fhwa.dot.gov/freight/freight_analysis/faf/faf3/netwkdbflow/network/esri/faf3_4_esri.zip
#unzip faf3_4_esri.zip
#mv ./FAF3_4_ESRI/FAF3.4_NETWORK.DBF ./FAF3_4_ESRI/FAF3.4_Network.dbf
#
#ogr2ogr -f GeoJSON -t_srs crs:84 ./faf.geojson ./FAF3_4_ESRI/FAF3.4_Network.shp
cp /home/daniel/WebstormProjects/network_api/build_network/simple_network.geojson .
tippecanoe -f -o ../mbtiles/faf.mbtiles -z8 -y ID -M 250000 -m8 -D9 --layer=network simple_network.geojson
|
shell | #!/bin/bash
export MAVEN_OPTS="-Xmx1G"
mvn --projects=tool clean
mvn --projects=tool -DskipTests install
cp "${SRC_DIR}/tool/target/antlr4-${PKG_VERSION}-complete.jar" "${PREFIX}/lib/"
echo '#!/bin/bash' > $PREFIX/bin/antlr4
echo 'java -Xmx500M -cp "'$PREFIX'/lib/antlr4-'$PKG_VERSION'-complete.jar:$CLASSPATH" org.antlr.v4.Tool "$@"' >> $PREFIX/bin/antlr4
chmod +x "${PREFIX}/bin/antlr4"
|
shell | #sudo setterm -blank 0
#hdmi_force_hotplug=1
echo "Setting Up Driver..."
/usr/bin/sudo modprobe bcm2835-v4l2
echo "Executing OpenCV script..."
/usr/bin/python3 /home/pi/sauron/main.py |
shell |
function help {
local action_name=$1
|
shell | <gh_stars>1-10
docker build -t nuts .
docker run -v $PWD:/home/volume -it harryscholes/nuts:latest
|
shell | source "$sourceFile"
done
# Set default usage flags
quiet=false
printLog=false
logErrors=false
verbose=false
dryrun=false
force=false |
shell | PROFILES="default fast slow bifm l3"
## database command definitions
setupCommands()
{
BASECMD="${BINDIR}/lambda2 mkindexp -d db.fasta -t ${NCPU}"
case $PROFILE in |
shell | data_dir=path_to_data_file
checkpoint_dir=path_to_checkpoint_file
Step="xxxxx"
for idx in $Step
do
chmod 777 -R $work_dir/$checkpoint_dir
echo model_checkpoint_path: \"model.ckpt-$idx\" > $work_dir/$checkpoint_dir/checkpoint
cat $work_dir/$checkpoint_dir/checkpoint
echo decoding with checkpoint-$idx
CUDA_VISIBLE_DEVICES=0 python $work_dir/$code_dir/thumt/bin/translator.py \
--models transformer \
--checkpoints $work_dir/$checkpoint_dir \ |
shell |
[dropbear]
accept = 443
connect = 127.0.0.1:109
[dropbear]
accept = 777
connect = 127.0.0.1:22 |
shell | #/bin/sh
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ln -s $DIR/bin/spritex /usr/local/bin |
shell | if [ ! -s "$dist_tarball" ]; then
echo "Distribution tarball '$dist_tarball' empty or non-existent."
exit 1
fi
tar -xzf "$dist_tarball"
cd "${dist_tarball%.tar.gz}"
if [ -f Build.PL ]; then
perl Build.PL
perl ./Build
perl ./Build test |
shell | export TYPEORM_URL="postgres://root:123@localhost:5432/<%- database_name %>"
export TYPEORM_ENTITIES="src/model/*.ts"
export TYPEORM_MIGRATIONS="src/migrations/*-*.ts"
# export TYPEORM_MIGRATIONS_TABLE_NAME="migrations"
yarn typeorm migration:generate -n $1 -d migration |
shell | cd $HOME
git config --global user.email "<EMAIL>"
git config --global user.name "travis-ci"
git clone --quiet --branch=mvn-repo https://${GH_TOKEN}@github.com/CorfuDB/Corfu-Repos mvn-repo > /dev/null
cd mvn-repo
cp -Rf $HOME/mvn-repo-current/* .
git add -f .
git commit -m "Updated maven repository from travis build $TRAVIS_BUILD_NUMBER"
git push -fq origin mvn-repo > /dev/null
echo -e "Maven artifacts built and pushed to github."
fi
fi |
shell |
if [ -z "$(git ls-remote --heads https://github.com/${TRAVIS_REPO_SLUG} gh-pages)" ]; then
echo "INFO: The branch gh-pages does not exist.";
echo "INFO: Not building docs."; |
shell | KIDS_PAGES="$(seq 1 3)"
bubic_get_page_outfile "${KIDS_BASE}" "${KIDS_PAGES}" kids
#### URBAN ####
URBAN_BASE="${URL}/bicicletas/urbana.html?p="
URBAN_PAGES="$(seq 1 3)" |
shell | # Copyright 2020 Kirill 'kkm' Katsnelson
source functions.inc.sh &&
source _acquire_configuration.inc.sh &&
source _apply_configuration.inc.sh
|
shell |
LOCATION=$(dirname $0)
FOLDER=$(basename $LOCATION)
if [ "$FOLDER" != "diffs" ]
then
echo "Should be invoked outside of diffs folder"
exit 1
fi
cd $(dirname $LOCATION)
for i in `ls *template` ; do
if [ -f ${i}.orig ] ; then |
shell |
# TODO g++ -fexceptions -std=c++11 -g -I../src akin.cpp -o akin
g++ -std=c++11 -g -I../src readme.cpp -o readme
g++ -std=c++11 -g -I../src akin.cpp -o akin
g++ -std=c++11 -g -I../src get_alpha.cpp -o get_alpha |
shell |
# Check Internal IP
internalip=$(hostname -I)
echo -e '\E[32m'"Internal IP :" $reset $internalip
# Check External IP
externalip=$(wget -4qO- "http://whatismyip.akamai.com/")
echo -e '\E[32m'"External IP : $reset "$externalip |
shell | #
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part2.sh
# Description: OpenWrt DIY script part 2 (After Update feeds)
# |
shell |
name="libcrc32trim"
version="`awk '{print $NF}' VERSION`"
tar="rpmbuild/${name}-${version}.tar.gz"
rm -rf rpmbuild
mkdir rpmbuild
tar cvzf "$tar" \
--transform 's%^%'"${name}-${version}"'/%' \
crc32trim.{c,h} \
LICENSE \
Makefile \
packaging/ \ |
shell | #!/bin/bash
SERVER=$1
USER=$2
SERVICE=$3
FOLDER=$4
SITE=$5
echo "creando carpeta log"
ssh -i id_rsa $USER@$SERVER sudo mkdir -p $FOLDER/$SITE/log
echo "creando carpeta public"
ssh -i id_rsa $USER@$SERVER sudo mkdir -p $FOLDER/$SITE/public
echo "dando permisos de 775"
ssh -i id_rsa $USER@$SERVER sudo chmod -R 775 $FOLDER/$SITE
|
shell | #
# *********************************************************
# * Author : <NAME>
# * Email : <EMAIL>
# * Create time : 2019-10-08 17:26
# * Last modified : 2019-10-08 17:26
# * Filename : installListenMyZsh.sh
# * Description :
# *********************************************************
|
shell | build() {
cat >"${RPMBUILD_DIR}"SPECS/microshift-images.spec <<EOF
%global baseVersion ${BASE_VERSION}
EOF
for arch in $ARCHITECTURES; do |
shell | #!/bin/sh -l
result=`python3 ${1}`
echo ::set-output name=result-python3::${result}
|
shell | --gpu 2 --manual 0
python train_teacher_cifar.py \
--arch resnet32x4_aux \
--checkpoint-dir ./checkpoint \
--data ./data \ |
shell | TUTORIAL_1_QC_DIR=${DATA_DIR}/gwas/tutorial/1_QC_GWAS
TUTORIAL_2_PS_DIR=${DATA_DIR}/gwas/tutorial/2_PS_GWAS
TUTORIAL_3_AA_DIR=${DATA_DIR}/gwas/tutorial/3_AA_GWAS
ORGANISM_RICE_3KG_DIR=${DATA_DIR}/gwas/rice-snpseek/3K_RG_29mio_biallelic_SNPs_Dataset
ORGANISM_RICE_3KG_GWAS_DIR=${DATA_DIR}/gwas/rice-snpseek/1M_GWAS_SNP_Dataset
ORGANISM_CANINE_TGT_DIR=${DATA_DIR}/gwas/canine-hayward-2016-266k4
ORGANISM_CANINE_REF_DIR=${DATA_DIR}/gwas/canine-parker-2017
BENCHMARK_METHOD_DATA_DIR=${DATA_DIR}/gwas/benchmark |
shell | cat tmp/trace.bin | ../../bin/HalideTraceViz -s 400 236 -t 1 -d 10000 -h 4 -f producer_root_y -1 1 -1 0 32 1 32 48 1 0 0 1 -f consumer_root_y -1 1 -1 0 32 1 240 64 1 0 0 1 -l producer_root_y producer 32 40 1 -l consumer_root_y consumer 240 40 1 | avconv -f rawvideo -pix_fmt bgr32 -s 400x236 -i /dev/stdin tmp/frames_%04d.tif
make_gif lesson_08_store_root_compute_y.gif 20
cat tmp/trace.bin | ../../bin/HalideTraceViz -s 400 236 -t 1 -d 10000 -h 4 -f producer_root_x -1 1 -1 0 32 1 32 48 1 0 0 1 -f consumer_root_x -1 1 -1 0 32 1 240 64 1 0 0 1 -l producer_root_x producer 32 40 1 -l consumer_root_x consumer 240 40 1 | avconv -f rawvideo -pix_fmt bgr32 -s 400x236 -i /dev/stdin tmp/frames_%04d.tif
make_gif lesson_08_store_root_compute_x.gif 20
cat tmp/trace.bin | ../../bin/HalideTraceViz -s 400 236 -t 1 -d 10000 -h 10 -f producer_tile -1 1 -1 1 16 1 32 48 1 0 0 1 -f consumer_tile -1 1 -1 0 16 1 240 64 1 0 0 1 -l producer_tile producer 32 40 1 -l consumer_tile consumer 240 40 1 | avconv -f rawvideo -pix_fmt bgr32 -s 400x236 -i /dev/stdin tmp/frames_%04d.tif
make_gif lesson_08_tile.gif 10
rm -f lesson_08_mixed.mp4
cat tmp/trace.bin | ../../bin/HalideTraceViz -s 800 400 -t 200 -d 3 -h 30 -f producer_mixed -1.5 1.5 -1 1 2 1 40 48 1 0 0 1 -f consumer_mixed -1.5 1.5 -1 0 2 1 440 48 1 0 0 1 -l producer_mixed producer 40 40 1 -l consumer_mixed consumer 440 40 1 | avconv -f rawvideo -pix_fmt bgr32 -s 800x400 -i /dev/stdin -c:v h264 lesson_08_mixed.mp4
|
shell |
RDIR="$(pwd)/release/${RELEASE_VERSION}"
mkdir -p "$RDIR"
for goarch in ""amd64 386""; do
for goos in ""linux windows darwin""; do
NAME="${PROJECT_NAME}_${RELEASE_VERSION}_${goos}_${goarch}"
CGO_ENABLED=0 GOOS=${goos} GOARCH=${goarch} go build -o ${RDIR}/${NAME}
echo ${NAME}
done |
shell | sudo apt -y install git
|
shell | #!/bin/bash -f
echo "---------------------------------------------------------------------------"
echo "-- Start the tellstick service "
echo "---------------------------------------------------------------------------"
/usr/sbin/telldusd &
echo "---------------------------------------------------------------------------"
echo "-- Start the node-red web page "
echo "---------------------------------------------------------------------------"
# cp /opt/example-flows/* /root/.node-red/lib/flows/
exec /usr/local/bin/node-red |
shell | mkdir -p ${TMP_DIR}
cp build_inner.sh ${TMP_DIR}
docker run --rm -v ${TMP_DIR}:/work -w /work emsdk ./build_inner.sh
cp ${TMP_DIR}/astyle/build/gcc/bin/libastyle.wasm ${DST_DIR}/
|
shell | NAME='funnel-server'
# Ensure that a firewall rule exists allowing HTTP traffic
gcloud compute firewall-rules create default-http --allow='tcp:80' --source-tags='http-server' --quiet
# Start the VM
gcloud compute instances create $NAME \
--scopes 'compute-rw,storage-rw' \
--zone 'us-west1-a' \
--tags 'funnel,http-server' \
--machine-type n1-standard-2 \
--image-family cos-stable \
--image-project cos-cloud \ |
shell | gcloud_user=$1
service_account_user=$2
cd /
export GIT_CONFIG_NOSYSTEM=1
export GIT_TERMINAL_PROMPT=0
orig_home=$HOME
export HOME=$tmpdir
touch $HOME/.gitconfig
ln -s $orig_home/.config $HOME/.config |
shell | #!/bin/bash
source ${HOME}/.bashrc.rdbox-hq
# |
shell | #!/bin/sh
docker build -f swb2_build_ubuntu.docker -t swb2 .
|
shell | #!/bin/bash
set -e
function upgrade {
echo Upgrade $1 to $2
find . -type f -name 'build.sbt' | grep -v "node_modules" | grep -v "bower_components" | xargs sed -i '' s/"$1"/"$2"/g
find . -type f -name '*.md' | grep -v "node_modules" | grep -v "bower_components" | xargs sed -i '' s/"$1"/"$2"/g
find . -type f -name '*.jade' | grep -v "node_modules" | grep -v "bower_components" | xargs sed -i '' s/"$1"/"$2"/g
sed -i '' s/"$1"/"$2"/g build.sbt
}
upgrade `./tools/jq .globals.fintrospect.old harp.json` `./tools/jq .globals.fintrospect.new harp.json`
|
shell | #!/bin/bash
# Clean Installation Rests
apt clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /usr/share/doc/*
|
shell | "client_secret_post"}' \
https://iddev.fedorainfracloud.org/openidc/Registration \
-o client_secrets.json
else
echo "client_secrets.json already exists. To regenerate, delete the file." |
shell | <gh_stars>0
#!/usr/bin/env bash
export CELLARIUM_HOME="$(pwd)"
uwsgi uwsgi.ini
|
shell | echo "Check if pure Unified Auditing is enabled..."
echo
sqlplus -s / as sysdba << EOF
set echo on
col POLICY_NAME format A20
col AUDIT_OPTION format A40
column parameter format a50
column value format a40
set PAGES 9999
set lines 120
select parameter, value from v\$option where PARAMETER = 'Unified Auditing'; |
shell | if [ -f $KALDI_ROOT/tools/env.sh ]; then
. $KALDI_ROOT/tools/env.sh
fi
if [ -f $KALDI_ROOT/tools/config/common_path.sh ]; then
. $KALDI_ROOT/tools/config/common_path.sh
fi
|
shell | #!/usr/bin/env bash
set -eo pipefail
PYTHON=$(which python3)
echo "Running migrations"
${PYTHON} manage.py migrate --no-input
echo "Done!"
|
shell |
APP_NAME="norka"
APP_DOMAIN="com.github.tenderowl.norka"
if [ -z $1 ]; then
echo "Usage: $0 lang"
exit |
shell | #!/bin/bash
for x in {1..50..1}
do
echo $x
done
|
shell | cd ~/
wget http://archive.ubuntu.com/ubuntu/pool/universe/o/openssl098/libssl0.9.8_0.9.8o-7ubuntu3.2.14.04.1_i386.deb
dpkg -i *.deb
rm *.deb
git clone https://github.com/jlgaddis/iou2net.git
ln -s /usr/lib/i386-linux-gnu/libcrypto.so.0.9.8 /usr/lib/libcrypto.so.4
chmod +x /root/iou2net/iou2net.pl
|
shell | #!/bin/bash
rm /usr/bin/keychain_dumper
ldid -Sentitlements.xml keychain_dumper
mv keychain_dumper /usr/bin
|
shell | UDEVDIR=${PKGROOT}/etc/udev/rules.d
mkdir -p "${UDEVDIR}"
cp etc-udev-rules.d-openmono.rules "${UDEVDIR}/openmono.rules"
sudo chown -R root:root "${PKGROOT}"
sudo dpkg-deb --build "${PKGROOT}"
|
shell | <gh_stars>10-100
#!/bin/bash
set -euC
poetry run python main.py dajare.EvalBertFeature --local-scheduler --rerun
poetry run python main.py dajare.EvalMultipleFeature --local-scheduler --rerun
poetry run python main.py dajare.EvalBertFeatureSampling --local-scheduler --rerun
poetry run python main.py dajare.EvalMultipleFeatureSampling --local-scheduler --rerun
|
shell | <gh_stars>10-100
#!/bin/bash
clear
python ../src/lip_movement_net.py -i ../data/dataset_source
|
shell | # bspc monitor DP-1 -d "${desktops[@]}"
bspc monitor eDP-1 --remove
xrandr --output eDP-1 --off
else
echo "Internal display config"
# xrandr --output eDP-1 --primary --mode 3840x2160
xrandr --output eDP-1 --primary --mode 1920x1080 --panning 1920x1080
if [[ ! " ${active_displays[@]} " =~ " $external_monitor " ]]; then exit; fi
bspc monitor $external_monitor -a $external_monitor
bspc monitor eDP-1 -a eDP-1 |
shell | # Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
set -x
os=`uname -s`
downloader="wget"
echo "Trying to download files from github"
header=https://github.com/downloads/tetsuok/py-srilm-interpolator
# Download only non-existent files.
for f in news-commentary-v6-lowercased-en-3gram.arpa.gz \ |
shell | <gh_stars>0
cd Accounts
chmod +x gradlew
./gradle build
cd ../Transactions
chmod +x gradlew
./gradle build |
shell | echo "FULL: Checkout commit";
git checkout af0756dddd8c4692471302d0296d3a4a5675d050 2>&1 | head -n 1;
echo "FULL: Clean up data...";
# Remove unneeded data references |
shell | #!/bin/bash
thrift -r --gen go thrift/HiveServer.thrift
cp -r gen-go/hiveserver .
# For some reason these files have to be deleted. Getting duplicate definition
rm hiveserver/tcliservice.go hiveserver/ttypes.go hiveserver/constants.go
# This file we don't need
rm -rf hiveserver/t_c_l_i_service-remote/ |
shell | #!/bin/sh
rm -rf prometheus/client_data/*
docker-compose build
docker-compose up
|
shell | set -x
echo "Geomancer uninstall process beginning."
rm -rf $HOME/code/geomancer
rm -f /etc/nginx/sites-enabled/geomancer.conf |
shell | bash scripts/run_cartpole_swingup.sh
bash scripts/run_finger_spin.sh
bash scripts/run_reacher_easy.sh |
shell | tcp_cc_cleanup
}
setup()
{
tcp_cc_check_support bbr
tcp_cc_setup
tst_res TINFO "emulate congestion with packet loss 0.5%" |
shell | # Copyright (C) 2014 Tresys Technology
#
# Authors: <NAME> <<EMAIL>>
# Authors: <NAME> <<EMAIL>>
#
check () {
return 0
}
|
shell | echo "NOT FOUND! Install it, run it and try again."
reqs_failed=true
fi
if $reqs_failed; then
echo |
shell | tensorboard --logdir=tb_logs/ --port 1337 --bind_all
|
shell | ((doit++))
fi
if [ -e fvsvec.rc ] && [ -e fvsvec.ccmrun.namelist.tmpl ]; then
((doit++))
fi
if [ -e oseledec.rc ] && [ -e fvoseledec.ccmrun.namelist.tmpl ]; then |
shell | done
return $found_hardtab
}
if [ $# -eq 0 ]; then
echo "Checking files staged for commit for hardtabs..."
repo_root=$(git rev-parse --show-toplevel)/
added_files=$(git diff-index --name-only --cached HEAD | sed "s:^:$repo_root:")
check-hardtabs "${added_files[@]}" | column -t
else
check-hardtabs "$@" | column -t |
shell | #
# Exit codes:
# 0: is deployable
# 1: is not deployable
# 4: probably a jq parse error (bad server response)
#
# Requires:
# curl
# jq
#
# Environment:
# CLUBHOUSE_BOUNCER_API_KEY=<alphanumeric_secret> |
shell | <gh_stars>0
set -e
docker pull ubuntu:vivid-20150421
docker tag ubuntu:vivid-20150421 reg100kondrashin.azurecr.io/oldubuntu
docker push reg100kondrashin.azurecr.io/oldubuntu
|
shell |
if type python3 > /dev/null 2>&1 ; then
PYTHON=python3
else
echo 'Your platform is not supported : no python'
exit 1
fi
$PYTHON -m venv venv
. venv/bin/activate
pip install --upgrade pip |
shell | [[ "${pool_used_capacity}" -ge "${ZFS_POOL_CAPACITY_WARNING}" ]] ||
[[ "${scrub_repaired_bytes}" != "0B" ]] ||
[[ "$(echo "${scrub_age}" | awk '{print int($1)}')" -ge "${SCRUB_AGE_WARNING}" ]]; then
ui_symbol="${UI_WARNING_SYMBOL}"
else
ui_symbol=" "
fi
# Print the row with all the attributes corresponding to the pool.
printf "|%-12s %1s|%-8s|%6s|%6s|%6s|%3s%%|%8s|%6s|%5s|\n" "${pool_name}" "${ui_symbol}" "${pool_health}" \
"${read_errors}" "${write_errors}" "${checksum_errors}" "${pool_used_capacity}" "${scrub_repaired_bytes}" "${scrub_errors}" \
"${scrub_age}" >> "${EMAIL_BODY}"
done
echo "+--------------+--------+------+------+------+----+--------+------+-----+" >> "${EMAIL_BODY}"
|
shell | #!/bin/bash -x
echo "Skew-T log-P diagram"
ruby visualize/skewTlogP.rb output.dat
mv dcl_0001.png skewTlogP.png
|
Subsets and Splits